diff --git a/.github/ISSUE_TEMPLATE/01-bug.yml b/.github/ISSUE_TEMPLATE/01-bug.yml index 3b8fa99f93..cff3e79c65 100644 --- a/.github/ISSUE_TEMPLATE/01-bug.yml +++ b/.github/ISSUE_TEMPLATE/01-bug.yml @@ -43,6 +43,12 @@ body: validations: required: true + - id: company-name + type: input + attributes: + label: Company Name + description: Optional field. Providing this information allows us to communicate faster which may lead to faster issue resolution. + - type: markdown attributes: value: | diff --git a/.github/ISSUE_TEMPLATE/02-general-usage.yml b/.github/ISSUE_TEMPLATE/02-general-usage.yml index a0bb158038..1ddd1d306f 100644 --- a/.github/ISSUE_TEMPLATE/02-general-usage.yml +++ b/.github/ISSUE_TEMPLATE/02-general-usage.yml @@ -43,6 +43,12 @@ body: description: The semantic version of Terraform Provider placeholder: "x.y.z" + - id: company-name + type: input + attributes: + label: Company Name + description: Optional field. Providing this information allows us to communicate faster which may lead to faster issue resolution. + - type: markdown attributes: value: | diff --git a/.github/ISSUE_TEMPLATE/03-documentation.yml b/.github/ISSUE_TEMPLATE/03-documentation.yml index 1a389292d3..d73349e095 100644 --- a/.github/ISSUE_TEMPLATE/03-documentation.yml +++ b/.github/ISSUE_TEMPLATE/03-documentation.yml @@ -18,6 +18,12 @@ body: Documentation edits are generally a bit less involved, so are often a great entrypoint if you've ever been interested in contributing! If you would like to contribute to the project, please let us know and refer to the [contribution guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/CONTRIBUTING.md) for tips on getting started. + - id: company-name + type: input + attributes: + label: Company Name + description: Optional field. Providing this information allows us to communicate faster which may lead to faster issue resolution. + - id: object_type type: dropdown attributes: diff --git a/.github/ISSUE_TEMPLATE/04-feature-request.yml b/.github/ISSUE_TEMPLATE/04-feature-request.yml index 454b34fe59..b653f251ae 100644 --- a/.github/ISSUE_TEMPLATE/04-feature-request.yml +++ b/.github/ISSUE_TEMPLATE/04-feature-request.yml @@ -19,6 +19,12 @@ body: - [Guide on creating issues + FAQ + Commonly Known Issues](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/CREATING_ISSUES.md) - If you would like to create a GitHub issue, please read our guide first. It contains useful links, FAQ, and commonly known issues with solutions that may already solve your case. - [Provider Roadmap](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md) + - id: company-name + type: input + attributes: + label: Company Name + description: Optional field. Providing this information allows us to communicate faster which may lead to faster issue resolution. + - id: use-case type: textarea attributes: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f994b9b86d..3ec814e652 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -41,7 +41,11 @@ jobs: - name: Create and populate .snowflake/config file id: create_config - run: mkdir $HOME/.snowflake && echo "${{ secrets.SNOWFLAKE_CONFIG_FILE }}" > $HOME/.snowflake/config + run: mkdir -p $HOME/.snowflake && echo "${{ secrets.SNOWFLAKE_CONFIG_FILE }}" > $HOME/.snowflake/config + + - name: Create and populate .snowflake/config_v097_compatible file + id: create_config_v097_compatible + run: mkdir -p $HOME/.snowflake && echo "${{ secrets.SNOWFLAKE_CONFIG_FILE_V097_COMPATIBLE }}" > $HOME/.snowflake/config_v097_compatible - run: make test if: ${{ !cancelled() && steps.create_config.conclusion == 'success' }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 304768ea0d..6ba8439c12 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,52 @@ # Changelog +## [0.98.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.97.0...v0.98.0) (2024-11-08) + + +### 🎉 **What's new:** + +* Add authentication policy resource ([#3098](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3098)) ([ddea819](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/ddea819cb9d0dd7918e0f4bbdaa0a2204da7b8b5)) +* Add Resource for External Volumes ([#3106](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3106)) ([64ba674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/64ba6747f9b2364a1f84a43242472af3c4ebeca7)) +* Add stream on directory table ([#3129](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3129)) ([4391473](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/439147392436b9427e31ea578cc5bb971189a932)) +* Add stream on view ([#3150](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3150)) ([494af6d](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/494af6dd19d1098d76e7ea0451d1e144138c7a29)) +* Connection datasource ([#3173](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3173)) ([4127b3f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/4127b3f782660772b77a72f9b38dafc728254de3)) +* Connection resource ([#3162](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3162)) ([5aef117](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5aef117f415f238a0e786b9a063d44fadeb879e5)) +* Rework config hierarchy ([#3166](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3166)) ([04cd9f0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/04cd9f04f1713b47f50c1931fd8955665ea8cbcc)) +* Rework provider configuration fields ([#3152](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3152)) ([fd6af43](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/fd6af43d5e50e83d247333e4a0edf85008538a9e)) +* Rework streams data source ([#3151](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3151)) ([b18bf30](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/b18bf30eee1b86e1f85c52e36122e9fa052053bd)) +* SDK Connection ([#3155](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3155)) ([bd11e0f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/bd11e0f5eab8fe420f8af6644f4c1eb90910e69a)) +* Secret resource ([#3110](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3110)) ([16a812d](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/16a812dae20d59f31457790dcd99db03db697051)) +* Secrets datasource ([#3131](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3131)) ([8110138](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/811013887cdf1a6624c93481f18e12a183865463)) +* Upgrade tag SDK ([#3126](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3126)) ([893b288](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/893b288f91ee3d31c875f2e38c7346fe362632e6)) + + +### 🔧 **Misc** + +* Add a company name field to the issue templates ([#3182](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3182)) ([0d3248a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/0d3248a2cb5323f94d8a984aaabe60857041fc3b)) +* Add object renaming research summary ([#3172](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3172)) ([721ee40](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/721ee40d6954254fc1f45eec50a7098b45d1afc3)) +* Add tests to 3117 and bump build time ([#3133](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3133)) ([ca90fde](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/ca90fdefc18f6c77627e602df46650e17cb54eaa)) +* Detect changes in lists and sets ([#3147](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3147)) ([c3edb79](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/c3edb79758e152a60370fcaa251fdbbdda9bcc56)) +* Exclude methods from test function checks in architest ([#3174](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3174)) ([edc46cc](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/edc46cc29fa9d0f19f5f62f04de71c5a56e4b902)) +* join object renaming tests into parameterized ones ([#3154](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3154)) ([be13502](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/be13502d7fc67c896ee1387297bddad904f515b0)) +* New roadmap entry ([#3158](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3158)) ([d83cdde](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/d83cdde59a5fb98fea5d31e5017869f841c678cb)) +* Test more authentication methods ([#3178](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3178)) ([d345cd2](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/d345cd291170db27e7ba1b83d69e613938fd9538)) +* Test support for object renaming ([#3130](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3130)) ([d665419](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/d6654195c099b81969bb8043e98771cf199ee0f4)) + + +### 🐛 **Bug fixes:** + +* Apply various fixes ([#3176](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3176)) ([55591da](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/55591da967db58a3ef11e3e454101e27ca7abb42)) +* Connection and secret-datasource tests ([#3177](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3177)) ([167de4b](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/167de4be15530d481b6e46953a7984d1b2777899)) +* Fix grant import docs ([#3183](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3183)) ([94ac910](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/94ac910f490a4cc3d9975e073e7503781bab2ffd)) +* Fix main ([#3157](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3157)) ([89b9705](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/89b9705d2af43aad3f0e098e3b8af7ced0b3d406)) +* Fix main ([#3160](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3160)) ([5b7412f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5b7412fe00a3620d1428d61ce30f51232eccf213)) +* Fix main ([#3186](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3186)) ([59a0a26](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/59a0a2699cbbb0a18517b607e736a04b62f6c3ba)) +* Fix user resource import ([#3181](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3181)) ([34bbbc1](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/34bbbc18b61ce1224c4f8607a12a9f6dfd95c958)) +* handle external change of secret type ([#3141](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3141)) ([649b839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/649b8397987d13fa53c67729a81dc7fceef218a7)) +* Handle external type changes in stream resources ([#3164](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3164)) ([9fd8f88](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/9fd8f887401c0931bbbd18d3b3c4d770b8410fd4)) +* merge diffs on test clients ([#3149](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3149)) ([0f06b4a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/0f06b4a063c9a9f9922aa4d83c47935d09757571)) +* Skip connection data source acc test ([#3184](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3184)) ([2942374](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/2942374f668b4bdc48f81c7580262fc1c6473179)) + ## [0.97.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.96.0...v0.97.0) (2024-10-10) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 56a1af3788..d1d76a6b25 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -100,8 +100,63 @@ resource "snowflake_task" "example" { - `show_output` and `paramters` fields added for holding SHOW and SHOW PARAMETERS output (see [raw Snowflake output](./v1-preparations/CHANGES_BEFORE_V1.md#raw-snowflake-output)). - Added support for finalizer tasks with `finalize` field. It conflicts with `after` and `schedule` (see [finalizer tasks](https://docs.snowflake.com/en/user-guide/tasks-graphs#release-and-cleanup-of-task-graphs)). +## v0.98.0 ➞ v0.99.0 + +### snowflake_tag_masking_policy_association deprecation +`snowflake_tag_masking_policy_association` is now deprecated in favor of `snowflake_tag` with a new `masking_policy` field. It will be removed with the v1 release. Please adjust your configuration files. + +### snowflake_tag resource changes +New fields: + - `masking_policies` field that holds the associated masking policies. + - `show_output` field that holds the response from SHOW TAGS. + +#### *(breaking change)* Changed fields in snowflake_masking_policy resource +Changed fields: + - `name` is now not marked as ForceNew. When this value is changed, the resource is renamed with `ALTER TAG`, instead of being recreated. + - `allowed_values` type was changed from list to set. This causes different ordering to be ignored. +State will be migrated automatically. + +#### *(breaking change)* Identifiers related changes +During [identifiers rework](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#identifiers-rework) we decided to +migrate resource ids from pipe-separated to regular Snowflake identifiers (e.g. `|` -> `"".""`). Importing resources also needs to be adjusted (see [example](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag#import)). + +Also, we added diff suppress function that prevents Terraform from showing differences, when only quoting is different. + +No change is required, the state will be migrated automatically. + ## v0.97.0 ➞ v0.98.0 +### *(new feature)* snowflake_connections datasource +Added a new datasource enabling querying and filtering connections. Notes: +- all results are stored in `connections` field. +- `like` field enables connections filtering. +- SHOW CONNECTIONS output is enclosed in `show_output` field inside `connections`. + It's important to limit the records and calls to Snowflake to the minimum. That's why we recommend assessing which information you need from the data source and then providing strong filters and turning off additional fields for better plan performance. + + +### *(new feature)* connection resources + +Added a new resources for managing connections. We decided to split connection into two separate resources based on whether the connection is a primary or replicated (secondary). i.e.: + +- `snowflake_primary_connection` is used to manage primary connection, with ability to enable failover to other accounts. +- `snowflake_secondary_connection` is used to manage replicated (secondary) connection. + +To promote `snowflake_secondary_connection` to `snowflake_primary_connection`, resources need to be removed from the state, altered manually using: +``` +ALTER CONNECTION PRIMARY; +``` +and then imported again, now as `snowflake_primary_connection`. + +To demote `snowflake_primary_connection` back to `snowflake_secondary_connection`, resources need to be removed from the state, re-created manually using: +``` +CREATE CONNECTION AS REPLICA OF ..; +``` +and then imported as `snowflake_secondary_connection`. + +For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). + +See reference [docs](https://docs.snowflake.com/en/sql-reference/sql/create-connection). + ### snowflake_streams data source changes New filtering options: - `like` @@ -123,6 +178,31 @@ Please adjust your Terraform configuration files. ### *(behavior change)* Provider configuration rework On our road to v1, we have decided to rework configuration to address the most common issues (see a [roadmap entry](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#providers-configuration-rework)). We have created a list of topics we wanted to address before v1. We will prepare an announcement soon. The following subsections describe the things addressed in the v0.98.0. +#### *(behavior change)* new fields +We have added new fields to match the ones in [the driver](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#Config) and to simplify setting account name. Specifically: +- `include_retry_reason`, `max_retry_count`, `driver_tracing`, `tmp_directory_path` and `disable_console_login` are the new fields that are supported in the driver +- `disable_saml_url_check` will be added to the provider after upgrading the driver +- `account_name` and `organization_name` were added to improve handling account names. Read more in [docs](https://docs.snowflake.com/en/user-guide/admin-account-identifier#using-an-account-name-as-an-identifier). + +#### *(behavior change)* changed configuration of driver log level +To be more consistent with other configuration options, we have decided to add `driver_tracing` to the configuration schema. This value can also be configured by `SNOWFLAKE_DRIVER_TRACING` environmental variable and by `drivertracing` field in the TOML file. The previous `SF_TF_GOSNOWFLAKE_LOG_LEVEL` environmental variable is not supported now, and was removed from the provider. + +#### *(behavior change)* deprecated fields +Because of new fields `account_name` and `organization_name`, `account` is now deprecated. It will be removed with the v1 release. Please adjust your configurations from +```terraform +provider "snowflake" { + account = "ORGANIZATION-ACCOUNT" +} +``` + +to +```terraform +provider "snowflake" { + organization_name = "ORGANIZATION" + account_name = "ACCOUNT" +} +``` + #### *(behavior change)* changed behavior of some fields For the fields that are not deprecated, we focused on improving validations and documentation. Also, we adjusted some fields to match our [driver's](https://github.com/snowflakedb/gosnowflake) defaults. Specifically: - Relaxed validations for enum fields like `protocol` and `authenticator`. Now, the case on such fields is ignored. @@ -211,6 +291,26 @@ This segregation was based on the secret flows in CREATE SECRET. i.e.: See reference [docs](https://docs.snowflake.com/en/sql-reference/sql/create-secret). +### *(bugfix)* Handle BCR Bundle 2024_08 in snowflake_user resource + +[bcr 2024_08](https://docs.snowflake.com/en/release-notes/bcr-bundles/2024_08/bcr-1798) changed the "empty" response in the `SHOW USERS` query. This provider version adapts to the new result types; it should be used if you want to have 2024_08 Bundle enabled on your account. + +Note: Because [bcr 2024_07](https://docs.snowflake.com/en/release-notes/bcr-bundles/2024_07/bcr-1692) changes the way how the `default_secondary_roles` attribute behaves, drift may be reported when enabling 2024_08 Bundle. Check [Handling default secondary roles](#breaking-change-handling-default-secondary-roles) for more context. + +Connected issues: [#3125](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3125) + +### *(bugfix)* Handle user import correctly + +#### Context before the change + +Password is empty after the `snowflake_user` import; we can't read it from the config or from Snowflake. +During the next terraform plan+apply it's updated to the "same" value. +It results in an error on Snowflake side: `New password rejected by current password policy. Reason: 'PRIOR_USE'.` + +#### After the change + +The error will be ignored on the provider side (after all, it means that the password in state is the same as on Snowflake side). Still, plan+apply is needed after importing user. + ## v0.96.0 ➞ v0.97.0 ### *(new feature)* snowflake_stream_on_table, snowflake_stream_on_external_table resource diff --git a/Makefile b/Makefile index c8feb876b8..66435284af 100644 --- a/Makefile +++ b/Makefile @@ -74,7 +74,7 @@ test-architecture: ## check architecture constraints between packages go test ./pkg/architests/... -v test-client: ## runs test that checks sdk.Client without instrumentedsql - SF_TF_NO_INSTRUMENTED_SQL=1 SF_TF_GOSNOWFLAKE_LOG_LEVEL=debug go test ./pkg/sdk/internal/client/... -v + SF_TF_NO_INSTRUMENTED_SQL=1 go test ./pkg/sdk/internal/client/... -v test-object-renaming: ## runs tests in object_renaming_acceptance_test.go TEST_SF_TF_ENABLE_OBJECT_RENAMING=1 go test ./pkg/resources/object_renaming_acceptace_test.go -v diff --git a/README.md b/README.md index 391118f254..9136352ea3 100644 --- a/README.md +++ b/README.md @@ -77,7 +77,7 @@ Some links that might help you: - The [issues section](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues) might already have an issue addressing your question. ## Would you like to create an issue? -If you would like to create a GitHub issue, please read our [guide](./CREATING_ISSUES.md) first. +If you would like to create a GitHub issue, please read our [guide](./CREATING_ISSUES.md) first. It contains useful links, FAQ, and commonly known issues with solutions that may already solve your case. ## Additional debug logs for `snowflake_grant_privileges_to_role` resource @@ -89,17 +89,17 @@ Set environment variable `SF_TF_ADDITIONAL_DEBUG_LOGGING` to a non-empty value. ## Additional SQL Client configuration Currently underlying sql [gosnowflake](https://github.com/snowflakedb/gosnowflake) driver is wrapped with [instrumentedsql](https://github.com/luna-duclos/instrumentedsql). In order to use raw [gosnowflake](https://github.com/snowflakedb/gosnowflake) driver, set environment variable `SF_TF_NO_INSTRUMENTED_SQL` to a non-empty value. -By default, the underlying driver is set to error level logging. It can be changed by setting `SF_TF_GOSNOWFLAKE_LOG_LEVEL` to one of: -- `panic` -- `fatal` -- `error` -- `warn` -- `warning` -- `info` -- `debug` +By default, the underlying driver is set to error level logging. It can be changed by setting `driver_tracing` field in the configuration to one of (from most to least verbose): - `trace` +- `debug` +- `info` +- `print` +- `warning` +- `error` +- `fatal` +- `panic` -*note*: It's possible it will be one of the provider config parameters in the future provider versions. +Read more in [provider configuration docs](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). ## Contributing diff --git a/docs/data-sources/connections.md b/docs/data-sources/connections.md new file mode 100644 index 0000000000..5dff3c63ad --- /dev/null +++ b/docs/data-sources/connections.md @@ -0,0 +1,79 @@ +--- +page_title: "snowflake_connections Data Source - terraform-provider-snowflake" +subcategory: "" +description: |- + Datasource used to get details of filtered connections. Filtering is aligned with the current possibilities for SHOW CONNECTIONS https://docs.snowflake.com/en/sql-reference/sql/show-connections query. The results of SHOW is encapsulated in one output collection connections. +--- + +!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. + +# snowflake_connections (Data Source) + +Datasource used to get details of filtered connections. Filtering is aligned with the current possibilities for [SHOW CONNECTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-connections) query. The results of SHOW is encapsulated in one output collection `connections`. + +## Example Usage + +```terraform +# Simple usage +data "snowflake_connections" "simple" { +} + +output "simple_output" { + value = data.snowflake_connections.simple.connections +} + +# Filtering (like) +data "snowflake_connections" "like" { + like = "connection-name" +} + +output "like_output" { + value = data.snowflake_connections.like.connections +} + +# Filtering by prefix (like) +data "snowflake_connections" "like_prefix" { + like = "prefix%" +} + +output "like_prefix_output" { + value = data.snowflake_connections.like_prefix.connections +} +``` + + +## Schema + +### Optional + +- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). + +### Read-Only + +- `connections` (List of Object) Holds the aggregated output of all connections details queries. (see [below for nested schema](#nestedatt--connections)) +- `id` (String) The ID of this resource. + + +### Nested Schema for `connections` + +Read-Only: + +- `show_output` (List of Object) (see [below for nested schema](#nestedobjatt--connections--show_output)) + + +### Nested Schema for `connections.show_output` + +Read-Only: + +- `account_locator` (String) +- `account_name` (String) +- `comment` (String) +- `connection_url` (String) +- `created_on` (String) +- `failover_allowed_to_accounts` (List of String) +- `is_primary` (Boolean) +- `name` (String) +- `organization_name` (String) +- `primary` (String) +- `region_group` (String) +- `snowflake_region` (String) diff --git a/docs/data-sources/grants.md b/docs/data-sources/grants.md index c7ccbfafce..6ffd89b195 100644 --- a/docs/data-sources/grants.md +++ b/docs/data-sources/grants.md @@ -5,6 +5,8 @@ description: |- --- +!> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + # snowflake_grants (Data Source) diff --git a/docs/index.md b/docs/index.md index cc619bb20a..518af87d7c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -17,6 +17,8 @@ Coverage is focused on part of Snowflake related to access control. ## Example Provider Configuration +This is an example configuration of the provider in `main.tf` in a configuration directory. More examples are provided [below](#order-precedence). + ```terraform terraform { required_providers { @@ -26,31 +28,34 @@ terraform { } } +# A simple configuration of the provider with a default authentication. +# A default value for `authenticator` is `snowflake`, enabling authentication with `user` and `password`. provider "snowflake" { - account = "..." # required if not using profile. Can also be set via SNOWFLAKE_ACCOUNT env var - username = "..." # required if not using profile or token. Can also be set via SNOWFLAKE_USER env var - password = "..." - authenticator = "..." # required if not using password as auth method - oauth_access_token = "..." - private_key_path = "..." - private_key = "..." - private_key_passphrase = "..." - oauth_refresh_token = "..." - oauth_client_id = "..." - oauth_client_secret = "..." - oauth_endpoint = "..." - oauth_redirect_url = "..." + organization_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ORGANIZATION_NAME env var + account_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ACCOUNT_NAME env var + user = "..." # required if not using profile or token. Can also be set via SNOWFLAKE_USER env var + password = "..." // optional - region = "..." # required if using legacy format for account identifier role = "..." host = "..." warehouse = "..." - session_params = { + params = { query_tag = "..." } } +# A simple configuration of the provider with private key authentication. +provider "snowflake" { + organization_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ORGANIZATION_NAME env var + account_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ACCOUNT_NAME env var + user = "..." # required if not using profile or token. Can also be set via SNOWFLAKE_USER env var + authenticator = "SNOWFLAKE_JWT" + private_key = "-----BEGIN ENCRYPTED PRIVATE KEY-----..." + private_key_passphrase = "passphrase" +} + +# By using the `profile` field, missing fields will be populated from ~/.snowflake/config TOML file provider "snowflake" { profile = "securityadmin" } @@ -65,22 +70,27 @@ provider "snowflake" { ### Optional -- `account` (String) Specifies your Snowflake account identifier assigned, by Snowflake. The [account locator](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-2-account-locator-in-a-region) format is not supported. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html). Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_ACCOUNT` environment variable. -- `authenticator` (String) Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid values include: Snowflake, OAuth, ExternalBrowser, Okta, JWT, TokenAccessor, UsernamePasswordMFA. It has to be set explicitly to JWT for private key authentication. Can also be sourced from the `SNOWFLAKE_AUTHENTICATOR` environment variable. +- `account` (String, Deprecated) Use `account_name` and `organization_name` instead. Specifies your Snowflake account identifier assigned, by Snowflake. The [account locator](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-2-account-locator-in-a-region) format is not supported. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html). Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_ACCOUNT` environment variable. +- `account_name` (String) Specifies your Snowflake account name assigned by Snowflake. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier#account-name). Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_ACCOUNT_NAME` environment variable. +- `authenticator` (String) Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid options are: `SNOWFLAKE` | `OAUTH` | `EXTERNALBROWSER` | `OKTA` | `JWT` | `SNOWFLAKE_JWT` | `TOKENACCESSOR` | `USERNAMEPASSWORDMFA`. Value `JWT` is deprecated and will be removed in future releases. Can also be sourced from the `SNOWFLAKE_AUTHENTICATOR` environment variable. - `browser_auth` (Boolean, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_USE_BROWSER_AUTH` environment variable. - `client_ip` (String) IP address for network checks. Can also be sourced from the `SNOWFLAKE_CLIENT_IP` environment variable. - `client_request_mfa_token` (String) When true the MFA token is cached in the credential manager. True by default in Windows/OSX. False for Linux. Can also be sourced from the `SNOWFLAKE_CLIENT_REQUEST_MFA_TOKEN` environment variable. - `client_store_temporary_credential` (String) When true the ID token is cached in the credential manager. True by default in Windows/OSX. False for Linux. Can also be sourced from the `SNOWFLAKE_CLIENT_STORE_TEMPORARY_CREDENTIAL` environment variable. - `client_timeout` (Number) The timeout in seconds for the client to complete the authentication. Can also be sourced from the `SNOWFLAKE_CLIENT_TIMEOUT` environment variable. +- `disable_console_login` (String) Indicates whether console login should be disabled in the driver. Can also be sourced from the `SNOWFLAKE_DISABLE_CONSOLE_LOGIN` environment variable. - `disable_query_context_cache` (Boolean) Disables HTAP query context cache in the driver. Can also be sourced from the `SNOWFLAKE_DISABLE_QUERY_CONTEXT_CACHE` environment variable. - `disable_telemetry` (Boolean) Disables telemetry in the driver. Can also be sourced from the `DISABLE_TELEMETRY` environment variable. +- `driver_tracing` (String) Specifies the logging level to be used by the driver. Valid options are: `trace` | `debug` | `info` | `print` | `warning` | `error` | `fatal` | `panic`. Can also be sourced from the `SNOWFLAKE_DRIVER_TRACING` environment variable. - `external_browser_timeout` (Number) The timeout in seconds for the external browser to complete the authentication. Can also be sourced from the `SNOWFLAKE_EXTERNAL_BROWSER_TIMEOUT` environment variable. - `host` (String) Specifies a custom host value used by the driver for privatelink connections. Can also be sourced from the `SNOWFLAKE_HOST` environment variable. +- `include_retry_reason` (String) Should retried request contain retry reason. Can also be sourced from the `SNOWFLAKE_INCLUDE_RETRY_REASON` environment variable. - `insecure_mode` (Boolean) If true, bypass the Online Certificate Status Protocol (OCSP) certificate revocation check. IMPORTANT: Change the default value for testing or emergency situations only. Can also be sourced from the `SNOWFLAKE_INSECURE_MODE` environment variable. - `jwt_client_timeout` (Number) The timeout in seconds for the JWT client to complete the authentication. Can also be sourced from the `SNOWFLAKE_JWT_CLIENT_TIMEOUT` environment variable. - `jwt_expire_timeout` (Number) JWT expire after timeout in seconds. Can also be sourced from the `SNOWFLAKE_JWT_EXPIRE_TIMEOUT` environment variable. - `keep_session_alive` (Boolean) Enables the session to persist even after the connection is closed. Can also be sourced from the `SNOWFLAKE_KEEP_SESSION_ALIVE` environment variable. - `login_timeout` (Number) Login retry timeout in seconds EXCLUDING network roundtrip and read out http response. Can also be sourced from the `SNOWFLAKE_LOGIN_TIMEOUT` environment variable. +- `max_retry_count` (Number) Specifies how many times non-periodic HTTP request can be retried by the driver. Can also be sourced from the `SNOWFLAKE_MAX_RETRY_COUNT` environment variable. - `oauth_access_token` (String, Sensitive, Deprecated) Token for use with OAuth. Generating the token is left to other tools. Cannot be used with `browser_auth`, `private_key_path`, `oauth_refresh_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable. - `oauth_client_id` (String, Sensitive, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_CLIENT_ID` environment variable. - `oauth_client_secret` (String, Sensitive, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_CLIENT_SECRET` environment variable. @@ -88,25 +98,27 @@ provider "snowflake" { - `oauth_redirect_url` (String, Sensitive, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_REDIRECT_URL` environment variable. - `oauth_refresh_token` (String, Sensitive, Deprecated) Token for use with OAuth. Setup and generation of the token is left to other tools. Should be used in conjunction with `oauth_client_id`, `oauth_client_secret`, `oauth_endpoint`, `oauth_redirect_url`. Cannot be used with `browser_auth`, `private_key_path`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_REFRESH_TOKEN` environment variable. - `ocsp_fail_open` (String) True represents OCSP fail open mode. False represents OCSP fail closed mode. Fail open true by default. Can also be sourced from the `SNOWFLAKE_OCSP_FAIL_OPEN` environment variable. -- `okta_url` (String) The URL of the Okta server. e.g. https://example.okta.com. Can also be sourced from the `SNOWFLAKE_OKTA_URL` environment variable. -- `params` (Map of String) Sets other connection (i.e. session) parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters) +- `okta_url` (String) The URL of the Okta server. e.g. https://example.okta.com. Okta URL host needs to to have a suffix `okta.com`. Read more in Snowflake [docs](https://docs.snowflake.com/en/user-guide/oauth-okta). Can also be sourced from the `SNOWFLAKE_OKTA_URL` environment variable. +- `organization_name` (String) Specifies your Snowflake organization name assigned by Snowflake. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier#organization-name). Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_ORGANIZATION_NAME` environment variable. +- `params` (Map of String) Sets other connection (i.e. session) parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters). This field can not be set with environmental variables. - `passcode` (String) Specifies the passcode provided by Duo when using multi-factor authentication (MFA) for login. Can also be sourced from the `SNOWFLAKE_PASSCODE` environment variable. - `passcode_in_password` (Boolean) False by default. Set to true if the MFA passcode is embedded to the configured password. Can also be sourced from the `SNOWFLAKE_PASSCODE_IN_PASSWORD` environment variable. -- `password` (String, Sensitive) Password for username+password auth. Cannot be used with `browser_auth` or `private_key_path`. Can also be sourced from the `SNOWFLAKE_PASSWORD` environment variable. +- `password` (String, Sensitive) Password for user + password auth. Cannot be used with `browser_auth` or `private_key_path`. Can also be sourced from the `SNOWFLAKE_PASSWORD` environment variable. - `port` (Number) Specifies a custom port value used by the driver for privatelink connections. Can also be sourced from the `SNOWFLAKE_PORT` environment variable. - `private_key` (String, Sensitive) Private Key for username+private-key auth. Cannot be used with `browser_auth` or `password`. Can also be sourced from the `SNOWFLAKE_PRIVATE_KEY` environment variable. - `private_key_passphrase` (String, Sensitive) Supports the encryption ciphers aes-128-cbc, aes-128-gcm, aes-192-cbc, aes-192-gcm, aes-256-cbc, aes-256-gcm, and des-ede3-cbc. Can also be sourced from the `SNOWFLAKE_PRIVATE_KEY_PASSPHRASE` environment variable. - `private_key_path` (String, Sensitive, Deprecated) Path to a private key for using keypair authentication. Cannot be used with `browser_auth`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_PRIVATE_KEY_PATH` environment variable. - `profile` (String) Sets the profile to read from ~/.snowflake/config file. Can also be sourced from the `SNOWFLAKE_PROFILE` environment variable. -- `protocol` (String) A protocol used in the connection. Valid options are: `HTTP` | `HTTPS`. Can also be sourced from the `SNOWFLAKE_PROTOCOL` environment variable. +- `protocol` (String) A protocol used in the connection. Valid options are: `http` | `https`. Can also be sourced from the `SNOWFLAKE_PROTOCOL` environment variable. - `region` (String, Deprecated) Snowflake region, such as "eu-central-1", with this parameter. However, since this parameter is deprecated, it is best to specify the region as part of the account parameter. For details, see the description of the account parameter. [Snowflake region](https://docs.snowflake.com/en/user-guide/intro-regions.html) to use. Required if using the [legacy format for the `account` identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#format-2-legacy-account-locator-in-a-region) in the form of `.`. Can also be sourced from the `SNOWFLAKE_REGION` environment variable. - `request_timeout` (Number) request retry timeout in seconds EXCLUDING network roundtrip and read out http response. Can also be sourced from the `SNOWFLAKE_REQUEST_TIMEOUT` environment variable. - `role` (String) Specifies the role to use by default for accessing Snowflake objects in the client session. Can also be sourced from the `SNOWFLAKE_ROLE` environment variable. - `session_params` (Map of String, Deprecated) Sets session parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters) +- `tmp_directory_path` (String) Sets temporary directory used by the driver for operations like encrypting, compressing etc. Can also be sourced from the `SNOWFLAKE_TMP_DIRECTORY_PATH` environment variable. - `token` (String, Sensitive) Token to use for OAuth and other forms of token based auth. Can also be sourced from the `SNOWFLAKE_TOKEN` environment variable. - `token_accessor` (Block List, Max: 1) (see [below for nested schema](#nestedblock--token_accessor)) - `user` (String) Username. Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_USER` environment variable. -- `username` (String, Deprecated) Username for username+password authentication. Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_USERNAME` environment variable. +- `username` (String, Deprecated) Username for user + password authentication. Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_USERNAME` environment variable. - `validate_default_parameters` (String) True by default. If false, disables the validation checks for Database, Schema, Warehouse and Role at the time a connection is established. Can also be sourced from the `SNOWFLAKE_VALIDATE_DEFAULT_PARAMETERS` environment variable. - `warehouse` (String) Specifies the virtual warehouse to use by default for queries, loading, etc. in the client session. Can also be sourced from the `SNOWFLAKE_WAREHOUSE` environment variable. @@ -132,7 +144,7 @@ The Snowflake provider support multiple ways to authenticate: * Private Key * Config File -In all cases account and username are required. +In all cases `organization_name`, `account_name` and `user` are required. ### Keypair Authentication Environment Variables @@ -209,30 +221,143 @@ export SNOWFLAKE_USER='...' export SNOWFLAKE_PASSWORD='...' ``` -### Config File +## Order Precedence -If you choose to use a config file, the optional `profile` attribute specifies the profile to use from the config file. If no profile is specified, the default profile is used. The Snowflake config file lives at `~/.snowflake/config` and uses [TOML](https://toml.io/) format. You can override this location by setting the `SNOWFLAKE_CONFIG_PATH` environment variable. If no username and account are specified, the provider will fall back to reading the config file. +Currently, the provider can be configured in three ways: +1. In a Terraform file located in the Terraform module with other resources. -```shell +Example content of the Terraform file configuration: + +```terraform +provider "snowflake" { + organization_name = "..." + account_name = "..." + username = "..." + password = "..." +} +``` + +2. In environmental variables (envs). This is mainly used to provide sensitive values. + + +```bash +export SNOWFLAKE_USER="..." +export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key" +``` + +3. In a TOML file (default in ~/.snowflake/config). Notice the use of different profiles. The profile name needs to be specified in the Terraform configuration file in `profile` field. When this is not specified, `default` profile is loaded. +When a `default` profile is not present in the TOML file, it is treated as "empty", without failing. + +Example content of the Terraform file configuration: + +```terraform +provider "snowflake" { + profile = "default" +} +``` + +Example content of the TOML file configuration: + +```toml [default] -account='TESTACCOUNT' -user='TEST_USER' -password='hunter2' +organizationname='organization_name' +accountname='account_name' +user='user' +password='password' role='ACCOUNTADMIN' -[securityadmin] -account='TESTACCOUNT' -user='TEST_USER' -password='hunter2' -role='SECURITYADMIN' +[secondary_test_account] +organizationname='organization_name' +accountname='account2_name' +user='user' +password='password' +role='ACCOUNTADMIN' ``` -## Order Precedence +Not all fields must be configured in one source; users can choose which fields are configured in which source. +Provider uses an established hierarchy of sources. The current behavior is that for each field: +1. Check if it is present in the provider configuration. If yes, use this value. If not, go to step 2. +1. Check if it is present in the environment variables. If yes, use this value. If not, go to step 3. +1. Check if it is present in the TOML config file (specifically, use the profile name configured in one of the steps above). If yes, use this value. If not, the value is considered empty. + +An example TOML file contents: + +```toml +[example] +accountname = 'account_name' +organizationname = 'organization_name' +user = 'user' +password = 'password' +warehouse = 'SNOWFLAKE' +role = 'ACCOUNTADMIN' +clientip = '1.2.3.4' +protocol = 'https' +port = 443 +oktaurl = 'https://example.com' +clienttimeout = 10 +jwtclienttimeout = 20 +logintimeout = 30 +requesttimeout = 40 +jwtexpiretimeout = 50 +externalbrowsertimeout = 60 +maxretrycount = 1 +authenticator = 'snowflake' +insecuremode = true +ocspfailopen = true +keepsessionalive = true +disabletelemetry = true +validatedefaultparameters = true +clientrequestmfatoken = true +clientstoretemporarycredential = true +tracing = 'info' +tmpdirpath = '/tmp/terraform-provider/' +disablequerycontextcache = true +includeretryreason = true +disableconsolelogin = true + +[example.params] +param_key = 'param_value' +``` + +An example terraform configuration file equivalent: -The Snowflake provider will use the following order of precedence when determining which credentials to use: -1) Provider Configuration -2) Environment Variables -3) Config File +```terraform +provider "snowflake" { + organization_name = "organization_name" + account_name = "account_name" + user = "user" + password = "password" + warehouse = "SNOWFLAKE" + protocol = "https" + port = "443" + role = "ACCOUNTADMIN" + validate_default_parameters = true + client_ip = "1.2.3.4" + authenticator = "snowflake" + okta_url = "https://example.com" + login_timeout = 10 + request_timeout = 20 + jwt_expire_timeout = 30 + client_timeout = 40 + jwt_client_timeout = 50 + external_browser_timeout = 60 + insecure_mode = true + ocsp_fail_open = true + keep_session_alive = true + disable_telemetry = true + client_request_mfa_token = true + client_store_temporary_credential = true + disable_query_context_cache = true + include_retry_reason = true + max_retry_count = 3 + driver_tracing = "info" + tmp_directory_path = "/tmp/terraform-provider/" + disable_console_login = true + params = { + param_key = "param_value" + } +} +``` ## Currently deprecated resources @@ -241,6 +366,7 @@ The Snowflake provider will use the following order of precedence when determini - [snowflake_role](./docs/resources/role) - use [snowflake_account_role](./docs/resources/account_role) instead - [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead - [snowflake_stream](./docs/resources/stream) +- [snowflake_tag_masking_policy_association](./docs/resources/tag_masking_policy_association) ## Currently deprecated datasources diff --git a/docs/resources/account_role.md b/docs/resources/account_role.md index 1d5a3eec4e..e5c9a7068f 100644 --- a/docs/resources/account_role.md +++ b/docs/resources/account_role.md @@ -33,7 +33,7 @@ resource "snowflake_account_role" "complete" { ### Required -- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/api_authentication_integration_with_authorization_code_grant.md b/docs/resources/api_authentication_integration_with_authorization_code_grant.md index af2ad46347..7ff4240139 100644 --- a/docs/resources/api_authentication_integration_with_authorization_code_grant.md +++ b/docs/resources/api_authentication_integration_with_authorization_code_grant.md @@ -45,7 +45,7 @@ resource "snowflake_api_authentication_integration_with_authorization_code_grant ### Required - `enabled` (Boolean) Specifies whether this security integration is enabled or disabled. -- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `oauth_client_id` (String) Specifies the client ID for the OAuth application in the external service. - `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. diff --git a/docs/resources/api_authentication_integration_with_client_credentials.md b/docs/resources/api_authentication_integration_with_client_credentials.md index 3813877b62..098bdf6ce8 100644 --- a/docs/resources/api_authentication_integration_with_client_credentials.md +++ b/docs/resources/api_authentication_integration_with_client_credentials.md @@ -43,7 +43,7 @@ resource "snowflake_api_authentication_integration_with_client_credentials" "tes ### Required - `enabled` (Boolean) Specifies whether this security integration is enabled or disabled. -- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `oauth_client_id` (String) Specifies the client ID for the OAuth application in the external service. - `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. diff --git a/docs/resources/api_authentication_integration_with_jwt_bearer.md b/docs/resources/api_authentication_integration_with_jwt_bearer.md index 9b2024cbd4..c4cdee9bdf 100644 --- a/docs/resources/api_authentication_integration_with_jwt_bearer.md +++ b/docs/resources/api_authentication_integration_with_jwt_bearer.md @@ -46,7 +46,7 @@ resource "snowflake_api_authentication_integration_with_jwt_bearer" "test" { ### Required - `enabled` (Boolean) Specifies whether this security integration is enabled or disabled. -- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `oauth_assertion_issuer` (String) - `oauth_client_id` (String) Specifies the client ID for the OAuth application in the external service. - `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. diff --git a/docs/resources/authentication_policy.md b/docs/resources/authentication_policy.md index 99b64f0cfe..e53ccb42ca 100644 --- a/docs/resources/authentication_policy.md +++ b/docs/resources/authentication_policy.md @@ -41,9 +41,9 @@ resource "snowflake_authentication_policy" "complete" { ### Required -- `database` (String) The database in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/database.md b/docs/resources/database.md index a8a1596e1c..998d9ca232 100644 --- a/docs/resources/database.md +++ b/docs/resources/database.md @@ -84,7 +84,7 @@ resource "snowflake_database" "primary" { ### Required -- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/database_role.md b/docs/resources/database_role.md index 0774975d4b..f0097ccb89 100644 --- a/docs/resources/database_role.md +++ b/docs/resources/database_role.md @@ -32,8 +32,8 @@ resource "snowflake_database_role" "test_database_role" { ### Required -- `database` (String) The database in which to create the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/external_oauth_integration.md b/docs/resources/external_oauth_integration.md index b76fc63e59..2d6473cfd9 100644 --- a/docs/resources/external_oauth_integration.md +++ b/docs/resources/external_oauth_integration.md @@ -70,7 +70,7 @@ resource "snowflake_external_oauth_integration" "test" { - `external_oauth_snowflake_user_mapping_attribute` (String) Indicates which Snowflake user record attribute should be used to map the access token to a Snowflake user record. Valid values are (case-insensitive): `LOGIN_NAME` | `EMAIL_ADDRESS`. - `external_oauth_token_user_mapping_claim` (Set of String) Specifies the access token claim or claims that can be used to map the access token to a Snowflake user record. If removed from the config, the resource is recreated. - `external_oauth_type` (String) Specifies the OAuth 2.0 authorization server to be Okta, Microsoft Azure AD, Ping Identity PingFederate, or a Custom OAuth 2.0 authorization server. Valid values are (case-insensitive): `OKTA` | `AZURE` | `PING_FEDERATE` | `CUSTOM`. -- `name` (String) Specifies the name of the External Oath integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the name of the External Oath integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/external_volume.md b/docs/resources/external_volume.md index 5c5a0f8cca..249d2a60f4 100644 --- a/docs/resources/external_volume.md +++ b/docs/resources/external_volume.md @@ -16,7 +16,7 @@ Resource used to manage external volume objects. For more information, check [ex ### Required -- `name` (String) Identifier for the external volume; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Identifier for the external volume; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `storage_location` (Block List, Min: 1) List of named cloud storage locations in different regions and, optionally, cloud platforms. Minimum 1 required. The order of the list is important as it impacts the active storage location, and updates will be triggered if it changes. Note that not all parameter combinations are valid as they depend on the given storage_provider. Consult [the docs](https://docs.snowflake.com/en/sql-reference/sql/create-external-volume#cloud-provider-parameters-cloudproviderparams) for more details on this. (see [below for nested schema](#nestedblock--storage_location)) ### Optional @@ -37,7 +37,7 @@ Resource used to manage external volume objects. For more information, check [ex Required: - `storage_base_url` (String) Specifies the base URL for your cloud storage location. -- `storage_location_name` (String) Name of the storage location. Must be unique for the external volume. Do not use the name `terraform_provider_sentinel_storage_location` - this is reserved for the provider for performing update operations. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `storage_location_name` (String) Name of the storage location. Must be unique for the external volume. Do not use the name `terraform_provider_sentinel_storage_location` - this is reserved for the provider for performing update operations. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `storage_provider` (String) Specifies the cloud storage provider that stores your data files. Valid values are (case-insensitive): `GCS` | `AZURE` | `S3` | `S3GOV`. Optional: diff --git a/docs/resources/grant_account_role.md b/docs/resources/grant_account_role.md index 89382c496d..b01aad31af 100644 --- a/docs/resources/grant_account_role.md +++ b/docs/resources/grant_account_role.md @@ -5,6 +5,8 @@ description: |- --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + # snowflake_grant_account_role (Resource) @@ -73,5 +75,5 @@ Import is supported using the following syntax: ```shell # format is role_name (string) | grantee_object_type (ROLE|USER) | grantee_name (string) -terraform import "\"test_role\"|ROLE|\"test_parent_role\"" +terraform import snowflake_grant_account_role.example '"test_role"|ROLE|"test_parent_role"' ``` diff --git a/docs/resources/grant_application_role.md b/docs/resources/grant_application_role.md index 7787f033f5..9578e2b830 100644 --- a/docs/resources/grant_application_role.md +++ b/docs/resources/grant_application_role.md @@ -5,6 +5,8 @@ description: |- --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + # snowflake_grant_application_role (Resource) @@ -65,5 +67,5 @@ Import is supported using the following syntax: ```shell # format is application_role_name (string) | object_type (ACCOUNT_ROLE|APPLICATION) | grantee_name (string) -terraform import "\"my_application\".\"app_role_1\"|ACCOUNT_ROLE|\"my_role\"" +terraform import snowflake_grant_application_role.example '"my_application"."app_role_1"|ACCOUNT_ROLE|"my_role"' ``` diff --git a/docs/resources/grant_database_role.md b/docs/resources/grant_database_role.md index 176fcc70e7..0071eaf5a0 100644 --- a/docs/resources/grant_database_role.md +++ b/docs/resources/grant_database_role.md @@ -5,6 +5,8 @@ description: |- --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + # snowflake_grant_database_role (Resource) @@ -85,5 +87,5 @@ Import is supported using the following syntax: ```shell # format is database_role_name (string) | object_type (ROLE|DATABASE ROLE|SHARE) | grantee_name (string) -terraform import "\"ABC\".\"test_db_role\"|ROLE|\"test_parent_role\"" +terraform import snowflake_grant_database_role.example '"ABC"."test_db_role"|ROLE|"test_parent_role"' ``` diff --git a/docs/resources/grant_ownership.md b/docs/resources/grant_ownership.md index e9ac0afa06..03eb48c244 100644 --- a/docs/resources/grant_ownership.md +++ b/docs/resources/grant_ownership.md @@ -6,6 +6,8 @@ description: |- --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + ~> **Note** For more details about granting ownership, please visit [`GRANT OWNERSHIP` Snowflake documentation page](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership). !> **Warning** Grant ownership resource still has some limitations. Delete operation is not implemented for on_future grants (you have to remove the config and then revoke ownership grant on future X manually). @@ -299,7 +301,7 @@ Optional: Import is supported using the following syntax: -`terraform import "||||"` +`terraform import snowflake_grant_ownership.example '||||'` where: - role_type - string - type of granted role (either ToAccountRole or ToDatabaseRole) @@ -311,43 +313,43 @@ where: It has varying number of parts, depending on grant_type. All the possible types are: ### OnObject -`terraform import "|||OnObject||"` +`terraform import snowflake_grant_ownership.example '|||OnObject||'` ### OnAll (contains inner types: InDatabase | InSchema) #### InDatabase -`terraform import "|||OnAll||InDatabase|"` +`terraform import snowflake_grant_ownership.example '|||OnAll||InDatabase|'` #### InSchema -`terraform import "|||OnAll||InSchema|"` +`terraform import snowflake_grant_ownership.example '|||OnAll||InSchema|'` ### OnFuture (contains inner types: InDatabase | InSchema) #### InDatabase -`terraform import "|||OnFuture||InDatabase|"` +`terraform import snowflake_grant_ownership.example '|||OnFuture||InDatabase|'` #### InSchema -`terraform import "|||OnFuture||InSchema|"` +`terraform import snowflake_grant_ownership.example '|||OnFuture||InSchema|'` ### Import examples #### OnObject on Schema ToAccountRole -`terraform import "ToAccountRole|\"account_role\"|COPY|OnObject|SCHEMA|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|COPY|OnObject|SCHEMA|"database_name"."schema_name"'` #### OnObject on Schema ToDatabaseRole -`terraform import "ToDatabaseRole|\"database_name\".\"database_role_name\"|COPY|OnObject|SCHEMA|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToDatabaseRole|"database_name"."database_role_name"|COPY|OnObject|SCHEMA|"database_name"."schema_name"'` #### OnObject on Table -`terraform import "ToAccountRole|\"account_role\"|COPY|OnObject|TABLE|\"database_name\".\"schema_name\".\"table_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|COPY|OnObject|TABLE|"database_name"."schema_name"."table_name"'` #### OnAll InDatabase -`terraform import "ToAccountRole|\"account_role\"|REVOKE|OnAll|TABLES|InDatabase|\"database_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|REVOKE|OnAll|TABLES|InDatabase|"database_name"'` #### OnAll InSchema -`terraform import "ToAccountRole|\"account_role\"||OnAll|TABLES|InSchema|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"||OnAll|TABLES|InSchema|"database_name"."schema_name"'` #### OnFuture InDatabase -`terraform import "ToAccountRole|\"account_role\"||OnFuture|TABLES|InDatabase|\"database_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"||OnFuture|TABLES|InDatabase|"database_name"'` #### OnFuture InSchema -`terraform import "ToAccountRole|\"account_role\"|COPY|OnFuture|TABLES|InSchema|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|COPY|OnFuture|TABLES|InSchema|"database_name"."schema_name"'` diff --git a/docs/resources/grant_privileges_to_account_role.md b/docs/resources/grant_privileges_to_account_role.md index a96306852a..a314c5dd16 100644 --- a/docs/resources/grant_privileges_to_account_role.md +++ b/docs/resources/grant_privileges_to_account_role.md @@ -6,6 +6,8 @@ description: |- --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). @@ -344,7 +346,7 @@ Optional: Import is supported using the following syntax: -`terraform import "|||||"` +`terraform import snowflake_grant_privileges_to_account_role.example '|||||'` where: - account_role_name - fully qualified identifier @@ -357,62 +359,62 @@ where: It has varying number of parts, depending on grant_type. All the possible types are: ### OnAccount -`terraform import "||||OnAccount` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnAccount'` ### OnAccountObject -`terraform import "||||OnAccountObject||` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnAccountObject||'` ### OnSchema On schema contains inner types for all options. #### OnSchema -`terraform import "||||OnSchema|OnSchema|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchema|OnSchema|'` #### OnAllSchemasInDatabase -`terraform import "||||OnSchema|OnAllSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchema|OnAllSchemasInDatabase|'` #### OnFutureSchemasInDatabase -`terraform import "||||OnSchema|OnFutureSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchema|OnFutureSchemasInDatabase|'` ### OnSchemaObject On schema object contains inner types for all options. #### OnObject -`terraform import "||||OnSchemaObject|OnObject||"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnObject||'` #### OnAll On all contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnAll||InDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnAll||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnAll||InSchema|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnAll||InSchema|'` #### OnFuture On future contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnFuture||InDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnFuture||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnFuture||InSchema|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnFuture||InSchema|'` ### Import examples #### Grant all privileges OnAccountObject (Database) -`terraform import "\"test_db_role\"|false|false|ALL|OnAccountObject|DATABASE|\"test_db\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|ALL|OnAccountObject|DATABASE|"test_db"'` #### Grant list of privileges OnAllSchemasInDatabase -`terraform import "\"test_db_role\"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|\"test_db\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|"test_db"'` #### Grant list of privileges on table -`terraform import "\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|\"test_db\".\"test_schema\".\"test_table\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|"test_db"."test_schema"."test_table"'` #### Grant list of privileges OnAll tables in schema -`terraform import "\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|\"test_db\".\"test_schema\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|"test_db"."test_schema"'` diff --git a/docs/resources/grant_privileges_to_database_role.md b/docs/resources/grant_privileges_to_database_role.md index f8011813ac..81f34a561a 100644 --- a/docs/resources/grant_privileges_to_database_role.md +++ b/docs/resources/grant_privileges_to_database_role.md @@ -6,6 +6,8 @@ description: |- --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). @@ -249,7 +251,7 @@ Optional: Import is supported using the following syntax: -`terraform import "|||||"` +`terraform import snowflake_grant_privileges_to_database_role.example '|||||'` where: - database_role_name - fully qualified identifier @@ -262,59 +264,59 @@ where: It has varying number of parts, depending on grant_type. All the possible types are: ### OnDatabase -`terraform import "||||OnDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnDatabase|'` ### OnSchema On schema contains inner types for all options. #### OnSchema -`terraform import "||||OnSchema|OnSchema|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchema|OnSchema|'` #### OnAllSchemasInDatabase -`terraform import "||||OnSchema|OnAllSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchema|OnAllSchemasInDatabase|'` #### OnFutureSchemasInDatabase -`terraform import "||||OnSchema|OnFutureSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchema|OnFutureSchemasInDatabase|'` ### OnSchemaObject On schema object contains inner types for all options. #### OnObject -`terraform import "||||OnSchemaObject|OnObject||"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnObject||'` #### OnAll On all contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnAll||InDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnAll||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnAll||InSchema|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnAll||InSchema|'` #### OnFuture On future contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnFuture||InDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnFuture||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnFuture||InSchema|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnFuture||InSchema|'` ### Import examples #### Grant all privileges OnDatabase -`terraform import "\"test_db\".\"test_db_role\"|false|false|ALL|OnDatabase|\"test_db\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|ALL|OnDatabase|"test_db"'` #### Grant list of privileges OnAllSchemasInDatabase -`terraform import "\"test_db\".\"test_db_role\"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|\"test_db\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|"test_db"'` #### Grant list of privileges on table -`terraform import "\"test_db\".\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|\"test_db\".\"test_schema\".\"test_table\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|"test_db"."test_schema"."test_table"'` #### Grant list of privileges OnAll tables in schema -`terraform import "\"test_db\".\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|\"test_db\".\"test_schema\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|"test_db"."test_schema"'` diff --git a/docs/resources/grant_privileges_to_share.md b/docs/resources/grant_privileges_to_share.md index 89c64c7289..cbfb9e14fb 100644 --- a/docs/resources/grant_privileges_to_share.md +++ b/docs/resources/grant_privileges_to_share.md @@ -6,6 +6,8 @@ description: |- --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + # snowflake_grant_privileges_to_share (Resource) @@ -129,7 +131,7 @@ resource "snowflake_grant_privileges_to_share" "example" { Import is supported using the following syntax: -`terraform import "|||"` +`terraform import snowflake_grant_privileges_to_share.example '|||'` where: - share_name - fully qualified identifier @@ -138,19 +140,19 @@ where: - grant_identifier - fully qualified identifier ### OnDatabase -`terraform import "||OnDatabase|"` +`terraform import snowflake_grant_privileges_to_share.example '||OnDatabase|'` ### OnSchema -`terraform import "||OnSchema|."` +`terraform import snowflake_grant_privileges_to_share.example '||OnSchema|.'` ### OnTable -`terraform import "||OnTable|.."` +`terraform import snowflake_grant_privileges_to_share.example '||OnTable|..'` ### OnSchema -`terraform import "||OnAllTablesInSchema|."` +`terraform import snowflake_grant_privileges_to_share.example '||OnAllTablesInSchema|.'` ### OnTag -`terraform import "||OnTag|.."` +`terraform import snowflake_grant_privileges_to_share.example '||OnTag|..'` ### OnView -`terraform import "||OnView|.."` +`terraform import snowflake_grant_privileges_to_share.example '||OnView|..'` diff --git a/docs/resources/legacy_service_user.md b/docs/resources/legacy_service_user.md index bd5865064b..57ef504370 100644 --- a/docs/resources/legacy_service_user.md +++ b/docs/resources/legacy_service_user.md @@ -123,7 +123,7 @@ resource "snowflake_legacy_service_user" "u" { ### Required -- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional @@ -1011,3 +1011,5 @@ Import is supported using the following syntax: ```shell terraform import snowflake_legacy_service_user.example '""' ``` + +Note: terraform plan+apply may be needed after successful import to fill out all the missing fields (like `password`) in state. diff --git a/docs/resources/masking_policy.md b/docs/resources/masking_policy.md index 0806f24147..aa9bb89af4 100644 --- a/docs/resources/masking_policy.md +++ b/docs/resources/masking_policy.md @@ -86,10 +86,10 @@ EOF - `argument` (Block List, Min: 1) List of the arguments for the masking policy. The first column and its data type always indicate the column data type values to mask or tokenize in the subsequent policy conditions. Note that you can not specify a virtual column as the first column argument in a conditional masking policy. (see [below for nested schema](#nestedblock--argument)) - `body` (String) Specifies the SQL expression that transforms the data. To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. -- `database` (String) The database in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the masking policy; must be unique for the database and schema in which the masking policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the masking policy; must be unique for the database and schema in which the masking policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `return_data_type` (String) The return data type must match the input data type of the first column that is specified as an input column. For more information about data types, check [Snowflake docs](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types). -- `schema` (String) The schema in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `schema` (String) The schema in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/network_policy.md b/docs/resources/network_policy.md index 706acad399..aaa3bc02c5 100644 --- a/docs/resources/network_policy.md +++ b/docs/resources/network_policy.md @@ -37,7 +37,7 @@ resource "snowflake_network_policy" "complete" { ### Required -- `name` (String) Specifies the identifier for the network policy; must be unique for the account in which the network policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier for the network policy; must be unique for the account in which the network policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/oauth_integration_for_custom_clients.md b/docs/resources/oauth_integration_for_custom_clients.md index c6f427e7b3..2ed8b0521e 100644 --- a/docs/resources/oauth_integration_for_custom_clients.md +++ b/docs/resources/oauth_integration_for_custom_clients.md @@ -50,7 +50,7 @@ resource "snowflake_oauth_integration_for_custom_clients" "complete" { ### Required - `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. -- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `oauth_client_type` (String) Specifies the type of client being registered. Snowflake supports both confidential and public clients. Valid options are: `PUBLIC` | `CONFIDENTIAL`. - `oauth_redirect_uri` (String) Specifies the client URI. After a user is authenticated, the web browser is redirected to this URI. diff --git a/docs/resources/oauth_integration_for_partner_applications.md b/docs/resources/oauth_integration_for_partner_applications.md index ae75802d54..0d9f1c139e 100644 --- a/docs/resources/oauth_integration_for_partner_applications.md +++ b/docs/resources/oauth_integration_for_partner_applications.md @@ -43,7 +43,7 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { ### Required - `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. -- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `oauth_client` (String) Creates an OAuth interface between Snowflake and a partner application. Valid options are: `LOOKER` | `TABLEAU_DESKTOP` | `TABLEAU_SERVER`. ### Optional diff --git a/docs/resources/primary_connection.md b/docs/resources/primary_connection.md new file mode 100644 index 0000000000..27d726a362 --- /dev/null +++ b/docs/resources/primary_connection.md @@ -0,0 +1,84 @@ +--- +page_title: "snowflake_primary_connection Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage primary connections. For managing replicated connection check resource snowflakesecondaryconnection ./secondary_connection. For more information, check connection documentation https://docs.snowflake.com/en/sql-reference/sql/create-connection.html. +--- + +!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. + +# snowflake_primary_connection (Resource) + +Resource used to manage primary connections. For managing replicated connection check resource [snowflake_secondary_connection](./secondary_connection). For more information, check [connection documentation](https://docs.snowflake.com/en/sql-reference/sql/create-connection.html). + +## Example Usage + +```terraform +## Minimal +resource "snowflake_primary_connection" "basic" { + name = "connection_name" +} + +## Complete (with every optional set) +resource "snowflake_primary_connection" "complete" { + name = "connection_name" + comment = "my complete connection" + enable_failover_to_accounts = [ + "." + ] +} +``` + +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + +-> **Note** To demote `snowflake_primary_connection` to [`snowflake_secondary_connection`](./secondary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then recreate it in manually using: + ``` + CREATE CONNECTION AS REPLICA OF ..; + ``` +and then import it as the `snowflake_secondary_connection`. + + + +## Schema + +### Required + +- `name` (String) String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a primary connection, the name must be unique across connection names and account names in the organization. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` + +### Optional + +- `comment` (String) Specifies a comment for the connection. +- `enable_failover_to_accounts` (List of String) Enables failover for given connection to provided accounts. Specifies a list of accounts in your organization where a secondary connection for this primary connection can be promoted to serve as the primary connection. Include your organization name for each account in the list. + +### Read-Only + +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `is_primary` (Boolean) Indicates if the connection is primary. When Terraform detects that the connection is not primary, the resource is recreated. +- `show_output` (List of Object) Outputs the result of `SHOW CONNECTIONS` for the given connection. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `show_output` + +Read-Only: + +- `account_locator` (String) +- `account_name` (String) +- `comment` (String) +- `connection_url` (String) +- `created_on` (String) +- `failover_allowed_to_accounts` (List of String) +- `is_primary` (Boolean) +- `name` (String) +- `organization_name` (String) +- `primary` (String) +- `region_group` (String) +- `snowflake_region` (String) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_primary_connection.example 'connection_name' +``` diff --git a/docs/resources/resource_monitor.md b/docs/resources/resource_monitor.md index b603e6816a..c5eb401268 100644 --- a/docs/resources/resource_monitor.md +++ b/docs/resources/resource_monitor.md @@ -57,7 +57,7 @@ resource "snowflake_resource_monitor" "complete" { ### Required -- `name` (String) Identifier for the resource monitor; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Identifier for the resource monitor; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/role.md b/docs/resources/role.md index 4b20e2d2c3..cf79b2cdf3 100644 --- a/docs/resources/role.md +++ b/docs/resources/role.md @@ -34,7 +34,7 @@ resource "snowflake_role" "complete" { ### Required -- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/row_access_policy.md b/docs/resources/row_access_policy.md index 3bd3afa7d3..1d1951dcb0 100644 --- a/docs/resources/row_access_policy.md +++ b/docs/resources/row_access_policy.md @@ -44,9 +44,9 @@ resource "snowflake_row_access_policy" "example_row_access_policy" { - `argument` (Block List, Min: 1) List of the arguments for the row access policy. A signature specifies a set of attributes that must be considered to determine whether the row is accessible. The attribute values come from the database object (e.g. table or view) to be protected by the row access policy. If any argument name or type is changed, the resource is recreated. (see [below for nested schema](#nestedblock--argument)) - `body` (String) Specifies the SQL expression. The expression can be any boolean-valued SQL expression. To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. -- `database` (String) The database in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the row access policy; must be unique for the database and schema in which the row access policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the row access policy; must be unique for the database and schema in which the row access policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/saml2_integration.md b/docs/resources/saml2_integration.md index 133eb33f4c..38e45dc4d8 100644 --- a/docs/resources/saml2_integration.md +++ b/docs/resources/saml2_integration.md @@ -53,7 +53,7 @@ resource "snowflake_saml2_integration" "test" { ### Required -- `name` (String) Specifies the name of the SAML2 integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the name of the SAML2 integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `saml2_issuer` (String) The string containing the IdP EntityID / Issuer. - `saml2_provider` (String) The string describing the IdP. Valid options are: `OKTA` | `ADFS` | `CUSTOM`. - `saml2_sso_url` (String) The string containing the IdP SSO URL, where the user should be redirected by Snowflake (the Service Provider) with a SAML AuthnRequest message. diff --git a/docs/resources/scim_integration.md b/docs/resources/scim_integration.md index 19985cf3e1..c3e8ee35a9 100644 --- a/docs/resources/scim_integration.md +++ b/docs/resources/scim_integration.md @@ -41,7 +41,7 @@ resource "snowflake_scim_integration" "test" { ### Required - `enabled` (Boolean) Specify whether the security integration is enabled. -- `name` (String) String that specifies the identifier (i.e. name) for the integration; must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) String that specifies the identifier (i.e. name) for the integration; must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `run_as_role` (String) Specify the SCIM role in Snowflake that owns any users and roles that are imported from the identity provider into Snowflake using SCIM. Provider assumes that the specified role is already provided. Valid options are: `OKTA_PROVISIONER` | `AAD_PROVISIONER` | `GENERIC_SCIM_PROVISIONER`. - `scim_client` (String) Specifies the client type for the scim integration. Valid options are: `OKTA` | `AZURE` | `GENERIC`. diff --git a/docs/resources/secondary_connection.md b/docs/resources/secondary_connection.md new file mode 100644 index 0000000000..416e593392 --- /dev/null +++ b/docs/resources/secondary_connection.md @@ -0,0 +1,83 @@ +--- +page_title: "snowflake_secondary_connection Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage secondary (replicated) connections. To manage primary connection check resource snowflakeprimaryconnection ./primary_connection. For more information, check connection documentation https://docs.snowflake.com/en/sql-reference/sql/create-connection.html. +--- + +!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. + +# snowflake_secondary_connection (Resource) + +Resource used to manage secondary (replicated) connections. To manage primary connection check resource [snowflake_primary_connection](./primary_connection). For more information, check [connection documentation](https://docs.snowflake.com/en/sql-reference/sql/create-connection.html). + +## Example Usage + +```terraform +## Minimal +resource "snowflake_secondary_connection" "basic" { + name = "connection_name" + as_replica_of = ".." +} + +## Complete (with every optional set) +resource "snowflake_secondary_connection" "complete" { + name = "connection_name" + as_replica_of = ".." + comment = "my complete secondary connection" +} +``` + +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + +-> **Note** To promote `snowflake_secondary_connection` to [`snowflake_primary_connection`](./primary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then promote it manually using: + ``` + ALTER CONNECTION PRIMARY; + ``` +and then import it as the `snowflake_primary_connection`. + + + +## Schema + +### Required + +- `as_replica_of` (String) Specifies the identifier for a primary connection from which to create a replica (i.e. a secondary connection). +- `name` (String) String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a secondary connection, the name must match the name of its primary connection. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` + +### Optional + +- `comment` (String) Specifies a comment for the secondary connection. + +### Read-Only + +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `is_primary` (Boolean) Indicates if the connection primary status has been changed. If change is detected, resource will be recreated. +- `show_output` (List of Object) Outputs the result of `SHOW CONNECTIONS` for the given connection. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `show_output` + +Read-Only: + +- `account_locator` (String) +- `account_name` (String) +- `comment` (String) +- `connection_url` (String) +- `created_on` (String) +- `failover_allowed_to_accounts` (List of String) +- `is_primary` (Boolean) +- `name` (String) +- `organization_name` (String) +- `primary` (String) +- `region_group` (String) +- `snowflake_region` (String) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_secondary_connection.example 'secondary_connection_name' +``` diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md index 8e3c22cd65..95fbaf8815 100644 --- a/docs/resources/secondary_database.md +++ b/docs/resources/secondary_database.md @@ -93,7 +93,7 @@ resource "snowflake_task" "refresh_secondary_database" { ### Required - `as_replica_of` (String) A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `""."".""`. -- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/secret_with_authorization_code_grant.md b/docs/resources/secret_with_authorization_code_grant.md index ff13b763fc..3217db19dc 100644 --- a/docs/resources/secret_with_authorization_code_grant.md +++ b/docs/resources/secret_with_authorization_code_grant.md @@ -44,11 +44,11 @@ resource "snowflake_secret_with_authorization_code_grant" "test" { ### Required - `api_authentication` (String) Specifies the name value of the Snowflake security integration that connects Snowflake to an external service. -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `oauth_refresh_token` (String, Sensitive) Specifies the token as a string that is used to obtain a new access token from the OAuth authorization server when the access token expires. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `oauth_refresh_token_expiry_time` (String) Specifies the timestamp as a string when the OAuth refresh token expires. Accepted string formats: YYYY-MM-DD, YYYY-MM-DD HH:MI, YYYY-MM-DD HH:MI:SS, YYYY-MM-DD HH:MI -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/secret_with_basic_authentication.md b/docs/resources/secret_with_basic_authentication.md index e7078451d4..66d99164b3 100644 --- a/docs/resources/secret_with_basic_authentication.md +++ b/docs/resources/secret_with_basic_authentication.md @@ -41,10 +41,10 @@ resource "snowflake_secret_with_basic_authentication" "test" { ### Required -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `password` (String, Sensitive) Specifies the password value to store in the secret. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `username` (String, Sensitive) Specifies the username value to store in the secret. ### Optional diff --git a/docs/resources/secret_with_client_credentials.md b/docs/resources/secret_with_client_credentials.md index d827292373..0e5ad14903 100644 --- a/docs/resources/secret_with_client_credentials.md +++ b/docs/resources/secret_with_client_credentials.md @@ -42,10 +42,10 @@ resource "snowflake_secret_with_client_credentials" "test" { ### Required - `api_authentication` (String) Specifies the name value of the Snowflake security integration that connects Snowflake to an external service. -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `oauth_scopes` (Set of String) Specifies a list of scopes to use when making a request from the OAuth server by a role with USAGE on the integration during the OAuth client credentials flow. -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/secret_with_generic_string.md b/docs/resources/secret_with_generic_string.md index 3b1579c6ce..408e71592d 100644 --- a/docs/resources/secret_with_generic_string.md +++ b/docs/resources/secret_with_generic_string.md @@ -39,9 +39,9 @@ resource "snowflake_secret_with_generic_string" "test" { ### Required -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `secret_string` (String, Sensitive) Specifies the string to store in the secret. The string can be an API token or a string of sensitive value that can be used in the handler code of a UDF or stored procedure. For details, see [Creating and using an external access integration](https://docs.snowflake.com/en/developer-guide/external-network-access/creating-using-external-network-access). You should not use this property to store any kind of OAuth token; use one of the other secret types for your OAuth use cases. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Optional diff --git a/docs/resources/service_user.md b/docs/resources/service_user.md index dd74130322..eba0597df1 100644 --- a/docs/resources/service_user.md +++ b/docs/resources/service_user.md @@ -120,7 +120,7 @@ resource "snowflake_service_user" "u" { ### Required -- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md index 09cf18a2fc..574ff4f28c 100644 --- a/docs/resources/shared_database.md +++ b/docs/resources/shared_database.md @@ -76,7 +76,7 @@ resource "snowflake_shared_database" "test" { ### Required - `from_share` (String) A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `""."".""`. -- `name` (String) Specifies the identifier for the database; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier for the database; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/stream_on_directory_table.md b/docs/resources/stream_on_directory_table.md index ec6abfdb5e..1913610345 100644 --- a/docs/resources/stream_on_directory_table.md +++ b/docs/resources/stream_on_directory_table.md @@ -56,10 +56,10 @@ resource "snowflake_stream_on_directory_table" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `stage` (String) Specifies an identifier for the stage the stream will monitor. Due to Snowflake limitations, the provider can not read the stage's database and schema. For stages, Snowflake returns only partially qualified name instead of fully qualified name. Please use stages located in the same schema as the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `stage` (String) Specifies an identifier for the stage the stream will monitor. Due to Snowflake limitations, the provider can not read the stage's database and schema. For stages, Snowflake returns only partially qualified name instead of fully qualified name. Please use stages located in the same schema as the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/stream_on_external_table.md b/docs/resources/stream_on_external_table.md index e885a93a91..a01c1a93bc 100644 --- a/docs/resources/stream_on_external_table.md +++ b/docs/resources/stream_on_external_table.md @@ -70,10 +70,10 @@ resource "snowflake_stream_on_external_table" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `external_table` (String) Specifies an identifier for the external table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `external_table` (String) Specifies an identifier for the external table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/stream_on_table.md b/docs/resources/stream_on_table.md index 99ec11a4be..244f5b97a5 100644 --- a/docs/resources/stream_on_table.md +++ b/docs/resources/stream_on_table.md @@ -54,10 +54,10 @@ resource "snowflake_stream_on_table" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `table` (String) Specifies an identifier for the table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `table` (String) Specifies an identifier for the table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/stream_on_view.md b/docs/resources/stream_on_view.md index 6795301683..ea8c406eb4 100644 --- a/docs/resources/stream_on_view.md +++ b/docs/resources/stream_on_view.md @@ -58,10 +58,10 @@ resource "snowflake_stream_on_view" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `view` (String) Specifies an identifier for the view the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `view` (String) Specifies an identifier for the view the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/resources/tag.md b/docs/resources/tag.md index b2352e260c..0d8c2ba31c 100644 --- a/docs/resources/tag.md +++ b/docs/resources/tag.md @@ -2,33 +2,35 @@ page_title: "snowflake_tag Resource - terraform-provider-snowflake" subcategory: "" description: |- - + Resource used to manage tags. For more information, check tag documentation https://docs.snowflake.com/en/sql-reference/sql/create-tag. --- -# snowflake_tag (Resource) +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. +# snowflake_tag (Resource) +Resource used to manage tags. For more information, check [tag documentation](https://docs.snowflake.com/en/sql-reference/sql/create-tag). ## Example Usage ```terraform -resource "snowflake_database" "database" { - name = "database" -} - -resource "snowflake_schema" "schema" { - name = "schema" - database = snowflake_database.database.name +# basic resource +resource "snowflake_tag" "tag" { + name = "tag" + database = "database" + schema = "schema" } +# complete resource resource "snowflake_tag" "tag" { - name = "cost_center" - database = snowflake_database.database.name - schema = snowflake_schema.schema.name - allowed_values = ["finance", "engineering"] + name = "tag" + database = "database" + schema = "schema" + comment = "comment" + allowed_values = ["finance", "engineering", ""] + masking_policies = [snowfalke_masking_policy.masking_policy.fully_qualified_name] } ``` - -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). @@ -37,25 +39,40 @@ resource "snowflake_tag" "tag" { ### Required -- `database` (String) The database in which to create the tag. -- `name` (String) Specifies the identifier for the tag; must be unique for the database in which the tag is created. -- `schema` (String) The schema in which to create the tag. +- `database` (String) The database in which to create the tag. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the tag; must be unique for the database in which the tag is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the tag. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional -- `allowed_values` (List of String) List of allowed values for the tag. +- `allowed_values` (Set of String) Set of allowed values for the tag. - `comment` (String) Specifies a comment for the tag. +- `masking_policies` (Set of String) Set of masking policies for the tag. A tag can support one masking policy for each data type. If masking policies are assigned to the tag, before dropping the tag, the provider automatically unassigns them. ### Read-Only - `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). - `id` (String) The ID of this resource. +- `show_output` (List of Object) Outputs the result of `SHOW TAGS` for the given tag. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `show_output` + +Read-Only: + +- `allowed_values` (Set of String) +- `comment` (String) +- `created_on` (String) +- `database_name` (String) +- `name` (String) +- `owner` (String) +- `owner_role_type` (String) +- `schema_name` (String) ## Import Import is supported using the following syntax: ```shell -# format is database name | schema name | tag name -terraform import snowflake_tag.example 'dbName|schemaName|tagName' +terraform import snowflake_tag.example '""."".""' ``` diff --git a/docs/resources/tag_masking_policy_association.md b/docs/resources/tag_masking_policy_association.md index 1b183e31f9..36d6c8943a 100644 --- a/docs/resources/tag_masking_policy_association.md +++ b/docs/resources/tag_masking_policy_association.md @@ -7,6 +7,8 @@ description: |- # snowflake_tag_masking_policy_association (Resource) +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: `snowflake_tag` + Attach a masking policy to a tag. Requires a current warehouse to be set. Either with SNOWFLAKE_WAREHOUSE env variable or in current session. If no warehouse is provided, a temporary warehouse will be created. ## Example Usage diff --git a/docs/resources/user.md b/docs/resources/user.md index 48e3f1e7f8..0f542d36b9 100644 --- a/docs/resources/user.md +++ b/docs/resources/user.md @@ -128,7 +128,7 @@ resource "snowflake_user" "u" { ### Required -- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional @@ -1021,3 +1021,5 @@ Import is supported using the following syntax: ```shell terraform import snowflake_user.example '""' ``` + +Note: terraform plan+apply may be needed after successful import to fill out all the missing fields (like `password`) in state. diff --git a/docs/resources/view.md b/docs/resources/view.md index 46a880bab0..4295f33051 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -91,9 +91,9 @@ SQL ### Required -- `database` (String) The database in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `name` (String) Specifies the identifier for the view; must be unique for the schema in which the view is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` -- `schema` (String) The schema in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `database` (String) The database in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the view; must be unique for the schema in which the view is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` - `statement` (String) Specifies the query used to create the view. ### Optional diff --git a/docs/resources/warehouse.md b/docs/resources/warehouse.md index 3a440710bd..af0fbe914d 100644 --- a/docs/resources/warehouse.md +++ b/docs/resources/warehouse.md @@ -28,7 +28,7 @@ resource "snowflake_warehouse" "warehouse" { ### Required -- `name` (String) Identifier for the virtual warehouse; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Identifier for the virtual warehouse; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` ### Optional diff --git a/docs/technical-documentation/object_renaming_research_summary.md b/docs/technical-documentation/object_renaming_research_summary.md new file mode 100644 index 0000000000..575770f1a0 --- /dev/null +++ b/docs/technical-documentation/object_renaming_research_summary.md @@ -0,0 +1,55 @@ +# Object Renaming Support + +The Terraform Provider team recently started a short research project on object renaming and other similar topics. This document will cover the topics we looked into, explain how we tested them, and discuss their effects on the provider. We'll also list the topics we want to explore more in our next research. + +## Topics + +### Renaming higher-hierarchy objects + +**Description:** This problem relates to renaming objects that are higher in the object hierarchy (e.g. database or schema) and how this affects the lower hierarchy objects created on them (e.g. schema or table) while they are present in the Terraform configuration. We decided to deeply test this problem, as from time to time we got issues related to it. We wanted to get a better understanding of it and how currently our provider is handling such situations to provide appropriate fixes if necessary. + +**Tests:** We prepared a [set of test cases](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/pkg/resources/object_renaming_acceptance_test.go) by combining permutations of different aspects like: + +- Depth (shallow connections like database and schema or deep connections like database, schema, and table). +- Higher hierarchy object placement (inside or outside of the Terraform configuration) +- Resource dependency (implicit, [depends\_on](https://developer.hashicorp.com/terraform/language/meta-arguments/depends_on), or no dependency) +- Place of rename execution (within the Terraform configuration or manually outside) + +**Impact:** We decided that we will provide additional documentation on: + +- Best practices +- Guide on dealing with certain errors connected to object renaming +- Guidelines that may be useful in certain scenarios connected to object renaming + +In addition to improved documentation, the tests showed us that we need to improve our error handling in the Read and Delete operations. In certain scenarios, the resources failed to remove themselves from the state when they should. This change should decrease the chances of resources trapping themselves in the infinite plan state where the only way out is through [manual state manipulation](https://developer.hashicorp.com/terraform/cli/commands/state). + +### Ignoring list order after creation \+ updating list items (mostly related to table columns) + +**Description:** The issues with table columns ([\#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420), [\#753](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/753), [\#2839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2839)) are something we had in mind for a very long time and finally had a chance to work on a solution that would improve them, and possibly some of the other resources. In short, the use case for table columns consists of two use cases connected to each other: + +- Ignoring the order of columns after creation. Users should be able to reorder, add, and remove columns from any place while still having somewhat control over column order on the Snowflake side. +- Updating a given column instead of removing and adding it again. Ignoring the order was an additional challenge because if someone wants to order the columns and change their name in one apply, then we need a way to identify this column to perform the correct action. + +**Tests:** The tests were carried out on a resource created only for the purpose of the research ([resource reference](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/pkg/resources/object_renaming_lists_and_sets.go#L125)). Note that it won’t be normally visible and no other changes in other resources were made. We tested a few approaches regarding order ignoring and one on updating items. + +**Impact:** The outcomes of the tests showed promising results and potential improvement in how structures like table columns are managed. The adjustment of table columns will be done during the refactoring of the table we do as part of [preparing the GA object for V1](https://github.com/Snowflake-Labs/terraform-provider-snowflake/pull/3147). + +Additionally, this gives us more knowledge of how the lists and their items are managed in Terraform SDKv2 and how we can interact with them to achieve certain behaviors. We confirmed their limitations, and found solutions on how to deal with most of them. We also have high hopes that once we can migrate to the newer Terraform Plugin Framework, there will be more tools to support even more demanding use cases. + +## Topics for future research + +### Diff suppression for the items of lists and sets + +We would like to research the usage of DiffSuppressFuncs for items on lists and sets. With some of the resources we found that using them is sometimes tricky and may cause issues with the plan. This could enable us to suppress differences for e.g. quotes in sets of identifiers. + +### Computed \+ Optional lists (and sets) + +During this research, the concept was only briefly touched on due to its complexity and the need for attention to other topics. However, they may be useful for cases where the list (or set) is optional and is computed on the Snowflake side when not specified. This is an example of recently refactored views where this approach could be used, but it needs further testing before using it in the actual resource. + +### Item updates in sets + +In Terraform, the indexes of set items are calculated based on the item’s hash. Because of that, it’s hard to handle an item’s update whenever one of the items changes. The topic wasn’t covered in this research, because there’s no real use case for that (yet), but we were already thinking about potentially switching some of the fields from lists to sets where this feature could be useful. + +## Summary + +This journey has been valuable, enhancing our understanding and guiding future improvements. We also outlined areas for further research, which we believe will bring even more benefits to our users. We are excited to keep improving your experience with our Terraform Provider. \ No newline at end of file diff --git a/examples/additional/deprecated_resources.MD b/examples/additional/deprecated_resources.MD index 1c5fb01acb..557f4af44a 100644 --- a/examples/additional/deprecated_resources.MD +++ b/examples/additional/deprecated_resources.MD @@ -5,3 +5,4 @@ - [snowflake_role](./docs/resources/role) - use [snowflake_account_role](./docs/resources/account_role) instead - [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead - [snowflake_stream](./docs/resources/stream) +- [snowflake_tag_masking_policy_association](./docs/resources/tag_masking_policy_association) diff --git a/examples/additional/provider_config_tf.MD b/examples/additional/provider_config_tf.MD new file mode 100644 index 0000000000..2f1377939c --- /dev/null +++ b/examples/additional/provider_config_tf.MD @@ -0,0 +1,35 @@ +provider "snowflake" { + organization_name = "organization_name" + account_name = "account_name" + user = "user" + password = "password" + warehouse = "SNOWFLAKE" + protocol = "https" + port = "443" + role = "ACCOUNTADMIN" + validate_default_parameters = true + client_ip = "1.2.3.4" + authenticator = "snowflake" + okta_url = "https://example.com" + login_timeout = 10 + request_timeout = 20 + jwt_expire_timeout = 30 + client_timeout = 40 + jwt_client_timeout = 50 + external_browser_timeout = 60 + insecure_mode = true + ocsp_fail_open = true + keep_session_alive = true + disable_telemetry = true + client_request_mfa_token = true + client_store_temporary_credential = true + disable_query_context_cache = true + include_retry_reason = true + max_retry_count = 3 + driver_tracing = "info" + tmp_directory_path = "/tmp/terraform-provider/" + disable_console_login = true + params = { + param_key = "param_value" + } +} diff --git a/examples/additional/provider_config_toml.MD b/examples/additional/provider_config_toml.MD new file mode 100644 index 0000000000..e0f4039014 --- /dev/null +++ b/examples/additional/provider_config_toml.MD @@ -0,0 +1,34 @@ +[example] +accountname = 'account_name' +organizationname = 'organization_name' +user = 'user' +password = 'password' +warehouse = 'SNOWFLAKE' +role = 'ACCOUNTADMIN' +clientip = '1.2.3.4' +protocol = 'https' +port = 443 +oktaurl = 'https://example.com' +clienttimeout = 10 +jwtclienttimeout = 20 +logintimeout = 30 +requesttimeout = 40 +jwtexpiretimeout = 50 +externalbrowsertimeout = 60 +maxretrycount = 1 +authenticator = 'snowflake' +insecuremode = true +ocspfailopen = true +keepsessionalive = true +disabletelemetry = true +validatedefaultparameters = true +clientrequestmfatoken = true +clientstoretemporarycredential = true +tracing = 'info' +tmpdirpath = '/tmp/terraform-provider/' +disablequerycontextcache = true +includeretryreason = true +disableconsolelogin = true + +[example.params] +param_key = 'param_value' diff --git a/examples/data-sources/snowflake_connections/data-source.tf b/examples/data-sources/snowflake_connections/data-source.tf new file mode 100644 index 0000000000..b32fd92e5e --- /dev/null +++ b/examples/data-sources/snowflake_connections/data-source.tf @@ -0,0 +1,25 @@ +# Simple usage +data "snowflake_connections" "simple" { +} + +output "simple_output" { + value = data.snowflake_connections.simple.connections +} + +# Filtering (like) +data "snowflake_connections" "like" { + like = "connection-name" +} + +output "like_output" { + value = data.snowflake_connections.like.connections +} + +# Filtering by prefix (like) +data "snowflake_connections" "like_prefix" { + like = "prefix%" +} + +output "like_prefix_output" { + value = data.snowflake_connections.like_prefix.connections +} diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf index d7452fec32..0264c069ab 100644 --- a/examples/provider/provider.tf +++ b/examples/provider/provider.tf @@ -6,31 +6,34 @@ terraform { } } +# A simple configuration of the provider with a default authentication. +# A default value for `authenticator` is `snowflake`, enabling authentication with `user` and `password`. provider "snowflake" { - account = "..." # required if not using profile. Can also be set via SNOWFLAKE_ACCOUNT env var - username = "..." # required if not using profile or token. Can also be set via SNOWFLAKE_USER env var - password = "..." - authenticator = "..." # required if not using password as auth method - oauth_access_token = "..." - private_key_path = "..." - private_key = "..." - private_key_passphrase = "..." - oauth_refresh_token = "..." - oauth_client_id = "..." - oauth_client_secret = "..." - oauth_endpoint = "..." - oauth_redirect_url = "..." + organization_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ORGANIZATION_NAME env var + account_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ACCOUNT_NAME env var + user = "..." # required if not using profile or token. Can also be set via SNOWFLAKE_USER env var + password = "..." // optional - region = "..." # required if using legacy format for account identifier role = "..." host = "..." warehouse = "..." - session_params = { + params = { query_tag = "..." } } +# A simple configuration of the provider with private key authentication. +provider "snowflake" { + organization_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ORGANIZATION_NAME env var + account_name = "..." # required if not using profile. Can also be set via SNOWFLAKE_ACCOUNT_NAME env var + user = "..." # required if not using profile or token. Can also be set via SNOWFLAKE_USER env var + authenticator = "SNOWFLAKE_JWT" + private_key = "-----BEGIN ENCRYPTED PRIVATE KEY-----..." + private_key_passphrase = "passphrase" +} + +# By using the `profile` field, missing fields will be populated from ~/.snowflake/config TOML file provider "snowflake" { profile = "securityadmin" } diff --git a/examples/resources/snowflake_grant_account_role/import.sh b/examples/resources/snowflake_grant_account_role/import.sh index f308c94fa5..f7bfaee609 100644 --- a/examples/resources/snowflake_grant_account_role/import.sh +++ b/examples/resources/snowflake_grant_account_role/import.sh @@ -1,2 +1,2 @@ # format is role_name (string) | grantee_object_type (ROLE|USER) | grantee_name (string) -terraform import "\"test_role\"|ROLE|\"test_parent_role\"" +terraform import snowflake_grant_account_role.example '"test_role"|ROLE|"test_parent_role"' diff --git a/examples/resources/snowflake_grant_application_role/import.sh b/examples/resources/snowflake_grant_application_role/import.sh index ad0caa05b1..7e7a9015c9 100644 --- a/examples/resources/snowflake_grant_application_role/import.sh +++ b/examples/resources/snowflake_grant_application_role/import.sh @@ -1,2 +1,2 @@ # format is application_role_name (string) | object_type (ACCOUNT_ROLE|APPLICATION) | grantee_name (string) -terraform import "\"my_application\".\"app_role_1\"|ACCOUNT_ROLE|\"my_role\"" +terraform import snowflake_grant_application_role.example '"my_application"."app_role_1"|ACCOUNT_ROLE|"my_role"' diff --git a/examples/resources/snowflake_grant_database_role/import.sh b/examples/resources/snowflake_grant_database_role/import.sh index 878d3c901a..9798e0298a 100644 --- a/examples/resources/snowflake_grant_database_role/import.sh +++ b/examples/resources/snowflake_grant_database_role/import.sh @@ -1,2 +1,2 @@ # format is database_role_name (string) | object_type (ROLE|DATABASE ROLE|SHARE) | grantee_name (string) -terraform import "\"ABC\".\"test_db_role\"|ROLE|\"test_parent_role\"" +terraform import snowflake_grant_database_role.example '"ABC"."test_db_role"|ROLE|"test_parent_role"' diff --git a/examples/resources/snowflake_grant_ownership/import.sh b/examples/resources/snowflake_grant_ownership/import.sh index e4745f24bc..f56c1db07f 100644 --- a/examples/resources/snowflake_grant_ownership/import.sh +++ b/examples/resources/snowflake_grant_ownership/import.sh @@ -1,41 +1,41 @@ ### OnObject -`terraform import "|||OnObject||"` +`terraform import snowflake_grant_ownership.example '|||OnObject||'` ### OnAll (contains inner types: InDatabase | InSchema) #### InDatabase -`terraform import "|||OnAll||InDatabase|"` +`terraform import snowflake_grant_ownership.example '|||OnAll||InDatabase|'` #### InSchema -`terraform import "|||OnAll||InSchema|"` +`terraform import snowflake_grant_ownership.example '|||OnAll||InSchema|'` ### OnFuture (contains inner types: InDatabase | InSchema) #### InDatabase -`terraform import "|||OnFuture||InDatabase|"` +`terraform import snowflake_grant_ownership.example '|||OnFuture||InDatabase|'` #### InSchema -`terraform import "|||OnFuture||InSchema|"` +`terraform import snowflake_grant_ownership.example '|||OnFuture||InSchema|'` ### Import examples #### OnObject on Schema ToAccountRole -`terraform import "ToAccountRole|\"account_role\"|COPY|OnObject|SCHEMA|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|COPY|OnObject|SCHEMA|"database_name"."schema_name"'` #### OnObject on Schema ToDatabaseRole -`terraform import "ToDatabaseRole|\"database_name\".\"database_role_name\"|COPY|OnObject|SCHEMA|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToDatabaseRole|"database_name"."database_role_name"|COPY|OnObject|SCHEMA|"database_name"."schema_name"'` #### OnObject on Table -`terraform import "ToAccountRole|\"account_role\"|COPY|OnObject|TABLE|\"database_name\".\"schema_name\".\"table_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|COPY|OnObject|TABLE|"database_name"."schema_name"."table_name"'` #### OnAll InDatabase -`terraform import "ToAccountRole|\"account_role\"|REVOKE|OnAll|TABLES|InDatabase|\"database_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|REVOKE|OnAll|TABLES|InDatabase|"database_name"'` #### OnAll InSchema -`terraform import "ToAccountRole|\"account_role\"||OnAll|TABLES|InSchema|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"||OnAll|TABLES|InSchema|"database_name"."schema_name"'` #### OnFuture InDatabase -`terraform import "ToAccountRole|\"account_role\"||OnFuture|TABLES|InDatabase|\"database_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"||OnFuture|TABLES|InDatabase|"database_name"'` #### OnFuture InSchema -`terraform import "ToAccountRole|\"account_role\"|COPY|OnFuture|TABLES|InSchema|\"database_name\".\"schema_name\""` +`terraform import snowflake_grant_ownership.example 'ToAccountRole|"account_role"|COPY|OnFuture|TABLES|InSchema|"database_name"."schema_name"'` diff --git a/examples/resources/snowflake_primary_connection/import.sh b/examples/resources/snowflake_primary_connection/import.sh new file mode 100644 index 0000000000..743bf79921 --- /dev/null +++ b/examples/resources/snowflake_primary_connection/import.sh @@ -0,0 +1 @@ +terraform import snowflake_primary_connection.example 'connection_name' diff --git a/examples/resources/snowflake_primary_connection/resource.tf b/examples/resources/snowflake_primary_connection/resource.tf new file mode 100644 index 0000000000..b9fe410b72 --- /dev/null +++ b/examples/resources/snowflake_primary_connection/resource.tf @@ -0,0 +1,13 @@ +## Minimal +resource "snowflake_primary_connection" "basic" { + name = "connection_name" +} + +## Complete (with every optional set) +resource "snowflake_primary_connection" "complete" { + name = "connection_name" + comment = "my complete connection" + enable_failover_to_accounts = [ + "." + ] +} diff --git a/examples/resources/snowflake_secondary_connection/import.sh b/examples/resources/snowflake_secondary_connection/import.sh new file mode 100644 index 0000000000..4de28135f7 --- /dev/null +++ b/examples/resources/snowflake_secondary_connection/import.sh @@ -0,0 +1 @@ +terraform import snowflake_secondary_connection.example 'secondary_connection_name' diff --git a/examples/resources/snowflake_secondary_connection/resource.tf b/examples/resources/snowflake_secondary_connection/resource.tf new file mode 100644 index 0000000000..17d32c0820 --- /dev/null +++ b/examples/resources/snowflake_secondary_connection/resource.tf @@ -0,0 +1,12 @@ +## Minimal +resource "snowflake_secondary_connection" "basic" { + name = "connection_name" + as_replica_of = ".." +} + +## Complete (with every optional set) +resource "snowflake_secondary_connection" "complete" { + name = "connection_name" + as_replica_of = ".." + comment = "my complete secondary connection" +} diff --git a/examples/resources/snowflake_tag/import.sh b/examples/resources/snowflake_tag/import.sh index 0c75224ed4..784e71109b 100644 --- a/examples/resources/snowflake_tag/import.sh +++ b/examples/resources/snowflake_tag/import.sh @@ -1,2 +1 @@ -# format is database name | schema name | tag name -terraform import snowflake_tag.example 'dbName|schemaName|tagName' \ No newline at end of file +terraform import snowflake_tag.example '""."".""' diff --git a/examples/resources/snowflake_tag/resource.tf b/examples/resources/snowflake_tag/resource.tf index 465cd6741d..9c99ab0503 100644 --- a/examples/resources/snowflake_tag/resource.tf +++ b/examples/resources/snowflake_tag/resource.tf @@ -1,15 +1,16 @@ -resource "snowflake_database" "database" { - name = "database" -} - -resource "snowflake_schema" "schema" { - name = "schema" - database = snowflake_database.database.name +# basic resource +resource "snowflake_tag" "tag" { + name = "tag" + database = "database" + schema = "schema" } +# complete resource resource "snowflake_tag" "tag" { - name = "cost_center" - database = snowflake_database.database.name - schema = snowflake_schema.schema.name - allowed_values = ["finance", "engineering"] + name = "tag" + database = "database" + schema = "schema" + comment = "comment" + allowed_values = ["finance", "engineering", ""] + masking_policies = [snowfalke_masking_policy.masking_policy.fully_qualified_name] } diff --git a/pkg/acceptance/bettertestspoc/README.md b/pkg/acceptance/bettertestspoc/README.md index 9d73168210..7b96af6362 100644 --- a/pkg/acceptance/bettertestspoc/README.md +++ b/pkg/acceptance/bettertestspoc/README.md @@ -351,5 +351,6 @@ func (w *WarehouseDatasourceShowOutputAssert) IsEmpty() { 1. Lists of objects are partially generated, and only parameter name is generated in some functions (the type has to be added manually). 2. `testing` is a package name that makes Go think that we want to have unnamed parameter there, but we just didn't generate the type for that field in the function argument. - generate assertions checking that time is not empty - we often do not compare time fields by value, but check if they are set +- utilize `ContainsExactlyInAnyOrder` function in `pkg/acceptance/bettertestspoc/assert/commons.go` to create asserts on collections that are order independent - Additional asserts for sets and lists that wouldn't rely on the order of items saved to the state (SNOW-1706544) - support generating provider config and use generated configs in `pkg/provider/provider_acceptance_test.go` diff --git a/pkg/acceptance/bettertestspoc/assert/commons.go b/pkg/acceptance/bettertestspoc/assert/commons.go index 59e1c86ffa..aeb44da985 100644 --- a/pkg/acceptance/bettertestspoc/assert/commons.go +++ b/pkg/acceptance/bettertestspoc/assert/commons.go @@ -3,8 +3,13 @@ package assert import ( "errors" "fmt" + "slices" + "strconv" + "strings" "testing" + "golang.org/x/exp/maps" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" ) @@ -101,3 +106,81 @@ func AssertThatObject(t *testing.T, objectAssert InPlaceAssertionVerifier) { t.Helper() objectAssert.VerifyAll(t) } + +func ContainsExactlyInAnyOrder(resourceKey string, attributePath string, expectedItems []map[string]string) resource.TestCheckFunc { + return func(state *terraform.State) error { + var actualItems []map[string]string + var resourceValue *terraform.ResourceState + + if value, ok := state.RootModule().Resources[resourceKey]; ok { + resourceValue = value + } else { + return fmt.Errorf("resource %s not found", resourceKey) + } + + // Allocate space for actualItems and assert length + for attrKey, attrValue := range resourceValue.Primary.Attributes { + if strings.HasPrefix(attrKey, attributePath) { + attr := strings.TrimPrefix(attrKey, attributePath+".") + + if attr == "#" { + attrValueLen, err := strconv.Atoi(attrValue) + if err != nil { + return fmt.Errorf("failed to convert length of the attribute %s: %w", attrKey, err) + } + if len(expectedItems) != attrValueLen { + return fmt.Errorf("expected to find %d items in %s, but found %d", len(expectedItems), attributePath, attrValueLen) + } + + actualItems = make([]map[string]string, attrValueLen) + for i := range actualItems { + actualItems[i] = make(map[string]string) + } + } + } + } + + // Gather all actual items + for attrKey, attrValue := range resourceValue.Primary.Attributes { + if strings.HasPrefix(attrKey, attributePath) { + attr := strings.TrimPrefix(attrKey, attributePath+".") + + if strings.HasSuffix(attr, "%") || strings.HasSuffix(attr, "#") { + continue + } + + attrParts := strings.SplitN(attr, ".", 2) + index, indexErr := strconv.Atoi(attrParts[0]) + isIndex := indexErr == nil + + if len(attrParts) > 1 && isIndex { + itemKey := attrParts[1] + actualItems[index][itemKey] = attrValue + } + } + } + + errs := make([]error, 0) + for _, actualItem := range actualItems { + found := false + if slices.ContainsFunc(expectedItems, func(expected map[string]string) bool { return maps.Equal(expected, actualItem) }) { + found = true + } + if !found { + errs = append(errs, fmt.Errorf("unexpected item found: %s", actualItem)) + } + } + + for _, expectedItem := range expectedItems { + found := false + if slices.ContainsFunc(actualItems, func(actual map[string]string) bool { return maps.Equal(actual, expectedItem) }) { + found = true + } + if !found { + errs = append(errs, fmt.Errorf("expected item to be found, but it wasn't: %s", expectedItem)) + } + } + + return errors.Join(errs...) + } +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_ext.go index 4e237798ef..3ab10cc0e9 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_ext.go @@ -3,13 +3,12 @@ package objectassert import ( "fmt" "slices" - "strings" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) -func (c *ConnectionAssert) HasFailoverAllowedToAccounts(expected []string) *ConnectionAssert { +func (c *ConnectionAssert) HasFailoverAllowedToAccounts(expected ...sdk.AccountIdentifier) *ConnectionAssert { c.AddAssertion(func(t *testing.T, o *sdk.Connection) error { t.Helper() if !slices.Equal(expected, o.FailoverAllowedToAccounts) { @@ -46,9 +45,8 @@ func (c *ConnectionAssert) HasConnectionUrlNotEmpty() *ConnectionAssert { func (c *ConnectionAssert) HasPrimaryIdentifier(expected sdk.ExternalObjectIdentifier) *ConnectionAssert { c.AddAssertion(func(t *testing.T, o *sdk.Connection) error { t.Helper() - expectedString := strings.ReplaceAll(expected.FullyQualifiedName(), `"`, "") - if o.Primary != expectedString { - return fmt.Errorf("expected primary identifier: %v; got: %v", expectedString, o.Primary) + if o.Primary != expected { + return fmt.Errorf("expected primary identifier: %v; got: %v", expected.FullyQualifiedName(), o.Primary) } return nil }) diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_gen.go index d1c0946276..80b165243e 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/connection_snowflake_gen.go @@ -31,6 +31,20 @@ func ConnectionFromObject(t *testing.T, connection *sdk.Connection) *ConnectionA } } +func (c *ConnectionAssert) HasRegionGroup(expected string) *ConnectionAssert { + c.AddAssertion(func(t *testing.T, o *sdk.Connection) error { + t.Helper() + if o.RegionGroup == nil { + return fmt.Errorf("expected region group to have value; got: nil") + } + if *o.RegionGroup != expected { + return fmt.Errorf("expected region group: %v; got: %v", expected, *o.RegionGroup) + } + return nil + }) + return c +} + func (c *ConnectionAssert) HasSnowflakeRegion(expected string) *ConnectionAssert { c.AddAssertion(func(t *testing.T, o *sdk.Connection) error { t.Helper() @@ -100,7 +114,7 @@ func (c *ConnectionAssert) HasIsPrimary(expected bool) *ConnectionAssert { return c } -func (c *ConnectionAssert) HasPrimary(expected string) *ConnectionAssert { +func (c *ConnectionAssert) HasPrimary(expected sdk.ExternalObjectIdentifier) *ConnectionAssert { c.AddAssertion(func(t *testing.T, o *sdk.Connection) error { t.Helper() if o.Primary != expected { diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go index a95d113b07..bcbe79ed5b 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go @@ -97,6 +97,18 @@ var allResourceSchemaDefs = []ResourceSchemaDef{ name: "StreamOnView", schema: resources.StreamOnView().Schema, }, + { + name: "PrimaryConnection", + schema: resources.PrimaryConnection().Schema, + }, + { + name: "SecondaryConnection", + schema: resources.SecondaryConnection().Schema, + }, + { + name: "Tag", + schema: resources.Tag().Schema, + }, { name: "Task", schema: resources.Task().Schema, diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/primary_connection_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/primary_connection_resource_ext.go new file mode 100644 index 0000000000..b22dbfd4ee --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/primary_connection_resource_ext.go @@ -0,0 +1,21 @@ +package resourceassert + +import ( + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (c *PrimaryConnectionResourceAssert) HasExactlyFailoverToAccountsInOrder(expected ...sdk.AccountIdentifier) *PrimaryConnectionResourceAssert { + c.AddAssertion(assert.ValueSet("enable_failover_to_accounts.#", fmt.Sprintf("%d", len(expected)))) + for i, v := range expected { + c.AddAssertion(assert.ValueSet(fmt.Sprintf("enable_failover_to_accounts.%d", i), v.Name())) + } + return c +} + +func (c *PrimaryConnectionResourceAssert) HasNoEnableFailoverToAccounts() *PrimaryConnectionResourceAssert { + c.AddAssertion(assert.ValueSet("enable_failover_to_accounts.#", "0")) + return c +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/primary_connection_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/primary_connection_resource_gen.go new file mode 100644 index 0000000000..9297a6408f --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/primary_connection_resource_gen.go @@ -0,0 +1,79 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type PrimaryConnectionResourceAssert struct { + *assert.ResourceAssert +} + +func PrimaryConnectionResource(t *testing.T, name string) *PrimaryConnectionResourceAssert { + t.Helper() + + return &PrimaryConnectionResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedPrimaryConnectionResource(t *testing.T, id string) *PrimaryConnectionResourceAssert { + t.Helper() + + return &PrimaryConnectionResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (p *PrimaryConnectionResourceAssert) HasCommentString(expected string) *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueSet("comment", expected)) + return p +} + +func (p *PrimaryConnectionResourceAssert) HasEnableFailoverToAccountsString(expected string) *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueSet("enable_failover_to_accounts", expected)) + return p +} + +func (p *PrimaryConnectionResourceAssert) HasFullyQualifiedNameString(expected string) *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return p +} + +func (p *PrimaryConnectionResourceAssert) HasNameString(expected string) *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueSet("name", expected)) + return p +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (p *PrimaryConnectionResourceAssert) HasNoComment() *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueNotSet("comment")) + return p +} + +/* +func (p *PrimaryConnectionResourceAssert) HasNoEnableFailoverToAccounts() *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueNotSet("enable_failover_to_accounts")) + return p +} +*/ + +func (p *PrimaryConnectionResourceAssert) HasNoFullyQualifiedName() *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return p +} + +func (p *PrimaryConnectionResourceAssert) HasNoName() *PrimaryConnectionResourceAssert { + p.AddAssertion(assert.ValueNotSet("name")) + return p +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/secondary_connection_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/secondary_connection_resource_ext.go new file mode 100644 index 0000000000..4bf24106c7 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/secondary_connection_resource_ext.go @@ -0,0 +1,11 @@ +package resourceassert + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (s *SecondaryConnectionResourceAssert) HasAsReplicaOfIdentifier(expected sdk.ExternalObjectIdentifier) *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueSet("as_replica_of", expected.Name())) + return s +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/secondary_connection_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/secondary_connection_resource_gen.go new file mode 100644 index 0000000000..363cd9c804 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/secondary_connection_resource_gen.go @@ -0,0 +1,87 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type SecondaryConnectionResourceAssert struct { + *assert.ResourceAssert +} + +func SecondaryConnectionResource(t *testing.T, name string) *SecondaryConnectionResourceAssert { + t.Helper() + + return &SecondaryConnectionResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedSecondaryConnectionResource(t *testing.T, id string) *SecondaryConnectionResourceAssert { + t.Helper() + + return &SecondaryConnectionResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (s *SecondaryConnectionResourceAssert) HasAsReplicaOfString(expected string) *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueSet("as_replica_of", expected)) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasCommentString(expected string) *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueSet("comment", expected)) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasFullyQualifiedNameString(expected string) *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasIsPrimaryString(expected string) *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueSet("is_primary", expected)) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasNameString(expected string) *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueSet("name", expected)) + return s +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (s *SecondaryConnectionResourceAssert) HasNoAsReplicaOf() *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueNotSet("as_replica_of")) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasNoComment() *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueNotSet("comment")) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasNoFullyQualifiedName() *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasNoIsPrimary() *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueNotSet("is_primary")) + return s +} + +func (s *SecondaryConnectionResourceAssert) HasNoName() *SecondaryConnectionResourceAssert { + s.AddAssertion(assert.ValueNotSet("name")) + return s +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/tag_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/tag_resource_ext.go new file mode 100644 index 0000000000..d9ee8d7190 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/tag_resource_ext.go @@ -0,0 +1,17 @@ +package resourceassert + +import ( + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (t *TagResourceAssert) HasMaskingPoliciesLength(len int) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("masking_policies.#", fmt.Sprintf("%d", len))) + return t +} + +func (t *TagResourceAssert) HasAllowedValuesLength(len int) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("allowed_values.#", fmt.Sprintf("%d", len))) + return t +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/tag_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/tag_resource_gen.go new file mode 100644 index 0000000000..27102d9656 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/tag_resource_gen.go @@ -0,0 +1,107 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type TagResourceAssert struct { + *assert.ResourceAssert +} + +func TagResource(t *testing.T, name string) *TagResourceAssert { + t.Helper() + + return &TagResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedTagResource(t *testing.T, id string) *TagResourceAssert { + t.Helper() + + return &TagResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (t *TagResourceAssert) HasAllowedValuesString(expected string) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("allowed_values", expected)) + return t +} + +func (t *TagResourceAssert) HasCommentString(expected string) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("comment", expected)) + return t +} + +func (t *TagResourceAssert) HasDatabaseString(expected string) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("database", expected)) + return t +} + +func (t *TagResourceAssert) HasFullyQualifiedNameString(expected string) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return t +} + +func (t *TagResourceAssert) HasMaskingPolicyString(expected string) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("masking_policy", expected)) + return t +} + +func (t *TagResourceAssert) HasNameString(expected string) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("name", expected)) + return t +} + +func (t *TagResourceAssert) HasSchemaString(expected string) *TagResourceAssert { + t.AddAssertion(assert.ValueSet("schema", expected)) + return t +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (t *TagResourceAssert) HasNoAllowedValues() *TagResourceAssert { + t.AddAssertion(assert.ValueNotSet("allowed_values")) + return t +} + +func (t *TagResourceAssert) HasNoComment() *TagResourceAssert { + t.AddAssertion(assert.ValueNotSet("comment")) + return t +} + +func (t *TagResourceAssert) HasNoDatabase() *TagResourceAssert { + t.AddAssertion(assert.ValueNotSet("database")) + return t +} + +func (t *TagResourceAssert) HasNoFullyQualifiedName() *TagResourceAssert { + t.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return t +} + +func (t *TagResourceAssert) HasNoMaskingPolicies() *TagResourceAssert { + t.AddAssertion(assert.ValueNotSet("masking_policies")) + return t +} + +func (t *TagResourceAssert) HasNoName() *TagResourceAssert { + t.AddAssertion(assert.ValueNotSet("name")) + return t +} + +func (t *TagResourceAssert) HasNoSchema() *TagResourceAssert { + t.AddAssertion(assert.ValueNotSet("schema")) + return t +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/user_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/user_resource_ext.go index 076102926f..80d6dd7841 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/user_resource_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/user_resource_ext.go @@ -3,6 +3,8 @@ package resourceassert import ( "strconv" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) @@ -17,6 +19,11 @@ func (u *UserResourceAssert) HasEmptyPassword() *UserResourceAssert { return u } +func (u *UserResourceAssert) HasNotEmptyPassword() *UserResourceAssert { + u.AddAssertion(assert.ValuePresent("password")) + return u +} + func (u *UserResourceAssert) HasMustChangePassword(expected bool) *UserResourceAssert { u.AddAssertion(assert.ValueSet("must_change_password", strconv.FormatBool(expected))) return u @@ -25,3 +32,29 @@ func (u *UserResourceAssert) HasMustChangePassword(expected bool) *UserResourceA func (u *UserResourceAssert) HasDefaultSecondaryRolesOption(expected sdk.SecondaryRolesOption) *UserResourceAssert { return u.HasDefaultSecondaryRolesOptionString(string(expected)) } + +func (u *UserResourceAssert) HasAllDefaults(userId sdk.AccountObjectIdentifier, expectedDefaultSecondaryRoles sdk.SecondaryRolesOption) *UserResourceAssert { + return u. + HasNameString(userId.Name()). + HasNoPassword(). + HasNoLoginName(). + HasNoDisplayName(). + HasNoFirstName(). + HasNoMiddleName(). + HasNoLastName(). + HasNoEmail(). + HasMustChangePasswordString(r.BooleanDefault). + HasDisabledString(r.BooleanDefault). + HasNoDaysToExpiry(). + HasMinsToUnlockString(r.IntDefaultString). + HasNoDefaultWarehouse(). + HasNoDefaultNamespace(). + HasNoDefaultRole(). + HasDefaultSecondaryRolesOption(expectedDefaultSecondaryRoles). + HasMinsToBypassMfaString(r.IntDefaultString). + HasNoRsaPublicKey(). + HasNoRsaPublicKey2(). + HasNoComment(). + HasDisableMfaString(r.BooleanDefault). + HasFullyQualifiedNameString(userId.FullyQualifiedName()) +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/connection_show_output_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/connection_show_output_ext.go new file mode 100644 index 0000000000..1e45b6794e --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/connection_show_output_ext.go @@ -0,0 +1,26 @@ +package resourceshowoutputassert + +import ( + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (c *ConnectionShowOutputAssert) HasCreatedOnNotEmpty() *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) + return c +} + +func (c *ConnectionShowOutputAssert) HasPrimaryIdentifier(expected sdk.ExternalObjectIdentifier) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("primary", expected.FullyQualifiedName())) + return c +} + +func (c *ConnectionShowOutputAssert) HasFailoverAllowedToAccounts(expected ...sdk.AccountIdentifier) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("failover_allowed_to_accounts.#", fmt.Sprintf("%d", len(expected)))) + for i, v := range expected { + c.AddAssertion(assert.ResourceShowOutputValueSet(fmt.Sprintf("failover_allowed_to_accounts.%d", i), v.Name())) + } + return c +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/connection_show_output_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/connection_show_output_gen.go new file mode 100644 index 0000000000..d0e76f836a --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/connection_show_output_gen.go @@ -0,0 +1,104 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceshowoutputassert + +import ( + "testing" + "time" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +// to ensure sdk package is used +var _ = sdk.Object{} + +type ConnectionShowOutputAssert struct { + *assert.ResourceAssert +} + +func ConnectionShowOutput(t *testing.T, name string) *ConnectionShowOutputAssert { + t.Helper() + + c := ConnectionShowOutputAssert{ + ResourceAssert: assert.NewResourceAssert(name, "show_output"), + } + c.AddAssertion(assert.ValueSet("show_output.#", "1")) + return &c +} + +func ImportedConnectionShowOutput(t *testing.T, id string) *ConnectionShowOutputAssert { + t.Helper() + + c := ConnectionShowOutputAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "show_output"), + } + c.AddAssertion(assert.ValueSet("show_output.#", "1")) + return &c +} + +//////////////////////////// +// Attribute value checks // +//////////////////////////// + +func (c *ConnectionShowOutputAssert) HasRegionGroup(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("region_group", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasSnowflakeRegion(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("snowflake_region", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasCreatedOn(expected time.Time) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("created_on", expected.String())) + return c +} + +func (c *ConnectionShowOutputAssert) HasAccountName(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("account_name", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasName(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("name", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasComment(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("comment", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasIsPrimary(expected bool) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputBoolValueSet("is_primary", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasPrimary(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("primary", expected)) + return c +} + +/* +func (c *ConnectionShowOutputAssert) HasFailoverAllowedToAccounts(expected []string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("failover_allowed_to_accounts", expected)) + return c +} +*/ + +func (c *ConnectionShowOutputAssert) HasConnectionUrl(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("connection_url", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasOrganizationName(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("organization_name", expected)) + return c +} + +func (c *ConnectionShowOutputAssert) HasAccountLocator(expected string) *ConnectionShowOutputAssert { + c.AddAssertion(assert.ResourceShowOutputValueSet("account_locator", expected)) + return c +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/tag_show_output_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/tag_show_output_ext.go new file mode 100644 index 0000000000..9fb4de087b --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/tag_show_output_ext.go @@ -0,0 +1,26 @@ +package resourceshowoutputassert + +import ( + "fmt" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (s *TagShowOutputAssert) HasCreatedOnNotEmpty() *TagShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) + return s +} + +func (s *TagShowOutputAssert) HasAllowedValues(expected ...string) *TagShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputValueSet("allowed_values.#", strconv.FormatInt(int64(len(expected)), 10))) + for i := range expected { + s.AddAssertion(assert.ResourceShowOutputValueSet(fmt.Sprintf("allowed_values.%d", i), expected[i])) + } + return s +} + +func (s *TagShowOutputAssert) HasNoAllowedValues(expected ...string) *TagShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputValueSet("allowed_values.#", "0")) + return s +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/tag_show_output_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/tag_show_output_gen.go index 486e08488c..fbf79c735f 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/tag_show_output_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/tag_show_output_gen.go @@ -71,11 +71,6 @@ func (t *TagShowOutputAssert) HasComment(expected string) *TagShowOutputAssert { return t } -// func (t *TagShowOutputAssert) HasAllowedValues(expected []string) *TagShowOutputAssert { -// t.AddAssertion(assert.ResourceShowOutputValueSet("allowed_values", expected)) -// return t -// } - func (t *TagShowOutputAssert) HasOwnerRoleType(expected string) *TagShowOutputAssert { t.AddAssertion(assert.ResourceShowOutputValueSet("owner_role_type", expected)) return t diff --git a/pkg/acceptance/bettertestspoc/config/model/primary_connection_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/primary_connection_model_ext.go new file mode 100644 index 0000000000..0e7da1b1dc --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/primary_connection_model_ext.go @@ -0,0 +1,17 @@ +package model + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/config" +) + +func (c *PrimaryConnectionModel) WithEnableFailover(toAccount ...sdk.AccountIdentifier) *PrimaryConnectionModel { + variables := make([]config.Variable, 0) + for _, v := range toAccount { + variables = append(variables, config.StringVariable(v.Name())) + } + + c.EnableFailoverToAccounts = config.ListVariable(variables...) + + return c +} diff --git a/pkg/acceptance/bettertestspoc/config/model/primary_connection_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/primary_connection_model_gen.go new file mode 100644 index 0000000000..f8f29bf1cf --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/primary_connection_model_gen.go @@ -0,0 +1,85 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type PrimaryConnectionModel struct { + Comment tfconfig.Variable `json:"comment,omitempty"` + EnableFailoverToAccounts tfconfig.Variable `json:"enable_failover_to_accounts,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func PrimaryConnection( + resourceName string, + name string, +) *PrimaryConnectionModel { + p := &PrimaryConnectionModel{ResourceModelMeta: config.Meta(resourceName, resources.PrimaryConnection)} + p.WithName(name) + return p +} + +func PrimaryConnectionWithDefaultMeta( + name string, +) *PrimaryConnectionModel { + p := &PrimaryConnectionModel{ResourceModelMeta: config.DefaultMeta(resources.PrimaryConnection)} + p.WithName(name) + return p +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +func (p *PrimaryConnectionModel) WithComment(comment string) *PrimaryConnectionModel { + p.Comment = tfconfig.StringVariable(comment) + return p +} + +// enable_failover_to_accounts attribute type is not yet supported, so WithEnableFailoverToAccounts can't be generated + +func (p *PrimaryConnectionModel) WithFullyQualifiedName(fullyQualifiedName string) *PrimaryConnectionModel { + p.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return p +} + +func (p *PrimaryConnectionModel) WithName(name string) *PrimaryConnectionModel { + p.Name = tfconfig.StringVariable(name) + return p +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (p *PrimaryConnectionModel) WithCommentValue(value tfconfig.Variable) *PrimaryConnectionModel { + p.Comment = value + return p +} + +func (p *PrimaryConnectionModel) WithEnableFailoverToAccountsValue(value tfconfig.Variable) *PrimaryConnectionModel { + p.EnableFailoverToAccounts = value + return p +} + +func (p *PrimaryConnectionModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *PrimaryConnectionModel { + p.FullyQualifiedName = value + return p +} + +func (p *PrimaryConnectionModel) WithNameValue(value tfconfig.Variable) *PrimaryConnectionModel { + p.Name = value + return p +} diff --git a/pkg/acceptance/bettertestspoc/config/model/secondary_connection_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/secondary_connection_model_gen.go new file mode 100644 index 0000000000..f1ee4f7024 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/secondary_connection_model_gen.go @@ -0,0 +1,103 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type SecondaryConnectionModel struct { + AsReplicaOf tfconfig.Variable `json:"as_replica_of,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + IsPrimary tfconfig.Variable `json:"is_primary,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func SecondaryConnection( + resourceName string, + asReplicaOf string, + name string, +) *SecondaryConnectionModel { + s := &SecondaryConnectionModel{ResourceModelMeta: config.Meta(resourceName, resources.SecondaryConnection)} + s.WithAsReplicaOf(asReplicaOf) + s.WithName(name) + return s +} + +func SecondaryConnectionWithDefaultMeta( + asReplicaOf string, + name string, +) *SecondaryConnectionModel { + s := &SecondaryConnectionModel{ResourceModelMeta: config.DefaultMeta(resources.SecondaryConnection)} + s.WithAsReplicaOf(asReplicaOf) + s.WithName(name) + return s +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +func (s *SecondaryConnectionModel) WithAsReplicaOf(asReplicaOf string) *SecondaryConnectionModel { + s.AsReplicaOf = tfconfig.StringVariable(asReplicaOf) + return s +} + +func (s *SecondaryConnectionModel) WithComment(comment string) *SecondaryConnectionModel { + s.Comment = tfconfig.StringVariable(comment) + return s +} + +func (s *SecondaryConnectionModel) WithFullyQualifiedName(fullyQualifiedName string) *SecondaryConnectionModel { + s.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return s +} + +func (s *SecondaryConnectionModel) WithIsPrimary(isPrimary bool) *SecondaryConnectionModel { + s.IsPrimary = tfconfig.BoolVariable(isPrimary) + return s +} + +func (s *SecondaryConnectionModel) WithName(name string) *SecondaryConnectionModel { + s.Name = tfconfig.StringVariable(name) + return s +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (s *SecondaryConnectionModel) WithAsReplicaOfValue(value tfconfig.Variable) *SecondaryConnectionModel { + s.AsReplicaOf = value + return s +} + +func (s *SecondaryConnectionModel) WithCommentValue(value tfconfig.Variable) *SecondaryConnectionModel { + s.Comment = value + return s +} + +func (s *SecondaryConnectionModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *SecondaryConnectionModel { + s.FullyQualifiedName = value + return s +} + +func (s *SecondaryConnectionModel) WithIsPrimaryValue(value tfconfig.Variable) *SecondaryConnectionModel { + s.IsPrimary = value + return s +} + +func (s *SecondaryConnectionModel) WithNameValue(value tfconfig.Variable) *SecondaryConnectionModel { + s.Name = value + return s +} diff --git a/pkg/acceptance/bettertestspoc/config/model/tag_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/tag_model_ext.go new file mode 100644 index 0000000000..81fd60aa63 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/tag_model_ext.go @@ -0,0 +1,26 @@ +package model + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" +) + +func (t *TagModel) WithAllowedValues(allowedValues ...string) *TagModel { + allowedValuesStringVariables := make([]tfconfig.Variable, len(allowedValues)) + for i, v := range allowedValues { + allowedValuesStringVariables[i] = tfconfig.StringVariable(v) + } + + t.AllowedValues = tfconfig.SetVariable(allowedValuesStringVariables...) + return t +} + +func (t *TagModel) WithMaskingPolicies(maskingPolicies ...sdk.SchemaObjectIdentifier) *TagModel { + maskingPoliciesStringVariables := make([]tfconfig.Variable, len(maskingPolicies)) + for i, v := range maskingPolicies { + maskingPoliciesStringVariables[i] = tfconfig.StringVariable(v.FullyQualifiedName()) + } + + t.MaskingPolicies = tfconfig.SetVariable(maskingPoliciesStringVariables...) + return t +} diff --git a/pkg/acceptance/bettertestspoc/config/model/tag_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/tag_model_gen.go new file mode 100644 index 0000000000..91b5bb9eff --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/tag_model_gen.go @@ -0,0 +1,123 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type TagModel struct { + AllowedValues tfconfig.Variable `json:"allowed_values,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + MaskingPolicies tfconfig.Variable `json:"masking_policies,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func Tag( + resourceName string, + database string, + name string, + schema string, +) *TagModel { + t := &TagModel{ResourceModelMeta: config.Meta(resourceName, resources.Tag)} + t.WithDatabase(database) + t.WithName(name) + t.WithSchema(schema) + return t +} + +func TagWithDefaultMeta( + database string, + name string, + schema string, +) *TagModel { + t := &TagModel{ResourceModelMeta: config.DefaultMeta(resources.Tag)} + t.WithDatabase(database) + t.WithName(name) + t.WithSchema(schema) + return t +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// allowed_values attribute type is not yet supported, so WithAllowedValues can't be generated + +func (t *TagModel) WithComment(comment string) *TagModel { + t.Comment = tfconfig.StringVariable(comment) + return t +} + +func (t *TagModel) WithDatabase(database string) *TagModel { + t.Database = tfconfig.StringVariable(database) + return t +} + +func (t *TagModel) WithFullyQualifiedName(fullyQualifiedName string) *TagModel { + t.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return t +} + +// masking_policy attribute type is not yet supported, so WithMaskingPolicy can't be generated + +func (t *TagModel) WithName(name string) *TagModel { + t.Name = tfconfig.StringVariable(name) + return t +} + +func (t *TagModel) WithSchema(schema string) *TagModel { + t.Schema = tfconfig.StringVariable(schema) + return t +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (t *TagModel) WithAllowedValuesValue(value tfconfig.Variable) *TagModel { + t.AllowedValues = value + return t +} + +func (t *TagModel) WithCommentValue(value tfconfig.Variable) *TagModel { + t.Comment = value + return t +} + +func (t *TagModel) WithDatabaseValue(value tfconfig.Variable) *TagModel { + t.Database = value + return t +} + +func (t *TagModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *TagModel { + t.FullyQualifiedName = value + return t +} + +func (t *TagModel) WithMaskingPoliciesValue(value tfconfig.Variable) *TagModel { + t.MaskingPolicies = value + return t +} + +func (t *TagModel) WithNameValue(value tfconfig.Variable) *TagModel { + t.Name = value + return t +} + +func (t *TagModel) WithSchemaValue(value tfconfig.Variable) *TagModel { + t.Schema = value + return t +} diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index f9fd627104..57145b726f 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -102,6 +102,9 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.AuthenticationPolicy: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.AuthenticationPolicies.ShowByID) }, + resources.PrimaryConnection: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Connections.ShowByID) + }, resources.CortexSearchService: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.CortexSearchServices.ShowByID) }, @@ -195,6 +198,9 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.ScimSecurityIntegration: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.SecurityIntegrations.ShowByID) }, + resources.SecondaryConnection: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Connections.ShowByID) + }, resources.SecondaryDatabase: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Databases.ShowByID) }, diff --git a/pkg/acceptance/helpers/compute_pool_client.go b/pkg/acceptance/helpers/compute_pool_client.go new file mode 100644 index 0000000000..08affb0a2b --- /dev/null +++ b/pkg/acceptance/helpers/compute_pool_client.go @@ -0,0 +1,47 @@ +package helpers + +import ( + "context" + "fmt" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/require" +) + +// TODO [SNOW-1790174]: change raw sqls to proper client +type ComputePoolClient struct { + context *TestClientContext + ids *IdsGenerator +} + +func NewComputePoolClient(context *TestClientContext, idsGenerator *IdsGenerator) *ComputePoolClient { + return &ComputePoolClient{ + context: context, + ids: idsGenerator, + } +} + +func (c *ComputePoolClient) client() *sdk.Client { + return c.context.client +} + +func (c *ComputePoolClient) CreateComputePool(t *testing.T) (sdk.AccountObjectIdentifier, func()) { + t.Helper() + ctx := context.Background() + + id := c.ids.RandomAccountObjectIdentifier() + _, err := c.client().ExecForTests(ctx, fmt.Sprintf(`CREATE COMPUTE POOL %s MIN_NODES = 1 MAX_NODES = 1 INSTANCE_FAMILY = CPU_X64_XS`, id.FullyQualifiedName())) + require.NoError(t, err) + return id, c.DropComputePoolFunc(t, id) +} + +func (c *ComputePoolClient) DropComputePoolFunc(t *testing.T, id sdk.AccountObjectIdentifier) func() { + t.Helper() + ctx := context.Background() + + return func() { + _, err := c.client().ExecForTests(ctx, fmt.Sprintf(`DROP COMPUTE POOL IF EXISTS %s`, id.FullyQualifiedName())) + require.NoError(t, err) + } +} diff --git a/pkg/acceptance/helpers/connection_client.go b/pkg/acceptance/helpers/connection_client.go index 24fdf9384a..72d37924b5 100644 --- a/pkg/acceptance/helpers/connection_client.go +++ b/pkg/acceptance/helpers/connection_client.go @@ -2,6 +2,8 @@ package helpers import ( "context" + "fmt" + "strings" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -46,7 +48,7 @@ func (c *ConnectionClient) CreateReplication(t *testing.T, id sdk.AccountObjectI return connection, c.DropFunc(t, id) } -func (c *ConnectionClient) Alter(t *testing.T, id sdk.AccountObjectIdentifier, req *sdk.AlterConnectionRequest) { +func (c *ConnectionClient) Alter(t *testing.T, req *sdk.AlterConnectionRequest) { t.Helper() ctx := context.Background() @@ -70,3 +72,7 @@ func (c *ConnectionClient) Show(t *testing.T, id sdk.AccountObjectIdentifier) (* return c.client().ShowByID(ctx, id) } + +func (c *ConnectionClient) GetConnectionUrl(organizationName, objectName string) string { + return strings.ToLower(fmt.Sprintf("%s-%s.snowflakecomputing.com", organizationName, objectName)) +} diff --git a/pkg/acceptance/helpers/database_client.go b/pkg/acceptance/helpers/database_client.go index 640dbb896f..a0ea93ebab 100644 --- a/pkg/acceptance/helpers/database_client.go +++ b/pkg/acceptance/helpers/database_client.go @@ -48,14 +48,6 @@ func (c *DatabaseClient) CreateDatabaseWithOptions(t *testing.T, id sdk.AccountO return database, c.DropDatabaseFunc(t, id) } -func (c *DatabaseClient) Alter(t *testing.T, id sdk.AccountObjectIdentifier, opts *sdk.AlterDatabaseOptions) { - t.Helper() - ctx := context.Background() - - err := c.client().Alter(ctx, id, opts) - require.NoError(t, err) -} - func (c *DatabaseClient) DropDatabaseFunc(t *testing.T, id sdk.AccountObjectIdentifier) func() { t.Helper() return func() { require.NoError(t, c.DropDatabase(t, id)) } @@ -192,3 +184,11 @@ func (c *DatabaseClient) ShowAllReplicationDatabases(t *testing.T) ([]sdk.Replic return c.context.client.ReplicationFunctions.ShowReplicationDatabases(ctx, nil) } + +func (c *DatabaseClient) Alter(t *testing.T, id sdk.AccountObjectIdentifier, opts *sdk.AlterDatabaseOptions) { + t.Helper() + ctx := context.Background() + + err := c.client().Alter(ctx, id, opts) + require.NoError(t, err) +} diff --git a/pkg/acceptance/helpers/event_table_client..go b/pkg/acceptance/helpers/event_table_client..go new file mode 100644 index 0000000000..0c8e5d529d --- /dev/null +++ b/pkg/acceptance/helpers/event_table_client..go @@ -0,0 +1,49 @@ +package helpers + +import ( + "context" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/require" +) + +type EventTableClient struct { + context *TestClientContext + ids *IdsGenerator +} + +func NewEventTableClient(context *TestClientContext, idsGenerator *IdsGenerator) *EventTableClient { + return &EventTableClient{ + context: context, + ids: idsGenerator, + } +} + +func (c *EventTableClient) client() sdk.EventTables { + return c.context.client.EventTables +} + +func (c *EventTableClient) Create(t *testing.T) (*sdk.EventTable, func()) { + t.Helper() + ctx := context.Background() + + id := c.ids.RandomSchemaObjectIdentifier() + err := c.client().Create(ctx, sdk.NewCreateEventTableRequest(id)) + require.NoError(t, err) + + integration, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return integration, c.DropFunc(t, id) +} + +func (c *EventTableClient) DropFunc(t *testing.T, id sdk.SchemaObjectIdentifier) func() { + t.Helper() + ctx := context.Background() + + return func() { + err := c.client().Drop(ctx, sdk.NewDropEventTableRequest(id).WithIfExists(sdk.Bool(true))) + require.NoError(t, err) + } +} diff --git a/pkg/acceptance/helpers/function_client.go b/pkg/acceptance/helpers/function_client.go index 421623b17d..5b23afaf9f 100644 --- a/pkg/acceptance/helpers/function_client.go +++ b/pkg/acceptance/helpers/function_client.go @@ -64,12 +64,20 @@ func (c *FunctionClient) CreateWithRequest(t *testing.T, id sdk.SchemaObjectIden err := c.client().CreateForSQL(ctx, req.WithArguments(argumentRequests)) require.NoError(t, err) - t.Cleanup(func() { - require.NoError(t, c.context.client.Functions.Drop(ctx, sdk.NewDropFunctionRequest(id).WithIfExists(true))) - }) + t.Cleanup(c.DropFunctionFunc(t, id)) function, err := c.client().ShowByID(ctx, id) require.NoError(t, err) return function } + +func (c *FunctionClient) DropFunctionFunc(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) func() { + t.Helper() + ctx := context.Background() + + return func() { + err := c.client().Drop(ctx, sdk.NewDropFunctionRequest(id).WithIfExists(true)) + require.NoError(t, err) + } +} diff --git a/pkg/acceptance/helpers/notification_integration_client.go b/pkg/acceptance/helpers/notification_integration_client.go index e22f498140..7b0717e43c 100644 --- a/pkg/acceptance/helpers/notification_integration_client.go +++ b/pkg/acceptance/helpers/notification_integration_client.go @@ -8,9 +8,6 @@ import ( "github.com/stretchr/testify/require" ) -// TODO [SNOW-1017580]: replace with real value -const gcpPubsubSubscriptionName = "projects/project-1234/subscriptions/sub2" - type NotificationIntegrationClient struct { context *TestClientContext ids *IdsGenerator @@ -29,11 +26,21 @@ func (c *NotificationIntegrationClient) client() sdk.NotificationIntegrations { func (c *NotificationIntegrationClient) Create(t *testing.T) (*sdk.NotificationIntegration, func()) { t.Helper() - return c.CreateWithRequest(t, sdk.NewCreateNotificationIntegrationRequest(c.ids.RandomAccountObjectIdentifier(), true). - WithAutomatedDataLoadsParams(sdk.NewAutomatedDataLoadsParamsRequest(). - WithGoogleAutoParams(sdk.NewGoogleAutoParamsRequest(gcpPubsubSubscriptionName)), - ), - ) + ctx := context.Background() + + id := c.ids.RandomAccountObjectIdentifier() + + // TODO [SNOW-1007539]: use email of our service user + request := sdk.NewCreateNotificationIntegrationRequest(id, true). + WithEmailParams(sdk.NewEmailParamsRequest().WithAllowedRecipients([]sdk.NotificationIntegrationAllowedRecipient{{Email: "artur.sawicki@snowflake.com"}})) + + err := c.client().Create(ctx, request) + require.NoError(t, err) + + integration, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return integration, c.DropFunc(t, id) } func (c *NotificationIntegrationClient) CreateWithRequest(t *testing.T, request *sdk.CreateNotificationIntegrationRequest) (*sdk.NotificationIntegration, func()) { diff --git a/pkg/acceptance/helpers/random/certs.go b/pkg/acceptance/helpers/random/certs.go index eb0cf4aaf9..e8a7697d70 100644 --- a/pkg/acceptance/helpers/random/certs.go +++ b/pkg/acceptance/helpers/random/certs.go @@ -2,6 +2,7 @@ package random import ( "bytes" + "crypto" "crypto/rand" "crypto/rsa" "crypto/sha256" @@ -16,6 +17,7 @@ import ( "time" "github.com/stretchr/testify/require" + "github.com/youmark/pkcs8" ) // GenerateX509 returns base64 encoded certificate on a single line without the leading -----BEGIN CERTIFICATE----- and ending -----END CERTIFICATE----- markers. @@ -43,8 +45,7 @@ func GenerateX509(t *testing.T) string { // GenerateRSAPublicKey returns an RSA public key without BEGIN and END markers, and key's hash. func GenerateRSAPublicKey(t *testing.T) (string, string) { t.Helper() - key, err := rsa.GenerateKey(rand.Reader, 2048) - require.NoError(t, err) + key := GenerateRSAPrivateKey(t) pub := key.Public() b, err := x509.MarshalPKIXPublicKey(pub.(*rsa.PublicKey)) @@ -52,6 +53,42 @@ func GenerateRSAPublicKey(t *testing.T) (string, string) { return encode(t, "RSA PUBLIC KEY", b), hash(t, b) } +// GenerateRSAPrivateKey returns an RSA private key. +func GenerateRSAPrivateKey(t *testing.T) *rsa.PrivateKey { + t.Helper() + key, err := rsa.GenerateKey(rand.Reader, 2048) + require.NoError(t, err) + return key +} + +// GenerateRSAPrivateKeyEncrypted returns a PEM-encoded pair of unencrypted and encrypted key with a given password +func GenerateRSAPrivateKeyEncrypted(t *testing.T, password string) (unencrypted, encrypted string) { + t.Helper() + rsaPrivateKey := GenerateRSAPrivateKey(t) + unencryptedDer, err := x509.MarshalPKCS8PrivateKey(rsaPrivateKey) + require.NoError(t, err) + privBlock := pem.Block{ + Type: "PRIVATE KEY", + Bytes: unencryptedDer, + } + unencrypted = string(pem.EncodeToMemory(&privBlock)) + + encryptedDer, err := pkcs8.MarshalPrivateKey(rsaPrivateKey, []byte(password), &pkcs8.Opts{ + Cipher: pkcs8.AES256CBC, + KDFOpts: pkcs8.PBKDF2Opts{ + SaltSize: 16, IterationCount: 2000, HMACHash: crypto.SHA256, + }, + }) + require.NoError(t, err) + privEncryptedBlock := pem.Block{ + Type: "ENCRYPTED PRIVATE KEY", + Bytes: encryptedDer, + } + encrypted = string(pem.EncodeToMemory(&privEncryptedBlock)) + + return +} + func hash(t *testing.T, b []byte) string { t.Helper() hash := sha256.Sum256(b) diff --git a/pkg/acceptance/helpers/schema_client.go b/pkg/acceptance/helpers/schema_client.go index c20f7a58fb..5373bb1156 100644 --- a/pkg/acceptance/helpers/schema_client.go +++ b/pkg/acceptance/helpers/schema_client.go @@ -55,14 +55,6 @@ func (c *SchemaClient) CreateSchemaWithOpts(t *testing.T, id sdk.DatabaseObjectI return schema, c.DropSchemaFunc(t, id) } -func (c *SchemaClient) Alter(t *testing.T, id sdk.DatabaseObjectIdentifier, opts *sdk.AlterSchemaOptions) { - t.Helper() - ctx := context.Background() - - err := c.client().Alter(ctx, id, opts) - require.NoError(t, err) -} - func (c *SchemaClient) DropSchemaFunc(t *testing.T, id sdk.DatabaseObjectIdentifier) func() { t.Helper() ctx := context.Background() @@ -108,3 +100,11 @@ func (c *SchemaClient) ShowWithOptions(t *testing.T, opts *sdk.ShowSchemaOptions require.NoError(t, err) return schemas } + +func (c *SchemaClient) Alter(t *testing.T, id sdk.DatabaseObjectIdentifier, opts *sdk.AlterSchemaOptions) { + t.Helper() + ctx := context.Background() + + err := c.client().Alter(ctx, id, opts) + require.NoError(t, err) +} diff --git a/pkg/acceptance/helpers/security_integration_client.go b/pkg/acceptance/helpers/security_integration_client.go index 5931b8fa9d..85115cf084 100644 --- a/pkg/acceptance/helpers/security_integration_client.go +++ b/pkg/acceptance/helpers/security_integration_client.go @@ -25,24 +25,46 @@ func (c *SecurityIntegrationClient) client() sdk.SecurityIntegrations { return c.context.client.SecurityIntegrations } -func (c *SecurityIntegrationClient) UpdateExternalOauth(t *testing.T, request *sdk.AlterExternalOauthSecurityIntegrationRequest) { +func (c *SecurityIntegrationClient) CreateApiAuthenticationWithClientCredentialsFlow(t *testing.T) (*sdk.SecurityIntegration, func()) { t.Helper() ctx := context.Background() - err := c.client().AlterExternalOauth(ctx, request) + id := c.ids.RandomAccountObjectIdentifier() + request := sdk.NewCreateApiAuthenticationWithClientCredentialsFlowSecurityIntegrationRequest(id, false, "foo", "foo") + err := c.client().CreateApiAuthenticationWithClientCredentialsFlow(ctx, request) + require.NoError(t, err) + + si, err := c.client().ShowByID(ctx, request.GetName()) require.NoError(t, err) + + return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } -func (c *SecurityIntegrationClient) CreateSaml2(t *testing.T, id sdk.AccountObjectIdentifier) (*sdk.SecurityIntegration, func()) { +func (c *SecurityIntegrationClient) CreateApiAuthenticationWithAuthorizationCodeGrantFlow(t *testing.T) (*sdk.SecurityIntegration, func()) { t.Helper() - return c.CreateSaml2WithRequest(t, sdk.NewCreateSaml2SecurityIntegrationRequest(id, c.ids.Alpha(), "https://example.com", "Custom", random.GenerateX509(t))) + ctx := context.Background() + + id := c.ids.RandomAccountObjectIdentifier() + request := sdk.NewCreateApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationRequest(id, false, "foo", "foo") + err := c.client().CreateApiAuthenticationWithAuthorizationCodeGrantFlow(ctx, request) + require.NoError(t, err) + + si, err := c.client().ShowByID(ctx, request.GetName()) + require.NoError(t, err) + + return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } -func (c *SecurityIntegrationClient) CreateSaml2WithRequest(t *testing.T, request *sdk.CreateSaml2SecurityIntegrationRequest) (*sdk.SecurityIntegration, func()) { +func (c *SecurityIntegrationClient) CreateExternalOauth(t *testing.T) (*sdk.SecurityIntegration, func()) { t.Helper() ctx := context.Background() - err := c.client().CreateSaml2(ctx, request) + id := c.ids.RandomAccountObjectIdentifier() + issuer := random.String() + request := sdk.NewCreateExternalOauthSecurityIntegrationRequest(id, false, sdk.ExternalOauthSecurityIntegrationTypeCustom, + issuer, []sdk.TokenUserMappingClaim{{Claim: "foo"}}, sdk.ExternalOauthSecurityIntegrationSnowflakeUserMappingAttributeLoginName, + ).WithExternalOauthJwsKeysUrl([]sdk.JwsKeysUrl{{JwsKeyUrl: "http://example.com"}}) + err := c.client().CreateExternalOauth(ctx, request) require.NoError(t, err) si, err := c.client().ShowByID(ctx, request.GetName()) @@ -51,16 +73,29 @@ func (c *SecurityIntegrationClient) CreateSaml2WithRequest(t *testing.T, request return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } -func (c *SecurityIntegrationClient) CreateScim(t *testing.T) (*sdk.SecurityIntegration, func()) { +func (c *SecurityIntegrationClient) CreateOauthForPartnerApplications(t *testing.T) (*sdk.SecurityIntegration, func()) { t.Helper() - return c.CreateScimWithRequest(t, sdk.NewCreateScimSecurityIntegrationRequest(c.ids.RandomAccountObjectIdentifier(), sdk.ScimSecurityIntegrationScimClientGeneric, sdk.ScimSecurityIntegrationRunAsRoleGenericScimProvisioner)) + ctx := context.Background() + + id := c.ids.RandomAccountObjectIdentifier() + request := sdk.NewCreateOauthForPartnerApplicationsSecurityIntegrationRequest(id, sdk.OauthSecurityIntegrationClientLooker). + WithOauthRedirectUri("http://example.com") + err := c.client().CreateOauthForPartnerApplications(ctx, request) + require.NoError(t, err) + + si, err := c.client().ShowByID(ctx, request.GetName()) + require.NoError(t, err) + + return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } -func (c *SecurityIntegrationClient) CreateApiAuthenticationClientCredentialsWithRequest(t *testing.T, request *sdk.CreateApiAuthenticationWithClientCredentialsFlowSecurityIntegrationRequest) (*sdk.SecurityIntegration, func()) { +func (c *SecurityIntegrationClient) CreateOauthForCustomClients(t *testing.T) (*sdk.SecurityIntegration, func()) { t.Helper() ctx := context.Background() - err := c.client().CreateApiAuthenticationWithClientCredentialsFlow(ctx, request) + id := c.ids.RandomAccountObjectIdentifier() + request := sdk.NewCreateOauthForCustomClientsSecurityIntegrationRequest(id, sdk.OauthSecurityIntegrationClientTypePublic, "https://example.com") + err := c.client().CreateOauthForCustomClients(ctx, request) require.NoError(t, err) si, err := c.client().ShowByID(ctx, request.GetName()) @@ -69,25 +104,41 @@ func (c *SecurityIntegrationClient) CreateApiAuthenticationClientCredentialsWith return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } -func (c *SecurityIntegrationClient) UpdateSaml2(t *testing.T, request *sdk.AlterSaml2SecurityIntegrationRequest) { +func (c *SecurityIntegrationClient) CreateSaml2(t *testing.T) (*sdk.SecurityIntegration, func()) { + t.Helper() + id := c.ids.RandomAccountObjectIdentifier() + return c.CreateSaml2WithRequest(t, sdk.NewCreateSaml2SecurityIntegrationRequest(id, c.ids.Alpha(), "https://example.com", "Custom", random.GenerateX509(t))) +} + +func (c *SecurityIntegrationClient) CreateSaml2WithRequest(t *testing.T, request *sdk.CreateSaml2SecurityIntegrationRequest) (*sdk.SecurityIntegration, func()) { t.Helper() ctx := context.Background() - err := c.client().AlterSaml2(ctx, request) + err := c.client().CreateSaml2(ctx, request) require.NoError(t, err) + + si, err := c.client().ShowByID(ctx, request.GetName()) + require.NoError(t, err) + + return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } -func (c *SecurityIntegrationClient) UpdateSaml2ForceAuthn(t *testing.T, id sdk.AccountObjectIdentifier, forceAuthn bool) { +func (c *SecurityIntegrationClient) CreateScim(t *testing.T) (*sdk.SecurityIntegration, func()) { t.Helper() - c.UpdateSaml2(t, sdk.NewAlterSaml2SecurityIntegrationRequest(id).WithSet(*sdk.NewSaml2IntegrationSetRequest().WithSaml2ForceAuthn(forceAuthn))) + return c.CreateScimWithRequest(t, sdk.NewCreateScimSecurityIntegrationRequest(c.ids.RandomAccountObjectIdentifier(), sdk.ScimSecurityIntegrationScimClientGeneric, sdk.ScimSecurityIntegrationRunAsRoleGenericScimProvisioner)) } -func (c *SecurityIntegrationClient) UpdateOauthForPartnerApplications(t *testing.T, request *sdk.AlterOauthForPartnerApplicationsSecurityIntegrationRequest) { +func (c *SecurityIntegrationClient) CreateApiAuthenticationClientCredentialsWithRequest(t *testing.T, request *sdk.CreateApiAuthenticationWithClientCredentialsFlowSecurityIntegrationRequest) (*sdk.SecurityIntegration, func()) { t.Helper() ctx := context.Background() - err := c.client().AlterOauthForPartnerApplications(ctx, request) + err := c.client().CreateApiAuthenticationWithClientCredentialsFlow(ctx, request) + require.NoError(t, err) + + si, err := c.client().ShowByID(ctx, request.GetName()) require.NoError(t, err) + + return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } func (c *SecurityIntegrationClient) CreateScimWithRequest(t *testing.T, request *sdk.CreateScimSecurityIntegrationRequest) (*sdk.SecurityIntegration, func()) { @@ -103,6 +154,35 @@ func (c *SecurityIntegrationClient) CreateScimWithRequest(t *testing.T, request return si, c.DropSecurityIntegrationFunc(t, request.GetName()) } +func (c *SecurityIntegrationClient) UpdateExternalOauth(t *testing.T, request *sdk.AlterExternalOauthSecurityIntegrationRequest) { + t.Helper() + ctx := context.Background() + + err := c.client().AlterExternalOauth(ctx, request) + require.NoError(t, err) +} + +func (c *SecurityIntegrationClient) UpdateSaml2(t *testing.T, request *sdk.AlterSaml2SecurityIntegrationRequest) { + t.Helper() + ctx := context.Background() + + err := c.client().AlterSaml2(ctx, request) + require.NoError(t, err) +} + +func (c *SecurityIntegrationClient) UpdateSaml2ForceAuthn(t *testing.T, id sdk.AccountObjectIdentifier, forceAuthn bool) { + t.Helper() + c.UpdateSaml2(t, sdk.NewAlterSaml2SecurityIntegrationRequest(id).WithSet(*sdk.NewSaml2IntegrationSetRequest().WithSaml2ForceAuthn(forceAuthn))) +} + +func (c *SecurityIntegrationClient) UpdateOauthForPartnerApplications(t *testing.T, request *sdk.AlterOauthForPartnerApplicationsSecurityIntegrationRequest) { + t.Helper() + ctx := context.Background() + + err := c.client().AlterOauthForPartnerApplications(ctx, request) + require.NoError(t, err) +} + func (c *SecurityIntegrationClient) UpdateOauthForClients(t *testing.T, request *sdk.AlterOauthForCustomClientsSecurityIntegrationRequest) { t.Helper() ctx := context.Background() diff --git a/pkg/acceptance/helpers/storage_integration_client.go b/pkg/acceptance/helpers/storage_integration_client.go new file mode 100644 index 0000000000..32041df07d --- /dev/null +++ b/pkg/acceptance/helpers/storage_integration_client.go @@ -0,0 +1,79 @@ +package helpers + +import ( + "context" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/require" +) + +type StorageIntegrationClient struct { + context *TestClientContext + ids *IdsGenerator +} + +func NewStorageIntegrationClient(context *TestClientContext, idsGenerator *IdsGenerator) *StorageIntegrationClient { + return &StorageIntegrationClient{ + context: context, + ids: idsGenerator, + } +} + +func (c *StorageIntegrationClient) client() sdk.StorageIntegrations { + return c.context.client.StorageIntegrations +} + +func (c *StorageIntegrationClient) CreateS3(t *testing.T, awsBucketUrl, awsRoleArn string) (*sdk.StorageIntegration, func()) { + t.Helper() + ctx := context.Background() + + allowedLocations := func(prefix string) []sdk.StorageLocation { + return []sdk.StorageLocation{ + { + Path: prefix + "/allowed-location", + }, + { + Path: prefix + "/allowed-location2", + }, + } + } + s3AllowedLocations := allowedLocations(awsBucketUrl) + + blockedLocations := func(prefix string) []sdk.StorageLocation { + return []sdk.StorageLocation{ + { + Path: prefix + "/blocked-location", + }, + { + Path: prefix + "/blocked-location2", + }, + } + } + s3BlockedLocations := blockedLocations(awsBucketUrl) + + id := c.ids.RandomAccountObjectIdentifier() + req := sdk.NewCreateStorageIntegrationRequest(id, true, s3AllowedLocations). + WithIfNotExists(sdk.Bool(true)). + WithS3StorageProviderParams(sdk.NewS3StorageParamsRequest(awsRoleArn)). + WithStorageBlockedLocations(s3BlockedLocations). + WithComment(sdk.String("some comment")) + + err := c.client().Create(ctx, req) + require.NoError(t, err) + + integration, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return integration, c.DropFunc(t, id) +} + +func (c *StorageIntegrationClient) DropFunc(t *testing.T, id sdk.AccountObjectIdentifier) func() { + t.Helper() + ctx := context.Background() + + return func() { + err := c.client().Drop(ctx, sdk.NewDropStorageIntegrationRequest(id).WithIfExists(sdk.Bool(true))) + require.NoError(t, err) + } +} diff --git a/pkg/acceptance/helpers/tag_client.go b/pkg/acceptance/helpers/tag_client.go index ee2f01d46a..90d7cbcb8b 100644 --- a/pkg/acceptance/helpers/tag_client.go +++ b/pkg/acceptance/helpers/tag_client.go @@ -52,6 +52,13 @@ func (c *TagClient) CreateWithRequest(t *testing.T, req *sdk.CreateTagRequest) ( return tag, c.DropTagFunc(t, req.GetName()) } +func (c *TagClient) Alter(t *testing.T, req *sdk.AlterTagRequest) { + t.Helper() + ctx := context.Background() + err := c.client().Alter(ctx, req) + require.NoError(t, err) +} + func (c *TagClient) DropTagFunc(t *testing.T, id sdk.SchemaObjectIdentifier) func() { t.Helper() ctx := context.Background() diff --git a/pkg/acceptance/helpers/test_client.go b/pkg/acceptance/helpers/test_client.go index 9a50039711..53a9b6cb2d 100644 --- a/pkg/acceptance/helpers/test_client.go +++ b/pkg/acceptance/helpers/test_client.go @@ -17,6 +17,7 @@ type TestClient struct { ApplicationPackage *ApplicationPackageClient AuthenticationPolicy *AuthenticationPolicyClient BcrBundles *BcrBundlesClient + ComputePool *ComputePoolClient Connection *ConnectionClient Context *ContextClient CortexSearchService *CortexSearchServiceClient @@ -26,6 +27,7 @@ type TestClient struct { DataMetricFunctionClient *DataMetricFunctionClient DataMetricFunctionReferences *DataMetricFunctionReferencesClient DynamicTable *DynamicTableClient + EventTable *EventTableClient ExternalAccessIntegration *ExternalAccessIntegrationClient ExternalFunction *ExternalFunctionClient ExternalTable *ExternalTableClient @@ -54,6 +56,7 @@ type TestClient struct { SessionPolicy *SessionPolicyClient Share *ShareClient Stage *StageClient + StorageIntegration *StorageIntegrationClient Stream *StreamClient Streamlit *StreamlitClient Table *TableClient @@ -86,6 +89,7 @@ func NewTestClient(c *sdk.Client, database string, schema string, warehouse stri ApplicationPackage: NewApplicationPackageClient(context, idsGenerator), AuthenticationPolicy: NewAuthenticationPolicyClient(context, idsGenerator), BcrBundles: NewBcrBundlesClient(context), + ComputePool: NewComputePoolClient(context, idsGenerator), Connection: NewConnectionClient(context, idsGenerator), Context: NewContextClient(context), CortexSearchService: NewCortexSearchServiceClient(context, idsGenerator), @@ -95,6 +99,7 @@ func NewTestClient(c *sdk.Client, database string, schema string, warehouse stri DataMetricFunctionClient: NewDataMetricFunctionClient(context, idsGenerator), DataMetricFunctionReferences: NewDataMetricFunctionReferencesClient(context), DynamicTable: NewDynamicTableClient(context, idsGenerator), + EventTable: NewEventTableClient(context, idsGenerator), ExternalAccessIntegration: NewExternalAccessIntegrationClient(context, idsGenerator), ExternalFunction: NewExternalFunctionClient(context, idsGenerator), ExternalTable: NewExternalTableClient(context, idsGenerator), @@ -122,9 +127,10 @@ func NewTestClient(c *sdk.Client, database string, schema string, warehouse stri SecurityIntegration: NewSecurityIntegrationClient(context, idsGenerator), SessionPolicy: NewSessionPolicyClient(context, idsGenerator), Share: NewShareClient(context, idsGenerator), + Stage: NewStageClient(context, idsGenerator), + StorageIntegration: NewStorageIntegrationClient(context, idsGenerator), Stream: NewStreamClient(context, idsGenerator), Streamlit: NewStreamlitClient(context, idsGenerator), - Stage: NewStageClient(context, idsGenerator), Table: NewTableClient(context, idsGenerator), Tag: NewTagClient(context, idsGenerator), Task: NewTaskClient(context, idsGenerator), diff --git a/pkg/acceptance/testenvs/testing_environment_variables.go b/pkg/acceptance/testenvs/testing_environment_variables.go index 7b6e22a50b..997d8cd8a8 100644 --- a/pkg/acceptance/testenvs/testing_environment_variables.go +++ b/pkg/acceptance/testenvs/testing_environment_variables.go @@ -32,6 +32,7 @@ const ( EnableAcceptance env = resource.EnvTfAcc EnableSweep env = "TEST_SF_TF_ENABLE_SWEEP" + EnableManual env = "TEST_SF_TF_ENABLE_MANUAL_TESTS" ConfigureClientOnce env = "SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE" TestObjectsSuffix env = "TEST_SF_TF_TEST_OBJECT_SUFFIX" RequireTestObjectsSuffix env = "TEST_SF_TF_REQUIRE_TEST_OBJECT_SUFFIX" diff --git a/pkg/acceptance/testing.go b/pkg/acceptance/testing.go index 79d52bd35f..3896228d0b 100644 --- a/pkg/acceptance/testing.go +++ b/pkg/acceptance/testing.go @@ -14,6 +14,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testprofiles" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-go/tfprotov5" @@ -21,7 +22,9 @@ import ( "github.com/hashicorp/terraform-plugin-mux/tf5to6server" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/snowflakedb/gosnowflake" + "github.com/stretchr/testify/require" ) const AcceptanceTestPrefix = "acc_test_" @@ -197,3 +200,28 @@ func TestClient() *helpers.TestClient { func SecondaryTestClient() *helpers.TestClient { return atc.secondaryTestClient } + +// ExternalProviderWithExactVersion returns a map of external providers with an exact version constraint +func ExternalProviderWithExactVersion(version string) map[string]resource.ExternalProvider { + return map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: fmt.Sprintf("=%s", version), + Source: "Snowflake-Labs/snowflake", + }, + } +} + +// SetV097CompatibleConfigPathEnv sets a new config path in a relevant env variable for a file that is compatible with v0.97. +func SetV097CompatibleConfigPathEnv(t *testing.T) { + t.Helper() + home, err := os.UserHomeDir() + require.NoError(t, err) + configPath := filepath.Join(home, ".snowflake", "config_v097_compatible") + t.Setenv(snowflakeenvs.ConfigPath, configPath) +} + +// UnsetConfigPathEnv unsets a config path env +func UnsetConfigPathEnv(t *testing.T) { + t.Helper() + t.Setenv(snowflakeenvs.ConfigPath, "") +} diff --git a/pkg/acceptance/testprofiles/testing_config_profiles.go b/pkg/acceptance/testprofiles/testing_config_profiles.go index 6bca3093c1..c671c4219c 100644 --- a/pkg/acceptance/testprofiles/testing_config_profiles.go +++ b/pkg/acceptance/testprofiles/testing_config_profiles.go @@ -6,4 +6,11 @@ const ( Third = "third_test_account" Fourth = "fourth_test_account" IncorrectUserAndPassword = "incorrect_test_profile" + CompleteFields = "complete_fields" + CompleteFieldsInvalid = "complete_fields_invalid" + DefaultWithPasscode = "default_with_passcode" + + JwtAuth = "jwt_auth" + EncryptedJwtAuth = "encrypted_jwt_auth" + Okta = "okta" ) diff --git a/pkg/architest/architest_test.go b/pkg/architest/architest_test.go index 419f78c503..b53a0d031d 100644 --- a/pkg/architest/architest_test.go +++ b/pkg/architest/architest_test.go @@ -120,6 +120,7 @@ func Test_Files(t *testing.T) { expectedMethodNames []string }{ {filePath: "testdata/dir1/sample1.go", expectedMethodNames: []string{}}, + // object methods are skipped {filePath: "testdata/dir1/sample2.go", expectedMethodNames: []string{"A"}}, } for _, tt := range tests1 { diff --git a/pkg/architest/file.go b/pkg/architest/file.go index a2d781a719..597402a868 100644 --- a/pkg/architest/file.go +++ b/pkg/architest/file.go @@ -42,7 +42,7 @@ func (f *File) ExportedMethods() Methods { for _, d := range f.fileSrc.Decls { if v, ok := d.(*ast.FuncDecl); ok { name := v.Name.Name - if ast.IsExported(name) { + if ast.IsExported(name) && v.Recv == nil { allExportedMethods = append(allExportedMethods, *NewMethod(name, f)) } } diff --git a/pkg/architest/testdata/dir1/sample2.go b/pkg/architest/testdata/dir1/sample2.go index 7ba18f8743..ccbd1b7318 100644 --- a/pkg/architest/testdata/dir1/sample2.go +++ b/pkg/architest/testdata/dir1/sample2.go @@ -3,3 +3,9 @@ package dir1 func A() {} func a() {} + +type obj struct{} + +func (o obj) Exported() {} + +func (o obj) private() {} diff --git a/pkg/datasources/connections.go b/pkg/datasources/connections.go new file mode 100644 index 0000000000..ef03ffa7f1 --- /dev/null +++ b/pkg/datasources/connections.go @@ -0,0 +1,68 @@ +package datasources + +import ( + "context" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var connectionsSchema = map[string]*schema.Schema{ + "like": likeSchema, + "connections": { + Type: schema.TypeList, + Computed: true, + Description: "Holds the aggregated output of all connections details queries.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + resources.ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW CONNECTIONS.", + Elem: &schema.Resource{ + Schema: schemas.ShowConnectionSchema, + }, + }, + }, + }, + }, +} + +func Connections() *schema.Resource { + return &schema.Resource{ + ReadContext: ReadConnections, + Schema: connectionsSchema, + Description: "Datasource used to get details of filtered connections. Filtering is aligned with the current possibilities for [SHOW CONNECTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-connections) query. The results of SHOW is encapsulated in one output collection `connections`.", + } +} + +func ReadConnections(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + req := sdk.ShowConnectionRequest{} + + handleLike(d, &req.Like) + + connections, err := client.Connections.Show(ctx, &req) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("connections_read") + + flattenedConnections := make([]map[string]any, len(connections)) + for i, connection := range connections { + connection := connection + flattenedConnections[i] = map[string]any{ + resources.ShowOutputAttributeName: []map[string]any{schemas.ConnectionToSchema(&connection)}, + } + } + if err := d.Set("connections", flattenedConnections); err != nil { + return diag.FromErr(err) + } + + return nil +} diff --git a/pkg/datasources/connections_acceptance_test.go b/pkg/datasources/connections_acceptance_test.go new file mode 100644 index 0000000000..5e71034391 --- /dev/null +++ b/pkg/datasources/connections_acceptance_test.go @@ -0,0 +1,223 @@ +package datasources_test + +import ( + "fmt" + "strings" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + accConfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func connectionsData() string { + return ` + data "snowflake_connections" "test" { + depends_on = [snowflake_primary_connection.test] + }` +} + +func TestAcc_Connections_Minimal(t *testing.T) { + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + + accountId := acc.TestClient().Account.GetAccountIdentifier(t) + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + connectionModel := model.PrimaryConnection("test", id.Name()) + + primaryConnectionAsExternalId := sdk.NewExternalObjectIdentifier(accountId, id) + + dataConnections := accConfig.FromModel(t, connectionModel) + connectionsData() + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.PrimaryConnection), + Steps: []resource.TestStep{ + { + Config: dataConnections, + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr("data.snowflake_connections.test", "connections.#", "1")), + resourceshowoutputassert.ConnectionShowOutput(t, "snowflake_primary_connection.test"). + HasName(id.Name()). + HasCreatedOnNotEmpty(). + HasSnowflakeRegion(acc.TestClient().Context.CurrentRegion(t)). + HasAccountLocator(acc.TestClient().GetAccountLocator()). + HasAccountName(accountId.AccountName()). + HasOrganizationName(accountId.OrganizationName()). + HasComment(""). + HasIsPrimary(true). + HasPrimaryIdentifier(primaryConnectionAsExternalId). + HasFailoverAllowedToAccounts(accountId). + HasConnectionUrl( + acc.TestClient().Connection.GetConnectionUrl(accountId.OrganizationName(), id.Name()), + ), + ), + }, + }, + }) +} + +func TestAcc_Connections_Complete(t *testing.T) { + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + + accountId := acc.TestClient().Account.GetAccountIdentifier(t) + secondaryAccountId := acc.SecondaryTestClient().Account.GetAccountIdentifier(t) + + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + connectionModel := model.PrimaryConnection("test", id.Name()). + WithEnableFailover(secondaryAccountId). + WithComment("test comment") + + primaryConnectionAsExternalId := sdk.NewExternalObjectIdentifier(accountId, id) + + dataConnections := accConfig.FromModel(t, connectionModel) + connectionsData() + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.PrimaryConnection), + Steps: []resource.TestStep{ + { + Config: dataConnections, + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr("data.snowflake_connections.test", "connections.#", "1")), + resourceshowoutputassert.ConnectionShowOutput(t, "snowflake_connection.test"). + HasName(id.Name()). + HasCreatedOnNotEmpty(). + HasSnowflakeRegion(acc.TestClient().Context.CurrentRegion(t)). + HasAccountLocator(acc.TestClient().GetAccountLocator()). + HasAccountName(accountId.AccountName()). + HasOrganizationName(accountId.OrganizationName()). + HasComment("test comment"). + HasIsPrimary(true). + HasPrimaryIdentifier(primaryConnectionAsExternalId). + HasFailoverAllowedToAccounts(accountId, secondaryAccountId). + HasConnectionUrl( + acc.TestClient().Connection.GetConnectionUrl(accountId.OrganizationName(), id.Name()), + ), + ), + }, + }, + }) +} + +func TestAcc_Connections_Filtering(t *testing.T) { + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + + // TODO: [SNOW-1788041] - need to uppercase as connection name in snowflake is returned in uppercase + prefix := random.AlphaN(4) + prefix = strings.ToUpper(prefix) + + idOne := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + idTwo := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + idThree := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + connectionModelOne := model.PrimaryConnection("c1", idOne.Name()) + connectionModelTwo := model.PrimaryConnection("c2", idTwo.Name()) + connectionModelThree := model.PrimaryConnection("c3", idThree.Name()) + + configWithLike := accConfig.FromModel(t, connectionModelOne) + + accConfig.FromModel(t, connectionModelTwo) + + accConfig.FromModel(t, connectionModelThree) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.PrimaryConnection), + Steps: []resource.TestStep{ + // with like + { + Config: configWithLike + connectionDatasourceWithLike(prefix+"%"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_connections.test", "connections.#", "2"), + ), + }, + }, + }) +} + +func TestAcc_Connections_FilteringWithReplica(t *testing.T) { + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + + // TODO: [SNOW-1788041] - need to uppercase as connection name in snowflake is returned in uppercase + prefix := random.AlphaN(4) + prefix = strings.ToUpper(prefix) + + idOne := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + idTwo := acc.SecondaryTestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + + accountId := acc.TestClient().Account.GetAccountIdentifier(t) + + _, cleanup := acc.SecondaryTestClient().Connection.Create(t, idTwo) + t.Cleanup(cleanup) + + primaryConnectionAsExternalId := sdk.NewExternalObjectIdentifier(accountId, idTwo) + acc.SecondaryTestClient().Connection.Alter(t, sdk.NewAlterConnectionRequest(idTwo). + WithEnableConnectionFailover(*sdk.NewEnableConnectionFailoverRequest([]sdk.AccountIdentifier{accountId}))) + + connectionModelOne := model.PrimaryConnection("c1", idOne.Name()) + connectionModelTwo := model.SecondaryConnection("c2", primaryConnectionAsExternalId.FullyQualifiedName(), idTwo.Name()) + + configWithLike := accConfig.FromModel(t, connectionModelOne) + + accConfig.FromModel(t, connectionModelTwo) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.ComposeCheckDestroy(t, resources.PrimaryConnection, resources.SecondaryConnection), + Steps: []resource.TestStep{ + // with like + { + Config: configWithLike + connectionAndSecondaryConnectionDatasourceWithLike(prefix+"%"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_connections.test", "connections.#", "2"), + ), + }, + }, + }) +} + +func connectionDatasourceWithLike(like string) string { + return fmt.Sprintf(` + data "snowflake_connections" "test" { + depends_on = [snowflake_primary_connection.c1, snowflake_primary_connection.c2, snowflake_primary_connection.c3] + + like = "%s" + } +`, like) +} + +func connectionAndSecondaryConnectionDatasourceWithLike(like string) string { + return fmt.Sprintf(` + data "snowflake_connections" "test" { + depends_on = [snowflake_primary_connection.c1, snowflake_secondary_connection.c2] + + like = "%s" + } +`, like) +} diff --git a/pkg/datasources/grants.go b/pkg/datasources/grants.go index 80ac1568aa..87ea97752f 100644 --- a/pkg/datasources/grants.go +++ b/pkg/datasources/grants.go @@ -378,13 +378,27 @@ func buildOptsForGrantsOn(grantsOn map[string]any) (*sdk.ShowGrantOptions, error if objectType == "" || objectName == "" { return nil, fmt.Errorf("object_type (%s) or object_name (%s) missing", objectType, objectName) } - objectId, err := helpers.DecodeSnowflakeParameterID(objectName) - if err != nil { - return nil, err + + sdkObjectType := sdk.ObjectType(objectType) + var objectId sdk.ObjectIdentifier + var err error + // TODO [SNOW-1569535]: use a mapper from object type to parsing function + // TODO [SNOW-1569535]: grant_ownership#getOnObjectIdentifier could be used but it is limited only to ownership-transferable objects (according to the docs) - we should add an integration test to verify if the docs are complete + if sdkObjectType.IsWithArguments() { + objectId, err = sdk.ParseSchemaObjectIdentifierWithArguments(objectName) + if err != nil { + return nil, err + } + } else { + objectId, err = helpers.DecodeSnowflakeParameterID(objectName) + if err != nil { + return nil, err + } } + opts.On = &sdk.ShowGrantsOn{ Object: &sdk.Object{ - ObjectType: sdk.ObjectType(objectType), + ObjectType: sdkObjectType, Name: objectId, }, } diff --git a/pkg/datasources/grants_acceptance_test.go b/pkg/datasources/grants_acceptance_test.go index 90478754ee..6e9774e285 100644 --- a/pkg/datasources/grants_acceptance_test.go +++ b/pkg/datasources/grants_acceptance_test.go @@ -6,6 +6,8 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" @@ -99,6 +101,32 @@ func TestAcc_Grants_On_SchemaObject(t *testing.T) { }) } +func TestAcc_Grants_On_SchemaObject_WithArguments(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + function := acc.TestClient().Function.Create(t, sdk.DataTypeVARCHAR) + configVariables := config.Variables{ + "fully_qualified_function_name": config.StringVariable(function.ID().FullyQualifiedName()), + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: nil, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Grants/On/SchemaObject_WithArguments"), + ConfigVariables: configVariables, + Check: checkAtLeastOneGrantPresent(), + }, + }, + }) +} + func TestAcc_Grants_On_Invalid_NoAttribute(t *testing.T) { resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, diff --git a/pkg/datasources/secrets_acceptance_test.go b/pkg/datasources/secrets_acceptance_test.go index 41d454a29a..122755c052 100644 --- a/pkg/datasources/secrets_acceptance_test.go +++ b/pkg/datasources/secrets_acceptance_test.go @@ -20,13 +20,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -const ( - secretWithClientCredentials = "snowflake_secret_with_client_credentials" - secretWithAuthorizationCodeGrant = "snowflake_secret_with_authorization_code_grant" - secretWithBasicAuthentication = "snowflake_secret_with_basic_authentication" - secretWithGenericString = "snowflake_secret_with_generic_string" -) - func TestAcc_Secrets_WithClientCredentials(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) @@ -42,7 +35,7 @@ func TestAcc_Secrets_WithClientCredentials(t *testing.T) { secretModel := model.SecretWithClientCredentials("test", integrationId.Name(), id.DatabaseName(), id.SchemaName(), id.Name(), []string{"username", "test_scope"}) - dataSecretsClientCredentials := accConfig.FromModel(t, secretModel) + secretsData(secretWithClientCredentials) + dataSecretsClientCredentials := accConfig.FromModel(t, secretModel) + secretsData(secretModel, id) dsName := "data.snowflake_secrets.test" resource.Test(t, resource.TestCase{ @@ -97,7 +90,7 @@ func TestAcc_Secrets_WithAuthorizationCodeGrant(t *testing.T) { secretModel := model.SecretWithAuthorizationCodeGrant("test", integrationId.Name(), id.DatabaseName(), id.SchemaName(), id.Name(), "test_token", time.Now().Add(24*time.Hour).Format(time.DateTime)).WithComment("test_comment") - dataSecretsAuthorizationCode := accConfig.FromModel(t, secretModel) + secretsData(secretWithAuthorizationCodeGrant) + dataSecretsAuthorizationCode := accConfig.FromModel(t, secretModel) + secretsData(secretModel, id) dsName := "data.snowflake_secrets.test" resource.Test(t, resource.TestCase{ @@ -138,7 +131,7 @@ func TestAcc_Secrets_WithBasicAuthentication(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() secretModel := model.SecretWithBasicAuthentication("test", id.DatabaseName(), id.Name(), "test_passwd", id.SchemaName(), "test_username") - dataSecretsAuthorizationCode := accConfig.FromModel(t, secretModel) + secretsData(secretWithBasicAuthentication) + dataSecretsAuthorizationCode := accConfig.FromModel(t, secretModel) + secretsData(secretModel, id) dsName := "data.snowflake_secrets.test" resource.Test(t, resource.TestCase{ @@ -179,7 +172,7 @@ func TestAcc_Secrets_WithGenericString(t *testing.T) { secretModel := model.SecretWithGenericString("test", id.DatabaseName(), id.Name(), id.SchemaName(), "test_secret_string") - dataSecretsAuthorizationCode := accConfig.FromModel(t, secretModel) + secretsData(secretWithGenericString) + dataSecretsAuthorizationCode := accConfig.FromModel(t, secretModel) + secretsData(secretModel, id) dsName := "data.snowflake_secrets.test" resource.Test(t, resource.TestCase{ @@ -216,11 +209,14 @@ func TestAcc_Secrets_WithGenericString(t *testing.T) { }) } -func secretsData(secretResourceName string) string { +func secretsData(secretModel accConfig.ResourceModel, secretId sdk.SchemaObjectIdentifier) string { return fmt.Sprintf(` data "snowflake_secrets" "test" { depends_on = [%s.test] - }`, secretResourceName) + in { + database = %s + } + }`, secretModel.Resource(), secretId.DatabaseId().FullyQualifiedName()) } func TestAcc_Secrets_Filtering(t *testing.T) { @@ -309,6 +305,23 @@ func TestAcc_Secrets_Filtering(t *testing.T) { }) } +func TestAcc_Secrets_EmptyIn(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: nil, + Steps: []resource.TestStep{ + { + Config: secretDatasourceEmptyIn(), + ExpectError: regexp.MustCompile("Invalid combination of arguments"), + }, + }, + }) +} + func datasourceWithLikeMultipleSecretTypes(like string) string { return fmt.Sprintf(` data "snowflake_secrets" "test" { @@ -340,23 +353,6 @@ func secretDatasourceInAccountWithLike(prefix string) string { `, prefix) } -func TestAcc_Secrets_EmptyIn(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: secretDatasourceEmptyIn(), - ExpectError: regexp.MustCompile("Invalid combination of arguments"), - }, - }, - }) -} - func secretDatasourceEmptyIn() string { return ` data "snowflake_secrets" "test" { diff --git a/pkg/datasources/testdata/TestAcc_Grants/On/SchemaObject_WithArguments/snowflake_grants_on_schema_object.tf b/pkg/datasources/testdata/TestAcc_Grants/On/SchemaObject_WithArguments/snowflake_grants_on_schema_object.tf new file mode 100644 index 0000000000..80d7df5fad --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Grants/On/SchemaObject_WithArguments/snowflake_grants_on_schema_object.tf @@ -0,0 +1,6 @@ +data "snowflake_grants" "test" { + grants_on { + object_name = var.fully_qualified_function_name + object_type = "FUNCTION" + } +} diff --git a/pkg/datasources/testdata/TestAcc_Grants/On/SchemaObject_WithArguments/variables.tf b/pkg/datasources/testdata/TestAcc_Grants/On/SchemaObject_WithArguments/variables.tf new file mode 100644 index 0000000000..8a903b6d62 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Grants/On/SchemaObject_WithArguments/variables.tf @@ -0,0 +1,3 @@ +variable "fully_qualified_function_name" { + type = string +} diff --git a/pkg/helpers/helpers.go b/pkg/helpers/helpers.go index bde76ff381..6b3e39d4cf 100644 --- a/pkg/helpers/helpers.go +++ b/pkg/helpers/helpers.go @@ -142,20 +142,6 @@ func DecodeSnowflakeAccountIdentifier(identifier string) (sdk.AccountIdentifier, } } -// TODO(SNOW-1479870): Test -// MergeMaps takes any number of maps (of the same type) and concatenates them. -// In case of key collision, the value will be selected from the map that is provided -// later in the src function parameter. -func MergeMaps[M ~map[K]V, K comparable, V any](src ...M) M { - merged := make(M) - for _, m := range src { - for k, v := range m { - merged[k] = v - } - } - return merged -} - // TODO: use slices.Concat in Go 1.22 func ConcatSlices[T any](slices ...[]T) []T { var tmp []T diff --git a/pkg/internal/collections/collection_helpers.go b/pkg/internal/collections/collection_helpers.go index 9f07b94fe5..871b2f6dd6 100644 --- a/pkg/internal/collections/collection_helpers.go +++ b/pkg/internal/collections/collection_helpers.go @@ -35,3 +35,17 @@ func MapErr[T any, R any](collection []T, mapper func(T) (R, error)) ([]R, error } return result, errors.Join(errs...) } + +// TODO(SNOW-1479870): Test +// MergeMaps takes any number of maps (of the same type) and concatenates them. +// In case of key collision, the value will be selected from the map that is provided +// later in the src function parameter. +func MergeMaps[M ~map[K]V, K comparable, V any](src ...M) M { + merged := make(M) + for _, m := range src { + for k, v := range m { + merged[k] = v + } + } + return merged +} diff --git a/pkg/internal/snowflakeenvs/snowflake_environment_variables.go b/pkg/internal/snowflakeenvs/snowflake_environment_variables.go index e8443e8fdd..91f817c655 100644 --- a/pkg/internal/snowflakeenvs/snowflake_environment_variables.go +++ b/pkg/internal/snowflakeenvs/snowflake_environment_variables.go @@ -2,6 +2,8 @@ package snowflakeenvs const ( Account = "SNOWFLAKE_ACCOUNT" + AccountName = "SNOWFLAKE_ACCOUNT_NAME" + OrganizationName = "SNOWFLAKE_ORGANIZATION_NAME" User = "SNOWFLAKE_USER" Username = "SNOWFLAKE_USERNAME" Password = "SNOWFLAKE_PASSWORD" @@ -41,9 +43,12 @@ const ( DisableQueryContextCache = "SNOWFLAKE_DISABLE_QUERY_CONTEXT_CACHE" IncludeRetryReason = "SNOWFLAKE_INCLUDE_RETRY_REASON" Profile = "SNOWFLAKE_PROFILE" + MaxRetryCount = "SNOWFLAKE_MAX_RETRY_COUNT" + DriverTracing = "SNOWFLAKE_DRIVER_TRACING" + TmpDirectoryPath = "SNOWFLAKE_TMP_DIRECTORY_PATH" + DisableConsoleLogin = "SNOWFLAKE_DISABLE_CONSOLE_LOGIN" ConfigPath = "SNOWFLAKE_CONFIG_PATH" - NoInstrumentedSql = "SF_TF_NO_INSTRUMENTED_SQL" - GosnowflakeLogLevel = "SF_TF_GOSNOWFLAKE_LOG_LEVEL" + NoInstrumentedSql = "SF_TF_NO_INSTRUMENTED_SQL" ) diff --git a/pkg/manual_tests/README.md b/pkg/manual_tests/README.md index 69fc83e54d..64222ee991 100644 --- a/pkg/manual_tests/README.md +++ b/pkg/manual_tests/README.md @@ -5,4 +5,5 @@ Every test should be placed in the subfolder representing a particular test (mos and should contain a file describing the manual steps to perform the test. Here's the list of cases we currently cannot reproduce and write acceptance tests for: -- `user_default_database_and_role`: Setting up a user with default_namespace and default_role, then logging into that user to see what happens with those values in various scenarios (e.g. insufficient privileges on the role). \ No newline at end of file +- `user_default_database_and_role`: Setting up a user with default_namespace and default_role, then logging into that user to see what happens with those values in various scenarios (e.g. insufficient privileges on the role). +- `authentication_methods`: Some of the authentication methods require manual steps, like confirming MFA or setting more dependencies. diff --git a/pkg/manual_tests/authentication_methods/README.md b/pkg/manual_tests/authentication_methods/README.md new file mode 100644 index 0000000000..99d9d548b5 --- /dev/null +++ b/pkg/manual_tests/authentication_methods/README.md @@ -0,0 +1,53 @@ +# Authentication methods manual tests + +This directory is dedicated to hold steps for manual authentication methods tests in the provider that are not possible to re-recreate in automated tests (or very hard to set up). These tests are disabled by default and require `TEST_SF_TF_ENABLE_MANUAL_TESTS` environmental variable to be set. + +## Okta authenticator test +This test checks `Okta` authenticator option. It requires manual steps because of additional setup on Okta side. It assumes that `default` profile uses a standard values of account name, user, password, etc. +1. Set up a developer Okta account [here](https://developer.okta.com/signup/). +2. Go to admin panel and select Applications -> Create App Integration. +3. Create a new application with SAML 2.0 type and give it a unique name +4. Fill SAML settings - paste the URLs for the testing accounts, like `https://example.snowflakecomputing.com/fed/login` for Single sign on URL, Recipient URL, Destination URL and Audience URI (SP Entity ID) +5. Click Next and Finish +6. After the app gets created, click View SAML setup instructions +7. Save the values provided: IDP SSO URL, IDP Issuer, and X509 certificate +8. Create a new security integration in Snowflake: +``` +CREATE SECURITY INTEGRATION MyIDP +TYPE=SAML2 +ENABLED=true +SAML2_ISSUER='http://www.okta.com/example' +SAML2_SSO_URL='https://dev-123456.oktapreview.com/app/dev-123456_test_1/example/sso/saml' +SAML2_PROVIDER='OKTA' +SAML2_SP_INITIATED_LOGIN_PAGE_LABEL='myidp - okta' +SAML2_ENABLE_SP_INITIATED=false +SAML2_X509_CERT=''; +``` +9. Note that Snowflake and Okta login name must match, otherwise create a temporary user with a login name matching the one in Okta. +10. Prepare a TOML config like: +``` +[okta] +organizationname='ORGANIZATION_NAME' +accountname='ACCOUNT_NAME' +user='LOGIN_NAME' # This is a value used to login in Okta +password='PASSWORD' # This is a password in Okta +oktaurl='https://dev-123456.okta.com' # URL of your Okta environment +``` +11. Run the tests - you should be able to authenticate with Okta. + + +## UsernamePasswordMFA authenticator test +This test checks `UsernamePasswordMFA` authenticator option. It requires manual steps because of additional verification via MFA device. It assumes that `default` profile uses a standard values of account name, user, password, etc. +1. Make sure the user you're testing with has enabled MFA (see [docs](https://docs.snowflake.com/en/user-guide/ui-snowsight-profile#enroll-in-multi-factor-authentication-mfa)) and an MFA bypass is not set (check `mins_to_bypass_mfa` in `SHOW USERS` output for the given user). +1. After running the test, you should get pinged 3 times in MFA app: + - The first two notifiactions are just test setups, also present in other acceptance tests. + - The third notification verifies that MFA is used for the first test step. + - For the second test step we are caching MFA token, so there is not any notification. + +## UsernamePasswordMFA authenticator with passcode test +This test checks `UsernamePasswordMFA` authenticator option with using `passcode`. It requires manual steps because of additional verification via MFA device. It assumes that `default_with_passcode` profile uses a standard values of account name, user, password, etc. with `passcode` set to a value in your MFA app. +1. Make sure the user you're testing with has enabled MFA (see [docs](https://docs.snowflake.com/en/user-guide/ui-snowsight-profile#enroll-in-multi-factor-authentication-mfa)) and an MFA bypass is not set (check `mins_to_bypass_mfa` in `SHOW USERS` output for the given user). +1. After running the test, you should get pinged 2 times in MFA app: + - The first two notifiactions are just test setups, also present in other acceptance tests. + - The first step asks for permition to access your device keychain. + - For the second test step we are caching MFA token, so there is not any notification. diff --git a/pkg/manual_tests/authentication_methods/auth_test.go b/pkg/manual_tests/authentication_methods/auth_test.go new file mode 100644 index 0000000000..5089009e3a --- /dev/null +++ b/pkg/manual_tests/authentication_methods/auth_test.go @@ -0,0 +1,102 @@ +package manual + +import ( + "fmt" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testprofiles" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeenvs" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +// This is a manual test for authenticating with Okta. +func TestAcc_Provider_OktaAuth(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableManual) + t.Setenv(string(testenvs.ConfigureClientOnce), "") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + testenvs.AssertEnvNotSet(t, snowflakeenvs.User) + testenvs.AssertEnvNotSet(t, snowflakeenvs.Password) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: providerConfigWithAuthenticator(testprofiles.Okta, sdk.AuthenticationTypeOkta), + }, + }, + }) +} + +// This test requires manual action due to MFA. Make sure the user does not have a positive `mins_to_bypass_mfa` in `SHOW USERS`. +func TestAcc_Provider_UsernamePasswordMfaAuth(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableManual) + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + // ensure MFA is checked here - accept login on your MFA device + { + Config: providerConfigWithAuthenticator(testprofiles.Default, sdk.AuthenticationTypeUsernamePasswordMfa), + }, + // check that MFA login is cached - this step should not require manual action + { + Config: providerConfigWithAuthenticator(testprofiles.Default, sdk.AuthenticationTypeUsernamePasswordMfa), + }, + }, + }) +} + +// This test requires manual action due to MFA. Make sure the user does not have a positive `mins_to_bypass_mfa` in `SHOW USERS`. +func TestAcc_Provider_UsernamePasswordMfaAuthWithPasscode(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableManual) + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + // ensure MFA is checked here - accept access to keychain on your device + { + Config: providerConfigWithAuthenticator(testprofiles.DefaultWithPasscode, sdk.AuthenticationTypeUsernamePasswordMfa), + }, + // check that MFA login is cached - this step should not require manual action + { + Config: providerConfigWithAuthenticator(testprofiles.DefaultWithPasscode, sdk.AuthenticationTypeUsernamePasswordMfa), + }, + }, + }) +} + +func providerConfigWithAuthenticator(profile string, authenticator sdk.AuthenticationType) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + authenticator = "%[2]s" +} +`, profile, authenticator) + datasourceConfig() +} + +func datasourceConfig() string { + return ` +data snowflake_database "t" { + name = "SNOWFLAKE" +}` +} diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 9369ca0451..ddc0512a20 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -8,8 +8,8 @@ import ( "net/url" "os" "strings" - "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/datasources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" @@ -20,6 +20,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/snowflakedb/gosnowflake" ) @@ -47,11 +48,19 @@ func init() { func Provider() *schema.Provider { return &schema.Provider{ Schema: map[string]*schema.Schema{ - "account": { - Type: schema.TypeString, - Description: envNameFieldDescription("Specifies your Snowflake account identifier assigned, by Snowflake. The [account locator](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-2-account-locator-in-a-region) format is not supported. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html). Required unless using `profile`.", snowflakeenvs.Account), - Optional: true, - DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Account, nil), + "account_name": { + Type: schema.TypeString, + Description: envNameFieldDescription("Specifies your Snowflake account name assigned by Snowflake. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier#account-name). Required unless using `profile`.", snowflakeenvs.AccountName), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.AccountName, nil), + RequiredWith: []string{"account_name", "organization_name"}, + }, + "organization_name": { + Type: schema.TypeString, + Description: envNameFieldDescription("Specifies your Snowflake organization name assigned by Snowflake. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier#organization-name). Required unless using `profile`.", snowflakeenvs.OrganizationName), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.OrganizationName, nil), + RequiredWith: []string{"account_name", "organization_name"}, }, "user": { Type: schema.TypeString, @@ -62,7 +71,7 @@ func Provider() *schema.Provider { }, "password": { Type: schema.TypeString, - Description: envNameFieldDescription("Password for username+password auth. Cannot be used with `browser_auth` or `private_key_path`.", snowflakeenvs.Password), + Description: envNameFieldDescription("Password for user + password auth. Cannot be used with `browser_auth` or `private_key_path`.", snowflakeenvs.Password), Optional: true, Sensitive: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Password, nil), @@ -89,9 +98,10 @@ func Provider() *schema.Provider { ValidateDiagFunc: validators.ValidateBooleanStringWithDefault, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.ValidateDefaultParameters, provider.BooleanDefault), }, + // TODO(SNOW-999056): optionally rename to session_params "params": { Type: schema.TypeMap, - Description: "Sets other connection (i.e. session) parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters)", + Description: "Sets other connection (i.e. session) parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters). This field can not be set with environmental variables.", Optional: true, }, "client_ip": { @@ -123,10 +133,10 @@ func Provider() *schema.Provider { }, "authenticator": { Type: schema.TypeString, - Description: envNameFieldDescription("Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid values include: Snowflake, OAuth, ExternalBrowser, Okta, JWT, TokenAccessor, UsernamePasswordMFA. It has to be set explicitly to JWT for private key authentication.", snowflakeenvs.Authenticator), + Description: envNameFieldDescription(fmt.Sprintf("Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid options are: %v. Value `JWT` is deprecated and will be removed in future releases.", docs.PossibleValuesListed(sdk.AllAuthenticationTypes)), snowflakeenvs.Authenticator), Optional: true, - DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Authenticator, string(authenticationTypeSnowflake)), - ValidateDiagFunc: validators.NormalizeValidation(toAuthenticatorType), + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Authenticator, string(sdk.AuthenticationTypeSnowflake)), + ValidateDiagFunc: validators.NormalizeValidation(sdk.ToAuthenticatorType), }, "passcode": { Type: schema.TypeString, @@ -144,7 +154,7 @@ func Provider() *schema.Provider { }, "okta_url": { Type: schema.TypeString, - Description: envNameFieldDescription("The URL of the Okta server. e.g. https://example.okta.com.", snowflakeenvs.OktaUrl), + Description: envNameFieldDescription("The URL of the Okta server. e.g. https://example.okta.com. Okta URL host needs to to have a suffix `okta.com`. Read more in Snowflake [docs](https://docs.snowflake.com/en/user-guide/oauth-okta).", snowflakeenvs.OktaUrl), Optional: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.OktaUrl, nil), ValidateDiagFunc: validation.ToDiagFunc(validation.IsURLWithHTTPorHTTPS), @@ -304,6 +314,41 @@ func Provider() *schema.Provider { Optional: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.DisableQueryContextCache, nil), }, + "include_retry_reason": { + Type: schema.TypeString, + Description: envNameFieldDescription("Should retried request contain retry reason.", snowflakeenvs.IncludeRetryReason), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.IncludeRetryReason, resources.BooleanDefault), + ValidateDiagFunc: validators.ValidateBooleanStringWithDefault, + }, + "max_retry_count": { + Type: schema.TypeInt, + Description: envNameFieldDescription("Specifies how many times non-periodic HTTP request can be retried by the driver.", snowflakeenvs.MaxRetryCount), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.MaxRetryCount, nil), + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), + }, + "driver_tracing": { + Type: schema.TypeString, + Description: envNameFieldDescription(fmt.Sprintf("Specifies the logging level to be used by the driver. Valid options are: %v.", docs.PossibleValuesListed(sdk.AllDriverLogLevels)), snowflakeenvs.DriverTracing), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.DriverTracing, nil), + ValidateDiagFunc: validators.NormalizeValidation(sdk.ToDriverLogLevel), + }, + "tmp_directory_path": { + Type: schema.TypeString, + Description: envNameFieldDescription("Sets temporary directory used by the driver for operations like encrypting, compressing etc.", snowflakeenvs.TmpDirectoryPath), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.TmpDirectoryPath, nil), + }, + "disable_console_login": { + Type: schema.TypeString, + Description: envNameFieldDescription("Indicates whether console login should be disabled in the driver.", snowflakeenvs.DisableConsoleLogin), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.DisableConsoleLogin, resources.BooleanDefault), + ValidateDiagFunc: validators.ValidateBooleanStringWithDefault, + }, + // TODO(SNOW-1761318): handle DisableSamlURLCheck after upgrading the driver to at least 1.10.1 "profile": { Type: schema.TypeString, // TODO(SNOW-1754364): Note that a default file path is already filled on sdk side. @@ -312,9 +357,16 @@ func Provider() *schema.Provider { DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Profile, "default"), }, // Deprecated attributes + "account": { + Type: schema.TypeString, + Description: envNameFieldDescription("Use `account_name` and `organization_name` instead. Specifies your Snowflake account identifier assigned, by Snowflake. The [account locator](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-2-account-locator-in-a-region) format is not supported. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html). Required unless using `profile`.", snowflakeenvs.Account), + Optional: true, + DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Account, nil), + Deprecated: "Use `account_name` and `organization_name` instead of `account`", + }, "username": { Type: schema.TypeString, - Description: envNameFieldDescription("Username for username+password authentication. Required unless using `profile`.", snowflakeenvs.Username), + Description: envNameFieldDescription("Username for user + password authentication. Required unless using `profile`.", snowflakeenvs.Username), Optional: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Username, nil), Deprecated: "Use `user` instead of `username`", @@ -417,7 +469,7 @@ func Provider() *schema.Provider { } func getResources() map[string]*schema.Resource { - return map[string]*schema.Resource{ + resourceList := map[string]*schema.Resource{ "snowflake_account": resources.Account(), "snowflake_account_authentication_policy_attachment": resources.AccountAuthenticationPolicyAttachment(), "snowflake_account_role": resources.AccountRole(), @@ -463,6 +515,7 @@ func getResources() map[string]*schema.Resource { "snowflake_object_parameter": resources.ObjectParameter(), "snowflake_password_policy": resources.PasswordPolicy(), "snowflake_pipe": resources.Pipe(), + "snowflake_primary_connection": resources.PrimaryConnection(), "snowflake_procedure": resources.Procedure(), "snowflake_resource_monitor": resources.ResourceMonitor(), "snowflake_role": resources.Role(), @@ -471,6 +524,7 @@ func getResources() map[string]*schema.Resource { "snowflake_saml2_integration": resources.SAML2Integration(), "snowflake_schema": resources.Schema(), "snowflake_scim_integration": resources.SCIMIntegration(), + "snowflake_secondary_connection": resources.SecondaryConnection(), "snowflake_secondary_database": resources.SecondaryDatabase(), "snowflake_secret_with_authorization_code_grant": resources.SecretWithAuthorizationCodeGrant(), "snowflake_secret_with_basic_authentication": resources.SecretWithBasicAuthentication(), @@ -504,12 +558,19 @@ func getResources() map[string]*schema.Resource { "snowflake_view": resources.View(), "snowflake_warehouse": resources.Warehouse(), } + + if os.Getenv(string(testenvs.EnableObjectRenamingTest)) != "" { + resourceList["snowflake_object_renaming"] = resources.ObjectRenamingListsAndSets() + } + + return resourceList } func getDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ "snowflake_accounts": datasources.Accounts(), "snowflake_alerts": datasources.Alerts(), + "snowflake_connections": datasources.Connections(), "snowflake_cortex_search_services": datasources.CortexSearchServices(), "snowflake_current_account": datasources.CurrentAccount(), "snowflake_current_role": datasources.CurrentRole(), @@ -571,49 +632,185 @@ func ConfigureProvider(ctx context.Context, s *schema.ResourceData) (any, diag.D } } - config := &gosnowflake.Config{ - Application: "terraform-provider-snowflake", + config, err := getDriverConfigFromTerraform(s) + if err != nil { + return nil, diag.FromErr(err) } - if v, ok := s.GetOk("account"); ok && v.(string) != "" { - config.Account = v.(string) + if v, ok := s.GetOk("profile"); ok && v.(string) != "" { + tomlConfig, err := getDriverConfigFromTOML(v.(string)) + if err != nil { + return nil, diag.FromErr(err) + } + config = sdk.MergeConfig(config, tomlConfig) } - // backwards compatibility until we can remove this - if v, ok := s.GetOk("username"); ok && v.(string) != "" { - config.User = v.(string) + client, clientErr := sdk.NewClient(config) + + // needed for tests verifying different provider setups + if os.Getenv(resource.EnvTfAcc) != "" && os.Getenv(string(testenvs.ConfigureClientOnce)) == "true" { + configuredClient = client + configureClientError = clientErr + } else { + configuredClient = nil + configureClientError = nil } - if v, ok := s.GetOk("user"); ok && v.(string) != "" { - config.User = v.(string) + if clientErr != nil { + return nil, diag.FromErr(clientErr) } - if v, ok := s.GetOk("password"); ok && v.(string) != "" { - config.Password = v.(string) + return &provider.Context{Client: client}, nil +} + +func getDriverConfigFromTOML(profile string) (*gosnowflake.Config, error) { + if profile == "default" { + return sdk.DefaultConfig(), nil + } + path, err := sdk.GetConfigFileName() + if err != nil { + return nil, err } - if v, ok := s.GetOk("warehouse"); ok && v.(string) != "" { - config.Warehouse = v.(string) + profileConfig, err := sdk.ProfileConfig(profile) + if err != nil { + return nil, fmt.Errorf(`could not retrieve "%s" profile config from file %s: %w`, profile, path, err) + } + if profileConfig == nil { + return nil, fmt.Errorf(`profile "%s" not found in file %s`, profile, path) } + return profileConfig, nil +} - if v, ok := s.GetOk("role"); ok && v.(string) != "" { - config.Role = v.(string) +func getDriverConfigFromTerraform(s *schema.ResourceData) (*gosnowflake.Config, error) { + config := &gosnowflake.Config{ + Application: "terraform-provider-snowflake", } - if v, ok := s.GetOk("region"); ok && v.(string) != "" { - config.Region = v.(string) + err := errors.Join( + // account_name and organization_name are handled below + handleStringField(s, "user", &config.User), + handleStringField(s, "password", &config.Password), + handleStringField(s, "warehouse", &config.Warehouse), + handleStringField(s, "role", &config.Role), + handleBooleanStringAttribute(s, "validate_default_parameters", &config.ValidateDefaultParameters), + // params are handled below + // client ip + func() error { + if v, ok := s.GetOk("client_ip"); ok && v.(string) != "" { + config.ClientIP = net.ParseIP(v.(string)) + } + return nil + }(), + // protocol + func() error { + if v, ok := s.GetOk("protocol"); ok && v.(string) != "" { + protocol, err := toProtocol(v.(string)) + if err != nil { + return err + } + config.Protocol = string(protocol) + } + return nil + }(), + handleStringField(s, "host", &config.Host), + handleIntAttribute(s, "port", &config.Port), + // authenticator + func() error { + if v, ok := s.GetOk("authenticator"); ok && v.(string) != "" { + authType, err := sdk.ToAuthenticatorType(v.(string)) + if err != nil { + return err + } + config.Authenticator = authType + } + return nil + }(), + handleStringField(s, "passcode", &config.Passcode), + handleBoolField(s, "passcode_in_password", &config.PasscodeInPassword), + // okta url + func() error { + if v, ok := s.GetOk("okta_url"); ok && v.(string) != "" { + oktaURL, err := url.Parse(v.(string)) + if err != nil { + return fmt.Errorf("could not parse okta_url err = %w", err) + } + config.OktaURL = oktaURL + } + return nil + }(), + handleDurationInSecondsAttribute(s, "login_timeout", &config.LoginTimeout), + handleDurationInSecondsAttribute(s, "request_timeout", &config.RequestTimeout), + handleDurationInSecondsAttribute(s, "jwt_expire_timeout", &config.JWTExpireTimeout), + handleDurationInSecondsAttribute(s, "client_timeout", &config.ClientTimeout), + handleDurationInSecondsAttribute(s, "jwt_client_timeout", &config.JWTClientTimeout), + handleDurationInSecondsAttribute(s, "external_browser_timeout", &config.ExternalBrowserTimeout), + handleBoolField(s, "insecure_mode", &config.InsecureMode), + // ocsp fail open + func() error { + if v := s.Get("ocsp_fail_open").(string); v != provider.BooleanDefault { + parsed, err := provider.BooleanStringToBool(v) + if err != nil { + return err + } + if parsed { + config.OCSPFailOpen = gosnowflake.OCSPFailOpenTrue + } else { + config.OCSPFailOpen = gosnowflake.OCSPFailOpenFalse + } + } + return nil + }(), + // token + func() error { + if v, ok := s.GetOk("token"); ok && v.(string) != "" { + config.Token = v.(string) + config.Authenticator = gosnowflake.AuthTypeOAuth + } + return nil + }(), + // token accessor is handled below + handleBoolField(s, "keep_session_alive", &config.KeepSessionAlive), + // private key and private key passphrase are handled below + handleBoolField(s, "disable_telemetry", &config.DisableTelemetry), + handleBooleanStringAttribute(s, "client_request_mfa_token", &config.ClientRequestMfaToken), + handleBooleanStringAttribute(s, "client_store_temporary_credential", &config.ClientStoreTemporaryCredential), + handleBoolField(s, "disable_query_context_cache", &config.DisableQueryContextCache), + handleBooleanStringAttribute(s, "include_retry_reason", &config.IncludeRetryReason), + handleIntAttribute(s, "max_retry_count", &config.MaxRetryCount), + // driver tracing + func() error { + if v, ok := s.GetOk("driver_tracing"); ok { + driverLogLevel, err := sdk.ToDriverLogLevel(v.(string)) + if err != nil { + return err + } + config.Tracing = string(driverLogLevel) + } + return nil + }(), + handleStringField(s, "tmp_directory_path", &config.TmpDirPath), + handleBooleanStringAttribute(s, "disable_console_login", &config.DisableConsoleLogin), + // profile is handled in the calling function + // TODO(SNOW-1761318): handle DisableSamlURLCheck after upgrading the driver to at least 1.10.1 + + // deprecated + handleStringField(s, "account", &config.Account), + handleStringField(s, "username", &config.User), + handleStringField(s, "region", &config.Region), + // session params are handled below + // browser auth is handled below + // private key path is handled below + ) + if err != nil { + return nil, err } - if v := s.Get("validate_default_parameters").(string); v != provider.BooleanDefault { - parsed, err := provider.BooleanStringToBool(v) - if err != nil { - return nil, diag.FromErr(err) - } - if parsed { - config.ValidateDefaultParameters = gosnowflake.ConfigBoolTrue - } else { - config.ValidateDefaultParameters = gosnowflake.ConfigBoolFalse - } + // account_name and organization_name override legacy account field + accountName := s.Get("account_name").(string) + organizationName := s.Get("organization_name").(string) + if accountName != "" && organizationName != "" { + config.Account = strings.Join([]string{organizationName, accountName}, "-") } m := make(map[string]interface{}) @@ -633,98 +830,14 @@ func ConfigureProvider(ctx context.Context, s *schema.ResourceData) (any, diag.D } config.Params = params - if v, ok := s.GetOk("client_ip"); ok && v.(string) != "" { - config.ClientIP = net.ParseIP(v.(string)) - } - - if v, ok := s.GetOk("protocol"); ok && v.(string) != "" { - config.Protocol = v.(string) - } - - if v, ok := s.GetOk("host"); ok && v.(string) != "" { - config.Host = v.(string) - } - - if v, ok := s.GetOk("port"); ok && v.(int) > 0 { - config.Port = v.(int) - } - // backwards compatibility until we can remove this if v, ok := s.GetOk("browser_auth"); ok && v.(bool) { config.Authenticator = gosnowflake.AuthTypeExternalBrowser } - if v, ok := s.GetOk("authenticator"); ok && v.(string) != "" { - authType, err := toAuthenticatorType(v.(string)) - if err != nil { - return "", diag.FromErr(err) - } - config.Authenticator = authType - } - - if v, ok := s.GetOk("passcode"); ok && v.(string) != "" { - config.Passcode = v.(string) - } - - if v, ok := s.GetOk("passcode_in_password"); ok && v.(bool) { - config.PasscodeInPassword = v.(bool) - } - if v, ok := s.GetOk("okta_url"); ok && v.(string) != "" { - oktaURL, err := url.Parse(v.(string)) - if err != nil { - return nil, diag.FromErr(fmt.Errorf("could not parse okta_url err = %w", err)) - } - config.OktaURL = oktaURL - } - - if v, ok := s.GetOk("login_timeout"); ok && v.(int) > 0 { - config.LoginTimeout = time.Second * time.Duration(int64(v.(int))) - } - - if v, ok := s.GetOk("request_timeout"); ok && v.(int) > 0 { - config.RequestTimeout = time.Second * time.Duration(int64(v.(int))) - } - - if v, ok := s.GetOk("jwt_expire_timeout"); ok && v.(int) > 0 { - config.JWTExpireTimeout = time.Second * time.Duration(int64(v.(int))) - } - - if v, ok := s.GetOk("client_timeout"); ok && v.(int) > 0 { - config.ClientTimeout = time.Second * time.Duration(int64(v.(int))) - } - - if v, ok := s.GetOk("jwt_client_timeout"); ok && v.(int) > 0 { - config.JWTClientTimeout = time.Second * time.Duration(int64(v.(int))) - } - - if v, ok := s.GetOk("external_browser_timeout"); ok && v.(int) > 0 { - config.ExternalBrowserTimeout = time.Second * time.Duration(int64(v.(int))) - } - - if v, ok := s.GetOk("insecure_mode"); ok && v.(bool) { - config.InsecureMode = v.(bool) - } - - if v := s.Get("ocsp_fail_open").(string); v != provider.BooleanDefault { - parsed, err := provider.BooleanStringToBool(v) - if err != nil { - return nil, diag.FromErr(err) - } - if parsed { - config.OCSPFailOpen = gosnowflake.OCSPFailOpenTrue - } else { - config.OCSPFailOpen = gosnowflake.OCSPFailOpenFalse - } - } - - if v, ok := s.GetOk("token"); ok && v.(string) != "" { - config.Token = v.(string) - config.Authenticator = gosnowflake.AuthTypeOAuth - } - if v, ok := s.GetOk("token_accessor"); ok { - if len(v.([]interface{})) > 0 { - tokenAccessor := v.([]interface{})[0].(map[string]interface{}) + if len(v.([]any)) > 0 { + tokenAccessor := v.([]any)[0].(map[string]any) tokenEndpoint := tokenAccessor["token_endpoint"].(string) refreshToken := tokenAccessor["refresh_token"].(string) clientID := tokenAccessor["client_id"].(string) @@ -732,92 +845,23 @@ func ConfigureProvider(ctx context.Context, s *schema.ResourceData) (any, diag.D redirectURI := tokenAccessor["redirect_uri"].(string) accessToken, err := GetAccessTokenWithRefreshToken(tokenEndpoint, clientID, clientSecret, refreshToken, redirectURI) if err != nil { - return nil, diag.FromErr(fmt.Errorf("could not retrieve access token from refresh token, err = %w", err)) + return nil, fmt.Errorf("could not retrieve access token from refresh token, err = %w", err) } config.Token = accessToken config.Authenticator = gosnowflake.AuthTypeOAuth } } - if v, ok := s.GetOk("keep_session_alive"); ok && v.(bool) { - config.KeepSessionAlive = v.(bool) - } - privateKeyPath := s.Get("private_key_path").(string) privateKey := s.Get("private_key").(string) privateKeyPassphrase := s.Get("private_key_passphrase").(string) v, err := getPrivateKey(privateKeyPath, privateKey, privateKeyPassphrase) if err != nil { - return nil, diag.FromErr(fmt.Errorf("could not retrieve private key: %w", err)) + return nil, fmt.Errorf("could not retrieve private key: %w", err) } if v != nil { config.PrivateKey = v } - if v, ok := s.GetOk("disable_telemetry"); ok && v.(bool) { - config.DisableTelemetry = v.(bool) - } - - if v := s.Get("client_request_mfa_token").(string); v != provider.BooleanDefault { - parsed, err := provider.BooleanStringToBool(v) - if err != nil { - return nil, diag.FromErr(err) - } - if parsed { - config.ClientRequestMfaToken = gosnowflake.ConfigBoolTrue - } else { - config.ClientRequestMfaToken = gosnowflake.ConfigBoolFalse - } - } - - if v := s.Get("client_store_temporary_credential").(string); v != provider.BooleanDefault { - parsed, err := provider.BooleanStringToBool(v) - if err != nil { - return nil, diag.FromErr(err) - } - if parsed { - config.ClientStoreTemporaryCredential = gosnowflake.ConfigBoolTrue - } else { - config.ClientStoreTemporaryCredential = gosnowflake.ConfigBoolFalse - } - } - - if v, ok := s.GetOk("disable_query_context_cache"); ok && v.(bool) { - config.DisableQueryContextCache = v.(bool) - } - - if v, ok := s.GetOk("profile"); ok && v.(string) != "" { - profile := v.(string) - if profile == "default" { - defaultConfig := sdk.DefaultConfig() - config = sdk.MergeConfig(config, defaultConfig) - } else { - profileConfig, err := sdk.ProfileConfig(profile) - if err != nil { - return "", diag.FromErr(errors.New("could not retrieve profile config: " + err.Error())) - } - if profileConfig == nil { - return "", diag.FromErr(errors.New("profile with name: " + profile + " not found in config file")) - } - // merge any credentials found in profile with config - config = sdk.MergeConfig(config, profileConfig) - } - } - - client, clientErr := sdk.NewClient(config) - - // needed for tests verifying different provider setups - if os.Getenv("TF_ACC") != "" && os.Getenv("SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE") == "true" { - configuredClient = client - configureClientError = clientErr - } else { - configuredClient = nil - configureClientError = nil - } - - if clientErr != nil { - return nil, diag.FromErr(clientErr) - } - - return &provider.Context{Client: client}, nil + return config, nil } diff --git a/pkg/provider/provider_acceptance_test.go b/pkg/provider/provider_acceptance_test.go index 20f9b0cabe..0d0daf7b67 100644 --- a/pkg/provider/provider_acceptance_test.go +++ b/pkg/provider/provider_acceptance_test.go @@ -2,16 +2,25 @@ package provider_test import ( "fmt" + "net" + "net/url" "os" "regexp" + "strings" "testing" + "time" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/snowflakedb/gosnowflake" + "github.com/stretchr/testify/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testprofiles" + internalprovider "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeenvs" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-plugin-testing/tfversion" "github.com/stretchr/testify/require" ) @@ -154,6 +163,300 @@ func TestAcc_Provider_configureClientOnceSwitching(t *testing.T) { }) } +func TestAcc_Provider_tomlConfig(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + + user := acc.DefaultConfig(t).User + pass := acc.DefaultConfig(t).Password + account := acc.DefaultConfig(t).Account + + oktaUrl, err := url.Parse("https://example.com") + require.NoError(t, err) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + testenvs.AssertEnvNotSet(t, snowflakeenvs.User) + testenvs.AssertEnvNotSet(t, snowflakeenvs.Password) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: providerConfig(testprofiles.CompleteFields), + Check: func(s *terraform.State) error { + config := acc.TestAccProvider.Meta().(*internalprovider.Context).Client.GetConfig() + assert.Equal(t, &gosnowflake.Config{ + Account: account, + User: user, + Password: pass, + Warehouse: "SNOWFLAKE", + Role: "ACCOUNTADMIN", + ValidateDefaultParameters: gosnowflake.ConfigBoolTrue, + ClientIP: net.ParseIP("1.2.3.4"), + Protocol: "https", + Host: fmt.Sprintf("%s.snowflakecomputing.com", account), + Params: map[string]*string{ + "foo": sdk.Pointer("bar"), + }, + Port: 443, + Authenticator: gosnowflake.AuthTypeSnowflake, + PasscodeInPassword: false, + OktaURL: oktaUrl, + LoginTimeout: 30 * time.Second, + RequestTimeout: 40 * time.Second, + JWTExpireTimeout: 50 * time.Second, + ClientTimeout: 10 * time.Second, + JWTClientTimeout: 20 * time.Second, + ExternalBrowserTimeout: 60 * time.Second, + MaxRetryCount: 1, + Application: "terraform-provider-snowflake", + InsecureMode: true, + OCSPFailOpen: gosnowflake.OCSPFailOpenTrue, + Token: "token", + KeepSessionAlive: true, + DisableTelemetry: true, + Tracing: string(sdk.DriverLogLevelInfo), + TmpDirPath: ".", + ClientRequestMfaToken: gosnowflake.ConfigBoolTrue, + ClientStoreTemporaryCredential: gosnowflake.ConfigBoolTrue, + DisableQueryContextCache: true, + IncludeRetryReason: gosnowflake.ConfigBoolTrue, + DisableConsoleLogin: gosnowflake.ConfigBoolTrue, + }, config) + assert.Equal(t, string(sdk.DriverLogLevelInfo), gosnowflake.GetLogger().GetLogLevel()) + + return nil + }, + }, + }, + }) +} + +func TestAcc_Provider_envConfig(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + + user := acc.DefaultConfig(t).User + pass := acc.DefaultConfig(t).Password + account := acc.DefaultConfig(t).Account + + accountParts := strings.SplitN(account, "-", 2) + + oktaUrlFromEnv, err := url.Parse("https://example-env.com") + require.NoError(t, err) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + testenvs.AssertEnvNotSet(t, snowflakeenvs.User) + testenvs.AssertEnvNotSet(t, snowflakeenvs.Password) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { + t.Setenv(snowflakeenvs.AccountName, accountParts[1]) + t.Setenv(snowflakeenvs.OrganizationName, accountParts[0]) + t.Setenv(snowflakeenvs.User, user) + t.Setenv(snowflakeenvs.Password, pass) + t.Setenv(snowflakeenvs.Warehouse, "SNOWFLAKE") + t.Setenv(snowflakeenvs.Protocol, "https") + t.Setenv(snowflakeenvs.Port, "443") + // do not set token - it should be propagated from TOML + t.Setenv(snowflakeenvs.Role, "ACCOUNTADMIN") + t.Setenv(snowflakeenvs.Authenticator, "snowflake") + t.Setenv(snowflakeenvs.ValidateDefaultParameters, "false") + t.Setenv(snowflakeenvs.ClientIp, "2.2.2.2") + t.Setenv(snowflakeenvs.Host, "") + t.Setenv(snowflakeenvs.Authenticator, "") + t.Setenv(snowflakeenvs.Passcode, "") + t.Setenv(snowflakeenvs.PasscodeInPassword, "false") + t.Setenv(snowflakeenvs.OktaUrl, "https://example-env.com") + t.Setenv(snowflakeenvs.LoginTimeout, "100") + t.Setenv(snowflakeenvs.RequestTimeout, "200") + t.Setenv(snowflakeenvs.JwtExpireTimeout, "300") + t.Setenv(snowflakeenvs.ClientTimeout, "400") + t.Setenv(snowflakeenvs.JwtClientTimeout, "500") + t.Setenv(snowflakeenvs.ExternalBrowserTimeout, "600") + t.Setenv(snowflakeenvs.InsecureMode, "false") + t.Setenv(snowflakeenvs.OcspFailOpen, "false") + t.Setenv(snowflakeenvs.KeepSessionAlive, "false") + t.Setenv(snowflakeenvs.DisableTelemetry, "false") + t.Setenv(snowflakeenvs.ClientRequestMfaToken, "false") + t.Setenv(snowflakeenvs.ClientStoreTemporaryCredential, "false") + t.Setenv(snowflakeenvs.DisableQueryContextCache, "false") + t.Setenv(snowflakeenvs.IncludeRetryReason, "false") + t.Setenv(snowflakeenvs.MaxRetryCount, "2") + t.Setenv(snowflakeenvs.DriverTracing, string(sdk.DriverLogLevelDebug)) + t.Setenv(snowflakeenvs.TmpDirectoryPath, "../") + t.Setenv(snowflakeenvs.DisableConsoleLogin, "false") + }, + Config: providerConfig(testprofiles.CompleteFieldsInvalid), + Check: func(s *terraform.State) error { + config := acc.TestAccProvider.Meta().(*internalprovider.Context).Client.GetConfig() + assert.Equal(t, &gosnowflake.Config{ + Account: account, + User: user, + Password: pass, + Warehouse: "SNOWFLAKE", + Role: "ACCOUNTADMIN", + ValidateDefaultParameters: gosnowflake.ConfigBoolFalse, + ClientIP: net.ParseIP("2.2.2.2"), + Protocol: "https", + Params: map[string]*string{ + "foo": sdk.Pointer("bar"), + }, + Host: fmt.Sprintf("%s.snowflakecomputing.com", account), + Port: 443, + Authenticator: gosnowflake.AuthTypeSnowflake, + PasscodeInPassword: false, + OktaURL: oktaUrlFromEnv, + LoginTimeout: 100 * time.Second, + RequestTimeout: 200 * time.Second, + JWTExpireTimeout: 300 * time.Second, + ClientTimeout: 400 * time.Second, + JWTClientTimeout: 500 * time.Second, + ExternalBrowserTimeout: 600 * time.Second, + MaxRetryCount: 2, + Application: "terraform-provider-snowflake", + InsecureMode: true, + OCSPFailOpen: gosnowflake.OCSPFailOpenFalse, + Token: "token", + KeepSessionAlive: true, + DisableTelemetry: true, + Tracing: string(sdk.DriverLogLevelDebug), + TmpDirPath: "../", + ClientRequestMfaToken: gosnowflake.ConfigBoolFalse, + ClientStoreTemporaryCredential: gosnowflake.ConfigBoolFalse, + DisableQueryContextCache: true, + IncludeRetryReason: gosnowflake.ConfigBoolFalse, + DisableConsoleLogin: gosnowflake.ConfigBoolFalse, + }, config) + assert.Equal(t, string(sdk.DriverLogLevelDebug), gosnowflake.GetLogger().GetLogLevel()) + + return nil + }, + }, + }, + }) +} + +func TestAcc_Provider_tfConfig(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + + user := acc.DefaultConfig(t).User + pass := acc.DefaultConfig(t).Password + account := acc.DefaultConfig(t).Account + + accountParts := strings.SplitN(account, "-", 2) + orgName, accountName := accountParts[0], accountParts[1] + + oktaUrlFromTf, err := url.Parse("https://example-tf.com") + require.NoError(t, err) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + testenvs.AssertEnvNotSet(t, snowflakeenvs.User) + testenvs.AssertEnvNotSet(t, snowflakeenvs.Password) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { + t.Setenv(snowflakeenvs.OrganizationName, "invalid") + t.Setenv(snowflakeenvs.AccountName, "invalid") + t.Setenv(snowflakeenvs.User, "invalid") + t.Setenv(snowflakeenvs.Password, "invalid") + t.Setenv(snowflakeenvs.Warehouse, "invalid") + t.Setenv(snowflakeenvs.Protocol, "invalid") + t.Setenv(snowflakeenvs.Port, "-1") + t.Setenv(snowflakeenvs.Token, "") + t.Setenv(snowflakeenvs.Role, "invalid") + t.Setenv(snowflakeenvs.ValidateDefaultParameters, "false") + t.Setenv(snowflakeenvs.ClientIp, "2.2.2.2") + t.Setenv(snowflakeenvs.Host, "") + t.Setenv(snowflakeenvs.Authenticator, "invalid") + t.Setenv(snowflakeenvs.Passcode, "") + t.Setenv(snowflakeenvs.PasscodeInPassword, "false") + t.Setenv(snowflakeenvs.OktaUrl, "https://example-env.com") + t.Setenv(snowflakeenvs.LoginTimeout, "100") + t.Setenv(snowflakeenvs.RequestTimeout, "200") + t.Setenv(snowflakeenvs.JwtExpireTimeout, "300") + t.Setenv(snowflakeenvs.ClientTimeout, "400") + t.Setenv(snowflakeenvs.JwtClientTimeout, "500") + t.Setenv(snowflakeenvs.ExternalBrowserTimeout, "600") + t.Setenv(snowflakeenvs.InsecureMode, "false") + t.Setenv(snowflakeenvs.OcspFailOpen, "false") + t.Setenv(snowflakeenvs.KeepSessionAlive, "false") + t.Setenv(snowflakeenvs.DisableTelemetry, "false") + t.Setenv(snowflakeenvs.ClientRequestMfaToken, "false") + t.Setenv(snowflakeenvs.ClientStoreTemporaryCredential, "false") + t.Setenv(snowflakeenvs.DisableQueryContextCache, "false") + t.Setenv(snowflakeenvs.IncludeRetryReason, "false") + t.Setenv(snowflakeenvs.MaxRetryCount, "2") + t.Setenv(snowflakeenvs.DriverTracing, string(sdk.DriverLogLevelDebug)) + t.Setenv(snowflakeenvs.TmpDirectoryPath, "../") + t.Setenv(snowflakeenvs.DisableConsoleLogin, "false") + }, + Config: providerConfigAllFields(testprofiles.CompleteFieldsInvalid, orgName, accountName, user, pass), + Check: func(s *terraform.State) error { + config := acc.TestAccProvider.Meta().(*internalprovider.Context).Client.GetConfig() + assert.Equal(t, &gosnowflake.Config{ + Account: account, + User: user, + Password: pass, + Warehouse: "SNOWFLAKE", + Role: "ACCOUNTADMIN", + ValidateDefaultParameters: gosnowflake.ConfigBoolTrue, + ClientIP: net.ParseIP("3.3.3.3"), + Protocol: "https", + Params: map[string]*string{ + "foo": sdk.Pointer("piyo"), + }, + Host: fmt.Sprintf("%s.snowflakecomputing.com", account), + Port: 443, + Authenticator: gosnowflake.AuthTypeSnowflake, + PasscodeInPassword: false, + OktaURL: oktaUrlFromTf, + LoginTimeout: 101 * time.Second, + RequestTimeout: 201 * time.Second, + JWTExpireTimeout: 301 * time.Second, + ClientTimeout: 401 * time.Second, + JWTClientTimeout: 501 * time.Second, + ExternalBrowserTimeout: 601 * time.Second, + MaxRetryCount: 3, + Application: "terraform-provider-snowflake", + InsecureMode: true, + OCSPFailOpen: gosnowflake.OCSPFailOpenTrue, + Token: "token", + KeepSessionAlive: true, + DisableTelemetry: true, + Tracing: string(sdk.DriverLogLevelInfo), + TmpDirPath: "../../", + ClientRequestMfaToken: gosnowflake.ConfigBoolTrue, + ClientStoreTemporaryCredential: gosnowflake.ConfigBoolTrue, + DisableQueryContextCache: true, + IncludeRetryReason: gosnowflake.ConfigBoolTrue, + DisableConsoleLogin: gosnowflake.ConfigBoolTrue, + }, config) + assert.Equal(t, string(sdk.DriverLogLevelInfo), gosnowflake.GetLogger().GetLogLevel()) + + return nil + }, + }, + }, + }) +} + func TestAcc_Provider_useNonExistentDefaultParams(t *testing.T) { t.Setenv(string(testenvs.ConfigureClientOnce), "") @@ -201,15 +504,15 @@ func TestAcc_Provider_triValueBoolean(t *testing.T) { }, Steps: []resource.TestStep{ { - ExternalProviders: map[string]resource.ExternalProvider{ - "snowflake": { - VersionConstraint: "=0.97.0", - Source: "Snowflake-Labs/snowflake", - }, - }, - Config: providerConfigWithClientStoreTemporaryCredential(testprofiles.Default, `true`), + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, + ExternalProviders: acc.ExternalProviderWithExactVersion("0.97.0"), + Config: providerConfigWithClientStoreTemporaryCredential(testprofiles.Default, `true`), }, { + // Use the default TOML config again. + PreConfig: func() { + t.Setenv(snowflakeenvs.ConfigPath, "") + }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: providerConfigWithClientStoreTemporaryCredential(testprofiles.Default, `true`), }, @@ -221,6 +524,81 @@ func TestAcc_Provider_triValueBoolean(t *testing.T) { }) } +func TestAcc_Provider_sessionParameters(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + testenvs.AssertEnvNotSet(t, snowflakeenvs.User) + testenvs.AssertEnvNotSet(t, snowflakeenvs.Password) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: providerWithParamsConfig(testprofiles.Default, 31337), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_unsafe_execute.t", "query_results.#", "1"), + resource.TestCheckResourceAttr("snowflake_unsafe_execute.t", "query_results.0.value", "31337"), + ), + }, + }, + }) +} + +func TestAcc_Provider_JwtAuth(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + testenvs.AssertEnvNotSet(t, snowflakeenvs.User) + testenvs.AssertEnvNotSet(t, snowflakeenvs.Password) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + // authenticate with unencrypted private key + { + Config: providerConfigWithAuthenticator(testprofiles.JwtAuth, sdk.AuthenticationTypeJwt), + }, + // authenticate with unencrypted private key with a legacy authenticator value + // solves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2983 + { + Config: providerConfigWithAuthenticator(testprofiles.JwtAuth, sdk.AuthenticationTypeJwtLegacy), + }, + // authenticate with encrypted private key + { + Config: providerConfigWithAuthenticator(testprofiles.EncryptedJwtAuth, sdk.AuthenticationTypeJwt), + }, + }, + }) +} + +func TestAcc_Provider_SnowflakeAuth(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { + acc.TestAccPreCheck(t) + }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: providerConfigWithAuthenticator(testprofiles.Default, sdk.AuthenticationTypeSnowflake), + }, + }, + }) +} + func TestAcc_Provider_invalidConfigurations(t *testing.T) { resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -234,7 +612,7 @@ func TestAcc_Provider_invalidConfigurations(t *testing.T) { }, { Config: providerConfigWithProtocol(testprofiles.Default, "invalid"), - ExpectError: regexp.MustCompile("invalid protocol: INVALID"), + ExpectError: regexp.MustCompile("invalid protocol: invalid"), }, { Config: providerConfigWithPort(testprofiles.Default, 123456789), @@ -242,7 +620,7 @@ func TestAcc_Provider_invalidConfigurations(t *testing.T) { }, { Config: providerConfigWithAuthType(testprofiles.Default, "invalid"), - ExpectError: regexp.MustCompile("invalid authenticator type: INVALID"), + ExpectError: regexp.MustCompile("invalid authenticator type: invalid"), }, { Config: providerConfigWithOktaUrl(testprofiles.Default, "invalid"), @@ -256,12 +634,27 @@ func TestAcc_Provider_invalidConfigurations(t *testing.T) { Config: providerConfigWithTokenEndpoint(testprofiles.Default, "invalid"), ExpectError: regexp.MustCompile(`expected "token_endpoint" to have a host, got invalid`), }, + { + Config: providerConfigWithLogLevel(testprofiles.Default, "invalid"), + ExpectError: regexp.MustCompile(`invalid driver log level: invalid`), + }, + { + Config: providerConfig("non-existing"), + // .* is used to match the error message regarding of the home user location + ExpectError: regexp.MustCompile(`profile "non-existing" not found in file .*.snowflake/config`), + }, }, }) } -// TODO(SNOW-1754319): for JWT auth flow, check setting authenticator value as `SNOWFLAKE_JWT`. -// This will ensure https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2983 is solved. +func providerConfigWithAuthenticator(profile string, authenticator sdk.AuthenticationType) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + authenticator = "%[2]s" +} +`, profile, authenticator) + datasourceConfig() +} func emptyProviderConfig() string { return ` @@ -374,6 +767,15 @@ provider "snowflake" { `, profile, tokenEndpoint) + datasourceConfig() } +func providerConfigWithLogLevel(profile, logLevel string) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + driver_tracing = "%[2]s" +} +`, profile, logLevel) + datasourceConfig() +} + func providerConfigWithClientIp(profile, clientIp string) string { return fmt.Sprintf(` provider "snowflake" { @@ -402,9 +804,94 @@ provider "snowflake" { `, user, pass, profile) + datasourceConfig() } +func providerConfigWithNewAccountId(profile, orgName, accountName string) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + account_name = "%[2]s" + organization_name = "%[3]s" +} +`, profile, accountName, orgName) + datasourceConfig() +} + +func providerConfigComplete(profile, user, password, orgName, accountName string) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + user = "%[2]s" + password = "%[3]s" + organization_name = "%[4]s" + account_name = "%[5]s" + warehouse = "SNOWFLAKE" +} +`, profile, user, password, orgName, accountName) + datasourceConfig() +} + func datasourceConfig() string { return fmt.Sprintf(` data snowflake_database "t" { name = "%s" }`, acc.TestDatabaseName) } + +func providerConfigAllFields(profile, orgName, accountName, user, password string) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + organization_name = "%[2]s" + account_name = "%[3]s" + user = "%[4]s" + password = "%[5]s" + warehouse = "SNOWFLAKE" + protocol = "https" + port = "443" + role = "ACCOUNTADMIN" + validate_default_parameters = true + client_ip = "3.3.3.3" + authenticator = "snowflake" + okta_url = "https://example-tf.com" + login_timeout = 101 + request_timeout = 201 + jwt_expire_timeout = 301 + client_timeout = 401 + jwt_client_timeout = 501 + external_browser_timeout = 601 + insecure_mode = true + ocsp_fail_open = true + keep_session_alive = true + disable_telemetry = true + client_request_mfa_token = true + client_store_temporary_credential = true + disable_query_context_cache = true + include_retry_reason = true + max_retry_count = 3 + driver_tracing = "info" + tmp_directory_path = "../../" + disable_console_login = true + params = { + foo = "piyo" + } +} +`, profile, orgName, accountName, user, password) + datasourceConfig() +} + +// TODO(SNOW-1348325): Use parameter data source with `IN SESSION` filtering. +func providerWithParamsConfig(profile string, statementTimeoutInSeconds int) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + params = { + statement_timeout_in_seconds = %[2]d + } +} +`, profile, statementTimeoutInSeconds) + unsafeExecuteShowSessionParameter() +} + +func unsafeExecuteShowSessionParameter() string { + return ` +resource snowflake_unsafe_execute "t" { + execute = "SELECT 1" + query = "SHOW PARAMETERS LIKE 'STATEMENT_TIMEOUT_IN_SECONDS' IN SESSION" + revert = "SELECT 1" +}` +} diff --git a/pkg/provider/provider_helpers.go b/pkg/provider/provider_helpers.go index 0e8088cae4..dd6aa9ab92 100644 --- a/pkg/provider/provider_helpers.go +++ b/pkg/provider/provider_helpers.go @@ -3,7 +3,6 @@ package provider import ( "crypto/rsa" "encoding/json" - "encoding/pem" "errors" "fmt" "io" @@ -12,63 +11,21 @@ import ( "os" "strconv" "strings" + "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/mitchellh/go-homedir" "github.com/snowflakedb/gosnowflake" - "github.com/youmark/pkcs8" - "golang.org/x/crypto/ssh" ) -type authenticationType string - -const ( - authenticationTypeSnowflake authenticationType = "SNOWFLAKE" - authenticationTypeOauth authenticationType = "OAUTH" - authenticationTypeExternalBrowser authenticationType = "EXTERNALBROWSER" - authenticationTypeOkta authenticationType = "OKTA" - authenticationTypeJwtLegacy authenticationType = "JWT" - authenticationTypeJwt authenticationType = "SNOWFLAKE_JWT" - authenticationTypeTokenAccessor authenticationType = "TOKENACCESSOR" - authenticationTypeUsernamePasswordMfa authenticationType = "USERNAMEPASSWORDMFA" -) - -var allAuthenticationTypes = []authenticationType{ - authenticationTypeSnowflake, - authenticationTypeOauth, - authenticationTypeExternalBrowser, - authenticationTypeOkta, - authenticationTypeJwt, - authenticationTypeTokenAccessor, - authenticationTypeUsernamePasswordMfa, -} - -func toAuthenticatorType(s string) (gosnowflake.AuthType, error) { - s = strings.ToUpper(s) - switch s { - case string(authenticationTypeSnowflake): - return gosnowflake.AuthTypeSnowflake, nil - case string(authenticationTypeOauth): - return gosnowflake.AuthTypeOAuth, nil - case string(authenticationTypeExternalBrowser): - return gosnowflake.AuthTypeExternalBrowser, nil - case string(authenticationTypeOkta): - return gosnowflake.AuthTypeOkta, nil - case string(authenticationTypeJwt), string(authenticationTypeJwtLegacy): - return gosnowflake.AuthTypeJwt, nil - case string(authenticationTypeTokenAccessor): - return gosnowflake.AuthTypeTokenAccessor, nil - case string(authenticationTypeUsernamePasswordMfa): - return gosnowflake.AuthTypeUsernamePasswordMFA, nil - default: - return gosnowflake.AuthType(0), fmt.Errorf("invalid authenticator type: %s", s) - } -} - type protocol string const ( - protocolHttp protocol = "HTTP" - protocolHttps protocol = "HTTPS" + // these values are lower case on purpose to match gosnowflake case + protocolHttp protocol = "http" + protocolHttps protocol = "https" ) var allProtocols = []protocol{ @@ -77,12 +34,11 @@ var allProtocols = []protocol{ } func toProtocol(s string) (protocol, error) { - s = strings.ToUpper(s) - switch s { - case string(protocolHttp): - return protocolHttp, nil - case string(protocolHttps): - return protocolHttps, nil + lowerCase := strings.ToLower(s) + switch lowerCase { + case string(protocolHttp), + string(protocolHttps): + return protocol(lowerCase), nil default: return "", fmt.Errorf("invalid protocol: %s", s) } @@ -100,7 +56,7 @@ func getPrivateKey(privateKeyPath, privateKeyString, privateKeyPassphrase string return nil, fmt.Errorf("private Key file could not be read err = %w", err) } } - return parsePrivateKey(privateKeyBytes, []byte(privateKeyPassphrase)) + return sdk.ParsePrivateKey(privateKeyBytes, []byte(privateKeyPassphrase)) } func readFile(privateKeyPath string) ([]byte, error) { @@ -121,35 +77,6 @@ func readFile(privateKeyPath string) ([]byte, error) { return privateKeyBytes, nil } -func parsePrivateKey(privateKeyBytes []byte, passhrase []byte) (*rsa.PrivateKey, error) { - privateKeyBlock, _ := pem.Decode(privateKeyBytes) - if privateKeyBlock == nil { - return nil, fmt.Errorf("could not parse private key, key is not in PEM format") - } - - if privateKeyBlock.Type == "ENCRYPTED PRIVATE KEY" { - if len(passhrase) == 0 { - return nil, fmt.Errorf("private key requires a passphrase, but private_key_passphrase was not supplied") - } - privateKey, err := pkcs8.ParsePKCS8PrivateKeyRSA(privateKeyBlock.Bytes, passhrase) - if err != nil { - return nil, fmt.Errorf("could not parse encrypted private key with passphrase, only ciphers aes-128-cbc, aes-128-gcm, aes-192-cbc, aes-192-gcm, aes-256-cbc, aes-256-gcm, and des-ede3-cbc are supported err = %w", err) - } - return privateKey, nil - } - - privateKey, err := ssh.ParseRawPrivateKey(privateKeyBytes) - if err != nil { - return nil, fmt.Errorf("could not parse private key err = %w", err) - } - - rsaPrivateKey, ok := privateKey.(*rsa.PrivateKey) - if !ok { - return nil, errors.New("privateKey not of type RSA") - } - return rsaPrivateKey, nil -} - type GetRefreshTokenResponseBody struct { AccessToken string `json:"access_token"` TokenType string `json:"token_type"` @@ -201,3 +128,47 @@ func GetAccessTokenWithRefreshToken( func envNameFieldDescription(description, envName string) string { return fmt.Sprintf("%s Can also be sourced from the `%s` environment variable.", description, envName) } + +// TODO(SNOW-1787926): reuse these handlers with the ones in resources +func handleStringField(d *schema.ResourceData, key string, field *string) error { + if v, ok := d.GetOk(key); ok { + *field = v.(string) + } + return nil +} + +func handleBoolField(d *schema.ResourceData, key string, field *bool) error { + if v, ok := d.GetOk(key); ok { + *field = v.(bool) + } + return nil +} + +func handleDurationInSecondsAttribute(d *schema.ResourceData, key string, field *time.Duration) error { + if v, ok := d.GetOk(key); ok { + *field = time.Second * time.Duration(int64(v.(int))) + } + return nil +} + +func handleIntAttribute(d *schema.ResourceData, key string, field *int) error { + if v, ok := d.GetOk(key); ok { + *field = v.(int) + } + return nil +} + +func handleBooleanStringAttribute(d *schema.ResourceData, key string, field *gosnowflake.ConfigBool) error { + if v := d.Get(key).(string); v != provider.BooleanDefault { + parsed, err := provider.BooleanStringToBool(v) + if err != nil { + return err + } + if parsed { + *field = gosnowflake.ConfigBoolTrue + } else { + *field = gosnowflake.ConfigBoolFalse + } + } + return nil +} diff --git a/pkg/provider/provider_helpers_test.go b/pkg/provider/provider_helpers_test.go index eb9e2a8caf..64325e2e3f 100644 --- a/pkg/provider/provider_helpers_test.go +++ b/pkg/provider/provider_helpers_test.go @@ -3,52 +3,9 @@ package provider import ( "testing" - "github.com/snowflakedb/gosnowflake" "github.com/stretchr/testify/require" ) -func Test_Provider_toAuthenticationType(t *testing.T) { - type test struct { - input string - want gosnowflake.AuthType - } - - valid := []test{ - // Case insensitive. - {input: "snowflake", want: gosnowflake.AuthTypeSnowflake}, - - // Supported Values. - {input: "SNOWFLAKE", want: gosnowflake.AuthTypeSnowflake}, - {input: "OAUTH", want: gosnowflake.AuthTypeOAuth}, - {input: "EXTERNALBROWSER", want: gosnowflake.AuthTypeExternalBrowser}, - {input: "OKTA", want: gosnowflake.AuthTypeOkta}, - {input: "JWT", want: gosnowflake.AuthTypeJwt}, - {input: "SNOWFLAKE_JWT", want: gosnowflake.AuthTypeJwt}, - {input: "TOKENACCESSOR", want: gosnowflake.AuthTypeTokenAccessor}, - {input: "USERNAMEPASSWORDMFA", want: gosnowflake.AuthTypeUsernamePasswordMFA}, - } - - invalid := []test{ - {input: ""}, - {input: "foo"}, - } - - for _, tc := range valid { - t.Run(tc.input, func(t *testing.T) { - got, err := toAuthenticatorType(tc.input) - require.NoError(t, err) - require.Equal(t, tc.want, got) - }) - } - - for _, tc := range invalid { - t.Run(tc.input, func(t *testing.T) { - _, err := toAuthenticatorType(tc.input) - require.Error(t, err) - }) - } -} - func Test_Provider_toProtocol(t *testing.T) { type test struct { input string diff --git a/pkg/provider/resources/resources.go b/pkg/provider/resources/resources.go index 2076ccbe21..05757d5967 100644 --- a/pkg/provider/resources/resources.go +++ b/pkg/provider/resources/resources.go @@ -35,6 +35,7 @@ const ( OauthIntegrationForPartnerApplications resource = "snowflake_oauth_integration_for_partner_applications" PasswordPolicy resource = "snowflake_password_policy" Pipe resource = "snowflake_pipe" + PrimaryConnection resource = "snowflake_primary_connection" Procedure resource = "snowflake_procedure" ResourceMonitor resource = "snowflake_resource_monitor" Role resource = "snowflake_role" @@ -42,6 +43,7 @@ const ( Saml2SecurityIntegration resource = "snowflake_saml2_integration" Schema resource = "snowflake_schema" ScimSecurityIntegration resource = "snowflake_scim_integration" + SecondaryConnection resource = "snowflake_secondary_connection" SecondaryDatabase resource = "snowflake_secondary_database" SecretWithAuthorizationCodeGrant resource = "snowflake_secret_with_authorization_code_grant" SecretWithBasicAuthentication resource = "snowflake_secret_with_basic_authentication" @@ -61,6 +63,7 @@ const ( Streamlit resource = "snowflake_streamlit" Table resource = "snowflake_table" Tag resource = "snowflake_tag" + TagAssociation resource = "snowflake_tag_association" Task resource = "snowflake_task" User resource = "snowflake_user" View resource = "snowflake_view" diff --git a/pkg/provider/testdata/config.toml b/pkg/provider/testdata/config.toml new file mode 100644 index 0000000000..1396222be6 --- /dev/null +++ b/pkg/provider/testdata/config.toml @@ -0,0 +1,42 @@ +[basic_fields] +account = 'account' + +[complete_fields] +account='account' +accountname='accountname' +organizationname='organizationname' +user='user' +username='username' +password='password' +host='host' +warehouse='warehouse' +role='role' +clientip='clientip' +protocol='protocol' +passcode='passcode' +port=1000 +passcodeinpassword=true +oktaurl='oktaurl' +clienttimeout=10 +jwtclienttimeout=20 +logintimeout=30 +requesttimeout=40 +jwtexpiretimeout=50 +externalbrowsertimeout=60 +maxretrycount=70 +authenticator='snowflake' +insecuremode=true +ocspfailopen=true +token='token' +keepsessionalive=true +privatekey='privatekey' +privatekeypassphrase='privatekeypassphrase' +disabletelemetry=true +validatedefaultparameters=true +clientrequestmfatoken=true +clientstoretemporarycredential=true +tracing='tracing' +tmpdirpath='.' +disablequerycontextcache=true +includeretryreason=true +disableconsolelogin=true diff --git a/pkg/resources/account_role_acceptance_test.go b/pkg/resources/account_role_acceptance_test.go index cda06bec4d..7fffe1456b 100644 --- a/pkg/resources/account_role_acceptance_test.go +++ b/pkg/resources/account_role_acceptance_test.go @@ -217,6 +217,7 @@ func TestAcc_AccountRole_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t CheckDestroy: acc.CheckDestroy(t, resources.AccountRole), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -229,6 +230,7 @@ func TestAcc_AccountRole_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: accountRoleBasicConfig(id.Name(), comment), Check: resource.ComposeAggregateTestCheckFunc( @@ -252,6 +254,7 @@ func TestAcc_AccountRole_WithQuotedName(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.AccountRole), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -266,6 +269,7 @@ func TestAcc_AccountRole_WithQuotedName(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: accountRoleBasicConfig(quotedId, comment), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/alert_acceptance_test.go b/pkg/resources/alert_acceptance_test.go index 7dd9853e6e..f021233f08 100644 --- a/pkg/resources/alert_acceptance_test.go +++ b/pkg/resources/alert_acceptance_test.go @@ -208,7 +208,6 @@ resource "snowflake_alert" "test_alert" { // Can't reproduce the issue, leaving the test for now. func TestAcc_Alert_Issue3117(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithPrefix("small caps with spaces") - resource.Test(t, resource.TestCase{ PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ @@ -217,6 +216,7 @@ func TestAcc_Alert_Issue3117(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Alert), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -229,6 +229,7 @@ func TestAcc_Alert_Issue3117(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: alertIssue3117Config(id, acc.TestClient().Ids.WarehouseId(), "test_alert"), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/api_authentication_integration_with_authorization_code_grant.go b/pkg/resources/api_authentication_integration_with_authorization_code_grant.go index c2086e4608..c261d19305 100644 --- a/pkg/resources/api_authentication_integration_with_authorization_code_grant.go +++ b/pkg/resources/api_authentication_integration_with_authorization_code_grant.go @@ -31,7 +31,7 @@ var apiAuthAuthorizationCodeGrantSchema = func() map[string]*schema.Schema { Description: "Specifies a list of scopes to use when making a request from the OAuth by a role with USAGE on the integration during the OAuth client credentials flow.", }, } - return helpers.MergeMaps(apiAuthCommonSchema, apiAuthAuthorizationCodeGrant) + return collections.MergeMaps(apiAuthCommonSchema, apiAuthAuthorizationCodeGrant) }() func ApiAuthenticationIntegrationWithAuthorizationCodeGrant() *schema.Resource { diff --git a/pkg/resources/api_authentication_integration_with_authorization_code_grant_acceptance_test.go b/pkg/resources/api_authentication_integration_with_authorization_code_grant_acceptance_test.go index e8ee7e82bb..d81c86eac6 100644 --- a/pkg/resources/api_authentication_integration_with_authorization_code_grant_acceptance_test.go +++ b/pkg/resources/api_authentication_integration_with_authorization_code_grant_acceptance_test.go @@ -271,6 +271,7 @@ func TestAcc_ApiAuthenticationIntegrationWithAuthorizationCodeGrant_migrateFromV CheckDestroy: acc.CheckDestroy(t, resources.ApiAuthenticationIntegrationWithAuthorizationCodeGrant), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -283,6 +284,7 @@ func TestAcc_ApiAuthenticationIntegrationWithAuthorizationCodeGrant_migrateFromV ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: apiAuthenticationIntegrationWithAuthorizationCodeGrantBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -305,6 +307,7 @@ func TestAcc_ApiAuthenticationIntegrationWithAuthorizationCodeGrant_WithQuotedNa CheckDestroy: acc.CheckDestroy(t, resources.ApiAuthenticationIntegrationWithAuthorizationCodeGrant), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -319,6 +322,7 @@ func TestAcc_ApiAuthenticationIntegrationWithAuthorizationCodeGrant_WithQuotedNa ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: apiAuthenticationIntegrationWithAuthorizationCodeGrantBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/api_authentication_integration_with_client_credentials.go b/pkg/resources/api_authentication_integration_with_client_credentials.go index c98039dcfc..51c2209c3e 100644 --- a/pkg/resources/api_authentication_integration_with_client_credentials.go +++ b/pkg/resources/api_authentication_integration_with_client_credentials.go @@ -26,7 +26,7 @@ var apiAuthClientCredentialsSchema = func() map[string]*schema.Schema { Description: "Specifies a list of scopes to use when making a request from the OAuth by a role with USAGE on the integration during the OAuth client credentials flow.", }, } - return helpers.MergeMaps(apiAuthCommonSchema, apiAuthClientCredentials) + return collections.MergeMaps(apiAuthCommonSchema, apiAuthClientCredentials) }() func ApiAuthenticationIntegrationWithClientCredentials() *schema.Resource { diff --git a/pkg/resources/api_authentication_integration_with_client_credentials_acceptance_test.go b/pkg/resources/api_authentication_integration_with_client_credentials_acceptance_test.go index 799655a517..d221bacd85 100644 --- a/pkg/resources/api_authentication_integration_with_client_credentials_acceptance_test.go +++ b/pkg/resources/api_authentication_integration_with_client_credentials_acceptance_test.go @@ -264,6 +264,7 @@ func TestAcc_ApiAuthenticationIntegrationWithClientCredentials_migrateFromV0941_ CheckDestroy: acc.CheckDestroy(t, resources.ApiAuthenticationIntegrationWithClientCredentials), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -276,6 +277,7 @@ func TestAcc_ApiAuthenticationIntegrationWithClientCredentials_migrateFromV0941_ ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: apiAuthenticationIntegrationWithClientCredentialsBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -298,6 +300,7 @@ func TestAcc_ApiAuthenticationIntegrationWithClientCredentials_WithQuotedName(t CheckDestroy: acc.CheckDestroy(t, resources.ApiAuthenticationIntegrationWithClientCredentials), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -312,6 +315,7 @@ func TestAcc_ApiAuthenticationIntegrationWithClientCredentials_WithQuotedName(t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: apiAuthenticationIntegrationWithClientCredentialsBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/api_authentication_integration_with_jwt_bearer.go b/pkg/resources/api_authentication_integration_with_jwt_bearer.go index b3a508cb46..22703fe675 100644 --- a/pkg/resources/api_authentication_integration_with_jwt_bearer.go +++ b/pkg/resources/api_authentication_integration_with_jwt_bearer.go @@ -29,7 +29,7 @@ var apiAuthJwtBearerSchema = func() map[string]*schema.Schema { Required: true, }, } - return helpers.MergeMaps(apiAuthCommonSchema, apiAuthJwtBearer) + return collections.MergeMaps(apiAuthCommonSchema, apiAuthJwtBearer) }() func ApiAuthenticationIntegrationWithJwtBearer() *schema.Resource { diff --git a/pkg/resources/api_authentication_integration_with_jwt_bearer_acceptance_test.go b/pkg/resources/api_authentication_integration_with_jwt_bearer_acceptance_test.go index a9ce80b0f4..a653f71970 100644 --- a/pkg/resources/api_authentication_integration_with_jwt_bearer_acceptance_test.go +++ b/pkg/resources/api_authentication_integration_with_jwt_bearer_acceptance_test.go @@ -232,6 +232,7 @@ func TestAcc_ApiAuthenticationIntegrationWithJwtBearer_migrateFromV0941_ensureSm CheckDestroy: acc.CheckDestroy(t, resources.ApiAuthenticationIntegrationWithJwtBearer), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -244,6 +245,7 @@ func TestAcc_ApiAuthenticationIntegrationWithJwtBearer_migrateFromV0941_ensureSm ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: apiAuthenticationIntegrationWithJwtBearerBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -268,6 +270,7 @@ func TestAcc_ApiAuthenticationIntegrationWithJwtBearer_IdentifierQuotingDiffSupp CheckDestroy: acc.CheckDestroy(t, resources.ApiAuthenticationIntegrationWithJwtBearer), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -282,6 +285,7 @@ func TestAcc_ApiAuthenticationIntegrationWithJwtBearer_IdentifierQuotingDiffSupp ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: apiAuthenticationIntegrationWithJwtBearerBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index 5d2ac7de3c..872c7ed24f 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -270,3 +270,17 @@ func RecreateWhenStreamIsStale() schema.CustomizeDiffFunc { return nil } } + +// RecreateWhenResourceBoolFieldChangedExternally recreates a resource when wantValue is different than value in boolField. +func RecreateWhenResourceBoolFieldChangedExternally(boolField string, wantValue bool) schema.CustomizeDiffFunc { + return func(_ context.Context, diff *schema.ResourceDiff, _ interface{}) error { + if n := diff.Get(boolField); n != nil { + logging.DebugLogger.Printf("[DEBUG] new external value for %v: %v\n", boolField, n.(bool)) + + if n.(bool) != wantValue { + return errors.Join(diff.SetNew(boolField, wantValue), diff.ForceNew(boolField)) + } + } + return nil + } +} diff --git a/pkg/resources/custom_diffs_test.go b/pkg/resources/custom_diffs_test.go index 8942eca24c..07c104de63 100644 --- a/pkg/resources/custom_diffs_test.go +++ b/pkg/resources/custom_diffs_test.go @@ -5,12 +5,11 @@ import ( "strings" "testing" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/stretchr/testify/assert" @@ -1082,3 +1081,66 @@ func Test_RecreateWhenSecretTypeChangedExternallyForOAuth2(t *testing.T) { }) } } + +func Test_RecreateWhenResourceBoolFieldChangedExternally(t *testing.T) { + tests := []struct { + name string + isPrimary bool + stateValue map[string]string + wantForceNew bool + }{ + { + name: "changed is_primary from false to true", + isPrimary: false, + stateValue: map[string]string{ + "is_primary": "true", + }, + wantForceNew: true, + }, + { + name: "changed is_primary from true to false", + isPrimary: true, + stateValue: map[string]string{ + "is_primary": "false", + }, + wantForceNew: true, + }, + { + name: "no change in is_primary - true to true", + isPrimary: true, + stateValue: map[string]string{ + "is_primary": "true", + }, + wantForceNew: false, + }, + { + name: "no change in is_primary - false to false", + isPrimary: false, + stateValue: map[string]string{ + "is_primary": "false", + }, + wantForceNew: false, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + customDiff := resources.RecreateWhenResourceBoolFieldChangedExternally("is_primary", tt.isPrimary) + testProvider := createProviderWithCustomSchemaAndCustomDiff(t, + map[string]*schema.Schema{ + "is_primary": { + Type: schema.TypeBool, + Computed: true, + }, + }, + customDiff) + diff := calculateDiffFromAttributes( + t, + testProvider, + tt.stateValue, + map[string]any{}, + ) + assert.Equal(t, tt.wantForceNew, diff.RequiresNew()) + }) + } +} diff --git a/pkg/resources/database.go b/pkg/resources/database.go index 9330dc8290..f9de40def7 100644 --- a/pkg/resources/database.go +++ b/pkg/resources/database.go @@ -8,6 +8,7 @@ import ( "strings" "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/util" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" @@ -95,7 +96,7 @@ func Database() *schema.Resource { DeleteContext: DeleteDatabase, Description: "Represents a standard database. If replication configuration is specified, the database is promoted to serve as a primary database for replication.", - Schema: helpers.MergeMaps(databaseSchema, databaseParametersSchema), + Schema: collections.MergeMaps(databaseSchema, databaseParametersSchema), Importer: &schema.ResourceImporter{ StateContext: ImportName[sdk.AccountObjectIdentifier], }, diff --git a/pkg/resources/database_acceptance_test.go b/pkg/resources/database_acceptance_test.go index a1df0a1266..d46f51d7c5 100644 --- a/pkg/resources/database_acceptance_test.go +++ b/pkg/resources/database_acceptance_test.go @@ -980,6 +980,7 @@ func TestAcc_Database_UpgradeWithTheSameFieldsAsInTheOldOne(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Database), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1045,6 +1046,7 @@ func TestAcc_Database_UpgradeWithDataRetentionSet(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Database), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1108,6 +1110,7 @@ func TestAcc_Database_WithReplication(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Database), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1125,6 +1128,7 @@ func TestAcc_Database_WithReplication(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: databaseStateUpgraderWithReplicationNew(id, secondaryAccountIdentifier), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1267,6 +1271,7 @@ func TestAcc_Database_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t *t CheckDestroy: acc.CheckDestroy(t, resources.Database), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1279,6 +1284,7 @@ func TestAcc_Database_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t *t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: databaseConfigBasic(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -1315,6 +1321,7 @@ func TestAcc_Database_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Database), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1331,6 +1338,7 @@ func TestAcc_Database_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: databaseConfigBasicWithExternalVolumeAndCatalog(quotedId, quotedExternalVolumeId, quotedCatalogId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/database_role_acceptance_test.go b/pkg/resources/database_role_acceptance_test.go index 1d3552a86c..45ab1ddeac 100644 --- a/pkg/resources/database_role_acceptance_test.go +++ b/pkg/resources/database_role_acceptance_test.go @@ -161,6 +161,7 @@ func TestAcc_DatabaseRole_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId( }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -173,6 +174,7 @@ func TestAcc_DatabaseRole_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId( ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, databaseRoleModelWithComment), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -204,6 +206,7 @@ func TestAcc_DatabaseRole_IdentifierQuotingDiffSuppression(t *testing.T) { }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -219,6 +222,7 @@ func TestAcc_DatabaseRole_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, databaseRoleModelWithComment), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/doc_helpers.go b/pkg/resources/doc_helpers.go index 8476ed050b..eec39dcf4a 100644 --- a/pkg/resources/doc_helpers.go +++ b/pkg/resources/doc_helpers.go @@ -32,7 +32,7 @@ func withPrivilegedRolesDescription(description, paramName string) string { } func blocklistedCharactersFieldDescription(description string) string { - return fmt.Sprintf(`%s Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: %s`, description, characterList([]rune{'|', '.', '(', ')', '"'})) + return fmt.Sprintf(`%s Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: %s`, description, characterList([]rune{'|', '.', '"'})) } func diffSuppressStatementFieldDescription(description string) string { diff --git a/pkg/resources/external_function_acceptance_test.go b/pkg/resources/external_function_acceptance_test.go index 1ce5b27cf1..c53b90167a 100644 --- a/pkg/resources/external_function_acceptance_test.go +++ b/pkg/resources/external_function_acceptance_test.go @@ -239,6 +239,7 @@ func TestAcc_ExternalFunction_migrateFromVersion085(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -291,6 +292,7 @@ func TestAcc_ExternalFunction_migrateFromVersion085_issue2694_previousValuePrese Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -335,6 +337,7 @@ func TestAcc_ExternalFunction_migrateFromVersion085_issue2694_previousValueRemov Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -415,6 +418,7 @@ func TestAcc_ExternalFunction_HeaderParsing(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.ExternalFunction), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.93.0", @@ -436,6 +440,7 @@ func TestAcc_ExternalFunction_HeaderParsing(t *testing.T) { ExpectNonEmptyPlan: true, }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: externalFunctionConfigIssueCurlyHeader(id), Check: resource.ComposeTestCheckFunc( @@ -583,6 +588,7 @@ func TestAcc_ExternalFunction_EnsureSmoothResourceIdMigrationToV0950(t *testing. CheckDestroy: acc.CheckDestroy(t, resources.ExternalFunction), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -595,6 +601,7 @@ func TestAcc_ExternalFunction_EnsureSmoothResourceIdMigrationToV0950(t *testing. ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: externalFunctionConfigWithMoreArguments(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( @@ -655,6 +662,7 @@ func TestAcc_ExternalFunction_EnsureSmoothResourceIdMigrationToV0950_WithoutArgu CheckDestroy: acc.CheckDestroy(t, resources.ExternalFunction), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -667,6 +675,7 @@ func TestAcc_ExternalFunction_EnsureSmoothResourceIdMigrationToV0950_WithoutArgu ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: externalFunctionConfigWithoutArguments(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/external_oauth_integration_acceptance_test.go b/pkg/resources/external_oauth_integration_acceptance_test.go index 0732282702..4b4f90c415 100644 --- a/pkg/resources/external_oauth_integration_acceptance_test.go +++ b/pkg/resources/external_oauth_integration_acceptance_test.go @@ -773,6 +773,7 @@ func TestAcc_ExternalOauthIntegration_migrateFromVersion092_withRsaPublicKeysAnd Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -799,6 +800,7 @@ func TestAcc_ExternalOauthIntegration_migrateFromVersion092_withRsaPublicKeysAnd ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: externalOauthIntegrationWithRsaPublicKeysAndBlockedRolesListv093(id.Name(), issuer, rsaKey, role.Name), Check: resource.ComposeAggregateTestCheckFunc( @@ -889,6 +891,7 @@ func TestAcc_ExternalOauthIntegration_migrateFromVersion092_withJwsKeysUrlAndAll Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -915,6 +918,7 @@ func TestAcc_ExternalOauthIntegration_migrateFromVersion092_withJwsKeysUrlAndAll ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: externalOauthIntegrationWithJwsKeysUrlAndAllowedRolesListv093(id.Name(), issuer, role.Name), Check: resource.ComposeAggregateTestCheckFunc( @@ -988,6 +992,7 @@ func TestAcc_ExternalOauthIntegration_migrateFromV0941_ensureSmoothUpgradeWithNe CheckDestroy: acc.CheckDestroy(t, resources.ExternalOauthSecurityIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1000,6 +1005,7 @@ func TestAcc_ExternalOauthIntegration_migrateFromV0941_ensureSmoothUpgradeWithNe ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: externalOauthIntegrationBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -1022,6 +1028,7 @@ func TestAcc_ExternalOauthIntegration_WithQuotedName(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.ExternalOauthSecurityIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1036,6 +1043,7 @@ func TestAcc_ExternalOauthIntegration_WithQuotedName(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: externalOauthIntegrationBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/function_acceptance_test.go b/pkg/resources/function_acceptance_test.go index 439d7cd47b..52d60e3717 100644 --- a/pkg/resources/function_acceptance_test.go +++ b/pkg/resources/function_acceptance_test.go @@ -203,6 +203,7 @@ func TestAcc_Function_migrateFromVersion085(t *testing.T) { // Added as subtask SNOW-1057066 to SNOW-926148. Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -350,6 +351,7 @@ func TestAcc_Function_EnsureSmoothResourceIdMigrationToV0950(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Function), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -362,6 +364,7 @@ func TestAcc_Function_EnsureSmoothResourceIdMigrationToV0950(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: functionConfigWithMoreArguments(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( @@ -410,6 +413,7 @@ func TestAcc_Function_EnsureSmoothResourceIdMigrationToV0950_WithoutArguments(t CheckDestroy: acc.CheckDestroy(t, resources.Function), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -422,6 +426,7 @@ func TestAcc_Function_EnsureSmoothResourceIdMigrationToV0950_WithoutArguments(t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: functionConfigWithoutArguments(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/grant_account_role_acceptance_test.go b/pkg/resources/grant_account_role_acceptance_test.go index 3a05e90595..d9c8c786a3 100644 --- a/pkg/resources/grant_account_role_acceptance_test.go +++ b/pkg/resources/grant_account_role_acceptance_test.go @@ -103,6 +103,7 @@ func TestAcc_GrantAccountRole_migrateFromV0941_ensureSmoothUpgradeWithNewResourc }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -115,6 +116,7 @@ func TestAcc_GrantAccountRole_migrateFromV0941_ensureSmoothUpgradeWithNewResourc ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantAccountRoleBasicConfig(roleName.Name(), parentRoleName.Name()), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -164,6 +166,7 @@ func TestAcc_GrantAccountRole_IdentifierQuotingDiffSuppression(t *testing.T) { }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -174,6 +177,7 @@ func TestAcc_GrantAccountRole_IdentifierQuotingDiffSuppression(t *testing.T) { Config: grantAccountRoleBasicConfig(quotedRoleId, quotedParentRoleId), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantAccountRoleBasicConfig(quotedRoleId, quotedParentRoleId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/grant_application_role_acceptance_test.go b/pkg/resources/grant_application_role_acceptance_test.go index fb70f56a2f..6068063592 100644 --- a/pkg/resources/grant_application_role_acceptance_test.go +++ b/pkg/resources/grant_application_role_acceptance_test.go @@ -140,6 +140,7 @@ func TestAcc_GrantApplicationRole_migrateFromV0941_ensureSmoothUpgradeWithNewRes }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -152,6 +153,7 @@ func TestAcc_GrantApplicationRole_migrateFromV0941_ensureSmoothUpgradeWithNewRes ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantApplicationRoleBasicConfig(fmt.Sprintf(`\"%s\".\"%s\"`, appRoleId.DatabaseName(), appRoleId.Name()), parentRoleId.Name()), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -200,6 +202,7 @@ func TestAcc_GrantApplicationRole_IdentifierQuotingDiffSuppression(t *testing.T) }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -210,6 +213,7 @@ func TestAcc_GrantApplicationRole_IdentifierQuotingDiffSuppression(t *testing.T) Config: grantApplicationRoleBasicConfig(unquotedApplicationRoleId, quotedParentRoleId), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantApplicationRoleBasicConfig(unquotedApplicationRoleId, quotedParentRoleId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/grant_database_role_acceptance_test.go b/pkg/resources/grant_database_role_acceptance_test.go index 48120de920..ae43cc32fc 100644 --- a/pkg/resources/grant_database_role_acceptance_test.go +++ b/pkg/resources/grant_database_role_acceptance_test.go @@ -265,6 +265,7 @@ func TestAcc_GrantDatabaseRole_migrateFromV0941_ensureSmoothUpgradeWithNewResour }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -277,6 +278,7 @@ func TestAcc_GrantDatabaseRole_migrateFromV0941_ensureSmoothUpgradeWithNewResour ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantDatabaseRoleBasicConfigQuoted(databaseId.Name(), roleId.Name(), parentRoleId.Name()), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -353,6 +355,7 @@ func TestAcc_GrantDatabaseRole_IdentifierQuotingDiffSuppression(t *testing.T) { }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -367,6 +370,7 @@ func TestAcc_GrantDatabaseRole_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantDatabaseRoleBasicConfigUnquoted(databaseId.Name(), roleId.Name(), parentRoleId.Name()), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/grant_ownership_acceptance_test.go b/pkg/resources/grant_ownership_acceptance_test.go index d9233fb1c7..c7b3455596 100644 --- a/pkg/resources/grant_ownership_acceptance_test.go +++ b/pkg/resources/grant_ownership_acceptance_test.go @@ -1360,6 +1360,7 @@ func TestAcc_GrantOwnership_migrateFromV0941_ensureSmoothUpgradeWithNewResourceI }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1372,6 +1373,7 @@ func TestAcc_GrantOwnership_migrateFromV0941_ensureSmoothUpgradeWithNewResourceI ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantOwnershipOnTableBasicConfig(acc.TestDatabaseName, acc.TestSchemaName, tableId.Name(), accountRoleId.Name(), escapedFullyQualifiedName), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1434,6 +1436,7 @@ func TestAcc_GrantOwnership_IdentifierQuotingDiffSuppression(t *testing.T) { }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1447,6 +1450,7 @@ func TestAcc_GrantOwnership_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantOwnershipOnTableBasicConfigWithManagedDatabaseAndSchema(databaseId.Name(), schemaId.Name(), tableId.Name(), accountRoleId.Name(), unescapedFullyQualifiedName), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/grant_privileges_to_account_role.go b/pkg/resources/grant_privileges_to_account_role.go index e314e067b0..9d8fd49a97 100644 --- a/pkg/resources/grant_privileges_to_account_role.go +++ b/pkg/resources/grant_privileges_to_account_role.go @@ -1183,6 +1183,9 @@ func createGrantPrivilegesToAccountRoleIdFromSchema(d *schema.ResourceData) (id case on.AccountObject.ReplicationGroup != nil: onAccountObjectGrantData.ObjectType = sdk.ObjectTypeReplicationGroup onAccountObjectGrantData.ObjectName = *on.AccountObject.ReplicationGroup + case on.AccountObject.ComputePool != nil: + onAccountObjectGrantData.ObjectType = sdk.ObjectTypeComputePool + onAccountObjectGrantData.ObjectName = *on.AccountObject.ComputePool case on.AccountObject.ExternalVolume != nil: onAccountObjectGrantData.ObjectType = sdk.ObjectTypeExternalVolume onAccountObjectGrantData.ObjectName = *on.AccountObject.ExternalVolume diff --git a/pkg/resources/grant_privileges_to_account_role_acceptance_test.go b/pkg/resources/grant_privileges_to_account_role_acceptance_test.go index 22a9dbeba1..c9a0a44ce2 100644 --- a/pkg/resources/grant_privileges_to_account_role_acceptance_test.go +++ b/pkg/resources/grant_privileges_to_account_role_acceptance_test.go @@ -8,11 +8,10 @@ import ( "strings" "testing" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" @@ -70,6 +69,45 @@ func TestAcc_GrantPrivilegesToAccountRole_OnAccount(t *testing.T) { }) } +func TestAcc_GrantPrivilegesToAccountRole_OnAccount_gh3153(t *testing.T) { + roleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + roleFullyQualifiedName := roleId.FullyQualifiedName() + configVariables := config.Variables{ + "name": config.StringVariable(roleFullyQualifiedName), + "privileges": config.ListVariable( + config.StringVariable(string(sdk.GlobalPrivilegeManageShareTarget)), + ), + } + resourceName := "snowflake_grant_privileges_to_account_role.test" + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckAccountRolePrivilegesRevoked(t), + Steps: []resource.TestStep{ + { + PreConfig: func() { + _, roleCleanup := acc.TestClient().Role.CreateRoleWithIdentifier(t, roleId) + t.Cleanup(roleCleanup) + acc.TestClient().BcrBundles.EnableBcrBundle(t, "2024_07") + }, + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToAccountRole/OnAccount_gh3153"), + ConfigVariables: configVariables, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "account_role_name", roleFullyQualifiedName), + resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), + resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.GlobalPrivilegeManageShareTarget)), + resource.TestCheckResourceAttr(resourceName, "on_account", "true"), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|%s|OnAccount", roleFullyQualifiedName, sdk.GlobalPrivilegeManageShareTarget)), + ), + }, + }, + }) +} + func TestAcc_GrantPrivilegesToAccountRole_OnAccount_PrivilegesReversed(t *testing.T) { roleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() roleFullyQualifiedName := roleId.FullyQualifiedName() @@ -177,6 +215,53 @@ func TestAcc_GrantPrivilegesToAccountRole_OnAccountObject(t *testing.T) { }) } +func TestAcc_GrantPrivilegesToAccountRole_OnAccountObject_gh2717(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + computePoolId, computePoolCleanup := acc.TestClient().ComputePool.CreateComputePool(t) + t.Cleanup(computePoolCleanup) + + roleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + roleFullyQualifiedName := roleId.FullyQualifiedName() + configVariables := config.Variables{ + "name": config.StringVariable(roleFullyQualifiedName), + "compute_pool": config.StringVariable(computePoolId.Name()), + "privileges": config.ListVariable( + config.StringVariable(string(sdk.AccountObjectPrivilegeUsage)), + ), + } + resourceName := "snowflake_grant_privileges_to_account_role.test" + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckAccountRolePrivilegesRevoked(t), + Steps: []resource.TestStep{ + { + PreConfig: func() { + _, roleCleanup := acc.TestClient().Role.CreateRoleWithIdentifier(t, roleId) + t.Cleanup(roleCleanup) + }, + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToAccountRole/OnAccountObject_gh2717"), + ConfigVariables: configVariables, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "account_role_name", roleFullyQualifiedName), + resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), + resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.AccountObjectPrivilegeUsage)), + resource.TestCheckResourceAttr(resourceName, "on_account_object.#", "1"), + resource.TestCheckResourceAttr(resourceName, "on_account_object.0.object_type", string(sdk.ObjectTypeComputePool)), + resource.TestCheckResourceAttr(resourceName, "on_account_object.0.object_name", computePoolId.Name()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|USAGE|OnAccountObject|%s|%s", roleFullyQualifiedName, sdk.ObjectTypeComputePool, computePoolId.FullyQualifiedName())), + ), + }, + }, + }) +} + // This proves that infinite plan is not produced as in snowflake_grant_privileges_to_role. // More details can be found in the fix pr https://github.com/Snowflake-Labs/terraform-provider-snowflake/pull/2364. func TestAcc_GrantPrivilegesToApplicationRole_OnAccountObject_InfinitePlan(t *testing.T) { @@ -1665,6 +1750,7 @@ func TestAcc_GrantPrivilegesToAccountRole_migrateFromV0941_ensureSmoothUpgradeWi }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1677,6 +1763,7 @@ func TestAcc_GrantPrivilegesToAccountRole_migrateFromV0941_ensureSmoothUpgradeWi ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToAccountRoleBasicConfig(accountRoleId.Name(), accountRoleId.Name(), quotedSchemaId), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1727,6 +1814,7 @@ func TestAcc_GrantPrivilegesToAccountRole_IdentifierQuotingDiffSuppression(t *te }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1741,6 +1829,7 @@ func TestAcc_GrantPrivilegesToAccountRole_IdentifierQuotingDiffSuppression(t *te ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToAccountRoleBasicConfig(accountRoleId.Name(), unquotedAccountRoleId, unquotedSchemaId), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1828,6 +1917,7 @@ func TestAcc_GrantPrivilegesToAccountRole_OnFutureModels_issue3050(t *testing.T) CheckDestroy: acc.CheckAccountRolePrivilegesRevoked(t), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.95.0", @@ -1838,6 +1928,7 @@ func TestAcc_GrantPrivilegesToAccountRole_OnFutureModels_issue3050(t *testing.T) ExpectNonEmptyPlan: true, }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToAccountRoleOnFutureInDatabaseConfig(accountRoleName, []string{"USAGE"}, sdk.PluralObjectTypeModels, databaseName), }, diff --git a/pkg/resources/grant_privileges_to_database_role_acceptance_test.go b/pkg/resources/grant_privileges_to_database_role_acceptance_test.go index b3b491cfd8..c1da413547 100644 --- a/pkg/resources/grant_privileges_to_database_role_acceptance_test.go +++ b/pkg/resources/grant_privileges_to_database_role_acceptance_test.go @@ -1388,6 +1388,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_migrateFromV0941_ensureSmoothUpgradeW }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1400,6 +1401,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_migrateFromV0941_ensureSmoothUpgradeW ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToDatabaseRoleBasicConfig(acc.TestClient().Ids.DatabaseId().Name(), databaseRoleId.Name(), quotedDatabaseRoleId, quotedSchemaId), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1451,6 +1453,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_IdentifierQuotingDiffSuppression(t *t }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1465,6 +1468,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_IdentifierQuotingDiffSuppression(t *t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToDatabaseRoleBasicConfig(acc.TestClient().Ids.DatabaseId().Name(), databaseRoleId.Name(), unquotedDatabaseRoleId, unquotedSchemaId), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1498,6 +1502,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnFutureModels_issue3050(t *testing.T CheckDestroy: acc.CheckAccountRolePrivilegesRevoked(t), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.95.0", @@ -1508,6 +1513,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnFutureModels_issue3050(t *testing.T ExpectNonEmptyPlan: true, }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToDatabaseRoleOnFutureInDatabaseConfig(databaseRoleId, []string{"USAGE"}, sdk.PluralObjectTypeModels, databaseName), }, diff --git a/pkg/resources/grant_privileges_to_share_acceptance_test.go b/pkg/resources/grant_privileges_to_share_acceptance_test.go index 78ae8a4a01..45c7bcd0c0 100644 --- a/pkg/resources/grant_privileges_to_share_acceptance_test.go +++ b/pkg/resources/grant_privileges_to_share_acceptance_test.go @@ -721,6 +721,7 @@ func TestAcc_GrantPrivilegesToShare_migrateFromV0941_ensureSmoothUpgradeWithNewR }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -733,6 +734,7 @@ func TestAcc_GrantPrivilegesToShare_migrateFromV0941_ensureSmoothUpgradeWithNewR ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToShareBasicConfig(databaseId.Name(), shareId.Name()), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -783,6 +785,7 @@ func TestAcc_GrantPrivilegesToShare_IdentifierQuotingDiffSuppression(t *testing. }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -793,6 +796,7 @@ func TestAcc_GrantPrivilegesToShare_IdentifierQuotingDiffSuppression(t *testing. Config: grantPrivilegesToShareBasicConfig(quotedDatabaseId, quotedShareId), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: grantPrivilegesToShareBasicConfig(quotedDatabaseId, quotedShareId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index a7c78d2532..3c47984e7e 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -330,3 +330,17 @@ func ListDiff[T comparable](beforeList []T, afterList []T) (added []T, removed [ return added, removed } + +// parseSchemaObjectIdentifierSet is a helper function to parse a given schema object identifier list from ResourceData. +func parseSchemaObjectIdentifierSet(v any) ([]sdk.SchemaObjectIdentifier, error) { + idsRaw := expandStringList(v.(*schema.Set).List()) + ids := make([]sdk.SchemaObjectIdentifier, len(idsRaw)) + for i, idRaw := range idsRaw { + id, err := sdk.ParseSchemaObjectIdentifier(idRaw) + if err != nil { + return nil, err + } + ids[i] = id + } + return ids, nil +} diff --git a/pkg/resources/masking_policy_acceptance_test.go b/pkg/resources/masking_policy_acceptance_test.go index 98a3648fb9..ac8e055173 100644 --- a/pkg/resources/masking_policy_acceptance_test.go +++ b/pkg/resources/masking_policy_acceptance_test.go @@ -413,6 +413,7 @@ func TestAcc_MaskingPolicy_migrateFromVersion_0_94_1(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -426,6 +427,7 @@ func TestAcc_MaskingPolicy_migrateFromVersion_0_94_1(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_MaskingPolicy/basic"), ConfigVariables: tfconfig.ConfigVariablesFromModel(t, policyModel), @@ -570,6 +572,7 @@ func TestAcc_MaskingPolicy_migrateFromVersion_0_95_0(t *testing.T) { PreCheck: func() { acc.TestAccPreCheck(t) }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.95.0", @@ -594,6 +597,7 @@ func TestAcc_MaskingPolicy_migrateFromVersion_0_95_0(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_MaskingPolicy/complete"), ConfigVariables: tfconfig.ConfigVariablesFromModel(t, policyModel), diff --git a/pkg/resources/network_policy.go b/pkg/resources/network_policy.go index 7b590ce169..981107abb0 100644 --- a/pkg/resources/network_policy.go +++ b/pkg/resources/network_policy.go @@ -138,7 +138,7 @@ func CreateContextNetworkPolicy(ctx context.Context, d *schema.ResourceData, met } if v, ok := d.GetOk("allowed_network_rule_list"); ok { - allowedNetworkRuleList, err := parseNetworkRulesList(v) + allowedNetworkRuleList, err := parseSchemaObjectIdentifierSet(v) if err != nil { return diag.FromErr(err) } @@ -146,7 +146,7 @@ func CreateContextNetworkPolicy(ctx context.Context, d *schema.ResourceData, met } if v, ok := d.GetOk("blocked_network_rule_list"); ok { - blockedNetworkRuleList, err := parseNetworkRulesList(v) + blockedNetworkRuleList, err := parseSchemaObjectIdentifierSet(v) if err != nil { return diag.FromErr(err) } @@ -315,7 +315,7 @@ func UpdateContextNetworkPolicy(ctx context.Context, d *schema.ResourceData, met if d.HasChange("allowed_network_rule_list") { if v, ok := d.GetOk("allowed_network_rule_list"); ok { - allowedNetworkRuleList, err := parseNetworkRulesList(v) + allowedNetworkRuleList, err := parseSchemaObjectIdentifierSet(v) if err != nil { return diag.FromErr(err) } @@ -327,7 +327,7 @@ func UpdateContextNetworkPolicy(ctx context.Context, d *schema.ResourceData, met if d.HasChange("blocked_network_rule_list") { if v, ok := d.GetOk("blocked_network_rule_list"); ok { - blockedNetworkRuleList, err := parseNetworkRulesList(v) + blockedNetworkRuleList, err := parseSchemaObjectIdentifierSet(v) if err != nil { return diag.FromErr(err) } @@ -399,17 +399,3 @@ func parseIPList(v interface{}) []sdk.IPRequest { } return ipRequests } - -// parseNetworkRulesList is a helper function to parse a given network rule list from ResourceData. -func parseNetworkRulesList(v interface{}) ([]sdk.SchemaObjectIdentifier, error) { - networkRules := expandStringList(v.(*schema.Set).List()) - networkRuleIdentifiers := make([]sdk.SchemaObjectIdentifier, len(networkRules)) - for i, networkRuleFullyQualifiedName := range networkRules { - networkRuleId, err := sdk.ParseSchemaObjectIdentifier(networkRuleFullyQualifiedName) - if err != nil { - return nil, err - } - networkRuleIdentifiers[i] = networkRuleId - } - return networkRuleIdentifiers, nil -} diff --git a/pkg/resources/network_policy_acceptance_test.go b/pkg/resources/network_policy_acceptance_test.go index 7ba7a143f2..b205528ee0 100644 --- a/pkg/resources/network_policy_acceptance_test.go +++ b/pkg/resources/network_policy_acceptance_test.go @@ -479,6 +479,7 @@ func TestAcc_NetworkPolicy_Issue2236(t *testing.T) { // Identifier quoting mismatch (no diff suppression) ExpectNonEmptyPlan: true, PreConfig: func() { + func() { acc.SetV097CompatibleConfigPathEnv(t) }() acc.TestClient().NetworkRule.CreateWithIdentifier(t, allowedNetworkRuleId) acc.TestClient().NetworkRule.CreateWithIdentifier(t, allowedNetworkRuleId2) acc.TestClient().NetworkRule.CreateWithIdentifier(t, blockedNetworkRuleId) @@ -502,6 +503,7 @@ func TestAcc_NetworkPolicy_Issue2236(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: networkPolicyConfigWithNetworkRules( id.Name(), @@ -554,6 +556,7 @@ func TestAcc_NetworkPolicy_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId CheckDestroy: acc.CheckDestroy(t, resources.NetworkPolicy), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -566,6 +569,7 @@ func TestAcc_NetworkPolicy_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: networkPolicyConfigBasic(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -588,6 +592,7 @@ func TestAcc_NetworkPolicy_WithQuotedName(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.NetworkPolicy), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -602,6 +607,7 @@ func TestAcc_NetworkPolicy_WithQuotedName(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: networkPolicyConfigBasic(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/network_rule_acceptance_test.go b/pkg/resources/network_rule_acceptance_test.go index 0c4e274d88..c6b6bea422 100644 --- a/pkg/resources/network_rule_acceptance_test.go +++ b/pkg/resources/network_rule_acceptance_test.go @@ -134,6 +134,7 @@ func TestAcc_NetworkRule_migrateFromVersion_0_94_1(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -147,6 +148,7 @@ func TestAcc_NetworkRule_migrateFromVersion_0_94_1(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: networkRuleIpv4(id.Name(), acc.TestDatabaseName, acc.TestSchemaName), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/notification_integration_acceptance_test.go b/pkg/resources/notification_integration_acceptance_test.go index fc473440aa..ffdb782367 100644 --- a/pkg/resources/notification_integration_acceptance_test.go +++ b/pkg/resources/notification_integration_acceptance_test.go @@ -232,6 +232,7 @@ func TestAcc_NotificationIntegration_migrateFromVersion085(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.NotificationIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -246,6 +247,7 @@ func TestAcc_NotificationIntegration_migrateFromVersion085(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: googleAutoConfigWithoutDirection(accName, gcpPubsubSubscriptionName), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -276,6 +278,7 @@ func TestAcc_NotificationIntegration_migrateFromVersion085_explicitType(t *testi CheckDestroy: acc.CheckDestroy(t, resources.NotificationIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -288,6 +291,7 @@ func TestAcc_NotificationIntegration_migrateFromVersion085_explicitType(t *testi ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: googleAutoConfig(accName, gcpPubsubSubscriptionName), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go b/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go index 0f77f4e16b..be88ceab7c 100644 --- a/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go +++ b/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go @@ -682,6 +682,7 @@ func TestAcc_OauthIntegrationForCustomClients_migrateFromV0941_ensureSmoothUpgra CheckDestroy: acc.CheckDestroy(t, resourcenames.OauthIntegrationForCustomClients), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -694,6 +695,7 @@ func TestAcc_OauthIntegrationForCustomClients_migrateFromV0941_ensureSmoothUpgra ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: oauthIntegrationForCustomClientsBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -716,6 +718,7 @@ func TestAcc_OauthIntegrationForCustomClients_WithQuotedName(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resourcenames.OauthIntegrationForCustomClients), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -730,6 +733,7 @@ func TestAcc_OauthIntegrationForCustomClients_WithQuotedName(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: oauthIntegrationForCustomClientsBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go b/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go index e9bfe4d778..0eccde347c 100644 --- a/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go +++ b/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go @@ -687,6 +687,7 @@ func TestAcc_OauthIntegrationForPartnerApplications_migrateFromV0941_ensureSmoot CheckDestroy: acc.CheckDestroy(t, resources.OauthIntegrationForPartnerApplications), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -699,6 +700,7 @@ func TestAcc_OauthIntegrationForPartnerApplications_migrateFromV0941_ensureSmoot ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: oauthIntegrationForPartnerApplicationsBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -721,6 +723,7 @@ func TestAcc_OauthIntegrationForPartnerApplications_WithQuotedName(t *testing.T) CheckDestroy: acc.CheckDestroy(t, resources.OauthIntegrationForPartnerApplications), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -735,6 +738,7 @@ func TestAcc_OauthIntegrationForPartnerApplications_WithQuotedName(t *testing.T) ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: oauthIntegrationForPartnerApplicationsBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/oauth_integration_test.go b/pkg/resources/oauth_integration_test.go index bd9a5438c6..c8e2d9a126 100644 --- a/pkg/resources/oauth_integration_test.go +++ b/pkg/resources/oauth_integration_test.go @@ -10,7 +10,7 @@ import ( sqlmock "github.com/DATA-DOG/go-sqlmock" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - . "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers" + . "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers/mock" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/stretchr/testify/require" ) diff --git a/pkg/resources/object_renaming_acceptance_test.go b/pkg/resources/object_renaming_acceptance_test.go index a74523e3eb..462035c8c8 100644 --- a/pkg/resources/object_renaming_acceptance_test.go +++ b/pkg/resources/object_renaming_acceptance_test.go @@ -136,283 +136,166 @@ const ( NoDependency DependencyType = "no_dependency" ) -func TestAcc_ShallowHierarchy_IsInConfig_RenamedInternally_WithImplicitDependency(t *testing.T) { +func TestAcc_ShallowHierarchy_IsInConfig_RenamedInternally(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() + testCases := []struct { + Dependency DependencyType + IsRenamedDatabaseReferencedAfterRename bool + }{ + {Dependency: ImplicitDependency}, + // Error happens during schema's Read operation and then Delete operation (schema cannot be removed). + // Not able to handle the error produced by Delete operation that results in test always failing + // {Dependency: NoDependency, IsRenamedDatabaseReferencedAfterRename: false}, + // {Dependency: NoDependency, IsRenamedDatabaseReferencedAfterRename: true}, + // {Dependency: DependsOnDependency, IsRenamedDatabaseReferencedAfterRename: false}, + // {Dependency: DependsOnDependency, IsRenamedDatabaseReferencedAfterRename: true}, + } - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) + for _, testCase := range testCases { + t.Run(fmt.Sprintf("TestAcc_ dependency: %s", testCase.Dependency), func(t *testing.T) { + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaName := acc.TestClient().Ids.Alpha() - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModel(t, databaseConfigModel) + configSchemaWithDatabaseReference(databaseConfigModel.ResourceReference(), schemaName), - }, - { - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ + databaseConfigModel := model.Database("test", databaseId.Name()) + databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) + + var schemaConfigAfterRename string + var preApplyChecksAfterRename []plancheck.PlanCheck + + switch testCase.Dependency { + case ImplicitDependency: + schemaConfigAfterRename = configSchemaWithReferences(t, databaseConfigModelWithNewId.ResourceReference(), testCase.Dependency, databaseId.Name(), schemaName) + preApplyChecksAfterRename = []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionUpdate), + plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionDestroyBeforeCreate), + } + case DependsOnDependency, NoDependency: + if testCase.IsRenamedDatabaseReferencedAfterRename { + schemaConfigAfterRename = configSchemaWithReferences(t, databaseConfigModelWithNewId.ResourceReference(), testCase.Dependency, newDatabaseId.Name(), schemaName) + preApplyChecksAfterRename = []plancheck.PlanCheck{ plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionUpdate), plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionDestroyBeforeCreate), - }, - }, - Config: config.FromModel(t, databaseConfigModelWithNewId) + configSchemaWithDatabaseReference(databaseConfigModelWithNewId.ResourceReference(), schemaName), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedInternally_WithoutDependency_AfterRenameSchemaReferencingOldDatabaseName(t *testing.T) { - // Error happens during schema's Read operation and then Delete operation (schema cannot be removed). - t.Skip("Not able to handle the error produced by Delete operation that results in test always failing") - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ + } + } else { + schemaConfigAfterRename = configSchemaWithReferences(t, databaseConfigModelWithNewId.ResourceReference(), testCase.Dependency, databaseId.Name(), schemaName) + preApplyChecksAfterRename = []plancheck.PlanCheck{ plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionUpdate), plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionNoop), - }, - }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfig), - ExpectError: regexp.MustCompile("does not exist or not authorized"), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedInternally_WithoutDependency_AfterRenameSchemaReferencingNewDatabaseName(t *testing.T) { - // Error happens during schema's Read operation and then Delete operation (schema cannot be removed). - t.Skip("Not able to handle the error produced by Delete operation that results in test always failing") - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfigWithNewDatabaseId := model.Schema("test", newDatabaseId.Name(), schemaName) + } + } + } - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionUpdate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionDestroyBeforeCreate), - }, + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfigWithNewDatabaseId), - ExpectError: regexp.MustCompile("does not exist or not authorized"), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedInternally_WithDependsOn_AfterRenameSchemaReferencingOldDatabaseName(t *testing.T) { - // Error happens during schema's Read operation and then Delete operation (schema cannot be removed). - t.Skip("Not able to handle the error produced by Delete operation that results in test always failing") - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfig.SetDependsOn(databaseConfigModel.ResourceReference()) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: resource.ComposeAggregateTestCheckFunc(), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionUpdate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionNoop), + CheckDestroy: acc.CheckDestroy(t, resources.Schema), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, databaseConfigModel) + configSchemaWithReferences(t, databaseConfigModel.ResourceReference(), testCase.Dependency, databaseId.Name(), schemaName), }, - }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfig), - ExpectError: regexp.MustCompile("does not exist or not authorized"), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedInternally_WithDependsOn_AfterRenameSchemaReferencingNewDatabaseName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfig.SetDependsOn(databaseConfigModel.ResourceReference()) - schemaModelConfigWithNewDatabaseId := model.Schema("test", newDatabaseId.Name(), schemaName) - schemaModelConfigWithNewDatabaseId.SetDependsOn(databaseConfigModelWithNewId.ResourceReference()) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionUpdate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionDestroyBeforeCreate), + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: preApplyChecksAfterRename, + }, + Config: config.FromModel(t, databaseConfigModelWithNewId) + schemaConfigAfterRename, }, }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfigWithNewDatabaseId), - }, - }, - }) + }) + }) + } } -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithImplicitDependency_DatabaseHoldingTheOldNameInConfig(t *testing.T) { +func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() + testCases := []struct { + Dependency DependencyType + IsRenamedDatabaseReferencedAfterRenameInDatabase bool + IsRenamedDatabaseReferencedAfterRenameInSchema bool + ExpectedError *regexp.Regexp + }{ + // errors with already exists happen, because we try to create databases or schemas when there's already an existing object in Snowflake with the same name. + {Dependency: ImplicitDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: false}, + {Dependency: ImplicitDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: true, ExpectedError: regexp.MustCompile(`Object '.*' already exists`)}, + + {Dependency: NoDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: false, IsRenamedDatabaseReferencedAfterRenameInSchema: true, ExpectedError: regexp.MustCompile("Failed to create schema")}, + {Dependency: NoDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: true, IsRenamedDatabaseReferencedAfterRenameInSchema: false, ExpectedError: regexp.MustCompile(`Object '.*' already exists`)}, + {Dependency: NoDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: true, IsRenamedDatabaseReferencedAfterRenameInSchema: true, ExpectedError: regexp.MustCompile(`Object '.*' already exists`)}, + + {Dependency: DependsOnDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: false, IsRenamedDatabaseReferencedAfterRenameInSchema: false}, + {Dependency: DependsOnDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: false, IsRenamedDatabaseReferencedAfterRenameInSchema: true, ExpectedError: regexp.MustCompile("Failed to create schema")}, + {Dependency: DependsOnDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: true, IsRenamedDatabaseReferencedAfterRenameInSchema: false, ExpectedError: regexp.MustCompile(`Object '.*' already exists`)}, + {Dependency: DependsOnDependency, IsRenamedDatabaseReferencedAfterRenameInDatabase: true, IsRenamedDatabaseReferencedAfterRenameInSchema: true, ExpectedError: regexp.MustCompile(`Object '.*' already exists`)}, + } - databaseConfigModel := model.Database("test", databaseId.Name()) + for _, testCase := range testCases { + t.Run(fmt.Sprintf("TestAcc_ dependency: %s, is using new database name in dataabse resource: %t, in schema resource: %t", testCase.Dependency, testCase.IsRenamedDatabaseReferencedAfterRenameInDatabase, testCase.IsRenamedDatabaseReferencedAfterRenameInSchema), func(t *testing.T) { + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaName := acc.TestClient().Ids.Alpha() - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModel(t, databaseConfigModel) + configSchemaWithDatabaseReference(databaseConfigModel.ResourceReference(), schemaName), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{ - NewName: &newDatabaseId, - }) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModel(t, databaseConfigModel) + configSchemaWithDatabaseReference(databaseConfigModel.ResourceReference(), schemaName), - }, - }, - }) -} + databaseConfigModel := model.Database("test", databaseId.Name()) + databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithImplicitDependency_DatabaseHoldingTheNewNameInConfig(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) + var databaseConfigAfterRename string + var schemaConfigAfterRename string - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() + if testCase.IsRenamedDatabaseReferencedAfterRenameInDatabase { + databaseConfigAfterRename = config.FromModel(t, databaseConfigModelWithNewId) + } else { + databaseConfigAfterRename = config.FromModel(t, databaseConfigModel) + } - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) + switch testCase.Dependency { + case ImplicitDependency: + schemaConfigAfterRename = configSchemaWithReferences(t, databaseConfigModel.ResourceReference(), testCase.Dependency, "", schemaName) + case DependsOnDependency, NoDependency: + if testCase.IsRenamedDatabaseReferencedAfterRenameInSchema { + schemaConfigAfterRename = configSchemaWithReferences(t, databaseConfigModel.ResourceReference(), testCase.Dependency, newDatabaseId.Name(), schemaName) + } else { + schemaConfigAfterRename = configSchemaWithReferences(t, databaseConfigModel.ResourceReference(), testCase.Dependency, databaseId.Name(), schemaName) + } + } - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModel(t, databaseConfigModel) + configSchemaWithDatabaseReference(databaseConfigModel.ResourceReference(), schemaName), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{ - NewName: &newDatabaseId, - }) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), // Create is expected, because in refresh Read before apply the database is removing the unknown database from the state using d.SetId("") after failed ShowByID - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), // Create is expected, because in refresh Read before apply the schema is removing the unknown schema from the state using d.SetId("") after failed ShowByID + CheckDestroy: acc.CheckDestroy(t, resources.Schema), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, databaseConfigModel) + configSchemaWithDatabaseReference(databaseConfigModel.ResourceReference(), schemaName), + }, + { + PreConfig: func() { + acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{ + NewName: &newDatabaseId, + }) + t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + // Creates are expected, because in refresh Read before apply the database is removing the unknown database from the state using d.SetId("") after failed ShowByID + plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), + plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), + }, + }, + Config: databaseConfigAfterRename + schemaConfigAfterRename, + ExpectError: testCase.ExpectedError, }, }, - Config: config.FromModel(t, databaseConfigModelWithNewId) + configSchemaWithDatabaseReference(databaseConfigModelWithNewId.ResourceReference(), schemaName), - ExpectError: regexp.MustCompile(fmt.Sprintf(`Object '%s' already exists`, newDatabaseId.Name())), - }, - }, - }) + }) + }) + } } func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithoutDependency_AfterRenameDatabaseReferencingOldNameAndSchemaReferencingOldDatabaseName(t *testing.T) { @@ -500,380 +383,64 @@ func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithoutDependency_Aft }) } -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithoutDependency_AfterRenameDatabaseReferencingOldNameAndSchemaReferencingNewDatabaseName(t *testing.T) { +func TestAcc_ShallowHierarchy_IsNotInConfig_RenamedExternally(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() + testCases := []struct { + IsReferencingNewDatabaseName bool + ExpectedError *regexp.Regexp + }{ + {IsReferencingNewDatabaseName: false, ExpectedError: regexp.MustCompile("object does not exist or not authorized")}, + {IsReferencingNewDatabaseName: true, ExpectedError: regexp.MustCompile("Failed to create schema")}, // already exists + } - databaseConfigModel := model.Database("test", databaseId.Name()) - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfigWithNewDatabaseId := model.Schema("test", newDatabaseId.Name(), schemaName) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModels(t, databaseConfigModel, schemaModelConfigWithNewDatabaseId), - ExpectError: regexp.MustCompile("Failed to create schema"), // already exists (because we try to create a schema on the renamed database that already has the schema that was previously created by terraform and wasn't removed) - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithoutDependency_AfterRenameDatabaseReferencingNewNameAndSchemaReferencingOldDatabaseName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfig), - ExpectError: regexp.MustCompile(fmt.Sprintf(`Object '%s' already exists`, newDatabaseId.Name())), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithoutDependency_AfterRenameDatabaseReferencingNewNameAndSchemaReferencingNewDatabaseName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfigWithNewDatabaseId := model.Schema("test", newDatabaseId.Name(), schemaName) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfigWithNewDatabaseId), - ExpectError: regexp.MustCompile(fmt.Sprintf(`Object '%s' already exists`, newDatabaseId.Name())), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithDependsOn_AfterRenameDatabaseReferencingOldNameAndSchemaReferencingOldDatabaseName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfig.SetDependsOn(databaseConfigModel.ResourceReference()) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - PreConfig: func() { - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithDependsOn_AfterRenameDatabaseReferencingOldNameAndSchemaReferencingNewDatabaseName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfig.SetDependsOn(databaseConfigModel.ResourceReference()) - schemaModelConfigWithNewDatabaseId := model.Schema("test", newDatabaseId.Name(), schemaName) - schemaModelConfigWithNewDatabaseId.SetDependsOn(databaseConfigModel.ResourceReference()) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModels(t, databaseConfigModel, schemaModelConfigWithNewDatabaseId), - ExpectError: regexp.MustCompile("Failed to create schema"), // already exists - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithDependsOn_AfterRenameDatabaseReferencingNewNameAndSchemaReferencingOldDatabaseName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfig.SetDependsOn(databaseConfigModel.ResourceReference()) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfig), - ExpectError: regexp.MustCompile(fmt.Sprintf(`Object '%s' already exists`, newDatabaseId.Name())), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsInConfig_RenamedExternally_WithDependsOn_AfterRenameDatabaseReferencingNewNameAndSchemaReferencingNewDatabaseName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - - databaseConfigModel := model.Database("test", databaseId.Name()) - databaseConfigModelWithNewId := model.Database("test", newDatabaseId.Name()) - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfig.SetDependsOn(databaseConfigModel.ResourceReference()) - schemaModelConfigWithNewDatabaseId := model.Schema("test", newDatabaseId.Name(), schemaName) - schemaModelConfigWithNewDatabaseId.SetDependsOn(databaseConfigModelWithNewId.ResourceReference()) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - Config: config.FromModels(t, databaseConfigModel, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_database.test", plancheck.ResourceActionCreate), - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), - }, - }, - Config: config.FromModels(t, databaseConfigModelWithNewId, schemaModelConfigWithNewDatabaseId), - ExpectError: regexp.MustCompile(fmt.Sprintf(`Object '%s' already exists`, newDatabaseId.Name())), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsNotInConfig_RenamedExternally_ReferencingOldName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) + for _, testCase := range testCases { + t.Run(fmt.Sprintf("TestAcc_ referencing new database name: %t", testCase.IsReferencingNewDatabaseName), func(t *testing.T) { + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaName := acc.TestClient().Ids.Alpha() + schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) + var schemaConfigAfterRename string + if testCase.IsReferencingNewDatabaseName { + schemaConfigAfterRename = configSchemaWithReferences(t, "", NoDependency, newDatabaseId.Name(), schemaName) + } else { + schemaConfigAfterRename = configSchemaWithReferences(t, "", NoDependency, databaseId.Name(), schemaName) + } - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - PreConfig: func() { - _, databaseCleanup := acc.TestClient().Database.CreateDatabaseWithIdentifier(t, databaseId) - t.Cleanup(databaseCleanup) - }, - Config: config.FromModel(t, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), + CheckDestroy: acc.CheckDestroy(t, resources.Schema), + Steps: []resource.TestStep{ + { + PreConfig: func() { + _, databaseCleanup := acc.TestClient().Database.CreateDatabaseWithIdentifier(t, databaseId) + t.Cleanup(databaseCleanup) + }, + Config: config.FromModel(t, schemaModelConfig), }, - }, - Config: config.FromModel(t, schemaModelConfig), - ExpectError: regexp.MustCompile("object does not exist or not authorized"), - }, - }, - }) -} - -func TestAcc_ShallowHierarchy_IsNotInConfig_RenamedExternally_ReferencingNewName(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - - databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - newDatabaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := acc.TestClient().Ids.Alpha() - schemaModelConfig := model.Schema("test", databaseId.Name(), schemaName) - schemaModelConfigWithNewDatabaseId := model.Schema("test", newDatabaseId.Name(), schemaName) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Schema), - Steps: []resource.TestStep{ - { - PreConfig: func() { - _, databaseCleanup := acc.TestClient().Database.CreateDatabaseWithIdentifier(t, databaseId) - t.Cleanup(databaseCleanup) - }, - Config: config.FromModel(t, schemaModelConfig), - }, - { - PreConfig: func() { - acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) - t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), + { + PreConfig: func() { + acc.TestClient().Database.Alter(t, databaseId, &sdk.AlterDatabaseOptions{NewName: &newDatabaseId}) + t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, newDatabaseId)) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionCreate), + }, + }, + Config: schemaConfigAfterRename, + ExpectError: testCase.ExpectedError, }, }, - Config: config.FromModel(t, schemaModelConfigWithNewDatabaseId), - ExpectError: regexp.MustCompile("Failed to create schema"), // already exists - }, - }, - }) + }) + }) + } } func configSchemaWithDatabaseReference(databaseReference string, schemaName string) string { diff --git a/pkg/resources/object_renaming_lists_and_sets.go b/pkg/resources/object_renaming_lists_and_sets.go new file mode 100644 index 0000000000..7888018ce6 --- /dev/null +++ b/pkg/resources/object_renaming_lists_and_sets.go @@ -0,0 +1,709 @@ +package resources + +import ( + "context" + "errors" + "fmt" + "log" + "strconv" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "golang.org/x/exp/slices" +) + +type ObjectRenamingDatabaseListItem struct { + String string + Int int +} + +func mapObjectRenamingDatabaseListItemFromValue(items []cty.Value) []ObjectRenamingDatabaseListItem { + return collections.Map(items, func(item cty.Value) ObjectRenamingDatabaseListItem { + intValue, _ := item.AsValueMap()["int"].AsBigFloat().Int64() + return ObjectRenamingDatabaseListItem{ + String: item.AsValueMap()["string"].AsString(), + Int: int(intValue), + } + }) +} + +func objectRenamingDatabaseListFromSchema(items []any) []ObjectRenamingDatabaseListItem { + return collections.Map(items, func(item any) ObjectRenamingDatabaseListItem { + return ObjectRenamingDatabaseListItem{ + String: item.(map[string]any)["string"].(string), + Int: item.(map[string]any)["int"].(int), + } + }) +} + +type objectRenamingDatabaseOrderedListItem struct { + Name string + Order string +} + +func mapObjectRenamingDatabaseOrderedListItemFromValue(items []cty.Value) []objectRenamingDatabaseOrderedListItem { + return collections.Map(items, func(item cty.Value) objectRenamingDatabaseOrderedListItem { + var order string + if orderValue, ok := item.AsValueMap()["order"]; ok && !orderValue.IsNull() { + order = orderValue.AsString() + } + var name string + if nameValue, ok := item.AsValueMap()["name"]; ok && !nameValue.IsNull() { + name = nameValue.AsString() + } + return objectRenamingDatabaseOrderedListItem{ + Name: name, + Order: order, + } + }) +} + +func objectRenamingDatabaseOrderedListFromSchema(list []any) []objectRenamingDatabaseOrderedListItem { + objectRenamingDatabaseOrderedListItems := make([]objectRenamingDatabaseOrderedListItem, len(list)) + for index, item := range list { + var name string + if nameValue, ok := item.(map[string]any)["name"]; ok { + name = nameValue.(string) + } + objectRenamingDatabaseOrderedListItems[index] = objectRenamingDatabaseOrderedListItem{ + Name: name, + Order: strconv.Itoa(index), + } + } + return objectRenamingDatabaseOrderedListItems +} + +type ObjectRenamingDatabaseManuallyOrderedListItem struct { + Name string + Type string +} + +func objectRenamingDatabaseManuallyOrderedListFromSchema(list []any) []ObjectRenamingDatabaseManuallyOrderedListItem { + objectRenamingDatabaseOrderedListItems := make([]ObjectRenamingDatabaseManuallyOrderedListItem, len(list)) + slices.SortFunc(list, func(a, b any) int { + return a.(map[string]any)["order"].(int) - b.(map[string]any)["order"].(int) + }) + for index, item := range list { + objectRenamingDatabaseOrderedListItems[index] = ObjectRenamingDatabaseManuallyOrderedListItem{ + Name: item.(map[string]any)["name"].(string), + Type: item.(map[string]any)["type"].(string), + } + } + return objectRenamingDatabaseOrderedListItems +} + +type objectRenamingDatabase struct { + List []ObjectRenamingDatabaseListItem + OrderedList []objectRenamingDatabaseOrderedListItem + ManuallyOrderedList []ObjectRenamingDatabaseManuallyOrderedListItem + ChangeLog ObjectRenamingDatabaseChangelog +} + +type ObjectRenamingDatabaseChangelogChange struct { + Before map[string]any + After map[string]any +} + +// ObjectRenamingDatabaseChangelog is used for testing purposes to track actions taken in the Update method like Add/Remove/Change. +// It's only supported the manually_ordered_list option. +type ObjectRenamingDatabaseChangelog struct { + Added []map[string]any + Removed []map[string]any + Changed []ObjectRenamingDatabaseChangelogChange +} + +var ObjectRenamingDatabaseInstance = &objectRenamingDatabase{ + List: make([]ObjectRenamingDatabaseListItem, 0), + OrderedList: make([]objectRenamingDatabaseOrderedListItem, 0), + ManuallyOrderedList: make([]ObjectRenamingDatabaseManuallyOrderedListItem, 0), + ChangeLog: ObjectRenamingDatabaseChangelog{}, +} + +var objectRenamingListsAndSetsSchema = map[string]*schema.Schema{ + // The list field was tested to be used in places where the order of the items should be ignored. + // It was ignored by comparing hashes of the items to see if any changes were made on the items themselves + // (if the hashes before and after were the same, we know that nothing was changed, only the order). + // Also, it doesn't fully support repeating items. This is because they have the same hash and to fully support it, + // hash counting could be added (counting if the same hash occurs in state and config the number of times, otherwise cause update). + // Modifications of the items will still cause remove/add behavior. + "list": { + Optional: true, + Type: schema.TypeList, + DiffSuppressFunc: ignoreListOrderAfterFirstApply("list"), + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "string": { + Type: schema.TypeString, + Optional: true, + }, + "int": { + Type: schema.TypeInt, + Optional: true, + }, + }, + }, + }, + // The manually_ordered_list focuses on providing both aspects: + // - Immunity to item reordering after create. + // - Handling updates for changed items instead of removing the old item and adding a new one. + // It does it by providing the required order field that represents what should be the actual order of items + // on the Snowflake side. The order is ignored on the DiffSuppressFunc level, and the item update (renaming the item) + // is handled in the resource update function. This proposal is supposed to test the behavior of Snowflake columns needed for table refactor for v1. + // Here's the full list of what should be possible with this approach: + // Supported actions: + // - Drop item (any position). + // - Add item (at the end). + // - Rename item / Change item type (any position; compatible type change). + // - Reorder items. + // Unsupported actions: + // - Add item (in the middle). + // - Change item type (incompatible change). + // - External changes (with an option to set either ForceNew or error behavior). + // Assumptions: + // - The list "returned from Snowflake side" is ordered (or identifiable). + // - Order field is treated as an identifier that cannot be changed for the lifetime of a given item. + // - Items contain fields that are able to uniquely identify a given item (in this case, we have name + type). + "manually_ordered_list": { + Optional: true, + Type: schema.TypeList, + DiffSuppressFunc: ignoreOrderAfterFirstApplyWithManuallyOrderedList("manually_ordered_list"), + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "order": { + Type: schema.TypeInt, + Required: true, + // Improvement: + // Cause ForceNew behavior whenever any of the items change their order to different value than previously. + // It's not trivial as it cannot be achieved by putting ForceNew modifier in the schema (with the current implementation of Update/Read/SuppressDiff). + // It also cannot be achieved by creating custom diff. It seems the custom diff is seeing the changes + // too late to call ForceNew and for Terraform to show it during the plan or apply it during the apply. + // Currently, the only good way to prevent such changes is to describe them clearly in the documentation. + }, + "name": { + Type: schema.TypeString, + Required: true, + }, + "type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: sdkValidation(func(value string) (string, error) { + if slices.Contains([]string{"INT", "NUMBER", "STRING", "TEXT"}, value) { + return value, nil + } + return "", fmt.Errorf("invalid type: %s", value) + }), + }, + }, + }, + }, + // The invalid_operation_handler is a switch that we wanted to implement for the approach with manually_ordered_list. + // The idea behind it was to switch between different error handling behaviors for invalid operations we discussed + // (see unsupported actions in the manually_ordered_list description). For now, we wanted to have two options: + // error out or force re-creation (force new). Currently, the proposal uses only errors because during FORCE_NEW + // testing it seemed like it's impossible to for resource re-creation without adding more logic into custom diff + // that would be able to calculate it on plan time, not apply time. + "invalid_operation_handler": { + Type: schema.TypeString, + Optional: true, + Default: "ERROR", + ValidateDiagFunc: sdkValidation(func(value string) (string, error) { + if slices.Contains([]string{"ERROR", "FORCE_NEW"}, value) { + return value, nil + } + return "", fmt.Errorf("invalid invalid operation handler: %s", value) + }), + }, + // The invalid_operation field was an attempt to collect error messages gathered in the Update and Read methods + // and use it in custom diff that would try to re-create the resource on non-empty invalid_operation field. + // The FORCE_NEW handler applied this way seemed to have no impact on the resource behavior because the information was + // transferred to the field too late. As mentioned, it happened during Update and Read when the `terraform apply` is + // already running. During the run, it's not valid to apply the force new, because it has to be known before the apply, so + // Terraform would be able to show it during the plan. Because of that, we know that the logic inside custom diff has to + // do much more guessing on its own, so we would be able to know those invalid operations during the plan time. + // Only then we would be able to have invalid_operation_handler and FORCE_NEW as a valid option (and this field would be + // most likely useless and could be removed). + "invalid_operation": { + Type: schema.TypeString, + Computed: true, + }, + // The ordered_list field was an attempt of making manual work done in manually_ordered_list automatic by making the order field computed. + // It didn't work because in DiffSuppressFunc it's hard to get the computed value in the "after" state to compare against. + // Possibly (but with very low probability), the solution could work by introducing a Computed + Optional list that would be managed by a custom diff function. + // Due to increased complexity, it was left as is and more research was dedicated to manually_ordered_list. + "ordered_list": { + Optional: true, + Type: schema.TypeList, + DiffSuppressFunc: ignoreOrderAfterFirstApplyWithOrderedList("ordered_list"), + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + }, + "order": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, +} + +func ObjectRenamingListsAndSets() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateObjectRenamingListsAndSets, + UpdateContext: UpdateObjectRenamingListsAndSets, + ReadContext: ReadObjectRenamingListsAndSets(true), + DeleteContext: DeleteObjectRenamingListsAndSets, + + Schema: objectRenamingListsAndSetsSchema, + } +} + +func CreateObjectRenamingListsAndSets(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + ObjectRenamingDatabaseInstance.List = objectRenamingDatabaseListFromSchema(d.Get("list").([]any)) + ObjectRenamingDatabaseInstance.OrderedList = objectRenamingDatabaseOrderedListFromSchema(d.Get("ordered_list").([]any)) + ObjectRenamingDatabaseInstance.ManuallyOrderedList = objectRenamingDatabaseManuallyOrderedListFromSchema(d.Get("manually_ordered_list").([]any)) + + d.SetId("identifier") + + return ReadObjectRenamingListsAndSets(false)(ctx, d, meta) +} + +func UpdateObjectRenamingListsAndSets(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + if d.HasChange("list") { + // It wasn't working with d.getChange(). It was returning null elements in one of the test case's steps. + oldList := d.GetRawState().AsValueMap()["list"].AsValueSlice() + newList := d.GetRawConfig().AsValueMap()["list"].AsValueSlice() + + oldListMapped := mapObjectRenamingDatabaseListItemFromValue(oldList) + newListMapped := mapObjectRenamingDatabaseListItemFromValue(newList) + + addedItems, removedItems := ListDiff(oldListMapped, newListMapped) + + for _, removedItem := range removedItems { + ObjectRenamingDatabaseInstance.List = slices.DeleteFunc(ObjectRenamingDatabaseInstance.List, func(item ObjectRenamingDatabaseListItem) bool { + shouldRemove := item == removedItem + if shouldRemove { + ObjectRenamingDatabaseInstance.ChangeLog.Removed = append(ObjectRenamingDatabaseInstance.ChangeLog.Removed, map[string]any{ + "int": item.Int, + "string": item.String, + }) + } + return shouldRemove + }) + } + + for _, addedItem := range addedItems { + ObjectRenamingDatabaseInstance.ChangeLog.Added = append(ObjectRenamingDatabaseInstance.ChangeLog.Added, map[string]any{ + "int": addedItem.Int, + "string": addedItem.String, + }) + } + + ObjectRenamingDatabaseInstance.List = append(ObjectRenamingDatabaseInstance.List, addedItems...) + } + + if d.HasChange("ordered_list") { + oldOrderedList := d.GetRawState().AsValueMap()["ordered_list"].AsValueSlice() + newOrderedList := d.GetRawConfig().AsValueMap()["ordered_list"].AsValueSlice() + oldOrderedListMapped := mapObjectRenamingDatabaseOrderedListItemFromValue(oldOrderedList) + newOrderedListMapped := mapObjectRenamingDatabaseOrderedListItemFromValue(newOrderedList) + + itemsToAdd, itemsToRemove := ListDiff(oldOrderedListMapped, newOrderedListMapped) + for _, removedItem := range itemsToRemove { + ObjectRenamingDatabaseInstance.OrderedList = slices.DeleteFunc(ObjectRenamingDatabaseInstance.OrderedList, func(item objectRenamingDatabaseOrderedListItem) bool { + return item == removedItem + }) + } + + for _, addedItem := range itemsToAdd { + addedItem.Order = "" + ObjectRenamingDatabaseInstance.OrderedList = append(ObjectRenamingDatabaseInstance.OrderedList, addedItem) + } + + // The implementation is not complete due to mentioned issues with computed order in DiffSuppressFunc + } + + if d.HasChange("manually_ordered_list") { + invalidOperations := make([]error, 0) + updateChangelog := ObjectRenamingDatabaseChangelog{} + + oldManuallyOrderedList := d.GetRawState().AsValueMap()["manually_ordered_list"].AsValueSlice() + newManuallyOrderedList := d.GetRawConfig().AsValueMap()["manually_ordered_list"].AsValueSlice() + + oldOrders := collections.Map(oldManuallyOrderedList, func(item cty.Value) int { + result, _ := item.AsValueMap()["order"].AsBigFloat().Int64() + return int(result) + }) + maxStateOrder := slices.MaxFunc(oldOrders, func(a, b int) int { return a - b }) + finalState := make([]ObjectRenamingDatabaseManuallyOrderedListItem, 0) + + for _, oldItem := range oldManuallyOrderedList { + oldItem := oldItem.AsValueMap() + newItemIndex := slices.IndexFunc(newManuallyOrderedList, func(newItem cty.Value) bool { + return oldItem["order"].AsBigFloat().Cmp(newItem.AsValueMap()["order"].AsBigFloat()) == 0 + }) + // Here we analyze already existing items and check if they need to be updated in any way. + if newItemIndex != -1 { + newItem := newManuallyOrderedList[newItemIndex] + newName := oldItem["name"].AsString() + newType := oldItem["type"].AsString() + wasChanged := false + + if oldItem["name"].AsString() != newItem.AsValueMap()["name"].AsString() { + // Change name + newName = newItem.AsValueMap()["name"].AsString() + wasChanged = true + } + + if oldItem["type"].AsString() != newItem.AsValueMap()["type"].AsString() { + // Change type + newType = newItem.AsValueMap()["type"].AsString() + wasChanged = true + + // Check for incompatible types + if slices.Contains([]string{"TEXT", "STRING"}, oldItem["type"].AsString()) && slices.Contains([]string{"INT", "NUMBER"}, newType) || + slices.Contains([]string{"INT", "NUMBER"}, oldItem["type"].AsString()) && slices.Contains([]string{"TEXT", "STRING"}, newType) { + invalidOperations = append(invalidOperations, fmt.Errorf("unable to change item type from %s to %s", oldItem["type"].AsString(), newType)) + } + } + + itemToAdd := ObjectRenamingDatabaseManuallyOrderedListItem{ + Name: newName, + Type: newType, + } + finalState = append(finalState, itemToAdd) + + if wasChanged { + updateChangelog.Changed = append(updateChangelog.Changed, ObjectRenamingDatabaseChangelogChange{ + Before: map[string]any{ + "name": oldItem["name"].AsString(), + "type": oldItem["type"].AsString(), + }, + After: map[string]any{ + "name": itemToAdd.Name, + "type": itemToAdd.Type, + }, + }) + } + } else { + // If given order wasn't found, it means this item was removed. + updateChangelog.Removed = append(updateChangelog.Removed, map[string]any{ + "name": oldItem["name"].AsString(), + "type": oldItem["type"].AsString(), + }) + } + } + + // Here we analyze newly added items + for _, newItem := range newManuallyOrderedList { + newItem := newItem.AsValueMap() + if !slices.ContainsFunc(oldManuallyOrderedList, func(oldItem cty.Value) bool { + return oldItem.AsValueMap()["order"].AsBigFloat().Cmp(newItem["order"].AsBigFloat()) == 0 + }) { + newItemOrder, _ := newItem["order"].AsBigFloat().Int64() + itemToAdd := ObjectRenamingDatabaseManuallyOrderedListItem{ + Name: newItem["name"].AsString(), + Type: newItem["type"].AsString(), + } + + // Items can be only added at the end of the list, otherwise invalid operation will be reported. + if int(newItemOrder) > maxStateOrder { + finalState = append(finalState, itemToAdd) + updateChangelog.Added = append(updateChangelog.Added, map[string]any{ + "name": itemToAdd.Name, + "type": itemToAdd.Type, + }) + } else { + invalidOperations = append(invalidOperations, fmt.Errorf("unable to add a new item: %+v, in the middle", itemToAdd)) + } + } + } + + if len(invalidOperations) > 0 { + // Partial is essential in invalid operations because it will prevent invalid state from being saved. + // It was previously failing the tests because Terraform saves the state automatically despite errors being returned. + d.Partial(true) + return diag.FromErr(errors.Join(invalidOperations...)) + } else { + // Apply the changes. For "normal" implementation instead of sending whole state, single changes should be saved and applied here + // (places for single actions could be saved based on ObjectRenamingDatabaseInstance.Changelog modifications). + ObjectRenamingDatabaseInstance.ManuallyOrderedList = finalState + ObjectRenamingDatabaseInstance.ChangeLog = updateChangelog + } + } + + return ReadObjectRenamingListsAndSets(false)(ctx, d, meta) +} + +func ReadObjectRenamingListsAndSets(withExternalChangesMarking bool) schema.ReadContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + list := collections.Map(ObjectRenamingDatabaseInstance.List, func(t ObjectRenamingDatabaseListItem) map[string]any { + return map[string]any{ + "string": t.String, + "int": t.Int, + } + }) + if err := d.Set("list", list); err != nil { + return diag.FromErr(err) + } + + orderedList := make([]map[string]any, len(ObjectRenamingDatabaseInstance.OrderedList)) + for index, item := range ObjectRenamingDatabaseInstance.OrderedList { + orderedList[index] = map[string]any{ + "name": item.Name, + "order": strconv.Itoa(index), + } + } + if err := d.Set("ordered_list", orderedList); err != nil { + return diag.FromErr(err) + } + + itemAdded := len(ObjectRenamingDatabaseInstance.ManuallyOrderedList) > len(d.Get("manually_ordered_list").([]any)) + itemRemoved := len(ObjectRenamingDatabaseInstance.ManuallyOrderedList) < len(d.Get("manually_ordered_list").([]any)) + if withExternalChangesMarking && d.Get("manually_ordered_list") != nil && (itemAdded || itemRemoved) { + // Detecting external changes by comparing current state with external source + // Improvements: + // - When the items' length is the same, try to match items by unique combinations (like name + type in this case). + // - If items were added externally, see if the item was added at the end (valid operation) or somewhere in the middle (invalid operation). + // - If items were removed externally, see if the items remained in the same order (valid operation). + // - Handle cases where multiple external operations were done at once (e.g. added and removed an item). + return diag.FromErr(errors.New("detected external changes in manually_ordered_list")) + } + + if d.GetRawState().IsNull() { + // For the first read, let's "copy-paste" config into state + if err := setStateToValuesFromConfig(d, objectRenamingListsAndSetsSchema, []string{"manually_ordered_list"}); err != nil { + return diag.FromErr(err) + } + } else { + // For later reads, let's put external changes into the state. Because we don't get the information order + // from the external source, we have to guess it. We do it by matching first with state, later with config (if not found). + // To correctly find items and their order, you have to match by using fields that uniquely identify a given item (name + type in this case). + + manuallyOrderedList := make([]any, len(ObjectRenamingDatabaseInstance.ManuallyOrderedList)) + for index, item := range ObjectRenamingDatabaseInstance.ManuallyOrderedList { + var itemOrder int64 = -1 + + foundIndex := slices.IndexFunc(d.GetRawState().AsValueMap()["manually_ordered_list"].AsValueSlice(), func(value cty.Value) bool { + return value.AsValueMap()["name"].AsString() == item.Name && value.AsValueMap()["type"].AsString() == item.Type + }) + if foundIndex != -1 { + itemOrder, _ = d.GetRawState().AsValueMap()["manually_ordered_list"].AsValueSlice()[foundIndex].AsValueMap()["order"].AsBigFloat().Int64() + } + + if foundIndex == -1 && !d.GetRawConfig().IsNull() { + configFoundIndex := slices.IndexFunc(d.GetRawConfig().AsValueMap()["manually_ordered_list"].AsValueSlice(), func(value cty.Value) bool { + return value.AsValueMap()["name"].AsString() == item.Name && value.AsValueMap()["type"].AsString() == item.Type + }) + if configFoundIndex != -1 { + itemOrder, _ = d.GetRawConfig().AsValueMap()["manually_ordered_list"].AsValueSlice()[configFoundIndex].AsValueMap()["order"].AsBigFloat().Int64() + } + } + + manuallyOrderedList[index] = map[string]any{ + "name": item.Name, + "type": item.Type, + "order": itemOrder, + } + } + + if err := d.Set("manually_ordered_list", manuallyOrderedList); err != nil { + return diag.FromErr(err) + } + } + + return nil + } +} + +func DeleteObjectRenamingListsAndSets(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + ObjectRenamingDatabaseInstance.List = nil + ObjectRenamingDatabaseInstance.OrderedList = nil + ObjectRenamingDatabaseInstance.ManuallyOrderedList = nil + d.SetId("") + + return nil +} + +func ignoreListOrderAfterFirstApply(parentKey string) schema.SchemaDiffSuppressFunc { + return func(key string, oldValue string, newValue string, d *schema.ResourceData) bool { + if strings.HasSuffix(key, ".#") { + return false + } + + // Raw state is not null after first apply + if !d.GetRawState().IsNull() { + // Parse item index from the key + keyParts := strings.Split(strings.TrimLeft(key, parentKey+"."), ".") + if len(keyParts) >= 2 { + index, err := strconv.Atoi(keyParts[0]) + if err != nil { + log.Println("[DEBUG] Failed to convert list item index: ", err) + return false + } + + newItems := d.GetRawConfig().AsValueMap()[parentKey].AsValueSlice() + if len(newItems) <= index { + // item was removed + return false + } + newItemHash := newItems[index].Hash() + + newItemWasAlreadyPresent := false + + // Try to find the same hash in the state; if found, the new item was already present, and it only changed place in the list + for _, oldItem := range d.GetRawState().AsValueMap()[parentKey].AsValueSlice() { + // Matching hashes indicate the order changed, but the item stayed in the config, so suppress the change + if oldItem.Hash() == newItemHash { + newItemWasAlreadyPresent = true + } + } + + oldItemIsStillPresent := false + + // Sizes of config and state may not be the same + if len(d.GetRawState().AsValueMap()[parentKey].AsValueSlice()) > index { + // Get the hash of the whole item from state (because it represents old value) + oldItemHash := d.GetRawState().AsValueMap()[parentKey].AsValueSlice()[index].Hash() + + // Try to find the same hash in the config; if found, the old item still exists, but changed its place in the list + for _, newItem := range d.GetRawConfig().AsValueMap()[parentKey].AsValueSlice() { + if newItem.Hash() == oldItemHash { + oldItemIsStillPresent = true + } + } + } else if newItemWasAlreadyPresent { + // Happens in cases where there's a new item at the end of the list, but it was already present, so do nothing + return true + } + + if newItemWasAlreadyPresent && oldItemIsStillPresent { + return true + } + } + } + + return false + } +} + +func ignoreOrderAfterFirstApplyWithOrderedList(parentKey string) schema.SchemaDiffSuppressFunc { + return func(key string, oldValue string, newValue string, d *schema.ResourceData) bool { + if strings.HasSuffix(key, ".#") { + return false + } + + // Raw state is not null after first apply + if !d.GetRawState().IsNull() { + // Parse item index from the key + keyParts := strings.Split(strings.TrimLeft(key, parentKey+"."), ".") + if len(keyParts) >= 2 { + index, err := strconv.Atoi(keyParts[0]) + if err != nil { + log.Println("[DEBUG] Failed to convert list item index: ", err) + return false + } + + newItem := d.GetRawConfig().AsValueMap()[parentKey].AsValueSlice()[index] + + newItemOrder := -1 + // The new order value cannot be retrieved because it's not set on config level. + // There's also no other way (most likely) to get the newly computed order value for a given item, + // making this approach not possible. + newItemOrderValue := newItem.AsValueMap()["order"] + if !newItemOrderValue.IsNull() { + newItemOrder, _ = strconv.Atoi(newItemOrderValue.AsString()) + } + + _ = newItemOrder + } + } + + return false + } +} + +func ignoreOrderAfterFirstApplyWithManuallyOrderedList(parentKey string) schema.SchemaDiffSuppressFunc { + return func(key string, oldValue string, newValue string, d *schema.ResourceData) bool { + if strings.HasSuffix(key, ".#") { + return false + } + + // Raw state is not null after first apply + if !d.GetRawState().IsNull() { + // Parse item index from the key + keyParts := strings.Split(strings.TrimLeft(key, parentKey+"."), ".") + if len(keyParts) >= 2 { + index, err := strconv.Atoi(keyParts[0]) + if err != nil { + log.Println("[DEBUG] Failed to convert list item index: ", err) + return false + } + + newItems := d.GetRawConfig().AsValueMap()[parentKey].AsValueSlice() + if len(newItems) <= index { + // item was removed + return false + } + + newItem := newItems[index] + itemWasAlreadyPresent := false + itemIsStillPresent := false + + var newItemOrder int64 + newItemOrderValue := newItem.AsValueMap()["order"] + if !newItemOrderValue.IsNull() { + newItemOrder, _ = newItemOrderValue.AsBigFloat().Int64() + } else { + // That's a new item + return false + } + + // It was already present, but we need to check the hash + for _, oldItem := range d.GetRawState().AsValueMap()[parentKey].AsValueSlice() { + oldItemOrder, _ := oldItem.AsValueMap()["order"].AsBigFloat().Int64() + if oldItemOrder == newItemOrder { + if oldItem.Hash() != newItem.Hash() { + // The item has the same order, but the values in other fields changed (different hash) + return false + } else { + itemWasAlreadyPresent = true + break + } + } + } + + // Check if a new item is indexable (with new items added at the end, it's not possible to index state value for them, because it doesn't exist yet) + if len(d.GetRawState().AsValueMap()[parentKey].AsValueSlice()) > index { + oldItem := d.GetRawState().AsValueMap()[parentKey].AsValueSlice()[index] + oldItemOrder, _ := oldItem.AsValueMap()["order"].AsBigFloat().Int64() + + // Check if this order is still present + for _, newItem := range d.GetRawConfig().AsValueMap()[parentKey].AsValueSlice() { + newItemOrder, _ := newItem.AsValueMap()["order"].AsBigFloat().Int64() + if oldItemOrder == newItemOrder { + if oldItem.Hash() != newItem.Hash() { + // The order is still present, but the values in other fields changed (different hash) + return false + } else { + itemIsStillPresent = true + break + } + } + } + } + + if itemWasAlreadyPresent && itemIsStillPresent { + return true + } + } + } + + return false + } +} diff --git a/pkg/resources/object_renaming_lists_and_sets_acceptance_test.go b/pkg/resources/object_renaming_lists_and_sets_acceptance_test.go new file mode 100644 index 0000000000..29b9169580 --- /dev/null +++ b/pkg/resources/object_renaming_lists_and_sets_acceptance_test.go @@ -0,0 +1,1217 @@ +package resources_test + +import ( + "context" + "fmt" + "reflect" + "regexp" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/planchecks" + tfjson "github.com/hashicorp/terraform-json" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_BasicListFlow(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigList([]map[string]any{ + {"string": "111", "int": 111}, + {"string": "222", "int": 222}, + {"string": "333", "int": 333}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "111", "int": "111"}, + {"string": "222", "int": "222"}, + {"string": "333", "int": "333"}, + }), + ), + }, + // Remove, shift, and add one item (in the middle) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "111", "int": 111}, + }, + Added: []map[string]any{ + {"string": "444", "int": 444}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "222", "int": 222}, + {"string": "444", "int": 444}, + {"string": "333", "int": 333}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "222", "int": "222"}, + {"string": "444", "int": "444"}, + {"string": "333", "int": "333"}, + }), + ), + }, + // Remove, shift, and add one item (at the end) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "222", "int": 222}, + }, + Added: []map[string]any{ + {"string": "111", "int": 111}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "444", "int": 444}, + {"string": "333", "int": 333}, + {"string": "111", "int": 111}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "444", "int": "444"}, + {"string": "333", "int": "333"}, + {"string": "111", "int": "111"}, + }), + ), + }, + // Remove, shift, and add one item (at the beginning) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "111", "int": 111}, + }, + Added: []map[string]any{ + {"string": "222", "int": 222}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "222", "int": 222}, + {"string": "333", "int": 333}, + {"string": "444", "int": 444}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "222", "int": "222"}, + {"string": "333", "int": "333"}, + {"string": "444", "int": "444"}, + }), + ), + }, + // Reorder items and add one + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Added: []map[string]any{ + {"string": "555", "int": 555}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "444", "int": 444}, + {"string": "555", "int": 555}, + {"string": "333", "int": 333}, + {"string": "222", "int": 222}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "444", "int": "444"}, + {"string": "555", "int": "555"}, + {"string": "333", "int": "333"}, + {"string": "222", "int": "222"}, + }), + ), + }, + // Replace all items + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "333", "int": 333}, + {"string": "444", "int": 444}, + {"string": "222", "int": 222}, + {"string": "555", "int": 555}, + }, + Added: []map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + {"string": "3333", "int": 3333}, + {"string": "4444", "int": 4444}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + {"string": "3333", "int": 3333}, + {"string": "4444", "int": 4444}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "1111", "int": "1111"}, + {"string": "2222", "int": "2222"}, + {"string": "3333", "int": "3333"}, + {"string": "4444", "int": "4444"}, + }), + ), + }, + // Remove a few items + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "3333", "int": 3333}, + {"string": "4444", "int": 4444}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "1111", "int": "1111"}, + {"string": "2222", "int": "2222"}, + }), + ), + }, + // Remove all items + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{}), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "0"), + ), + }, + // Add few items + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Added: []map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "1111", "int": "1111"}, + {"string": "2222", "int": "2222"}, + }), + ), + }, + // External changes: add item + { + PreConfig: func() { + resources.ObjectRenamingDatabaseInstance.List = append(resources.ObjectRenamingDatabaseInstance.List, resources.ObjectRenamingDatabaseListItem{ + String: "3333", + Int: 3333, + }) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "3333", "int": 3333}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "1111", "int": "1111"}, + {"string": "2222", "int": "2222"}, + }), + ), + }, + // External changes: removed item + { + PreConfig: func() { + resources.ObjectRenamingDatabaseInstance.List = resources.ObjectRenamingDatabaseInstance.List[:1] + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Added: []map[string]any{ + {"string": "2222", "int": 2222}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "1111", "int": "1111"}, + {"string": "2222", "int": "2222"}, + }), + ), + }, + // External changes: change item + { + PreConfig: func() { + resources.ObjectRenamingDatabaseInstance.List[1].String = "1010" + resources.ObjectRenamingDatabaseInstance.List[1].Int = 1010 + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"string": "1010", "int": 1010}, + }, + Added: []map[string]any{ + {"string": "2222", "int": 2222}, + }, + }), + }, + }, + Config: objectRenamingConfigList([]map[string]any{ + {"string": "1111", "int": 1111}, + {"string": "2222", "int": 2222}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "1111", "int": "1111"}, + {"string": "2222", "int": "2222"}, + }), + ), + }, + // Add an item that is identical to another one (currently, failing because hash duplicates are not handled) + // { + // Config: objectRenamingConfigList([]map[string]any{ + // {"string": "222", "int": 222}, + // {"string": "222", "int": 222}, + // }), + // Check: resource.ComposeAggregateTestCheckFunc( + // assert.HasListItemsOrderIndependent("snowflake_object_renaming.test", "list", []map[string]string{ + // {"string": "222", "int": "222"}, + // {"string": "222", "int": "222"}, + // }), + // ), + // }, + }, + }) +} + +// This test researches the possibility of performing update instead of remove + add item +func TestAcc_ListNameUpdate(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigList([]map[string]any{ + {"name": "column1", "string": "111", "int": 111}, + {"name": "column2", "string": "222", "int": 222}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"name": "column1", "string": "111", "int": "111"}, + {"name": "column2", "string": "222", "int": "222"}, + }), + ), + }, + { + Config: objectRenamingConfigList([]map[string]any{ + {"name": "column2", "string": "222", "int": 222}, + {"name": "column1", "string": "111", "int": 111}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"name": "column2", "string": "222", "int": "222"}, + {"name": "column1", "string": "111", "int": "111"}, + }), + ), + }, + // It's hard to handle reorder + rename with this approach, + // because without any additional metadata, we cannot identify a given list item. + { + Config: objectRenamingConfigList([]map[string]any{ + {"name": "column3", "string": "222", "int": 222}, + {"name": "column1", "string": "111", "int": 111}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"name": "column3", "string": "222", "int": "222"}, + {"name": "column1", "string": "111", "int": "111"}, + }), + ), + }, + }, + }) +} + +func TestAcc_ListsWithDuplicatedItems(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + // Fails, because the SuppressDiffFunc works on the hash of individual items. + // To correctly suppress such changes, the number of repeated hashes should be counted. + t.Skip("Currently failing, because duplicated hashes are not supported.") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigList([]map[string]any{ + {"string": "111", "int": 111}, + {"string": "222", "int": 222}, + {"string": "333", "int": 333}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "111", "int": "111"}, + {"string": "222", "int": "222"}, + {"string": "333", "int": "333"}, + }), + ), + }, + // Introduce duplicates (it would be enough just to introduce only one to break the approach assumptions) + { + Config: objectRenamingConfigList([]map[string]any{ + {"string": "111", "int": 111}, + {"string": "111", "int": 111}, + {"string": "222", "int": 222}, + {"string": "222", "int": 222}, + {"string": "333", "int": 333}, + {"string": "333", "int": 333}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + assert.ContainsExactlyInAnyOrder("snowflake_object_renaming.test", "list", []map[string]string{ + {"string": "111", "int": "111"}, + {"string": "111", "int": "111"}, + {"string": "222", "int": "222"}, + {"string": "222", "int": "222"}, + {"string": "333", "int": "333"}, + {"string": "333", "int": "333"}, + }), + ), + }, + }, + }) +} + +func objectRenamingConfigList(listItems []map[string]any) string { + generateListItem := func(s string, i int) string { + return fmt.Sprintf(` + list { + string = "%[1]s" + int = %[2]d + } +`, s, i) + } + + generatedListItems := "" + for _, item := range listItems { + generatedListItems += generateListItem(item["string"].(string), item["int"].(int)) + } + + return fmt.Sprintf(` + + resource "snowflake_object_renaming" "test" { + %s + } + +`, generatedListItems) +} + +type objectRenamingPlanCheck func(ctx context.Context, req plancheck.CheckPlanRequest, resp *plancheck.CheckPlanResponse) + +func (fn objectRenamingPlanCheck) CheckPlan(ctx context.Context, req plancheck.CheckPlanRequest, resp *plancheck.CheckPlanResponse) { + fn(ctx, req, resp) +} + +func assertObjectRenamingDatabaseChangelogAndClearIt(changelog resources.ObjectRenamingDatabaseChangelog) plancheck.PlanCheck { + return objectRenamingPlanCheck(func(ctx context.Context, req plancheck.CheckPlanRequest, resp *plancheck.CheckPlanResponse) { + if !reflect.DeepEqual(resources.ObjectRenamingDatabaseInstance.ChangeLog, changelog) { + resp.Error = fmt.Errorf("expected %+v changelog for this step, but got: %+v", changelog, resources.ObjectRenamingDatabaseInstance.ChangeLog) + } + resources.ObjectRenamingDatabaseInstance.ChangeLog.Added = nil + resources.ObjectRenamingDatabaseInstance.ChangeLog.Removed = nil + resources.ObjectRenamingDatabaseInstance.ChangeLog.Changed = nil + }) +} + +func TestAcc_SupportedActions(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + {"name": "nameThree", "type": "NUMBER", "order": 30}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameThree"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "30"), + ), + }, + // Drop item (any position) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"name": "nameTwo", "type": "STRING"}, + }, + }), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameThree", "type": "NUMBER", "order": 30}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameThree"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "30"), + ), + }, + // Add item (at the end) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Added: []map[string]any{ + {"name": "nameFour", "type": "INT"}, + }, + }), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameThree", "type": "NUMBER", "order": 30}, + {"name": "nameFour", "type": "INT", "order": 40}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameThree"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "30"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameFour"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "40"), + ), + }, + // Rename item / Change item type (any position; compatible type change) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Changed: []resources.ObjectRenamingDatabaseChangelogChange{ + { + Before: map[string]any{"name": "nameOne", "type": "TEXT"}, + After: map[string]any{"name": "nameOneV2", "type": "STRING"}, + }, + { + Before: map[string]any{"name": "nameThree", "type": "NUMBER"}, + After: map[string]any{"name": "nameThreeV2", "type": "INT"}, + }, + { + Before: map[string]any{"name": "nameFour", "type": "INT"}, + After: map[string]any{"name": "nameFourV2", "type": "NUMBER"}, + }, + }, + }), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOneV2", "type": "STRING", "order": 10}, + {"name": "nameThreeV2", "type": "INT", "order": 30}, + {"name": "nameFourV2", "type": "NUMBER", "order": 40}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOneV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameThreeV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "30"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameFourV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "40"), + ), + }, + // Reorder items in the configuration + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionNoop), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{}), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameFourV2", "type": "NUMBER", "order": 40}, + {"name": "nameThreeV2", "type": "INT", "order": 30}, + {"name": "nameOneV2", "type": "STRING", "order": 10}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOneV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameThreeV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "30"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameFourV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "40"), + ), + }, + // (after reorder) Drop item (any position) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Removed: []map[string]any{ + {"name": "nameThreeV2", "type": "INT"}, + }, + }), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameFourV2", "type": "NUMBER", "order": 40}, + {"name": "nameOneV2", "type": "STRING", "order": 10}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOneV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameFourV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "40"), + ), + }, + // (after reorder) Add item (at the end) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Added: []map[string]any{ + {"name": "nameFive", "type": "INT"}, + }, + }), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameFive", "type": "INT", "order": 50}, + {"name": "nameFourV2", "type": "NUMBER", "order": 40}, + {"name": "nameOneV2", "type": "STRING", "order": 10}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOneV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameFourV2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "40"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameFive"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "50"), + ), + }, + // (after reorder) Rename item / Change item type (any position; compatible type change) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + PostApplyPreRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{ + Changed: []resources.ObjectRenamingDatabaseChangelogChange{ + { + Before: map[string]any{"name": "nameOneV2", "type": "STRING"}, + After: map[string]any{"name": "nameOneV10", "type": "TEXT"}, + }, + { + Before: map[string]any{"name": "nameFourV2", "type": "NUMBER"}, + After: map[string]any{"name": "nameFourV10", "type": "INT"}, + }, + { + Before: map[string]any{"name": "nameFive", "type": "INT"}, + After: map[string]any{"name": "nameFiveV10", "type": "NUMBER"}, + }, + }, + }), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameFiveV10", "type": "NUMBER", "order": 50}, + {"name": "nameFourV10", "type": "INT", "order": 40}, + {"name": "nameOneV10", "type": "TEXT", "order": 10}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOneV10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameFourV10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "40"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameFiveV10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "NUMBER"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "50"), + ), + }, + }, + }) +} + +func TestAcc_UnsupportedActions_AddItemsNotAtTheEnd(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameTwo", "type": "STRING", "order": 20}, + {"name": "nameOne", "type": "TEXT", "order": 10}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + ), + }, + // Add item (in the middle) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + planchecks.ExpectChange( + "snowflake_object_renaming.test", + "manually_ordered_list", + tfjson.ActionUpdate, + sdk.String("[map[name:nameOne order:10 type:TEXT] map[name:nameTwo order:20 type:STRING]]"), + sdk.String("[map[name:atTheBeginning order:15 type:TEXT] map[name:nameTwo order:20 type:STRING] map[name:inTheMiddle order:17 type:INT] map[name:nameTwo order:20 type:STRING]]"), + ), + }, + // PostChecks don't apply when the expected error is set + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "atTheBeginning", "type": "TEXT", "order": 15}, + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "inTheMiddle", "type": "INT", "order": 17}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + ExpectError: regexp.MustCompile("unable to add a new item: \\{Name:atTheBeginning Type:TEXT}, in the middle\nunable to add a new item: \\{Name:inTheMiddle Type:INT}, in the middle"), + }, + // Try to go back to the original state (with flipped items in config) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionNoop), + }, + PostApplyPostRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{}), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + ), + }, + }, + }) +} + +func TestAcc_UnsupportedActions_ChangeItemTypeToIncompatibleOne(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameTwo", "type": "STRING", "order": 20}, + {"name": "nameOne", "type": "TEXT", "order": 10}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + ), + }, + // Change item type (incompatible change) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + planchecks.ExpectChange( + "snowflake_object_renaming.test", + "manually_ordered_list", + tfjson.ActionUpdate, + sdk.String("[map[name:nameOne order:10 type:TEXT] map[name:nameTwo order:20 type:STRING]]"), + sdk.String("[map[name:nameOne order:10 type:NUMBER] map[name:nameTwo order:20 type:STRING]]"), + ), + }, + // PostChecks don't apply when the expected error is set + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "NUMBER", "order": 10}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + ExpectError: regexp.MustCompile("unable to change item type from TEXT to NUMBER"), + }, + // Try to go back to the original state (with flipped items in config) + { + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionNoop), + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{}), + }, + }, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + ), + }, + }, + }) +} + +func TestAcc_UnsupportedActions_ExternalChange_AddNewItem(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + ), + }, + // Add one item externally + { + PreConfig: func() { + resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList = append(resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList, resources.ObjectRenamingDatabaseManuallyOrderedListItem{ + Name: "externalItem", + Type: "INT", + }) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + planchecks.ExpectChange( + "snowflake_object_renaming.test", + "manually_ordered_list", + tfjson.ActionUpdate, + sdk.String("[map[name:nameOne order:10 type:TEXT] map[name:nameTwo order:20 type:STRING]]"), + sdk.String("[map[name:nameOne order:10 type:NUMBER] map[name:nameTwo order:20 type:STRING] map[name:externalItem order:-1 type:INT]]"), + ), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "NUMBER", "order": 10}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + ExpectError: regexp.MustCompile("detected external changes in manually_ordered_list"), + }, + // Try to go back to the original state (after external correction) + { + PreConfig: func() { + resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList = resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList[:len(resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList)-1] + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionNoop), + }, + PostApplyPostRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{}), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "2"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + ), + }, + }, + }) +} + +func TestAcc_UnsupportedActions_ExternalChange_RemoveItem(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameThree", "type": "INT", "order": 30}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "3"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameThree"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "30"), + ), + }, + // Remove one item externally + { + PreConfig: func() { + // Remove middle item + resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList = append(resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList[:1], resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList[2]) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + planchecks.ExpectChange( + "snowflake_object_renaming.test", + "manually_ordered_list", + tfjson.ActionUpdate, + sdk.String("[map[name:nameOne order:10 type:NUMBER] map[name:nameTwo order:20 type:STRING] map[name:nameThree order:30 type:INT]]"), + sdk.String("[map[name:nameOne order:10 type:NUMBER] map[name:nameThree order:30 type:INT]]"), + ), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameThree", "type": "INT", "order": 30}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + ExpectError: regexp.MustCompile("detected external changes in manually_ordered_list"), + }, + // Try to go back to the original state (after external correction) + { + PreConfig: func() { + // Bring the middle item back + resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList = []resources.ObjectRenamingDatabaseManuallyOrderedListItem{ + resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList[0], + { + Name: "nameTwo", + Type: "STRING", + }, + resources.ObjectRenamingDatabaseInstance.ManuallyOrderedList[1], + } + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionNoop), + }, + PostApplyPostRefresh: []plancheck.PlanCheck{ + assertObjectRenamingDatabaseChangelogAndClearIt(resources.ObjectRenamingDatabaseChangelog{}), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameThree", "type": "INT", "order": 30}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "3"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameThree"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "30"), + ), + }, + }, + }) +} + +func TestAcc_UnsupportedActions_ChangingTheOrderOfItem(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.EnableObjectRenamingTest) + acc.TestAccPreCheck(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameThree", "type": "INT", "order": 30}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "3"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameThree"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "30"), + ), + }, + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionUpdate), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 15}, + {"name": "nameThree", "type": "INT", "order": 35}, + {"name": "nameTwo", "type": "STRING", "order": 25}, + }), + ExpectError: regexp.MustCompile("unable to add a new item: \\{Name:nameOne Type:TEXT}, in the middle\nunable to add a new item: \\{Name:nameTwo Type:STRING}, in the middle"), + }, + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_object_renaming.test", plancheck.ResourceActionNoop), + }, + }, + Config: objectRenamingConfigManuallyOrderedList([]map[string]any{ + {"name": "nameOne", "type": "TEXT", "order": 10}, + {"name": "nameThree", "type": "INT", "order": 30}, + {"name": "nameTwo", "type": "STRING", "order": 20}, + }), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.#", "3"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.name", "nameOne"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.type", "TEXT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.0.order", "10"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.name", "nameTwo"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.type", "STRING"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.1.order", "20"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.name", "nameThree"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.type", "INT"), + resource.TestCheckResourceAttr("snowflake_object_renaming.test", "manually_ordered_list.2.order", "30"), + ), + }, + }, + }) +} + +func objectRenamingConfigManuallyOrderedList(listItems []map[string]any) string { + generateListItem := func(name string, itemType string, order int) string { + return fmt.Sprintf(` +manually_ordered_list { + name = "%s" + type = "%s" + order = %d +} +`, name, itemType, order) + } + + generatedListItems := "" + for _, item := range listItems { + generatedListItems += generateListItem(item["name"].(string), item["type"].(string), item["order"].(int)) + } + + return fmt.Sprintf(` +resource "snowflake_object_renaming" "test" { + %s +} +`, generatedListItems) +} diff --git a/pkg/resources/password_policy_acceptance_test.go b/pkg/resources/password_policy_acceptance_test.go index 6fef84a84d..cf978abc3d 100644 --- a/pkg/resources/password_policy_acceptance_test.go +++ b/pkg/resources/password_policy_acceptance_test.go @@ -197,6 +197,7 @@ func TestAcc_PasswordPolicy_migrateFromVersion_0_94_1(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -210,6 +211,7 @@ func TestAcc_PasswordPolicy_migrateFromVersion_0_94_1(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: passwordPolicyBasicConfig(id), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/primary_connection.go b/pkg/resources/primary_connection.go new file mode 100644 index 0000000000..0966e6dc5a --- /dev/null +++ b/pkg/resources/primary_connection.go @@ -0,0 +1,272 @@ +package resources + +import ( + "context" + "errors" + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var primaryConnectionSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a primary connection, the name must be unique across connection names and account names in the organization. "), + DiffSuppressFunc: suppressIdentifierQuoting, + }, + "is_primary": { + Type: schema.TypeBool, + Computed: true, + Description: "Indicates if the connection is primary. When Terraform detects that the connection is not primary, the resource is recreated.", + }, + "enable_failover_to_accounts": { + Type: schema.TypeList, + Optional: true, + Description: "Enables failover for given connection to provided accounts. Specifies a list of accounts in your organization where a secondary connection for this primary connection can be promoted to serve as the primary connection. Include your organization name for each account in the list.", + Elem: &schema.Schema{ + Type: schema.TypeString, + DiffSuppressFunc: suppressIdentifierQuoting, + }, + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the connection.", + }, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW CONNECTIONS` for the given connection.", + Elem: &schema.Resource{ + Schema: schemas.ShowConnectionSchema, + }, + }, + FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, +} + +func PrimaryConnection() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateContextPrimaryConnection, + ReadContext: ReadContextPrimaryConnection, + UpdateContext: UpdateContextPrimaryConnection, + DeleteContext: DeleteContextPrimaryConnection, + + CustomizeDiff: customdiff.All( + ComputedIfAnyAttributeChanged(primaryConnectionSchema, ShowOutputAttributeName, "comment", "is_primary", "enable_failover_to_accounts"), + RecreateWhenResourceBoolFieldChangedExternally("is_primary", true), + ), + + Description: "Resource used to manage primary connections. For managing replicated connection check resource [snowflake_secondary_connection](./secondary_connection). For more information, check [connection documentation](https://docs.snowflake.com/en/sql-reference/sql/create-connection.html).", + Schema: primaryConnectionSchema, + Importer: &schema.ResourceImporter{ + StateContext: ImportName[sdk.AccountObjectIdentifier], + }, + } +} + +func CreateContextPrimaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id, err := sdk.ParseAccountObjectIdentifier(d.Get("name").(string)) + if err != nil { + return diag.FromErr(err) + } + + request := sdk.NewCreateConnectionRequest(id) + + if v, ok := d.GetOk("comment"); ok { + request.WithComment(v.(string)) + } + + err = client.Connections.Create(ctx, request) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(helpers.EncodeResourceIdentifier(id)) + + if v, ok := d.GetOk("enable_failover_to_accounts"); ok { + enableFailoverConfig := v.([]any) + + enableFailoverToAccountsList := make([]sdk.AccountIdentifier, 0) + for _, enableToAccount := range enableFailoverConfig { + accountInConfig := enableToAccount.(string) + accountIdentifier := sdk.NewAccountIdentifierFromFullyQualifiedName(accountInConfig) + + enableFailoverToAccountsList = append(enableFailoverToAccountsList, accountIdentifier) + } + + err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id). + WithEnableConnectionFailover(*sdk.NewEnableConnectionFailoverRequest(enableFailoverToAccountsList))) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextPrimaryConnection(ctx, d, meta) +} + +func ReadContextPrimaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseAccountObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + connection, err := client.Connections.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to retrieve connection. Target object not found. Marking the resource as removed.", + Detail: fmt.Sprintf("Connection name: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "Failed to retrieve connection.", + Detail: fmt.Sprintf("Connection name: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + + errs := errors.Join( + d.Set("is_primary", connection.IsPrimary), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ConnectionToSchema(connection)}), + d.Set("comment", connection.Comment), + ) + if errs != nil { + return diag.FromErr(errs) + } + + sessionDetails, err := client.ContextFunctions.CurrentSessionDetails(ctx) + if err != nil { + return diag.FromErr(err) + } + currentAccountIdentifier := sdk.NewAccountIdentifier(sessionDetails.OrganizationName, sessionDetails.AccountName) + + enableFailoverToAccounts := make([]string, 0) + for _, allowedAccount := range connection.FailoverAllowedToAccounts { + if currentAccountIdentifier.FullyQualifiedName() == allowedAccount.FullyQualifiedName() { + continue + } + enableFailoverToAccounts = append(enableFailoverToAccounts, allowedAccount.Name()) + } + + if len(enableFailoverToAccounts) == 0 { + err := d.Set("enable_failover_to_accounts", []any{}) + if err != nil { + return diag.FromErr(err) + } + } else { + err := d.Set("enable_failover_to_accounts", enableFailoverToAccounts) + if err != nil { + return diag.FromErr(err) + } + } + + return nil +} + +func UpdateContextPrimaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseAccountObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + connectionSetRequest := new(sdk.ConnectionSetRequest) + connectionUnsetRequest := new(sdk.ConnectionUnsetRequest) + + if d.HasChange("enable_failover_to_accounts") { + before, after := d.GetChange("enable_failover_to_accounts") + + getFailoverToAccounts := func(failoverConfig []any) []sdk.AccountIdentifier { + failoverEnabledToAccounts := make([]sdk.AccountIdentifier, 0) + + for _, allowedAccount := range failoverConfig { + accountIdentifier := sdk.NewAccountIdentifierFromFullyQualifiedName(allowedAccount.(string)) + failoverEnabledToAccounts = append(failoverEnabledToAccounts, accountIdentifier) + } + return failoverEnabledToAccounts + } + + beforeFailover := getFailoverToAccounts(before.([]any)) + afterFailover := getFailoverToAccounts(after.([]any)) + + addedFailovers, removedFailovers := ListDiff(beforeFailover, afterFailover) + + if len(addedFailovers) > 0 { + err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id). + WithEnableConnectionFailover(*sdk.NewEnableConnectionFailoverRequest(addedFailovers)), + ) + if err != nil { + return diag.FromErr(err) + } + } + + if len(removedFailovers) > 0 { + err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id). + WithDisableConnectionFailover(*sdk.NewDisableConnectionFailoverRequest(). + WithToAccounts(*sdk.NewToAccountsRequest(removedFailovers)), + ), + ) + if err != nil { + return diag.FromErr(err) + } + } + } + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if len(comment) > 0 { + connectionSetRequest.WithComment(comment) + } else { + connectionUnsetRequest.WithComment(true) + } + } + + if (*connectionSetRequest != sdk.ConnectionSetRequest{}) { + err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id).WithSet(*connectionSetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + if (*connectionUnsetRequest != sdk.ConnectionUnsetRequest{}) { + err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id).WithUnset(*connectionUnsetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextPrimaryConnection(ctx, d, meta) +} + +func DeleteContextPrimaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseAccountObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + err = client.Connections.Drop(ctx, sdk.NewDropConnectionRequest(id).WithIfExists(true)) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/primary_connection_acceptance_test.go b/pkg/resources/primary_connection_acceptance_test.go new file mode 100644 index 0000000000..8c2241a328 --- /dev/null +++ b/pkg/resources/primary_connection_acceptance_test.go @@ -0,0 +1,283 @@ +package resources_test + +import ( + "fmt" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/planchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + tfjson "github.com/hashicorp/terraform-json" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_PrimaryConnection_Basic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + + accountId := acc.TestClient().Account.GetAccountIdentifier(t) + secondaryAccountId := acc.SecondaryTestClient().Account.GetAccountIdentifier(t) + primaryConnectionAsExternalId := sdk.NewExternalObjectIdentifier(accountId, id) + + connectionModel := model.PrimaryConnection("t", id.Name()) + connectionModelWithComment := model.PrimaryConnection("t", id.Name()).WithComment(comment) + connectionModelWithFailover := model.PrimaryConnection("t", id.Name()).WithEnableFailover(secondaryAccountId) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.PrimaryConnection), + Steps: []resource.TestStep{ + // create + { + Config: config.FromModel(t, connectionModel), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModel.ResourceReference()). + HasNameString(id.Name()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasNoEnableFailoverToAccounts(). + HasCommentString(""), + + resourceshowoutputassert.ConnectionShowOutput(t, connectionModel.ResourceReference()). + HasName(id.Name()). + HasSnowflakeRegion(acc.TestClient().Context.CurrentRegion(t)). + HasAccountLocator(acc.TestClient().GetAccountLocator()). + HasAccountName(accountId.AccountName()). + HasOrganizationName(accountId.OrganizationName()). + HasComment(""). + HasIsPrimary(true). + HasPrimaryIdentifier(primaryConnectionAsExternalId). + HasFailoverAllowedToAccounts(accountId). + HasConnectionUrl( + acc.TestClient().Connection.GetConnectionUrl(accountId.OrganizationName(), id.Name()), + ), + ), + ), + }, + // set comment + { + Config: config.FromModel(t, connectionModelWithComment), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModelWithComment.ResourceReference()). + HasNameString(id.Name()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasNoEnableFailoverToAccounts(). + HasCommentString(comment), + + resourceshowoutputassert.ConnectionShowOutput(t, connectionModelWithComment.ResourceReference()). + HasComment(comment), + ), + ), + }, + // import + { + ResourceName: connectionModelWithComment.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateCheck: importchecks.ComposeImportStateCheck( + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name()), + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "comment", comment), + ), + }, + // unset comment + { + Config: config.FromModel(t, connectionModel), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModel.ResourceReference()). + HasCommentString(""), + + resourceshowoutputassert.ConnectionShowOutput(t, connectionModel.ResourceReference()). + HasComment(""), + ), + ), + }, + // enable failover to second account + { + Config: config.FromModel(t, connectionModelWithFailover), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModelWithFailover.ResourceReference()). + HasNameString(id.Name()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasExactlyFailoverToAccountsInOrder(secondaryAccountId). + HasCommentString(""), + + resourceshowoutputassert.ConnectionShowOutput(t, connectionModelWithFailover.ResourceReference()). + HasFailoverAllowedToAccounts(accountId, secondaryAccountId), + ), + ), + }, + // disable failover to second account + { + Config: config.FromModel(t, connectionModel), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModel.ResourceReference()). + HasNameString(id.Name()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasNoEnableFailoverToAccounts(). + HasCommentString(""), + + resourceshowoutputassert.ConnectionShowOutput(t, connectionModel.ResourceReference()). + HasFailoverAllowedToAccounts(accountId), + ), + ), + }, + }, + }) +} + +func TestAcc_PrimaryConnection_ExternalChanges(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + accountId := acc.TestClient().Account.GetAccountIdentifier(t) + secondaryAccountId := acc.SecondaryTestClient().Account.GetAccountIdentifier(t) + primaryConnectionAsExternalId := sdk.NewExternalObjectIdentifier(accountId, id) + + connectionModel := model.PrimaryConnection("t", id.Name()).WithComment("config comment") + connectionModelWithFailover := model.PrimaryConnection("t", id.Name()).WithEnableFailover(secondaryAccountId) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.PrimaryConnection), + Steps: []resource.TestStep{ + // create + { + Config: config.FromModel(t, connectionModel), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModel.ResourceReference()). + HasNameString(id.Name()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasNoEnableFailoverToAccounts(). + HasCommentString("config comment"), + + resourceshowoutputassert.ConnectionShowOutput(t, connectionModel.ResourceReference()). + HasName(id.Name()). + HasSnowflakeRegion(acc.TestClient().Context.CurrentRegion(t)). + HasAccountLocator(acc.TestClient().GetAccountLocator()). + HasAccountName(accountId.AccountName()). + HasOrganizationName(accountId.OrganizationName()). + HasComment("config comment"). + HasIsPrimary(true). + HasPrimaryIdentifier(primaryConnectionAsExternalId). + HasFailoverAllowedToAccounts(accountId), + ), + ), + }, + // change comment externally + { + PreConfig: func() { + acc.TestClient().Connection.Alter(t, sdk.NewAlterConnectionRequest(id). + WithSet(*sdk.NewConnectionSetRequest(). + WithComment("external comment"))) + }, + Config: config.FromModel(t, connectionModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(connectionModel.ResourceReference(), plancheck.ResourceActionUpdate), + planchecks.ExpectChange(connectionModel.ResourceReference(), "comment", tfjson.ActionUpdate, sdk.String("external comment"), sdk.String("config comment")), + }, + }, + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModel.ResourceReference()). + HasCommentString("config comment"), + resourceshowoutputassert.ConnectionShowOutput(t, connectionModel.ResourceReference()). + HasComment("config comment"), + ), + ), + }, + // externally change enable failover accounts list - add second account + { + PreConfig: func() { + acc.TestClient().Connection.Alter(t, sdk.NewAlterConnectionRequest(id). + WithEnableConnectionFailover(*sdk.NewEnableConnectionFailoverRequest( + []sdk.AccountIdentifier{secondaryAccountId})), + ) + }, + Config: config.FromModel(t, connectionModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(connectionModel.ResourceReference(), plancheck.ResourceActionUpdate), + planchecks.ExpectChange( + connectionModel.ResourceReference(), + "enable_failover_to_accounts", + tfjson.ActionUpdate, + sdk.String(fmt.Sprintf("[%s]", secondaryAccountId.FullyQualifiedName())), + nil, + ), + }, + }, + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModel.ResourceReference()). + HasNoEnableFailoverToAccounts(), + resourceshowoutputassert.ConnectionShowOutput(t, connectionModel.ResourceReference()). + HasFailoverAllowedToAccounts(accountId), + ), + ), + }, + // externally change disable failover + { + PreConfig: func() { + acc.TestClient().Connection.Alter(t, sdk.NewAlterConnectionRequest(id).WithDisableConnectionFailover(*sdk.NewDisableConnectionFailoverRequest())) + }, + Config: config.FromModel(t, connectionModelWithFailover), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(connectionModelWithFailover.ResourceReference(), plancheck.ResourceActionUpdate), + planchecks.ExpectChange( + connectionModel.ResourceReference(), + "enable_failover_to_accounts", + tfjson.ActionUpdate, + nil, + sdk.String(fmt.Sprintf("[%s]", secondaryAccountId.FullyQualifiedName())), + ), + }, + }, + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.PrimaryConnectionResource(t, connectionModelWithFailover.ResourceReference()). + HasExactlyFailoverToAccountsInOrder(secondaryAccountId), + resourceshowoutputassert.ConnectionShowOutput(t, connectionModelWithFailover.ResourceReference()). + HasFailoverAllowedToAccounts(accountId, secondaryAccountId), + ), + ), + }, + }, + }) +} diff --git a/pkg/resources/privileges.go b/pkg/resources/privileges.go deleted file mode 100644 index 46adbfafe5..0000000000 --- a/pkg/resources/privileges.go +++ /dev/null @@ -1,112 +0,0 @@ -package resources - -type Privilege string - -func (p Privilege) String() string { - return string(p) -} - -const ( - privilegeAllPrivileges Privilege = "ALL PRIVILEGES" - privilegeAccountSupportCases Privilege = "MANAGE ACCOUNT SUPPORT CASES" - privilegeAddSearchOptimization Privilege = "ADD SEARCH OPTIMIZATION" - privilegeApply Privilege = "APPLY" - privilegeApplyMaskingPolicy Privilege = "APPLY MASKING POLICY" - privilegeApplyPasswordPolicy Privilege = "APPLY PASSWORD POLICY" - privilegeApplyRowAccessPolicy Privilege = "APPLY ROW ACCESS POLICY" - privilegeApplySessionPolicy Privilege = "APPLY SESSION POLICY" - privilegeApplyTag Privilege = "APPLY TAG" - privilegeAttachPolicy Privilege = "ATTACH POLICY" - privilegeAudit Privilege = "AUDIT" - privilegeCreateAccount Privilege = "CREATE ACCOUNT" - privilegeCreateCredential Privilege = "CREATE CREDENTIAL" //#nosec G101-- This is a false positive. - privilegeCreateDatabase Privilege = "CREATE DATABASE" - privilegeCreateDatabaseRole Privilege = "CREATE DATABASE ROLE" - privilegeCreateDataExchangeListing Privilege = "CREATE DATA EXCHANGE LISTING" - privilegeCreateDynamicTable Privilege = "CREATE DYNAMIC TABLE" - privilegeCreateExternalTable Privilege = "CREATE EXTERNAL TABLE" - privilegeCreateFailoverGroup Privilege = "CREATE FAILOVER GROUP" - privilegeCreateFileFormat Privilege = "CREATE FILE FORMAT" - privilegeCreateFunction Privilege = "CREATE FUNCTION" - privilegeCreateIntegration Privilege = "CREATE INTEGRATION" - privilegeCreateMaskingPolicy Privilege = "CREATE MASKING POLICY" - privilegeCreateMaterializedView Privilege = "CREATE MATERIALIZED VIEW" - privilegeCreateNetworkPolicy Privilege = "CREATE NETWORK POLICY" - privilegeCreatePipe Privilege = "CREATE PIPE" - privilegeCreateProcedure Privilege = "CREATE PROCEDURE" - privilegeCreateRole Privilege = "CREATE ROLE" - privilegeCreateRowAccessPolicy Privilege = "CREATE ROW ACCESS POLICY" - privilegeCreateSchema Privilege = "CREATE SCHEMA" - privilegeCreateSequence Privilege = "CREATE SEQUENCE" - privilegeCreateSessionPolicy Privilege = "CREATE SESSION POLICY" - privilegeCreateShare Privilege = "CREATE SHARE" - privilegeCreateStage Privilege = "CREATE STAGE" - privilegeCreateStream Privilege = "CREATE STREAM" - privilegeCreateStreamlit Privilege = "CREATE STREAMLIT" - privilegeCreateTable Privilege = "CREATE TABLE" - privilegeCreateTag Privilege = "CREATE TAG" - privilegeCreateTask Privilege = "CREATE TASK" - privilegeCreateTemporaryTable Privilege = "CREATE TEMPORARY TABLE" - privilegeCreateUser Privilege = "CREATE USER" - privilegeCreateView Privilege = "CREATE VIEW" - privilegeCreateWarehouse Privilege = "CREATE WAREHOUSE" - privilegeDelete Privilege = "DELETE" - privilegeExecuteManagedTask Privilege = "EXECUTE MANAGED TASK" - privilegeExecuteTask Privilege = "EXECUTE TASK" - privilegeFailover Privilege = "FAILOVER" - privilegeImportedPrivileges Privilege = "IMPORTED PRIVILEGES" - privilegeImportShare Privilege = "IMPORT SHARE" - privilegeInsert Privilege = "INSERT" - privilegeManageGrants Privilege = "MANAGE GRANTS" - privilegeManageWarehouses Privilege = "MANAGE WAREHOUSES" - privilegeModify Privilege = "MODIFY" - privilegeMonitor Privilege = "MONITOR" - privilegeMonitorExecution Privilege = "MONITOR EXECUTION" - privilegeMonitorSecurity Privilege = "MONITOR SECURITY" - privilegeMonitorUsage Privilege = "MONITOR USAGE" - privilegeOperate Privilege = "OPERATE" - privilegeOrganizationSupportCases Privilege = "MANAGE ORGANIZATION SUPPORT CASES" - privilegeOverrideShareRestrictions Privilege = "OVERRIDE SHARE RESTRICTIONS" - privilegeOwnership Privilege = "OWNERSHIP" - privilegeProvisionApplication Privilege = "PROVISION APPLICATION" - privilegePurchaseDataExchangeListing Privilege = "PURCHASE DATA EXCHANGE LISTING" - privilegeRead Privilege = "READ" - privilegeRebuild Privilege = "REBUILD" - privilegeReferences Privilege = "REFERENCES" - privilegeReferenceUsage Privilege = "REFERENCE_USAGE" - privilegeReplicate Privilege = "REPLICATE" - privilegeResolveAll Privilege = "RESOLVE ALL" - privilegeSelect Privilege = "SELECT" - privilegeTruncate Privilege = "TRUNCATE" - privilegeUpdate Privilege = "UPDATE" - privilegeUsage Privilege = "USAGE" - privilegeUserSupportCases Privilege = "MANAGE USER SUPPORT CASES" - privilegeWrite Privilege = "WRITE" -) - -type PrivilegeSet map[Privilege]struct{} - -func NewPrivilegeSet(privileges ...Privilege) PrivilegeSet { - ps := PrivilegeSet{} - for _, priv := range privileges { - ps[priv] = struct{}{} - } - return ps -} - -func (ps PrivilegeSet) ToList() []string { - privs := []string{} - for p := range ps { - privs = append(privs, string(p)) - } - return privs -} - -func (ps PrivilegeSet) addString(s string) { - ps[Privilege(s)] = struct{}{} -} - -func (ps PrivilegeSet) hasString(s string) bool { - _, ok := ps[Privilege(s)] - return ok -} diff --git a/pkg/resources/procedure_acceptance_test.go b/pkg/resources/procedure_acceptance_test.go index 6d0ec93b58..05cbfbfd73 100644 --- a/pkg/resources/procedure_acceptance_test.go +++ b/pkg/resources/procedure_acceptance_test.go @@ -212,6 +212,7 @@ func TestAcc_Procedure_migrateFromVersion085(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -276,6 +277,7 @@ func TestAcc_Procedure_proveArgsPermanentDiff(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Procedure), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.89.0", @@ -292,6 +294,7 @@ func TestAcc_Procedure_proveArgsPermanentDiff(t *testing.T) { }, }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: sqlProcedureConfigArgsPermanentDiff(acc.TestDatabaseName, acc.TestSchemaName, name), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -319,6 +322,7 @@ func TestAcc_Procedure_returnTypePermanentDiff(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Procedure), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.89.0", @@ -335,6 +339,7 @@ func TestAcc_Procedure_returnTypePermanentDiff(t *testing.T) { }, }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: sqlProcedureConfigReturnTypePermanentDiff(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( @@ -408,6 +413,7 @@ func TestAcc_Procedure_EnsureSmoothResourceIdMigrationToV0950(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Procedure), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -420,6 +426,7 @@ func TestAcc_Procedure_EnsureSmoothResourceIdMigrationToV0950(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: procedureConfigWithMoreArguments(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( @@ -472,6 +479,7 @@ func TestAcc_Procedure_EnsureSmoothResourceIdMigrationToV0950_WithoutArguments(t CheckDestroy: acc.CheckDestroy(t, resources.Function), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -484,6 +492,7 @@ func TestAcc_Procedure_EnsureSmoothResourceIdMigrationToV0950_WithoutArguments(t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: procedureConfigWithoutArguments(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( @@ -523,6 +532,7 @@ func TestAcc_Procedure_EnsureSmoothResourceIdMigrationToV0950_ArgumentSynonyms(t CheckDestroy: acc.CheckDestroy(t, resources.Procedure), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -535,6 +545,7 @@ func TestAcc_Procedure_EnsureSmoothResourceIdMigrationToV0950_ArgumentSynonyms(t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: procedureConfigWithArgumentSynonyms(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/resource_monitor_acceptance_test.go b/pkg/resources/resource_monitor_acceptance_test.go index 0f7a02322a..00b99b9227 100644 --- a/pkg/resources/resource_monitor_acceptance_test.go +++ b/pkg/resources/resource_monitor_acceptance_test.go @@ -529,6 +529,7 @@ func TestAcc_ResourceMonitor_Issue1990_RemovingResourceMonitorOutsideOfTerraform Steps: []resource.TestStep{ // Create resource monitor { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.69.0", @@ -564,6 +565,7 @@ func TestAcc_ResourceMonitor_Issue1990_RemovingResourceMonitorOutsideOfTerraform }, // Same configuration, but it's the latest version of the provider (0.96.0 and above) { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, configModel), }, @@ -599,6 +601,7 @@ func TestAcc_ResourceMonitor_Issue_TimestampInfinitePlan(t *testing.T) { Steps: []resource.TestStep{ // Create resource monitor without the timestamps { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.90.0", @@ -641,6 +644,7 @@ func TestAcc_ResourceMonitor_Issue_TimestampInfinitePlan(t *testing.T) { }, // Create resource monitor without the timestamps { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, configModel), }, @@ -678,6 +682,7 @@ func TestAcc_ResourceMonitor_Issue1500_CreatingWithOnlyTriggers(t *testing.T) { Steps: []resource.TestStep{ // Create resource monitor with only triggers (old version) { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.90.0", @@ -689,6 +694,7 @@ func TestAcc_ResourceMonitor_Issue1500_CreatingWithOnlyTriggers(t *testing.T) { }, // Create resource monitor with only triggers (the latest version) { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, configModel), ExpectError: regexp.MustCompile("due to Snowflake limiltations you cannot create Resource Monitor with only triggers set"), @@ -730,6 +736,7 @@ func TestAcc_ResourceMonitor_Issue1500_AlteringWithOnlyTriggers(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.ResourceMonitor), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.90.0", @@ -763,6 +770,7 @@ func TestAcc_ResourceMonitor_Issue1500_AlteringWithOnlyTriggers(t *testing.T) { }, // Upgrade to the latest version { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, configModelWithCreditQuota), }, @@ -898,6 +906,7 @@ func TestAcc_ResourceMonitor_SetForWarehouse(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.ResourceMonitor), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.90.0", @@ -917,6 +926,7 @@ resource "snowflake_resource_monitor" "test" { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: fmt.Sprintf(` resource "snowflake_resource_monitor" "test" { diff --git a/pkg/resources/row_access_policy_acceptance_test.go b/pkg/resources/row_access_policy_acceptance_test.go index 5dfeddcaed..e553d16a73 100644 --- a/pkg/resources/row_access_policy_acceptance_test.go +++ b/pkg/resources/row_access_policy_acceptance_test.go @@ -208,6 +208,7 @@ func TestAcc_RowAccessPolicy_Issue2053(t *testing.T) { PreCheck: func() { acc.TestAccPreCheck(t) }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.95.0", @@ -228,6 +229,7 @@ func TestAcc_RowAccessPolicy_Issue2053(t *testing.T) { ExpectNonEmptyPlan: true, }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_RowAccessPolicy/basic"), ConfigVariables: tfconfig.ConfigVariablesFromModel(t, policyModel), @@ -425,6 +427,7 @@ func TestAcc_RowAccessPolicy_migrateFromVersion_0_95_0_LowercaseArgName(t *testi PreCheck: func() { acc.TestAccPreCheck(t) }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.95.0", @@ -450,6 +453,7 @@ func TestAcc_RowAccessPolicy_migrateFromVersion_0_95_0_LowercaseArgName(t *testi ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_RowAccessPolicy/basic"), ConfigVariables: tfconfig.ConfigVariablesFromModel(t, policyModel), @@ -505,6 +509,7 @@ func TestAcc_RowAccessPolicy_migrateFromVersion_0_95_0_UppercaseArgName(t *testi PreCheck: func() { acc.TestAccPreCheck(t) }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.95.0", @@ -530,6 +535,7 @@ func TestAcc_RowAccessPolicy_migrateFromVersion_0_95_0_UppercaseArgName(t *testi ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_RowAccessPolicy/basic"), ConfigVariables: tfconfig.ConfigVariablesFromModel(t, policyModel), diff --git a/pkg/resources/saml2_integration_acceptance_test.go b/pkg/resources/saml2_integration_acceptance_test.go index 352c58c884..4493a6a248 100644 --- a/pkg/resources/saml2_integration_acceptance_test.go +++ b/pkg/resources/saml2_integration_acceptance_test.go @@ -1082,6 +1082,7 @@ func TestAcc_Saml2Integration_migrateFromV0941_ensureSmoothUpgradeWithNewResourc CheckDestroy: acc.CheckDestroy(t, resources.Saml2SecurityIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1094,6 +1095,7 @@ func TestAcc_Saml2Integration_migrateFromV0941_ensureSmoothUpgradeWithNewResourc ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: saml2IntegrationBasicConfig(id.Name(), cert), Check: resource.ComposeAggregateTestCheckFunc( @@ -1117,6 +1119,7 @@ func TestAcc_Saml2Integration_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Saml2SecurityIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1131,6 +1134,7 @@ func TestAcc_Saml2Integration_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: saml2IntegrationBasicConfig(quotedId, cert), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/saml_integration_test.go b/pkg/resources/saml_integration_test.go index 30f3d1220c..d326c12212 100644 --- a/pkg/resources/saml_integration_test.go +++ b/pkg/resources/saml_integration_test.go @@ -10,7 +10,7 @@ import ( sqlmock "github.com/DATA-DOG/go-sqlmock" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - . "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers" + . "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers/mock" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/stretchr/testify/require" ) diff --git a/pkg/resources/schema.go b/pkg/resources/schema.go index ff9d85efdb..27158b069d 100644 --- a/pkg/resources/schema.go +++ b/pkg/resources/schema.go @@ -103,7 +103,7 @@ func Schema() *schema.Resource { schemaParametersCustomDiff, ), - Schema: helpers.MergeMaps(schemaSchema, schemaParametersSchema), + Schema: collections.MergeMaps(schemaSchema, schemaParametersSchema), Importer: &schema.ResourceImporter{ StateContext: ImportSchema, }, diff --git a/pkg/resources/schema_acceptance_test.go b/pkg/resources/schema_acceptance_test.go index 03551b68f5..179df30392 100644 --- a/pkg/resources/schema_acceptance_test.go +++ b/pkg/resources/schema_acceptance_test.go @@ -465,6 +465,7 @@ func TestAcc_Schema_ManagePublicVersion_0_94_0(t *testing.T) { Steps: []resource.TestStep{ // PUBLIC can not be created in v0.93 { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.93.0", @@ -543,6 +544,7 @@ func TestAcc_Schema_ManagePublicVersion_0_94_1(t *testing.T) { Steps: []resource.TestStep{ // PUBLIC can not be created in v0.93 { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.93.0", @@ -553,6 +555,7 @@ func TestAcc_Schema_ManagePublicVersion_0_94_1(t *testing.T) { ExpectError: regexp.MustCompile("Error: error creating schema PUBLIC"), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: schemav094WithPipeExecutionPaused(name, db.ID().Name(), true), Check: resource.ComposeTestCheckFunc( @@ -958,6 +961,7 @@ func TestAcc_Schema_migrateFromVersion093WithoutManagedAccess(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.93.0", @@ -971,6 +975,7 @@ func TestAcc_Schema_migrateFromVersion093WithoutManagedAccess(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: schemav094(id.Name(), databaseId.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -1000,6 +1005,7 @@ func TestAcc_Schema_migrateFromVersion093(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.93.0", @@ -1017,6 +1023,7 @@ func TestAcc_Schema_migrateFromVersion093(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: schemav094WithManagedAccessAndDataRetentionTimeInDays(id.Name(), databaseId.Name(), true, 10), Check: resource.ComposeAggregateTestCheckFunc( @@ -1104,6 +1111,7 @@ func TestAcc_Schema_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t *tes CheckDestroy: acc.CheckDestroy(t, resources.Schema), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1116,6 +1124,7 @@ func TestAcc_Schema_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t *tes ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: schemaBasicConfig(id.DatabaseName(), id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -1148,6 +1157,7 @@ func TestAcc_Schema_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Schema), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1163,6 +1173,7 @@ func TestAcc_Schema_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: schemaBasicConfig(quotedDatabaseName, quotedName), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/schema_parameters.go b/pkg/resources/schema_parameters.go index fe8deedbcc..1bb4d72f16 100644 --- a/pkg/resources/schema_parameters.go +++ b/pkg/resources/schema_parameters.go @@ -5,7 +5,7 @@ import ( "strconv" "strings" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -52,7 +52,7 @@ func init() { Optional: true, } } - schemaParametersSchema = helpers.MergeMaps(databaseParametersSchema, additionalSchemaParameters) + schemaParametersSchema = collections.MergeMaps(databaseParametersSchema, additionalSchemaParameters) } func schemaParametersProvider(ctx context.Context, d ResourceIdProvider, meta any) ([]*sdk.Parameter, error) { diff --git a/pkg/resources/scim_integration_acceptance_test.go b/pkg/resources/scim_integration_acceptance_test.go index a1a3812f82..aa8f838cdc 100644 --- a/pkg/resources/scim_integration_acceptance_test.go +++ b/pkg/resources/scim_integration_acceptance_test.go @@ -425,6 +425,7 @@ func TestAcc_ScimIntegration_migrateFromVersion092EnabledTrue(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -438,6 +439,7 @@ func TestAcc_ScimIntegration_migrateFromVersion092EnabledTrue(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: scimIntegrationv093(id.Name(), role.Name(), true, sdk.ScimSecurityIntegrationScimClientGeneric), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -473,6 +475,7 @@ func TestAcc_ScimIntegration_migrateFromVersion092EnabledFalse(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -486,6 +489,7 @@ func TestAcc_ScimIntegration_migrateFromVersion092EnabledFalse(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: scimIntegrationv093(id.Name(), role.Name(), false, sdk.ScimSecurityIntegrationScimClientGeneric), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -514,6 +518,7 @@ func TestAcc_ScimIntegration_migrateFromVersion093HandleSyncPassword(t *testing. Steps: []resource.TestStep{ // create resource with v0.92 { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -547,6 +552,7 @@ func TestAcc_ScimIntegration_migrateFromVersion093HandleSyncPassword(t *testing. }, // check with newest version - the value in state was set to boolean default, so there should be no diff { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: scimIntegrationv093(id.Name(), role.Name(), true, sdk.ScimSecurityIntegrationScimClientAzure), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -596,6 +602,7 @@ func TestAcc_ScimIntegration_migrateFromV0941_ensureSmoothUpgradeWithNewResource CheckDestroy: acc.CheckDestroy(t, resources.ScimSecurityIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -608,6 +615,7 @@ func TestAcc_ScimIntegration_migrateFromV0941_ensureSmoothUpgradeWithNewResource ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: scimIntegrationBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -630,6 +638,7 @@ func TestAcc_ScimIntegration_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.ScimSecurityIntegration), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -644,6 +653,7 @@ func TestAcc_ScimIntegration_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: scimIntegrationBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/secondary_connection.go b/pkg/resources/secondary_connection.go new file mode 100644 index 0000000000..c58b7a8ed3 --- /dev/null +++ b/pkg/resources/secondary_connection.go @@ -0,0 +1,191 @@ +package resources + +import ( + "context" + "errors" + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var secondaryConnectionSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a secondary connection, the name must match the name of its primary connection."), + DiffSuppressFunc: suppressIdentifierQuoting, + }, + "is_primary": { + Type: schema.TypeBool, + Computed: true, + Description: "Indicates if the connection primary status has been changed. If change is detected, resource will be recreated.", + }, + "as_replica_of": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Specifies the identifier for a primary connection from which to create a replica (i.e. a secondary connection).", + DiffSuppressFunc: suppressIdentifierQuoting, + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the secondary connection.", + }, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW CONNECTIONS` for the given connection.", + Elem: &schema.Resource{ + Schema: schemas.ShowConnectionSchema, + }, + }, + FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, +} + +func SecondaryConnection() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateContextSecondaryConnection, + ReadContext: ReadContextSecondaryConnection, + UpdateContext: UpdateContextSecondaryConnection, + DeleteContext: DeleteContextSecondaryConnection, + Description: "Resource used to manage secondary (replicated) connections. To manage primary connection check resource [snowflake_primary_connection](./primary_connection). For more information, check [connection documentation](https://docs.snowflake.com/en/sql-reference/sql/create-connection.html).", + + CustomizeDiff: customdiff.All( + ComputedIfAnyAttributeChanged(secondaryConnectionSchema, ShowOutputAttributeName, "comment", "is_primary"), + RecreateWhenResourceBoolFieldChangedExternally("is_primary", false), + ), + + Schema: secondaryConnectionSchema, + Importer: &schema.ResourceImporter{ + StateContext: ImportName[sdk.AccountObjectIdentifier], + }, + } +} + +func CreateContextSecondaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id, err := sdk.ParseAccountObjectIdentifier(d.Get("name").(string)) + if err != nil { + return diag.FromErr(err) + } + + request := sdk.NewCreateConnectionRequest(id) + + if v, ok := d.GetOk("as_replica_of"); ok { + externalObjectId, err := sdk.ParseExternalObjectIdentifier(v.(string)) + if err != nil { + return diag.FromErr(err) + } + request.WithAsReplicaOf(externalObjectId) + } + + if v, ok := d.GetOk("comment"); ok { + request.WithComment(v.(string)) + } + + err = client.Connections.Create(ctx, request) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(helpers.EncodeResourceIdentifier(id)) + + return ReadContextSecondaryConnection(ctx, d, meta) +} + +func ReadContextSecondaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseAccountObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + connection, err := client.Connections.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to retrieve connection. Target object not found. Marking the resource as removed.", + Detail: fmt.Sprintf("Connection name: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "Failed to retrieve connection.", + Detail: fmt.Sprintf("Connection name: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + + return diag.FromErr(errors.Join( + d.Set("is_primary", connection.IsPrimary), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ConnectionToSchema(connection)}), + d.Set("comment", connection.Comment), + d.Set("as_replica_of", connection.Primary), + )) +} + +func UpdateContextSecondaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseAccountObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + connectionSetRequest := new(sdk.ConnectionSetRequest) + connectionUnsetRequest := new(sdk.ConnectionUnsetRequest) + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if len(comment) > 0 { + connectionSetRequest.WithComment(comment) + } else { + connectionUnsetRequest.WithComment(true) + } + } + + if (*connectionSetRequest != sdk.ConnectionSetRequest{}) { + err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id).WithSet(*connectionSetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + if (*connectionUnsetRequest != sdk.ConnectionUnsetRequest{}) { + err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id).WithUnset(*connectionUnsetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextSecondaryConnection(ctx, d, meta) +} + +func DeleteContextSecondaryConnection(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseAccountObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + err = client.Connections.Drop(ctx, sdk.NewDropConnectionRequest(id).WithIfExists(true)) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/secondary_connection_acceptance_test.go b/pkg/resources/secondary_connection_acceptance_test.go new file mode 100644 index 0000000000..380f3b49b5 --- /dev/null +++ b/pkg/resources/secondary_connection_acceptance_test.go @@ -0,0 +1,138 @@ +package resources_test + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_SecondaryConnection_Basic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + + id := acc.SecondaryTestClient().Ids.RandomAccountObjectIdentifier() + accountId := acc.TestClient().Account.GetAccountIdentifier(t) + primaryConnectionAsExternalId := sdk.NewExternalObjectIdentifier(accountId, id) + + // create primary connection + _, cleanup := acc.SecondaryTestClient().Connection.Create(t, id) + t.Cleanup(cleanup) + acc.SecondaryTestClient().Connection.Alter(t, sdk.NewAlterConnectionRequest(id).WithEnableConnectionFailover( + *sdk.NewEnableConnectionFailoverRequest([]sdk.AccountIdentifier{accountId}))) + + secondartyConnectionModel := model.SecondaryConnection("t", primaryConnectionAsExternalId.FullyQualifiedName(), id.Name()) + secondartyConnectionModelWithComment := model.SecondaryConnection("t", primaryConnectionAsExternalId.FullyQualifiedName(), id.Name()).WithComment("secondary connection test comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.SecondaryConnection), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, secondartyConnectionModel), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.SecondaryConnectionResource(t, secondartyConnectionModel.ResourceReference()). + HasNameString(id.Name()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAsReplicaOfIdentifier(primaryConnectionAsExternalId). + HasIsPrimaryString("false"). + HasCommentString(""), + + resourceshowoutputassert.ConnectionShowOutput(t, secondartyConnectionModel.ResourceReference()). + HasName(id.Name()). + HasSnowflakeRegion(acc.SecondaryTestClient().Context.CurrentRegion(t)). + HasAccountLocator(acc.SecondaryTestClient().GetAccountLocator()). + HasAccountName(accountId.AccountName()). + HasOrganizationName(accountId.OrganizationName()). + HasComment(""). + HasIsPrimary(false). + HasPrimaryIdentifier(primaryConnectionAsExternalId). + HasFailoverAllowedToAccounts(accountId). + HasConnectionUrl( + acc.SecondaryTestClient().Connection.GetConnectionUrl(accountId.OrganizationName(), id.Name()), + ), + ), + ), + }, + // set comment + { + Config: config.FromModel(t, secondartyConnectionModelWithComment), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.SecondaryConnectionResource(t, secondartyConnectionModelWithComment.ResourceReference()). + HasNameString(id.Name()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString("secondary connection test comment"), + + resourceshowoutputassert.ConnectionShowOutput(t, secondartyConnectionModelWithComment.ResourceReference()). + HasComment("secondary connection test comment"), + ), + ), + }, + // import + { + ResourceName: secondartyConnectionModelWithComment.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateCheck: importchecks.ComposeImportStateCheck( + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name()), + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "comment", "secondary connection test comment"), + ), + }, + // unset comment + { + Config: config.FromModel(t, secondartyConnectionModel), + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.SecondaryConnectionResource(t, secondartyConnectionModel.ResourceReference()). + HasCommentString(""), + + resourceshowoutputassert.ConnectionShowOutput(t, secondartyConnectionModel.ResourceReference()). + HasComment(""), + ), + ), + }, + // recreate when exteranlly promoted to primary + { + PreConfig: func() { + acc.TestClient().Connection.Alter(t, sdk.NewAlterConnectionRequest(id).WithPrimary(true)) + }, + Config: config.FromModel(t, secondartyConnectionModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(secondartyConnectionModel.ResourceReference(), plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + Check: resource.ComposeTestCheckFunc( + assert.AssertThat(t, + resourceassert.SecondaryConnectionResource(t, secondartyConnectionModel.ResourceReference()). + HasIsPrimaryString("false"), + + resourceshowoutputassert.ConnectionShowOutput(t, secondartyConnectionModel.ResourceReference()). + HasIsPrimary(false), + ), + ), + }, + }, + }) +} diff --git a/pkg/resources/secondary_database.go b/pkg/resources/secondary_database.go index 155699d1c9..30298a4084 100644 --- a/pkg/resources/secondary_database.go +++ b/pkg/resources/secondary_database.go @@ -6,6 +6,7 @@ import ( "fmt" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -55,7 +56,7 @@ func SecondaryDatabase() *schema.Resource { databaseParametersCustomDiff, ComputedIfAnyAttributeChanged(secondaryDatabaseSchema, FullyQualifiedNameAttributeName, "name"), ), - Schema: helpers.MergeMaps(secondaryDatabaseSchema, databaseParametersSchema), + Schema: collections.MergeMaps(secondaryDatabaseSchema, databaseParametersSchema), Importer: &schema.ResourceImporter{ StateContext: ImportName[sdk.AccountObjectIdentifier], }, diff --git a/pkg/resources/secondary_database_acceptance_test.go b/pkg/resources/secondary_database_acceptance_test.go index e499a08317..22cb7c5611 100644 --- a/pkg/resources/secondary_database_acceptance_test.go +++ b/pkg/resources/secondary_database_acceptance_test.go @@ -541,6 +541,7 @@ func TestAcc_SecondaryDatabase_migrateFromV0941_ensureSmoothUpgradeWithNewResour CheckDestroy: acc.CheckDestroy(t, resources.SecondaryDatabase), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -553,6 +554,7 @@ func TestAcc_SecondaryDatabase_migrateFromV0941_ensureSmoothUpgradeWithNewResour ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: secondaryDatabaseConfigBasic(id.Name(), externalPrimaryId.FullyQualifiedName()), Check: resource.ComposeAggregateTestCheckFunc( @@ -591,6 +593,7 @@ func TestAcc_SecondaryDatabase_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.SecondaryDatabase), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -605,6 +608,7 @@ func TestAcc_SecondaryDatabase_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: secondaryDatabaseConfigBasic(quotedId, unquotedExternalPrimaryId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/secret_with_basic_authentication.go b/pkg/resources/secret_with_basic_authentication.go index 595fc6831d..6bec639e38 100644 --- a/pkg/resources/secret_with_basic_authentication.go +++ b/pkg/resources/secret_with_basic_authentication.go @@ -7,6 +7,7 @@ import ( "reflect" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -30,7 +31,7 @@ var secretBasicAuthenticationSchema = func() map[string]*schema.Schema { Description: externalChangesNotDetectedFieldDescription("Specifies the password value to store in the secret."), }, } - return helpers.MergeMaps(secretCommonSchema, secretBasicAuthentication) + return collections.MergeMaps(secretCommonSchema, secretBasicAuthentication) }() func SecretWithBasicAuthentication() *schema.Resource { diff --git a/pkg/resources/secret_with_generic_string.go b/pkg/resources/secret_with_generic_string.go index ca9dfe55d6..7715abd818 100644 --- a/pkg/resources/secret_with_generic_string.go +++ b/pkg/resources/secret_with_generic_string.go @@ -7,6 +7,7 @@ import ( "reflect" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -24,7 +25,7 @@ var secretGenericStringSchema = func() map[string]*schema.Schema { Description: externalChangesNotDetectedFieldDescription("Specifies the string to store in the secret. The string can be an API token or a string of sensitive value that can be used in the handler code of a UDF or stored procedure. For details, see [Creating and using an external access integration](https://docs.snowflake.com/en/developer-guide/external-network-access/creating-using-external-network-access). You should not use this property to store any kind of OAuth token; use one of the other secret types for your OAuth use cases."), }, } - return helpers.MergeMaps(secretCommonSchema, secretGenericString) + return collections.MergeMaps(secretCommonSchema, secretGenericString) }() func SecretWithGenericString() *schema.Resource { diff --git a/pkg/resources/secret_with_oauth_authorization_code_grant.go b/pkg/resources/secret_with_oauth_authorization_code_grant.go index 1797d51e8c..7ce5493ecf 100644 --- a/pkg/resources/secret_with_oauth_authorization_code_grant.go +++ b/pkg/resources/secret_with_oauth_authorization_code_grant.go @@ -7,6 +7,7 @@ import ( "reflect" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -37,7 +38,7 @@ var secretAuthorizationCodeGrantSchema = func() map[string]*schema.Schema { DiffSuppressFunc: suppressIdentifierQuoting, }, } - return helpers.MergeMaps(secretCommonSchema, secretAuthorizationCodeGrant) + return collections.MergeMaps(secretCommonSchema, secretAuthorizationCodeGrant) }() func SecretWithAuthorizationCodeGrant() *schema.Resource { diff --git a/pkg/resources/secret_with_oauth_client_credentials.go b/pkg/resources/secret_with_oauth_client_credentials.go index 866083688d..1e46e40822 100644 --- a/pkg/resources/secret_with_oauth_client_credentials.go +++ b/pkg/resources/secret_with_oauth_client_credentials.go @@ -7,6 +7,7 @@ import ( "reflect" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -31,7 +32,7 @@ var secretClientCredentialsSchema = func() map[string]*schema.Schema { Description: "Specifies a list of scopes to use when making a request from the OAuth server by a role with USAGE on the integration during the OAuth client credentials flow.", }, } - return helpers.MergeMaps(secretCommonSchema, secretClientCredentials) + return collections.MergeMaps(secretCommonSchema, secretClientCredentials) }() func SecretWithClientCredentials() *schema.Resource { diff --git a/pkg/resources/shared_database.go b/pkg/resources/shared_database.go index 8339148656..18a882d9a8 100644 --- a/pkg/resources/shared_database.go +++ b/pkg/resources/shared_database.go @@ -6,6 +6,7 @@ import ( "fmt" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -56,7 +57,7 @@ func SharedDatabase() *schema.Resource { ComputedIfAnyAttributeChanged(sharedDatabaseSchema, FullyQualifiedNameAttributeName, "name"), ), - Schema: helpers.MergeMaps(sharedDatabaseSchema, sharedDatabaseParametersSchema), + Schema: collections.MergeMaps(sharedDatabaseSchema, sharedDatabaseParametersSchema), Importer: &schema.ResourceImporter{ StateContext: ImportName[sdk.AccountObjectIdentifier], }, diff --git a/pkg/resources/shared_database_acceptance_test.go b/pkg/resources/shared_database_acceptance_test.go index c31314899e..5c05b8dce2 100644 --- a/pkg/resources/shared_database_acceptance_test.go +++ b/pkg/resources/shared_database_acceptance_test.go @@ -306,6 +306,7 @@ func TestAcc_SharedDatabase_migrateFromV0941_ensureSmoothUpgradeWithNewResourceI CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -318,6 +319,7 @@ func TestAcc_SharedDatabase_migrateFromV0941_ensureSmoothUpgradeWithNewResourceI ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: sharedDatabaseConfigBasic(id.Name(), externalShareId.FullyQualifiedName()), Check: resource.ComposeAggregateTestCheckFunc( @@ -355,6 +357,7 @@ func TestAcc_SharedDatabase_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -369,6 +372,7 @@ func TestAcc_SharedDatabase_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: sharedDatabaseConfigBasic(quotedId, unquotedExternalShareId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/stream_on_directory_table.go b/pkg/resources/stream_on_directory_table.go index 9277c90616..491692d7dd 100644 --- a/pkg/resources/stream_on_directory_table.go +++ b/pkg/resources/stream_on_directory_table.go @@ -6,6 +6,7 @@ import ( "fmt" "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -26,7 +27,7 @@ var streamOnDirectoryTableSchema = func() map[string]*schema.Schema { ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), }, } - return helpers.MergeMaps(streamCommonSchema, streamOnDirectoryTable) + return collections.MergeMaps(streamCommonSchema, streamOnDirectoryTable) }() func StreamOnDirectoryTable() *schema.Resource { diff --git a/pkg/resources/stream_on_external_table.go b/pkg/resources/stream_on_external_table.go index 73748790eb..bee62563d7 100644 --- a/pkg/resources/stream_on_external_table.go +++ b/pkg/resources/stream_on_external_table.go @@ -6,6 +6,7 @@ import ( "fmt" "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -37,7 +38,7 @@ var streamOnExternalTableSchema = func() map[string]*schema.Schema { AtAttributeName: atSchema, BeforeAttributeName: beforeSchema, } - return helpers.MergeMaps(streamCommonSchema, streamOnExternalTable) + return collections.MergeMaps(streamCommonSchema, streamOnExternalTable) }() func StreamOnExternalTable() *schema.Resource { diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go index 7dfaced2b1..18a99c1b55 100644 --- a/pkg/resources/stream_on_table.go +++ b/pkg/resources/stream_on_table.go @@ -6,6 +6,7 @@ import ( "fmt" "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -44,7 +45,7 @@ var streamOnTableSchema = func() map[string]*schema.Schema { AtAttributeName: atSchema, BeforeAttributeName: beforeSchema, } - return helpers.MergeMaps(streamCommonSchema, streamOnTable) + return collections.MergeMaps(streamCommonSchema, streamOnTable) }() func StreamOnTable() *schema.Resource { diff --git a/pkg/resources/stream_on_view.go b/pkg/resources/stream_on_view.go index c9e1bcb3a6..3b6eb72941 100644 --- a/pkg/resources/stream_on_view.go +++ b/pkg/resources/stream_on_view.go @@ -6,6 +6,7 @@ import ( "fmt" "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -44,7 +45,7 @@ var StreamOnViewSchema = func() map[string]*schema.Schema { AtAttributeName: atSchema, BeforeAttributeName: beforeSchema, } - return helpers.MergeMaps(streamCommonSchema, streamOnView) + return collections.MergeMaps(streamCommonSchema, streamOnView) }() func StreamOnView() *schema.Resource { diff --git a/pkg/resources/streamlit_acceptance_test.go b/pkg/resources/streamlit_acceptance_test.go index f18722a6c3..415bd2a22a 100644 --- a/pkg/resources/streamlit_acceptance_test.go +++ b/pkg/resources/streamlit_acceptance_test.go @@ -449,6 +449,7 @@ func TestAcc_Streamlit_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t * CheckDestroy: acc.CheckDestroy(t, resources.Streamlit), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -461,6 +462,7 @@ func TestAcc_Streamlit_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t * ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: streamlitBasicConfig(id, stage.ID(), "main_file"), Check: resource.ComposeAggregateTestCheckFunc( @@ -501,6 +503,7 @@ func TestAcc_Streamlit_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Streamlit), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -517,6 +520,7 @@ func TestAcc_Streamlit_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: streamlitBasicConfigWithRawIdentifierValues(quotedDatabaseName, quotedSchemaName, quotedName, stage.ID().Name(), "main_file"), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/resources/table_acceptance_test.go b/pkg/resources/table_acceptance_test.go index f0fc349780..caeeb1daaf 100644 --- a/pkg/resources/table_acceptance_test.go +++ b/pkg/resources/table_acceptance_test.go @@ -1994,6 +1994,7 @@ func TestAcc_Table_migrateFromVersion_0_94_1(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -2007,6 +2008,7 @@ func TestAcc_Table_migrateFromVersion_0_94_1(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: tableConfig(id.Name(), id.DatabaseName(), id.SchemaName()), Check: resource.ComposeTestCheckFunc( @@ -2032,6 +2034,7 @@ func TestAcc_Table_SuppressQuotingOnDefaultSequence_issue2644(t *testing.T) { }, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -2042,6 +2045,7 @@ func TestAcc_Table_SuppressQuotingOnDefaultSequence_issue2644(t *testing.T) { Config: tableConfigWithSequence(name, databaseName, schemaName), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: tableConfigWithSequence(name, databaseName, schemaName), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/table_constraint_acceptance_test.go b/pkg/resources/table_constraint_acceptance_test.go index 2feb8eca0c..89c83573a5 100644 --- a/pkg/resources/table_constraint_acceptance_test.go +++ b/pkg/resources/table_constraint_acceptance_test.go @@ -232,6 +232,7 @@ func TestAcc_Table_issue2535_newConstraint(t *testing.T) { CheckDestroy: nil, Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.86.0", @@ -254,6 +255,7 @@ func TestAcc_Table_issue2535_newConstraint(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: tableConstraintUniqueConfigUsingTableId(accName, acc.TestDatabaseName, acc.TestSchemaName, "|"), ExpectError: regexp.MustCompile(`.*Expected SchemaObjectIdentifier identifier type, but got:.*`), @@ -282,6 +284,7 @@ func TestAcc_Table_issue2535_existingTable(t *testing.T) { Steps: []resource.TestStep{ // reference done by table.id in 0.85.0 { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.85.0", @@ -306,6 +309,7 @@ func TestAcc_Table_issue2535_existingTable(t *testing.T) { }, // fixed in the current version { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: tableConstraintUniqueConfigUsingFullyQualifiedName(accName, acc.TestDatabaseName, acc.TestSchemaName), Check: resource.ComposeTestCheckFunc( diff --git a/pkg/resources/tag.go b/pkg/resources/tag.go index 08f50c8116..39ba4e9990 100644 --- a/pkg/resources/tag.go +++ b/pkg/resources/tag.go @@ -2,10 +2,13 @@ package resources import ( "context" + "errors" "fmt" - "slices" + "log" + "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -16,22 +19,24 @@ import ( var tagSchema = map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the identifier for the tag; must be unique for the database in which the tag is created.", - ForceNew: true, + Type: schema.TypeString, + Required: true, + Description: blocklistedCharactersFieldDescription("Specifies the identifier for the tag; must be unique for the database in which the tag is created."), + DiffSuppressFunc: suppressIdentifierQuoting, }, "database": { - Type: schema.TypeString, - Required: true, - Description: "The database in which to create the tag.", - ForceNew: true, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("The database in which to create the tag."), + DiffSuppressFunc: suppressIdentifierQuoting, }, "schema": { - Type: schema.TypeString, - Required: true, - Description: "The schema in which to create the tag.", - ForceNew: true, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("The schema in which to create the tag."), + DiffSuppressFunc: suppressIdentifierQuoting, }, "comment": { Type: schema.TypeString, @@ -39,12 +44,30 @@ var tagSchema = map[string]*schema.Schema{ Description: "Specifies a comment for the tag.", }, "allowed_values": { - Type: schema.TypeList, + Type: schema.TypeSet, Elem: &schema.Schema{Type: schema.TypeString}, Optional: true, - Description: "List of allowed values for the tag.", + Description: "Set of allowed values for the tag.", + }, + "masking_policies": { + Type: schema.TypeSet, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), + }, + Optional: true, + DiffSuppressFunc: NormalizeAndCompareIdentifiersInSet("masking_policies"), + Description: "Set of masking policies for the tag. A tag can support one masking policy for each data type. If masking policies are assigned to the tag, before dropping the tag, the provider automatically unassigns them.", }, FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW TAGS` for the given tag.", + Elem: &schema.Resource{ + Schema: schemas.ShowTagSchema, + }, + }, } // TODO(SNOW-1348114, SNOW-1348110, SNOW-1348355, SNOW-1348353): remove after rework of external table, materialized view, stage and table @@ -83,14 +106,31 @@ var tagReferenceSchema = &schema.Schema{ // Schema returns a pointer to the resource representing a schema. func Tag() *schema.Resource { return &schema.Resource{ + SchemaVersion: 1, + CreateContext: CreateContextTag, ReadContext: ReadContextTag, UpdateContext: UpdateContextTag, DeleteContext: DeleteContextTag, + Description: "Resource used to manage tags. For more information, check [tag documentation](https://docs.snowflake.com/en/sql-reference/sql/create-tag).", + + CustomizeDiff: customdiff.All( + ComputedIfAnyAttributeChanged(tagSchema, ShowOutputAttributeName, "name", "comment", "allowed_values"), + ComputedIfAnyAttributeChanged(tagSchema, FullyQualifiedNameAttributeName, "name"), + ), Schema: tagSchema, Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: ImportName[sdk.SchemaObjectIdentifier], + }, + + StateUpgraders: []schema.StateUpgrader{ + { + Version: 0, + // setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject + Type: cty.EmptyObject, + Upgrade: migratePipeSeparatedObjectIdentifierResourceIdToFullyQualifiedName, + }, }, } } @@ -98,57 +138,98 @@ func Tag() *schema.Resource { func CreateContextTag(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client name := d.Get("name").(string) - schema := d.Get("schema").(string) + schemaName := d.Get("schema").(string) database := d.Get("database").(string) - id := sdk.NewSchemaObjectIdentifier(database, schema, name) + id := sdk.NewSchemaObjectIdentifier(database, schemaName, name) request := sdk.NewCreateTagRequest(id) if v, ok := d.GetOk("comment"); ok { request.WithComment(sdk.String(v.(string))) } if v, ok := d.GetOk("allowed_values"); ok { - request.WithAllowedValues(expandStringListAllowEmpty(v.([]any))) + request.WithAllowedValues(expandStringListAllowEmpty(v.(*schema.Set).List())) } if err := client.Tags.Create(ctx, request); err != nil { return diag.FromErr(err) } - d.SetId(helpers.EncodeSnowflakeID(id)) + d.SetId(helpers.EncodeResourceIdentifier(id)) + if v, ok := d.GetOk("masking_policies"); ok { + ids, err := parseSchemaObjectIdentifierSet(v) + if err != nil { + return diag.FromErr(err) + } + err = client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithSet(sdk.NewTagSetRequest().WithMaskingPolicies(ids))) + if err != nil { + return diag.FromErr(fmt.Errorf("error setting masking policies in tag %v err = %w", id.Name(), err)) + } + } return ReadContextTag(ctx, d, meta) } func ReadContextTag(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - diags := diag.Diagnostics{} client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) - - tag, err := client.Tags.ShowByID(ctx, id) + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) if err != nil { return diag.FromErr(err) } - if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("name", tag.Name); err != nil { - return diag.FromErr(err) - } - if err := d.Set("database", tag.DatabaseName); err != nil { - return diag.FromErr(err) - } - if err := d.Set("schema", tag.SchemaName); err != nil { - return diag.FromErr(err) - } - if err := d.Set("comment", tag.Comment); err != nil { + + tag, err := client.Tags.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query tag. Marking the resource as removed.", + Detail: fmt.Sprintf("Tag: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } return diag.FromErr(err) } - if err := d.Set("allowed_values", tag.AllowedValues); err != nil { - return diag.FromErr(err) + errs := errors.Join( + d.Set("name", tag.Name), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.TagToSchema(tag)}), + d.Set("comment", tag.Comment), + d.Set("allowed_values", tag.AllowedValues), + func() error { + policyRefs, err := client.PolicyReferences.GetForEntity(ctx, sdk.NewGetForEntityPolicyReferenceRequest(id, sdk.PolicyEntityDomainTag)) + if err != nil { + return (fmt.Errorf("getting policy references for view: %w", err)) + } + policyIds := make([]string, 0, len(policyRefs)) + for _, p := range policyRefs { + if p.PolicyKind == sdk.PolicyKindMaskingPolicy { + policyId := sdk.NewSchemaObjectIdentifier(*p.PolicyDb, *p.PolicySchema, p.PolicyName) + policyIds = append(policyIds, policyId.FullyQualifiedName()) + } + } + return d.Set("masking_policies", policyIds) + }(), + ) + if errs != nil { + return diag.FromErr(errs) } - return diags + return nil } func UpdateContextTag(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + if d.HasChange("name") { + newId := sdk.NewSchemaObjectIdentifierInSchema(id.SchemaId(), d.Get("name").(string)) + + err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithRename(newId)) + if err != nil { + return diag.FromErr(fmt.Errorf("error renaming tag %v err = %w", d.Id(), err)) + } + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId + } if d.HasChange("comment") { comment, ok := d.GetOk("comment") if ok { @@ -165,30 +246,56 @@ func UpdateContextTag(ctx context.Context, d *schema.ResourceData, meta any) dia } if d.HasChange("allowed_values") { o, n := d.GetChange("allowed_values") - oldAllowedValues := expandStringListAllowEmpty(o.([]any)) - newAllowedValues := expandStringListAllowEmpty(n.([]any)) - var allowedValuesToAdd, allowedValuesToRemove []string + oldAllowedValues := expandStringListAllowEmpty(o.(*schema.Set).List()) + newAllowedValues := expandStringListAllowEmpty(n.(*schema.Set).List()) + + addedItems, removedItems := ListDiff(oldAllowedValues, newAllowedValues) + + if len(addedItems) > 0 { + if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithAdd(addedItems)); err != nil { + return diag.FromErr(err) + } + } + + if len(removedItems) > 0 { + if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithDrop(removedItems)); err != nil { + return diag.FromErr(err) + } + } + } + if d.HasChange("masking_policies") { + o, n := d.GetChange("masking_policies") + oldAllowedValues := expandStringList(o.(*schema.Set).List()) + newAllowedValues := expandStringList(n.(*schema.Set).List()) + + addedItems, removedItems := ListDiff(oldAllowedValues, newAllowedValues) - for _, oldAllowedValue := range oldAllowedValues { - if !slices.Contains(newAllowedValues, oldAllowedValue) { - allowedValuesToRemove = append(allowedValuesToRemove, oldAllowedValue) + removedids := make([]sdk.SchemaObjectIdentifier, len(removedItems)) + for i, idRaw := range removedItems { + id, err := sdk.ParseSchemaObjectIdentifier(idRaw) + if err != nil { + return diag.FromErr(err) } + removedids[i] = id } - for _, newAllowedValue := range newAllowedValues { - if !slices.Contains(oldAllowedValues, newAllowedValue) { - allowedValuesToAdd = append(allowedValuesToAdd, newAllowedValue) + addedids := make([]sdk.SchemaObjectIdentifier, len(addedItems)) + for i, idRaw := range addedItems { + id, err := sdk.ParseSchemaObjectIdentifier(idRaw) + if err != nil { + return diag.FromErr(err) } + addedids[i] = id } - if len(allowedValuesToAdd) > 0 { - if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithAdd(allowedValuesToAdd)); err != nil { + if len(removedItems) > 0 { + if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithUnset(sdk.NewTagUnsetRequest().WithMaskingPolicies(removedids))); err != nil { return diag.FromErr(err) } } - if len(allowedValuesToRemove) > 0 { - if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithDrop(allowedValuesToRemove)); err != nil { + if len(addedItems) > 0 { + if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithSet(sdk.NewTagSetRequest().WithMaskingPolicies(addedids))); err != nil { return diag.FromErr(err) } } @@ -198,21 +305,31 @@ func UpdateContextTag(ctx context.Context, d *schema.ResourceData, meta any) dia func DeleteContextTag(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) - if err := client.Tags.Drop(ctx, sdk.NewDropTagRequest(id)); err != nil { + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + // before dropping the resource, all policies must be unset + policyRefs, err := client.PolicyReferences.GetForEntity(ctx, sdk.NewGetForEntityPolicyReferenceRequest(id, sdk.PolicyEntityDomainTag)) + if err != nil { + return diag.FromErr(fmt.Errorf("getting policy references for view: %w", err)) + } + removedPolicies := make([]sdk.SchemaObjectIdentifier, 0, len(policyRefs)) + for _, p := range policyRefs { + if p.PolicyKind == sdk.PolicyKindMaskingPolicy { + policyName := sdk.NewSchemaObjectIdentifier(*p.PolicyDb, *p.PolicySchema, p.PolicyName) + removedPolicies = append(removedPolicies, policyName) + } + } + if len(removedPolicies) > 0 { + log.Printf("[DEBUG] unsetting masking policies before dropping tag: %s\n", id.FullyQualifiedName()) + if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(id).WithUnset(sdk.NewTagUnsetRequest().WithMaskingPolicies(removedPolicies))); err != nil { + return diag.FromErr(err) + } + } + if err := client.Tags.Drop(ctx, sdk.NewDropTagRequest(id).WithIfExists(true)); err != nil { return diag.FromErr(err) } d.SetId("") return nil } - -// Returns the slice of strings for inputed allowed values. -func expandAllowedValues(avChangeSet any) []string { - avList := avChangeSet.([]any) - newAvs := make([]string, len(avList)) - for idx, value := range avList { - newAvs[idx] = fmt.Sprintf("%v", value) - } - - return newAvs -} diff --git a/pkg/resources/tag_acceptance_test.go b/pkg/resources/tag_acceptance_test.go index f831685a6e..98ee76801e 100644 --- a/pkg/resources/tag_acceptance_test.go +++ b/pkg/resources/tag_acceptance_test.go @@ -1,34 +1,53 @@ package resources_test import ( + "fmt" "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/hashicorp/terraform-plugin-testing/config" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -func TestAcc_Tag(t *testing.T) { +func TestAcc_Tag_basic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - resourceName := "snowflake_tag.t" - m := func() map[string]config.Variable { - return map[string]config.Variable{ - "name": config.StringVariable(id.Name()), - "database": config.StringVariable(acc.TestDatabaseName), - "schema": config.StringVariable(acc.TestSchemaName), - "comment": config.StringVariable("Terraform acceptance test"), - "allowed_values": config.ListVariable(config.StringVariable("")), - } - } - - variableSet2 := m() - variableSet2["allowed_values"] = config.ListVariable(config.StringVariable("alv1"), config.StringVariable("alv2")) - - variableSet3 := m() - variableSet3["comment"] = config.StringVariable("Terraform acceptance test - updated") + + maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicy(t) + t.Cleanup(maskingPolicyCleanup) + + maskingPolicy2, maskingPolicy2Cleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicy(t) + t.Cleanup(maskingPolicy2Cleanup) + + baseModel := model.Tag("test", id.DatabaseName(), id.Name(), id.SchemaName()) + + modelWithExtraFields := model.Tag("test", id.DatabaseName(), id.Name(), id.SchemaName()). + WithComment("foo"). + WithAllowedValues("foo", "", "bar"). + WithMaskingPolicies(maskingPolicy.ID()) + + modelWithDifferentListOrder := model.Tag("test", id.DatabaseName(), id.Name(), id.SchemaName()). + WithComment("foo"). + WithAllowedValues("", "bar", "foo"). + WithMaskingPolicies(maskingPolicy.ID()) + + modelWithDifferentValues := model.Tag("test", id.DatabaseName(), id.Name(), id.SchemaName()). + WithComment("bar"). + WithAllowedValues("abc", "def", ""). + WithMaskingPolicies(maskingPolicy2.ID()) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -38,52 +57,334 @@ func TestAcc_Tag(t *testing.T) { }, CheckDestroy: acc.CheckDestroy(t, resources.Tag), Steps: []resource.TestStep{ + // base model { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Tag/basic"), - ConfigVariables: m(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", id.Name()), - resource.TestCheckResourceAttr(resourceName, "fully_qualified_name", id.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "allowed_values.#", "1"), - resource.TestCheckResourceAttr(resourceName, "allowed_values.0", ""), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test"), + Config: config.FromModel(t, baseModel), + Check: assert.AssertThat(t, resourceassert.TagResource(t, baseModel.ResourceReference()). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString(""). + HasNoMaskingPolicies(). + HasNoAllowedValues(), + resourceshowoutputassert.TagShowOutput(t, baseModel.ResourceReference()). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasComment(""). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasOwnerRoleType("ROLE"). + HasNoAllowedValues(), ), }, - - // test - change allowed values + // import without optionals + { + Config: config.FromModel(t, baseModel), + ResourceName: baseModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + }, + // set all fields { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Tag/basic"), - ConfigVariables: variableSet2, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", id.Name()), - resource.TestCheckResourceAttr(resourceName, "fully_qualified_name", id.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "allowed_values.#", "2"), - resource.TestCheckResourceAttr(resourceName, "allowed_values.0", "alv1"), - resource.TestCheckResourceAttr(resourceName, "allowed_values.1", "alv2"), + Config: config.FromModel(t, modelWithExtraFields), + Check: assert.AssertThat(t, resourceassert.TagResource(t, modelWithExtraFields.ResourceReference()). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString("foo"), + assert.Check(resource.TestCheckResourceAttr(modelWithExtraFields.ResourceReference(), "masking_policies.#", "1")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "masking_policies.*", maskingPolicy.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(modelWithExtraFields.ResourceReference(), "allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "allowed_values.*", "bar")), + resourceshowoutputassert.TagShowOutput(t, modelWithExtraFields.ResourceReference()). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasComment("foo"). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.*", "bar")), ), }, - - // test - change comment + // external change + { + PreConfig: func() { + acc.TestClient().Tag.Alter(t, sdk.NewAlterTagRequest(id).WithDrop([]string{"foo"})) + }, + Config: config.FromModel(t, modelWithExtraFields), + Check: assert.AssertThat(t, resourceassert.TagResource(t, modelWithExtraFields.ResourceReference()). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString("foo"), + assert.Check(resource.TestCheckResourceAttr(modelWithExtraFields.ResourceReference(), "masking_policies.#", "1")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "masking_policies.*", maskingPolicy.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(modelWithExtraFields.ResourceReference(), "allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "allowed_values.*", "bar")), + resourceshowoutputassert.TagShowOutput(t, modelWithExtraFields.ResourceReference()). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasComment("foo"). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithExtraFields.ResourceReference(), "show_output.0.allowed_values.*", "bar")), + ), + }, + // different set ordering + { + Config: config.FromModel(t, modelWithDifferentListOrder), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(modelWithDifferentListOrder.ResourceReference(), plancheck.ResourceActionNoop), + }, + }, + Check: assert.AssertThat(t, resourceassert.TagResource(t, modelWithDifferentListOrder.ResourceReference()). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString("foo"), + assert.Check(resource.TestCheckResourceAttr(modelWithDifferentListOrder.ResourceReference(), "masking_policies.#", "1")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentListOrder.ResourceReference(), "masking_policies.*", maskingPolicy.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(modelWithDifferentListOrder.ResourceReference(), "allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentListOrder.ResourceReference(), "allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentListOrder.ResourceReference(), "allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentListOrder.ResourceReference(), "allowed_values.*", "bar")), + resourceshowoutputassert.TagShowOutput(t, modelWithDifferentListOrder.ResourceReference()). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasComment("foo"). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttr(modelWithDifferentListOrder.ResourceReference(), "show_output.0.allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentListOrder.ResourceReference(), "show_output.0.allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentListOrder.ResourceReference(), "show_output.0.allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentListOrder.ResourceReference(), "show_output.0.allowed_values.*", "bar")), + ), + }, + // change some values + { + Config: config.FromModel(t, modelWithDifferentValues), + Check: assert.AssertThat(t, resourceassert.TagResource(t, modelWithDifferentValues.ResourceReference()). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString("bar"), + assert.Check(resource.TestCheckResourceAttr(modelWithDifferentValues.ResourceReference(), "masking_policies.#", "1")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentValues.ResourceReference(), "masking_policies.*", maskingPolicy2.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(modelWithDifferentValues.ResourceReference(), "allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentValues.ResourceReference(), "allowed_values.*", "abc")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentValues.ResourceReference(), "allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentValues.ResourceReference(), "allowed_values.*", "def")), + resourceshowoutputassert.TagShowOutput(t, modelWithDifferentValues.ResourceReference()). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasComment("bar"). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttr(modelWithDifferentValues.ResourceReference(), "show_output.0.allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentValues.ResourceReference(), "show_output.0.allowed_values.*", "abc")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentValues.ResourceReference(), "show_output.0.allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(modelWithDifferentValues.ResourceReference(), "show_output.0.allowed_values.*", "def")), + ), + }, + // unset optionals { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Tag/basic"), - ConfigVariables: variableSet3, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", id.Name()), - resource.TestCheckResourceAttr(resourceName, "fully_qualified_name", id.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test - updated"), + Config: config.FromModel(t, baseModel), + Check: assert.AssertThat(t, resourceassert.TagResource(t, baseModel.ResourceReference()). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString(""). + HasMaskingPoliciesLength(0). + HasAllowedValuesLength(0), + resourceshowoutputassert.TagShowOutput(t, baseModel.ResourceReference()). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasComment(""). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasOwnerRoleType("ROLE"). + HasNoAllowedValues(), ), }, + }, + }) +} + +func TestAcc_Tag_complete(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicy(t) + t.Cleanup(maskingPolicyCleanup) + + model := model.Tag("test", id.DatabaseName(), id.Name(), id.SchemaName()). + WithComment("foo"). + WithAllowedValuesValue(tfconfig.ListVariable(tfconfig.StringVariable("foo"), tfconfig.StringVariable(""), tfconfig.StringVariable("bar"))). + WithMaskingPoliciesValue(tfconfig.ListVariable(tfconfig.StringVariable(maskingPolicy.ID().FullyQualifiedName()))) - // test - import + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Tag), + Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Tag/basic"), - ConfigVariables: variableSet3, - ResourceName: resourceName, + Config: config.FromModel(t, model), + Check: assert.AssertThat(t, resourceassert.TagResource(t, model.ResourceReference()). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasCommentString("foo"), + assert.Check(resource.TestCheckResourceAttr(model.ResourceReference(), "masking_policies.#", "1")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "masking_policies.*", maskingPolicy.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(model.ResourceReference(), "allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "allowed_values.*", "bar")), + resourceshowoutputassert.TagShowOutput(t, model.ResourceReference()). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasComment("foo"). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttr(model.ResourceReference(), "show_output.0.allowed_values.#", "3")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "show_output.0.allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "show_output.0.allowed_values.*", "")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "show_output.0.allowed_values.*", "bar")), + ), + }, + { + Config: config.FromModel(t, model), + ResourceName: model.ResourceReference(), ImportState: true, ImportStateVerify: true, }, }, }) } + +func TestAcc_Tag_Rename(t *testing.T) { + oldId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + newId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + modelWithOldId := model.Tag("test", oldId.DatabaseName(), oldId.Name(), oldId.SchemaName()) + modelWithNewId := model.Tag("test", newId.DatabaseName(), newId.Name(), newId.SchemaName()) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Tag), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, modelWithOldId), + Check: assert.AssertThat(t, resourceassert.TagResource(t, modelWithOldId.ResourceReference()). + HasNameString(oldId.Name()). + HasDatabaseString(oldId.DatabaseName()). + HasSchemaString(oldId.SchemaName()), + ), + }, + { + Config: config.FromModel(t, modelWithNewId), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(modelWithOldId.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Check: assert.AssertThat(t, resourceassert.TagResource(t, modelWithNewId.ResourceReference()). + HasNameString(newId.Name()). + HasDatabaseString(newId.DatabaseName()). + HasSchemaString(newId.SchemaName()), + ), + }, + }, + }) +} + +func TestAcc_Tag_migrateFromVersion_0_98_0(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + model := model.Tag("test", id.DatabaseName(), id.Name(), id.SchemaName()). + WithAllowedValuesValue(tfconfig.ListVariable(tfconfig.StringVariable("foo"), tfconfig.StringVariable("bar"))) + + resource.Test(t, resource.TestCase{ + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ExternalProviders: acc.ExternalProviderWithExactVersion("0.98.0"), + Config: tag_v_0_98_0(id), + Check: assert.AssertThat(t, resourceassert.TagResource(t, model.ResourceReference()). + HasNameString(id.Name()), + assert.Check(resource.TestCheckResourceAttr(model.ResourceReference(), "allowed_values.#", "2")), + assert.Check(resource.TestCheckResourceAttr(model.ResourceReference(), "allowed_values.0", "bar")), + assert.Check(resource.TestCheckResourceAttr(model.ResourceReference(), "allowed_values.1", "foo")), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: config.FromModel(t, model), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(model.ResourceReference(), plancheck.ResourceActionNoop), + }, + }, + Check: assert.AssertThat(t, resourceassert.TagResource(t, model.ResourceReference()). + HasNameString(id.Name()), + assert.Check(resource.TestCheckResourceAttr(model.ResourceReference(), "allowed_values.#", "2")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "allowed_values.*", "foo")), + assert.Check(resource.TestCheckTypeSetElemAttr(model.ResourceReference(), "allowed_values.*", "bar")), + ), + }, + }, + }) +} + +func tag_v_0_98_0(id sdk.SchemaObjectIdentifier) string { + s := ` +resource "snowflake_tag" "test" { + name = "%[1]s" + database = "%[2]s" + schema = "%[3]s" + allowed_values = ["bar", "foo"] +} +` + return fmt.Sprintf(s, id.Name(), id.DatabaseName(), id.SchemaName()) +} diff --git a/pkg/resources/tag_association_acceptance_test.go b/pkg/resources/tag_association_acceptance_test.go index 420116c57d..37e1d54804 100644 --- a/pkg/resources/tag_association_acceptance_test.go +++ b/pkg/resources/tag_association_acceptance_test.go @@ -17,11 +17,11 @@ import ( ) func TestAcc_TagAssociation(t *testing.T) { - tagName := acc.TestClient().Ids.Alpha() + tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_tag_association.test" m := func() map[string]config.Variable { return map[string]config.Variable{ - "tag_name": config.StringVariable(tagName), + "tag_name": config.StringVariable(tagId.Name()), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), } @@ -39,7 +39,7 @@ func TestAcc_TagAssociation(t *testing.T) { ConfigVariables: m(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "object_type", "DATABASE"), - resource.TestCheckResourceAttr(resourceName, "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, tagName)), + resource.TestCheckResourceAttr(resourceName, "tag_id", tagId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "tag_value", "finance"), ), }, @@ -48,11 +48,11 @@ func TestAcc_TagAssociation(t *testing.T) { } func TestAcc_TagAssociationSchema(t *testing.T) { - tagName := acc.TestClient().Ids.Alpha() + tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_tag_association.test" m := func() map[string]config.Variable { return map[string]config.Variable{ - "tag_name": config.StringVariable(tagName), + "tag_name": config.StringVariable(tagId.Name()), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), } @@ -77,12 +77,12 @@ func TestAcc_TagAssociationSchema(t *testing.T) { } func TestAcc_TagAssociationColumn(t *testing.T) { - tagName := acc.TestClient().Ids.Alpha() + tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() tableName := acc.TestClient().Ids.Alpha() resourceName := "snowflake_tag_association.test" m := func() map[string]config.Variable { return map[string]config.Variable{ - "tag_name": config.StringVariable(tagName), + "tag_name": config.StringVariable(tagId.Name()), "table_name": config.StringVariable(tableName), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), @@ -101,7 +101,7 @@ func TestAcc_TagAssociationColumn(t *testing.T) { ConfigVariables: m(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "object_type", "COLUMN"), - resource.TestCheckResourceAttr(resourceName, "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, tagName)), + resource.TestCheckResourceAttr(resourceName, "tag_id", tagId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "tag_value", "TAG_VALUE"), resource.TestCheckResourceAttr(resourceName, "object_identifier.0.%", "3"), resource.TestCheckResourceAttr(resourceName, "object_identifier.0.name", fmt.Sprintf("%s.column_name", tableName)), @@ -113,12 +113,12 @@ func TestAcc_TagAssociationColumn(t *testing.T) { } func TestAcc_TagAssociationIssue1202(t *testing.T) { - tagName := acc.TestClient().Ids.Alpha() + tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() tableName := acc.TestClient().Ids.Alpha() resourceName := "snowflake_tag_association.test" m := func() map[string]config.Variable { return map[string]config.Variable{ - "tag_name": config.StringVariable(tagName), + "tag_name": config.StringVariable(tagId.Name()), "table_name": config.StringVariable(tableName), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), @@ -137,7 +137,7 @@ func TestAcc_TagAssociationIssue1202(t *testing.T) { ConfigVariables: m(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "object_type", "TABLE"), - resource.TestCheckResourceAttr(resourceName, "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, tagName)), + resource.TestCheckResourceAttr(resourceName, "tag_id", tagId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "tag_value", "v1"), ), }, @@ -147,7 +147,6 @@ func TestAcc_TagAssociationIssue1202(t *testing.T) { func TestAcc_TagAssociationIssue1909(t *testing.T) { tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - tagName := tagId.Name() tableName := acc.TestClient().Ids.Alpha() tableName2 := acc.TestClient().Ids.Alpha() columnName := "test.column" @@ -156,7 +155,7 @@ func TestAcc_TagAssociationIssue1909(t *testing.T) { objectID2 := sdk.NewTableColumnIdentifier(acc.TestDatabaseName, acc.TestSchemaName, tableName2, columnName) m := func() map[string]config.Variable { return map[string]config.Variable{ - "tag_name": config.StringVariable(tagName), + "tag_name": config.StringVariable(tagId.Name()), "table_name": config.StringVariable(tableName), "table_name2": config.StringVariable(tableName2), "column_name": config.StringVariable("test.column"), @@ -177,7 +176,7 @@ func TestAcc_TagAssociationIssue1909(t *testing.T) { ConfigVariables: m(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "object_type", "COLUMN"), - resource.TestCheckResourceAttr(resourceName, "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, tagName)), + resource.TestCheckResourceAttr(resourceName, "tag_id", tagId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "tag_value", "v1"), testAccCheckTableColumnTagAssociation(tagId, objectID, "v1"), testAccCheckTableColumnTagAssociation(tagId, objectID2, "v1"), @@ -205,12 +204,12 @@ func testAccCheckTableColumnTagAssociation(tagID sdk.SchemaObjectIdentifier, obj func TestAcc_TagAssociationAccountIssues1910(t *testing.T) { // todo: use role with ORGADMIN in CI (SNOW-1165821) _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) - tagName := acc.TestClient().Ids.Alpha() + tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() accountName := acc.TestClient().Ids.Alpha() resourceName := "snowflake_tag_association.test" m := func() map[string]config.Variable { return map[string]config.Variable{ - "tag_name": config.StringVariable(tagName), + "tag_name": config.StringVariable(tagId.Name()), "account_name": config.StringVariable(accountName), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), @@ -230,7 +229,7 @@ func TestAcc_TagAssociationAccountIssues1910(t *testing.T) { ConfigVariables: m(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "object_type", "ACCOUNT"), - resource.TestCheckResourceAttr(resourceName, "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, tagName)), + resource.TestCheckResourceAttr(resourceName, "tag_id", tagId.Name()), resource.TestCheckResourceAttr(resourceName, "tag_value", "v1"), ), }, @@ -239,13 +238,13 @@ func TestAcc_TagAssociationAccountIssues1910(t *testing.T) { } func TestAcc_TagAssociationIssue1926(t *testing.T) { - tagName := acc.TestClient().Ids.Alpha() + tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() tableName := acc.TestClient().Ids.Alpha() resourceName := "snowflake_tag_association.test" columnName := "test.column" m := func() map[string]config.Variable { return map[string]config.Variable{ - "tag_name": config.StringVariable(tagName), + "tag_name": config.StringVariable(tagId.Name()), "table_name": config.StringVariable(tableName), "column_name": config.StringVariable(columnName), "database": config.StringVariable(acc.TestDatabaseName), @@ -275,7 +274,7 @@ func TestAcc_TagAssociationIssue1926(t *testing.T) { ConfigVariables: m(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "object_type", "COLUMN"), - resource.TestCheckResourceAttr(resourceName, "tag_id", fmt.Sprintf("%s|%s|%s", acc.TestDatabaseName, acc.TestSchemaName, tagName)), + resource.TestCheckResourceAttr(resourceName, "tag_id", tagId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "tag_value", "v1"), resource.TestCheckResourceAttr(resourceName, "object_identifier.0.%", "3"), resource.TestCheckResourceAttr(resourceName, "object_identifier.0.name", fmt.Sprintf("%s.%s", tableName, columnName)), diff --git a/pkg/resources/tag_masking_policy_association.go b/pkg/resources/tag_masking_policy_association.go index f8f570f207..270ac027bd 100644 --- a/pkg/resources/tag_masking_policy_association.go +++ b/pkg/resources/tag_masking_policy_association.go @@ -13,6 +13,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + providerresources "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" ) @@ -84,7 +85,8 @@ func TagMaskingPolicyAssociation() *schema.Resource { Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, }, - Description: "Attach a masking policy to a tag. Requires a current warehouse to be set. Either with SNOWFLAKE_WAREHOUSE env variable or in current session. If no warehouse is provided, a temporary warehouse will be created.", + Description: "Attach a masking policy to a tag. Requires a current warehouse to be set. Either with SNOWFLAKE_WAREHOUSE env variable or in current session. If no warehouse is provided, a temporary warehouse will be created.", + DeprecationMessage: deprecatedResourceDescription(string(providerresources.Tag)), } } diff --git a/pkg/resources/tag_masking_policy_association_acceptance_test.go b/pkg/resources/tag_masking_policy_association_acceptance_test.go index 43da75a3a0..2493681dd1 100644 --- a/pkg/resources/tag_masking_policy_association_acceptance_test.go +++ b/pkg/resources/tag_masking_policy_association_acceptance_test.go @@ -5,12 +5,18 @@ import ( "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) func TestAcc_TagMaskingPolicyAssociationBasic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + tag, tagCleanup := acc.TestClient().Tag.CreateTag(t) + t.Cleanup(tagCleanup) accName := acc.TestClient().Ids.Alpha() resource.Test(t, resource.TestCase{ @@ -22,10 +28,10 @@ func TestAcc_TagMaskingPolicyAssociationBasic(t *testing.T) { CheckDestroy: nil, Steps: []resource.TestStep{ { - Config: tagAttachmentConfig(accName, acc.TestDatabaseName, acc.TestSchemaName), + Config: tagAttachmentConfig(accName, acc.TestDatabaseName, acc.TestSchemaName, tag.ID()), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "masking_policy_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, accName)), - resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, accName)), + resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", tag.ID().FullyQualifiedName()), ), }, }, @@ -92,16 +98,8 @@ func TestAcc_TagMaskingPolicyAssociationsystem_functions_integration_testComplet }) } -func tagAttachmentConfig(n string, databaseName string, schemaName string) string { +func tagAttachmentConfig(n string, databaseName string, schemaName string, tagId sdk.SchemaObjectIdentifier) string { return fmt.Sprintf(` -resource "snowflake_tag" "test" { - name = "%[1]v" - database = "%[2]s" - schema = "%[3]s" - allowed_values = [] - comment = "Terraform acceptance test" -} - resource "snowflake_masking_policy" "test" { name = "%[1]v" database = "%[2]s" @@ -116,8 +114,8 @@ resource "snowflake_masking_policy" "test" { } resource "snowflake_tag_masking_policy_association" "test" { - tag_id = "${snowflake_tag.test.database}.${snowflake_tag.test.schema}.${snowflake_tag.test.name}" + tag_id = "\"%s\".\"%s\".\"%s\"" masking_policy_id = "${snowflake_masking_policy.test.database}.${snowflake_masking_policy.test.schema}.${snowflake_masking_policy.test.name}" } -`, n, databaseName, schemaName) +`, n, databaseName, schemaName, tagId.DatabaseName(), tagId.SchemaName(), tagId.Name()) } diff --git a/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccountObject_gh2717/test.tf b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccountObject_gh2717/test.tf new file mode 100644 index 0000000000..89561deeab --- /dev/null +++ b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccountObject_gh2717/test.tf @@ -0,0 +1,8 @@ +resource "snowflake_grant_privileges_to_account_role" "test" { + account_role_name = var.name + privileges = var.privileges + on_account_object { + object_type = "COMPUTE POOL" + object_name = var.compute_pool + } +} diff --git a/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccountObject_gh2717/variables.tf b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccountObject_gh2717/variables.tf new file mode 100644 index 0000000000..82329fbe6b --- /dev/null +++ b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccountObject_gh2717/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "compute_pool" { + type = string +} + +variable "privileges" { + type = list(string) +} diff --git a/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccount_gh3153/test.tf b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccount_gh3153/test.tf new file mode 100644 index 0000000000..8fcd187669 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccount_gh3153/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_grant_privileges_to_account_role" "test" { + account_role_name = var.name + privileges = var.privileges + on_account = true +} diff --git a/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccount_gh3153/variables.tf b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccount_gh3153/variables.tf new file mode 100644 index 0000000000..45cb4745db --- /dev/null +++ b/pkg/resources/testdata/TestAcc_GrantPrivilegesToAccountRole/OnAccount_gh3153/variables.tf @@ -0,0 +1,7 @@ +variable "name" { + type = string +} + +variable "privileges" { + type = list(string) +} diff --git a/pkg/resources/testdata/TestAcc_Tag/basic/test.tf b/pkg/resources/testdata/TestAcc_Tag/basic/test.tf deleted file mode 100644 index 10673ee3b9..0000000000 --- a/pkg/resources/testdata/TestAcc_Tag/basic/test.tf +++ /dev/null @@ -1,7 +0,0 @@ -resource "snowflake_tag" "t" { - name = var.name - database = var.database - schema = var.schema - comment = var.comment - allowed_values = var.allowed_values -} diff --git a/pkg/resources/testdata/TestAcc_Tag/basic/variables.tf b/pkg/resources/testdata/TestAcc_Tag/basic/variables.tf deleted file mode 100644 index ba89ff9f86..0000000000 --- a/pkg/resources/testdata/TestAcc_Tag/basic/variables.tf +++ /dev/null @@ -1,19 +0,0 @@ -variable "database" { - type = string -} - -variable "schema" { - type = string -} - -variable "name" { - type = string -} - -variable "comment" { - type = string -} - -variable "allowed_values" { - type = list(string) -} \ No newline at end of file diff --git a/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf b/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf index ca8364a3ea..e4fe8efa84 100644 --- a/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf +++ b/pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf @@ -1,9 +1,11 @@ + resource "snowflake_tag" "test" { - name = var.name - database = var.database - schema = var.schema - comment = var.comment - allowed_values = ["alv1", "alv2"] + name = var.name + database = var.database + schema = var.schema + comment = var.comment + masking_policies = [snowflake_masking_policy.test.fully_qualified_name] + allowed_values = ["alv1", "alv2"] } resource "snowflake_masking_policy" "test" { diff --git a/pkg/resources/user.go b/pkg/resources/user.go index cb4a0cfa6a..95ff6aeeea 100644 --- a/pkg/resources/user.go +++ b/pkg/resources/user.go @@ -191,7 +191,7 @@ func User() *schema.Resource { DeleteContext: DeleteUser, Description: "Resource used to manage user objects. For more information, check [user documentation](https://docs.snowflake.com/en/sql-reference/commands-user-role#user-management).", - Schema: helpers.MergeMaps(userSchema, userParametersSchema), + Schema: collections.MergeMaps(userSchema, userParametersSchema), Importer: &schema.ResourceImporter{ StateContext: GetImportUserFunc(sdk.UserTypePerson), }, @@ -224,7 +224,7 @@ func ServiceUser() *schema.Resource { DeleteContext: DeleteUser, Description: "Resource used to manage service user objects. For more information, check [user documentation](https://docs.snowflake.com/en/sql-reference/commands-user-role#user-management).", - Schema: helpers.MergeMaps(serviceUserSchema, userParametersSchema), + Schema: collections.MergeMaps(serviceUserSchema, userParametersSchema), Importer: &schema.ResourceImporter{ StateContext: GetImportUserFunc(sdk.UserTypeService), }, @@ -247,7 +247,7 @@ func LegacyServiceUser() *schema.Resource { DeleteContext: DeleteUser, Description: "Resource used to manage legacy service user objects. For more information, check [user documentation](https://docs.snowflake.com/en/sql-reference/commands-user-role#user-management).", - Schema: helpers.MergeMaps(legacyServiceUserSchema, userParametersSchema), + Schema: collections.MergeMaps(legacyServiceUserSchema, userParametersSchema), Importer: &schema.ResourceImporter{ StateContext: GetImportUserFunc(sdk.UserTypeLegacyService), }, @@ -512,17 +512,17 @@ func GetReadUserFunc(userType sdk.UserType, withExternalChangesMarking bool) sch // can't read disable_mfa d.Set("user_type", u.Type), - func() error { + func(rd *schema.ResourceData, ud *sdk.UserDetails) error { var errs error if userType == sdk.UserTypePerson { errs = errors.Join( - setFromStringPropertyIfNotEmpty(d, "first_name", userDetails.FirstName), - setFromStringPropertyIfNotEmpty(d, "middle_name", userDetails.MiddleName), - setFromStringPropertyIfNotEmpty(d, "last_name", userDetails.LastName), + setFromStringPropertyIfNotEmpty(rd, "first_name", ud.FirstName), + setFromStringPropertyIfNotEmpty(rd, "middle_name", ud.MiddleName), + setFromStringPropertyIfNotEmpty(rd, "last_name", ud.LastName), ) } return errs - }(), + }(d, userDetails), d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), handleUserParameterRead(d, userParameters), @@ -607,7 +607,6 @@ func GetUpdateUserFunc(userType sdk.UserType) func(ctx context.Context, d *schem switch userType { case sdk.UserTypePerson: userTypeSpecificFieldsErrs = errors.Join( - stringAttributeUpdate(d, "password", &setObjectProperties.Password, &unsetObjectProperties.Password), stringAttributeUpdate(d, "first_name", &setObjectProperties.FirstName, &unsetObjectProperties.FirstName), stringAttributeUpdate(d, "middle_name", &setObjectProperties.MiddleName, &unsetObjectProperties.MiddleName), stringAttributeUpdate(d, "last_name", &setObjectProperties.LastName, &unsetObjectProperties.LastName), @@ -617,7 +616,6 @@ func GetUpdateUserFunc(userType sdk.UserType) func(ctx context.Context, d *schem ) case sdk.UserTypeLegacyService: userTypeSpecificFieldsErrs = errors.Join( - stringAttributeUpdate(d, "password", &setObjectProperties.Password, &unsetObjectProperties.Password), booleanStringAttributeUpdate(d, "must_change_password", &setObjectProperties.MustChangePassword, &unsetObjectProperties.MustChangePassword), ) } @@ -640,6 +638,10 @@ func GetUpdateUserFunc(userType sdk.UserType) func(ctx context.Context, d *schem } } + if err := handlePasswordUpdate(ctx, id, userType, d, client); err != nil { + return diag.FromErr(err) + } + set := &sdk.UserSet{ SessionParameters: &sdk.SessionParameters{}, ObjectParameters: &sdk.UserObjectParameters{}, @@ -677,6 +679,40 @@ func GetUpdateUserFunc(userType sdk.UserType) func(ctx context.Context, d *schem } } +// handlePasswordUpdate is a current workaround to handle user's password after import. +// Password is empty after the import, we can't read it from the config or from Snowflake. +// During the next terraform plan+apply it's updated to the "same" value. +// It results in an error on Snowflake side: New password rejected by current password policy. Reason: 'PRIOR_USE'. +// Current workaround is to ignore such an error. We will revisit it after migration to plugin framework. +func handlePasswordUpdate(ctx context.Context, id sdk.AccountObjectIdentifier, userType sdk.UserType, d *schema.ResourceData, client *sdk.Client) error { + if userType == sdk.UserTypePerson || userType == sdk.UserTypeLegacyService { + setPassword := sdk.UserAlterObjectProperties{} + unsetPassword := sdk.UserObjectPropertiesUnset{} + if err := stringAttributeUpdate(d, "password", &setPassword.Password, &unsetPassword.Password); err != nil { + return err + } + if (setPassword != sdk.UserAlterObjectProperties{}) { + err := client.Users.Alter(ctx, id, &sdk.AlterUserOptions{Set: &sdk.UserSet{ObjectProperties: &setPassword}}) + if err != nil { + if strings.Contains(err.Error(), "Error: 003002 (28P01)") || strings.Contains(err.Error(), "Reason: 'PRIOR_USE'") { + logging.DebugLogger.Printf("[DEBUG] Update to the same password is prohibited but it means we have a valid password in the current state. Continue.") + } else { + d.Partial(true) + return err + } + } + } + if (unsetPassword != sdk.UserObjectPropertiesUnset{}) { + err := client.Users.Alter(ctx, id, &sdk.AlterUserOptions{Unset: &sdk.UserUnset{ObjectProperties: &unsetPassword}}) + if err != nil { + d.Partial(true) + return err + } + } + } + return nil +} + func DeleteUser(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) diff --git a/pkg/resources/user_acceptance_test.go b/pkg/resources/user_acceptance_test.go index dcf3266fa9..137dadcf2d 100644 --- a/pkg/resources/user_acceptance_test.go +++ b/pkg/resources/user_acceptance_test.go @@ -1350,6 +1350,7 @@ func TestAcc_User_migrateFromVersion094_noDefaultSecondaryRolesSet(t *testing.T) CheckDestroy: acc.CheckDestroy(t, resources.User), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1363,6 +1364,7 @@ func TestAcc_User_migrateFromVersion094_noDefaultSecondaryRolesSet(t *testing.T) ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, userModel), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1395,6 +1397,7 @@ func TestAcc_User_migrateFromVersion094_defaultSecondaryRolesSet(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.User), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1413,6 +1416,7 @@ resource "snowflake_user" "test" { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: config.FromModel(t, userModelWithOptionAll), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1582,3 +1586,240 @@ func TestAcc_User_LoginNameAndDisplayName(t *testing.T) { }, }) } + +// https://docs.snowflake.com/en/release-notes/bcr-bundles/2024_08/bcr-1798 +// https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3125 +func TestAcc_User_handleChangesToShowUsers_bcr202408_gh3125(t *testing.T) { + userId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + userModelNoAttributes := model.User("w", userId.Name()) + userModelWithNoneDefaultSecondaryRoles := model.User("w", userId.Name()).WithDefaultSecondaryRolesOptionEnum(sdk.SecondaryRolesOptionNone) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.User), + Steps: []resource.TestStep{ + { + PreConfig: func() { + acc.TestClient().BcrBundles.EnableBcrBundle(t, "2024_08") + }, + Config: config.FromModel(t, userModelNoAttributes), + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModelNoAttributes.ResourceReference()). + HasAllDefaults(userId, sdk.SecondaryRolesOptionDefault), + ), + }, + { + Config: config.FromModel(t, userModelWithNoneDefaultSecondaryRoles), + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModelWithNoneDefaultSecondaryRoles.ResourceReference()). + HasAllDefaults(userId, sdk.SecondaryRolesOptionNone), + ), + }, + }, + }) +} + +// https://docs.snowflake.com/en/release-notes/bcr-bundles/2024_08/bcr-1798 +// https://docs.snowflake.com/release-notes/bcr-bundles/2024_08/bcr-1692 +// https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3125 +func TestAcc_User_handleChangesToShowUsers_bcr202408_gh3125_withbcr202407(t *testing.T) { + userId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + userModel := model.User("w", userId.Name()).WithDefaultSecondaryRolesOptionEnum(sdk.SecondaryRolesOptionNone) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.User), + Steps: []resource.TestStep{ + { + PreConfig: func() { + acc.TestClient().BcrBundles.EnableBcrBundle(t, "2024_07") + acc.TestClient().BcrBundles.EnableBcrBundle(t, "2024_08") + }, + Config: config.FromModel(t, userModel), + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModel.ResourceReference()). + HasAllDefaults(userId, sdk.SecondaryRolesOptionNone), + ), + }, + }, + }) +} + +// https://docs.snowflake.com/en/release-notes/bcr-bundles/2024_08/bcr-1798 +// https://docs.snowflake.com/release-notes/bcr-bundles/2024_08/bcr-1692 +// https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3125 +func TestAcc_User_handleChangesToShowUsers_bcr202408_migration_bcr202407_enabled(t *testing.T) { + userId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + userModel := model.User("w", userId.Name()) + + resource.Test(t, resource.TestCase{ + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.User), + Steps: []resource.TestStep{ + { + PreConfig: func() { + acc.TestClient().BcrBundles.EnableBcrBundle(t, "2024_07") + func() { acc.SetV097CompatibleConfigPathEnv(t) }() + }, + ExternalProviders: map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: "=0.97.0", + Source: "Snowflake-Labs/snowflake", + }, + }, + Config: config.FromModel(t, userModel), + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModel.ResourceReference()). + HasAllDefaults(userId, sdk.SecondaryRolesOptionDefault), + ), + }, + { + PreConfig: func() { + acc.TestClient().BcrBundles.EnableBcrBundle(t, "2024_08") + func() { acc.UnsetConfigPathEnv(t) }() + }, + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: config.FromModel(t, userModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectEmptyPlan(), + }, + }, + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModel.ResourceReference()). + HasAllDefaults(userId, sdk.SecondaryRolesOptionDefault), + ), + }, + }, + }) +} + +// https://docs.snowflake.com/en/release-notes/bcr-bundles/2024_08/bcr-1798 +// https://docs.snowflake.com/release-notes/bcr-bundles/2024_08/bcr-1692 +// https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3125 +func TestAcc_User_handleChangesToShowUsers_bcr202408_migration_bcr202407_disabled(t *testing.T) { + userId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + userModel := model.User("w", userId.Name()) + + resource.Test(t, resource.TestCase{ + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.User), + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: "=0.97.0", + Source: "Snowflake-Labs/snowflake", + }, + }, + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, + Config: config.FromModel(t, userModel), + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModel.ResourceReference()). + HasAllDefaults(userId, sdk.SecondaryRolesOptionDefault), + ), + }, + { + PreConfig: func() { + acc.TestClient().BcrBundles.EnableBcrBundle(t, "2024_08") + func() { acc.UnsetConfigPathEnv(t) }() + }, + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: config.FromModel(t, userModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + planchecks.ExpectDrift(userModel.ResourceReference(), "default_secondary_roles_option", sdk.String(string(sdk.SecondaryRolesOptionDefault)), sdk.String(string(sdk.SecondaryRolesOptionAll))), + planchecks.ExpectChange(userModel.ResourceReference(), "default_secondary_roles_option", tfjson.ActionUpdate, sdk.String(string(sdk.SecondaryRolesOptionAll)), sdk.String(string(sdk.SecondaryRolesOptionDefault))), + }, + PostApplyPostRefresh: []plancheck.PlanCheck{ + plancheck.ExpectEmptyPlan(), + }, + }, + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModel.ResourceReference()). + HasAllDefaults(userId, sdk.SecondaryRolesOptionDefault), + ), + }, + }, + }) +} + +func TestAcc_User_importPassword(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + userId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + pass := random.Password() + firstName := random.AlphaN(6) + + _, userCleanup := acc.TestClient().User.CreateUserWithOptions(t, userId, &sdk.CreateUserOptions{ObjectProperties: &sdk.UserObjectProperties{ + Password: sdk.String(pass), + FirstName: sdk.String(firstName), + }}) + t.Cleanup(userCleanup) + + userModel := model.User("w", userId.Name()).WithPassword(pass).WithFirstName(firstName) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.User), + Steps: []resource.TestStep{ + // IMPORT + { + Config: config.FromModel(t, userModel), + ResourceName: userModel.ResourceReference(), + ImportState: true, + ImportStateId: userId.Name(), + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedUserResource(t, userId.Name()). + HasNoPassword(). + HasFirstNameString(firstName), + ), + ImportStatePersist: true, + }, + { + Config: config.FromModel(t, userModel), + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModel.ResourceReference()). + HasNotEmptyPassword(). + HasFirstNameString(firstName), + ), + }, + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectEmptyPlan(), + }, + }, + Config: config.FromModel(t, userModel), + Check: assert.AssertThat(t, + resourceassert.UserResource(t, userModel.ResourceReference()). + HasNotEmptyPassword(). + HasFirstNameString(firstName), + ), + }, + }, + }) +} diff --git a/pkg/resources/user_password_policy_attachment_acceptance_test.go b/pkg/resources/user_password_policy_attachment_acceptance_test.go index 1e466cc3c1..ce3b8e4335 100644 --- a/pkg/resources/user_password_policy_attachment_acceptance_test.go +++ b/pkg/resources/user_password_policy_attachment_acceptance_test.go @@ -6,12 +6,16 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) +// TODO [SNOW-1423486]: Fix using warehouse; remove unsetting testenvs.ConfigureClientOnce func TestAcc_UserPasswordPolicyAttachment(t *testing.T) { - // TODO [SNOW-1423486]: unskip - t.Skipf("Skip because error %s; will be fixed in SNOW-1423486", "Error: 000606 (57P03): No active warehouse selected in the current session. Select an active warehouse with the 'use warehouse' command.") + t.Setenv(string(testenvs.ConfigureClientOnce), "") + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + userId := acc.TestClient().Ids.RandomAccountObjectIdentifier() userName := userId.Name() newUserId := acc.TestClient().Ids.RandomAccountObjectIdentifier() @@ -41,7 +45,49 @@ func TestAcc_UserPasswordPolicyAttachment(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_user_password_policy_attachment.ppa", "user_name", newUserName), resource.TestCheckResourceAttr("snowflake_user_password_policy_attachment.ppa", "password_policy_name", newPasswordPolicyId.FullyQualifiedName()), - resource.TestCheckResourceAttr("snowflake_user_password_policy_attachment.ppa", "id", fmt.Sprintf("%s|%s", userId.FullyQualifiedName(), newPasswordPolicyId.FullyQualifiedName())), + resource.TestCheckResourceAttr("snowflake_user_password_policy_attachment.ppa", "id", fmt.Sprintf("%s|%s", newUserId.FullyQualifiedName(), newPasswordPolicyId.FullyQualifiedName())), + ), + }, + // IMPORT + { + ResourceName: "snowflake_user_password_policy_attachment.ppa", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +// Adding this test to check if it will fail sometimes. It should, based on: +// - https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3005 +// - https://github.com/Snowflake-Labs/terraform-provider-snowflake/pull/2627 +// but haven't (at least during manual runs). +// The behavior was fixed in https://github.com/Snowflake-Labs/terraform-provider-snowflake/pull/2627 +// so the problem should not occur in the newest provider versions. +func TestAcc_UserPasswordPolicyAttachment_gh3005(t *testing.T) { + userId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + userName := userId.Name() + passwordPolicyId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + passwordPolicyName := passwordPolicyId.Name() + + resource.Test(t, resource.TestCase{ + ExternalProviders: map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: "=0.87.0", + Source: "Snowflake-Labs/snowflake", + }, + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckUserPasswordPolicyAttachmentDestroy(t), + Steps: []resource.TestStep{ + // CREATE + { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, + Config: userPasswordPolicyAttachmentConfigV087(userName, acc.TestDatabaseName, acc.TestSchemaName, passwordPolicyName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_user_password_policy_attachment.ppa", "user_name", userName), + resource.TestCheckResourceAttr("snowflake_user_password_policy_attachment.ppa", "password_policy_name", passwordPolicyId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_user_password_policy_attachment.ppa", "id", fmt.Sprintf("%s|%s", userId.FullyQualifiedName(), passwordPolicyId.FullyQualifiedName())), ), }, // IMPORT @@ -72,3 +118,23 @@ resource "snowflake_user_password_policy_attachment" "ppa" { } `, userName, databaseName, schemaName, passwordPolicyName) } + +func userPasswordPolicyAttachmentConfigV087(userName, databaseName, schemaName, passwordPolicyName string) string { + return fmt.Sprintf(` +resource "snowflake_user" "user" { + name = "%[1]s" +} + +resource "snowflake_password_policy" "pp" { + database = "%[2]s" + schema = "%[3]s" + name = "%[4]s" +} + +resource "snowflake_user_password_policy_attachment" "ppa" { + depends_on = [snowflake_password_policy.pp] + password_policy_name = "\"%[2]s\".\"%[3]s\".\"%[4]s\"" + user_name = snowflake_user.user.name +} +`, userName, databaseName, schemaName, passwordPolicyName) +} diff --git a/pkg/resources/view.go b/pkg/resources/view.go index 7c5b6d680f..9c782a71bf 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -124,13 +124,13 @@ var viewSchema = map[string]*schema.Schema{ Optional: true, Description: fmt.Sprintf("Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: %s. Due to Snowflake limitations, changes in this field are not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.", possibleValuesListed(sdk.AllViewDataMetricScheduleMinutes)), ValidateDiagFunc: IntInSlice(sdk.AllViewDataMetricScheduleMinutes), - ConflictsWith: []string{"data_metric_schedule.using_cron"}, + ConflictsWith: []string{"data_metric_schedule.0.using_cron"}, }, "using_cron": { Type: schema.TypeString, Optional: true, Description: "Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes`.", - ConflictsWith: []string{"data_metric_schedule.minutes"}, + ConflictsWith: []string{"data_metric_schedule.0.minutes"}, }, }, }, diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index 462f4ee6b8..2db7a4d7a1 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -1253,6 +1253,7 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1269,6 +1270,7 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), ConfigVariables: viewConfig, diff --git a/pkg/resources/warehouse_acceptance_test.go b/pkg/resources/warehouse_acceptance_test.go index 2d49146eed..49f3d678ce 100644 --- a/pkg/resources/warehouse_acceptance_test.go +++ b/pkg/resources/warehouse_acceptance_test.go @@ -1452,6 +1452,7 @@ func TestAcc_Warehouse_migrateFromVersion092_withWarehouseSize(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1465,6 +1466,7 @@ func TestAcc_Warehouse_migrateFromVersion092_withWarehouseSize(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ @@ -1493,6 +1495,7 @@ func TestAcc_Warehouse_migrateFromVersion092_allFieldsFilledBeforeMigration(t *t Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1507,6 +1510,7 @@ func TestAcc_Warehouse_migrateFromVersion092_allFieldsFilledBeforeMigration(t *t ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseFullMigrationConfig(id.Name(), false), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1553,6 +1557,7 @@ func TestAcc_Warehouse_migrateFromVersion092_allFieldsFilledBeforeMigration_bool Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1569,6 +1574,7 @@ func TestAcc_Warehouse_migrateFromVersion092_allFieldsFilledBeforeMigration_bool }, // let's try to change the value of the parameter that was earlier a bool and now is a string { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ @@ -1600,6 +1606,7 @@ func TestAcc_Warehouse_migrateFromVersion092_queryAccelerationMaxScaleFactor_sam Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1613,6 +1620,7 @@ func TestAcc_Warehouse_migrateFromVersion092_queryAccelerationMaxScaleFactor_sam ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseFullDefaultConfig(id.Name(), ""), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1646,6 +1654,7 @@ func TestAcc_Warehouse_migrateFromVersion092_queryAccelerationMaxScaleFactor_noI Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1659,6 +1668,7 @@ func TestAcc_Warehouse_migrateFromVersion092_queryAccelerationMaxScaleFactor_noI ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseFullDefaultConfigWithQueryAccelerationMaxScaleFactorRemoved(id.Name(), ""), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1692,6 +1702,7 @@ func TestAcc_Warehouse_migrateFromVersion092_queryAccelerationMaxScaleFactor_dif Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1705,6 +1716,7 @@ func TestAcc_Warehouse_migrateFromVersion092_queryAccelerationMaxScaleFactor_dif ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseFullDefaultConfigWithQueryAcceleration(id.Name(), "", true, 10), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1737,6 +1749,7 @@ func TestAcc_Warehouse_migrateFromVersion092_noConfigToFullConfig(t *testing.T) Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1750,6 +1763,7 @@ func TestAcc_Warehouse_migrateFromVersion092_noConfigToFullConfig(t *testing.T) ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseFullDefaultConfigWithQueryAcceleration(id.Name(), "", true, 8), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1792,6 +1806,7 @@ func TestAcc_Warehouse_migrateFromVersion092_defaultsRemoved(t *testing.T) { Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1821,6 +1836,7 @@ func TestAcc_Warehouse_migrateFromVersion092_defaultsRemoved(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseWithSizeConfig(id.Name(), string(sdk.WarehouseSizeXSmall)), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1860,6 +1876,7 @@ func TestAcc_Warehouse_migrateFromVersion092_warehouseSizeCausingForceNew(t *tes Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.92.0", @@ -1872,6 +1889,7 @@ func TestAcc_Warehouse_migrateFromVersion092_warehouseSizeCausingForceNew(t *tes ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseBasicConfig(id.Name()), ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -1898,6 +1916,7 @@ func TestAcc_Warehouse_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t * CheckDestroy: acc.CheckDestroy(t, resources.Warehouse), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1910,6 +1929,7 @@ func TestAcc_Warehouse_migrateFromV0941_ensureSmoothUpgradeWithNewResourceId(t * ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseBasicConfig(id.Name()), Check: resource.ComposeAggregateTestCheckFunc( @@ -1932,6 +1952,7 @@ func TestAcc_Warehouse_IdentifierQuotingDiffSuppression(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Warehouse), Steps: []resource.TestStep{ { + PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, ExternalProviders: map[string]resource.ExternalProvider{ "snowflake": { VersionConstraint: "=0.94.1", @@ -1946,6 +1967,7 @@ func TestAcc_Warehouse_IdentifierQuotingDiffSuppression(t *testing.T) { ), }, { + PreConfig: func() { acc.UnsetConfigPathEnv(t) }, ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: warehouseBasicConfig(quotedId), ConfigPlanChecks: resource.ConfigPlanChecks{ diff --git a/pkg/schemas/connection_gen.go b/pkg/schemas/connection_gen.go new file mode 100644 index 0000000000..0977ae88f5 --- /dev/null +++ b/pkg/schemas/connection_gen.go @@ -0,0 +1,90 @@ +// Code generated by sdk-to-schema generator; DO NOT EDIT. + +package schemas + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// ShowConnectionSchema represents output of SHOW query for the single Connection. +var ShowConnectionSchema = map[string]*schema.Schema{ + "region_group": { + Type: schema.TypeString, + Computed: true, + }, + "snowflake_region": { + Type: schema.TypeString, + Computed: true, + }, + "created_on": { + Type: schema.TypeString, + Computed: true, + }, + "account_name": { + Type: schema.TypeString, + Computed: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "comment": { + Type: schema.TypeString, + Computed: true, + }, + "is_primary": { + Type: schema.TypeBool, + Computed: true, + }, + "primary": { + Type: schema.TypeString, + Computed: true, + }, + "failover_allowed_to_accounts": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + Computed: true, + }, + "connection_url": { + Type: schema.TypeString, + Computed: true, + }, + "organization_name": { + Type: schema.TypeString, + Computed: true, + }, + "account_locator": { + Type: schema.TypeString, + Computed: true, + }, +} + +var _ = ShowConnectionSchema + +func ConnectionToSchema(connection *sdk.Connection) map[string]any { + connectionSchema := make(map[string]any) + if connection.RegionGroup != nil { + connectionSchema["region_group"] = connection.RegionGroup + } + connectionSchema["snowflake_region"] = connection.SnowflakeRegion + connectionSchema["created_on"] = connection.CreatedOn.String() + connectionSchema["account_name"] = connection.AccountName + connectionSchema["name"] = connection.Name + if connection.Comment != nil { + connectionSchema["comment"] = connection.Comment + } + connectionSchema["is_primary"] = connection.IsPrimary + connectionSchema["primary"] = connection.Primary.FullyQualifiedName() + var allowedAccounts []string + for _, accountId := range connection.FailoverAllowedToAccounts { + allowedAccounts = append(allowedAccounts, accountId.Name()) + } + connectionSchema["failover_allowed_to_accounts"] = allowedAccounts + connectionSchema["connection_url"] = connection.ConnectionUrl + connectionSchema["organization_name"] = connection.OrganizationName + connectionSchema["account_locator"] = connection.AccountLocator + return connectionSchema +} + +var _ = ConnectionToSchema diff --git a/pkg/schemas/gen/sdk_show_result_structs.go b/pkg/schemas/gen/sdk_show_result_structs.go index 3af846ccc7..8aa4a5bd9e 100644 --- a/pkg/schemas/gen/sdk_show_result_structs.go +++ b/pkg/schemas/gen/sdk_show_result_structs.go @@ -10,6 +10,7 @@ var SdkShowResultStructs = []any{ sdk.ApplicationRole{}, sdk.Application{}, sdk.AuthenticationPolicy{}, + sdk.Connection{}, sdk.DatabaseRole{}, sdk.Database{}, sdk.DynamicTable{}, diff --git a/pkg/schemas/security_integration.go b/pkg/schemas/security_integration.go index 051b49ea5d..cc069a0199 100644 --- a/pkg/schemas/security_integration.go +++ b/pkg/schemas/security_integration.go @@ -6,11 +6,12 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) var ( - SecurityIntegrationDescribeSchema = helpers.MergeMaps( + SecurityIntegrationDescribeSchema = collections.MergeMaps( DescribeApiAuthSecurityIntegrationSchema, DescribeExternalOauthSecurityIntegrationSchema, DescribeOauthIntegrationForCustomClients, diff --git a/pkg/schemas/tag_gen.go b/pkg/schemas/tag_gen.go index dc9cc242fe..68eee1df8b 100644 --- a/pkg/schemas/tag_gen.go +++ b/pkg/schemas/tag_gen.go @@ -34,7 +34,9 @@ var ShowTagSchema = map[string]*schema.Schema{ Computed: true, }, "allowed_values": { - Type: schema.TypeInvalid, + // Adjusted manually. + Type: schema.TypeSet, + Elem: &schema.Schema{Type: schema.TypeString}, Computed: true, }, "owner_role_type": { diff --git a/pkg/sdk/client.go b/pkg/sdk/client.go index 94b0f9f68a..8f6d66c8a4 100644 --- a/pkg/sdk/client.go +++ b/pkg/sdk/client.go @@ -14,14 +14,10 @@ import ( "github.com/snowflakedb/gosnowflake" ) -var ( - instrumentedSQL bool - gosnowflakeLoggingLevel string -) +var instrumentedSQL bool func init() { instrumentedSQL = os.Getenv(snowflakeenvs.NoInstrumentedSql) == "" - gosnowflakeLoggingLevel = os.Getenv(snowflakeenvs.GosnowflakeLogLevel) } type Client struct { @@ -146,10 +142,6 @@ func NewClient(cfg *gosnowflake.Config) (*Client, error) { driverName = "snowflake-instrumented" } - if gosnowflakeLoggingLevel != "" { - cfg.Tracing = gosnowflakeLoggingLevel - } - dsn, err := gosnowflake.DSN(cfg) if err != nil { return nil, err diff --git a/pkg/sdk/config.go b/pkg/sdk/config.go index 8c4441c330..c4f5ca99b7 100644 --- a/pkg/sdk/config.go +++ b/pkg/sdk/config.go @@ -1,12 +1,24 @@ package sdk import ( + "crypto/rsa" + "encoding/pem" + "errors" + "fmt" "log" + "net" + "net/url" "os" "path/filepath" + "slices" + "strings" + "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/pelletier/go-toml/v2" "github.com/snowflakedb/gosnowflake" + "github.com/youmark/pkcs8" + "golang.org/x/crypto/ssh" ) func DefaultConfig() *gosnowflake.Config { @@ -19,20 +31,28 @@ func DefaultConfig() *gosnowflake.Config { } func ProfileConfig(profile string) (*gosnowflake.Config, error) { - configs, err := loadConfigFile() + path, err := GetConfigFileName() if err != nil { return nil, err } + configs, err := loadConfigFile(path) + if err != nil { + return nil, fmt.Errorf("could not load config file: %w", err) + } + if profile == "" { profile = "default" } var config *gosnowflake.Config if cfg, ok := configs[profile]; ok { log.Printf("[DEBUG] loading config for profile: \"%s\"", profile) - config = cfg + driverCfg, err := cfg.DriverConfig() + if err != nil { + return nil, fmt.Errorf("converting profile \"%s\" in file %s failed: %w", profile, path, err) + } + config = Pointer(driverCfg) } - if config == nil { log.Printf("[DEBUG] no config found for profile: \"%s\"", profile) return nil, nil @@ -60,6 +80,9 @@ func MergeConfig(baseConfig *gosnowflake.Config, mergeConfig *gosnowflake.Config if baseConfig.Password == "" { baseConfig.Password = mergeConfig.Password } + if baseConfig.Warehouse == "" { + baseConfig.Warehouse = mergeConfig.Warehouse + } if baseConfig.Role == "" { baseConfig.Role = mergeConfig.Role } @@ -69,11 +92,113 @@ func MergeConfig(baseConfig *gosnowflake.Config, mergeConfig *gosnowflake.Config if baseConfig.Host == "" { baseConfig.Host = mergeConfig.Host } + if !configBoolSet(baseConfig.ValidateDefaultParameters) { + baseConfig.ValidateDefaultParameters = mergeConfig.ValidateDefaultParameters + } + if mergedMap := collections.MergeMaps(mergeConfig.Params, baseConfig.Params); len(mergedMap) > 0 { + baseConfig.Params = mergedMap + } + if baseConfig.ClientIP == nil { + baseConfig.ClientIP = mergeConfig.ClientIP + } + if baseConfig.Protocol == "" { + baseConfig.Protocol = mergeConfig.Protocol + } + if baseConfig.Host == "" { + baseConfig.Host = mergeConfig.Host + } + if baseConfig.Port == 0 { + baseConfig.Port = mergeConfig.Port + } + if baseConfig.Authenticator == 0 { + baseConfig.Authenticator = mergeConfig.Authenticator + } + if baseConfig.Passcode == "" { + baseConfig.Passcode = mergeConfig.Passcode + } + if !baseConfig.PasscodeInPassword { + baseConfig.PasscodeInPassword = mergeConfig.PasscodeInPassword + } + if baseConfig.OktaURL == nil { + baseConfig.OktaURL = mergeConfig.OktaURL + } + if baseConfig.LoginTimeout == 0 { + baseConfig.LoginTimeout = mergeConfig.LoginTimeout + } + if baseConfig.RequestTimeout == 0 { + baseConfig.RequestTimeout = mergeConfig.RequestTimeout + } + if baseConfig.JWTExpireTimeout == 0 { + baseConfig.JWTExpireTimeout = mergeConfig.JWTExpireTimeout + } + if baseConfig.ClientTimeout == 0 { + baseConfig.ClientTimeout = mergeConfig.ClientTimeout + } + if baseConfig.JWTClientTimeout == 0 { + baseConfig.JWTClientTimeout = mergeConfig.JWTClientTimeout + } + if baseConfig.ExternalBrowserTimeout == 0 { + baseConfig.ExternalBrowserTimeout = mergeConfig.ExternalBrowserTimeout + } + if baseConfig.MaxRetryCount == 0 { + baseConfig.MaxRetryCount = mergeConfig.MaxRetryCount + } + if !baseConfig.InsecureMode { + baseConfig.InsecureMode = mergeConfig.InsecureMode + } + if baseConfig.OCSPFailOpen == 0 { + baseConfig.OCSPFailOpen = mergeConfig.OCSPFailOpen + } + if baseConfig.Token == "" { + baseConfig.Token = mergeConfig.Token + } + if !baseConfig.KeepSessionAlive { + baseConfig.KeepSessionAlive = mergeConfig.KeepSessionAlive + } + if baseConfig.PrivateKey == nil { + baseConfig.PrivateKey = mergeConfig.PrivateKey + } + if !baseConfig.DisableTelemetry { + baseConfig.DisableTelemetry = mergeConfig.DisableTelemetry + } + if baseConfig.Tracing == "" { + baseConfig.Tracing = mergeConfig.Tracing + } + if baseConfig.TmpDirPath == "" { + baseConfig.TmpDirPath = mergeConfig.TmpDirPath + } + if !configBoolSet(baseConfig.ClientRequestMfaToken) { + baseConfig.ClientRequestMfaToken = mergeConfig.ClientRequestMfaToken + } + if !configBoolSet(baseConfig.ClientStoreTemporaryCredential) { + baseConfig.ClientStoreTemporaryCredential = mergeConfig.ClientStoreTemporaryCredential + } + if !baseConfig.DisableQueryContextCache { + baseConfig.DisableQueryContextCache = mergeConfig.DisableQueryContextCache + } + if !configBoolSet(baseConfig.IncludeRetryReason) { + baseConfig.IncludeRetryReason = mergeConfig.IncludeRetryReason + } + if !configBoolSet(baseConfig.DisableConsoleLogin) { + baseConfig.DisableConsoleLogin = mergeConfig.DisableConsoleLogin + } return baseConfig } -func configFile() (string, error) { - // has the user overwridden the default config path? +func configBoolSet(v gosnowflake.ConfigBool) bool { + // configBoolNotSet is unexported in the driver, so we check if it's neither true nor false + return slices.Contains([]gosnowflake.ConfigBool{gosnowflake.ConfigBoolFalse, gosnowflake.ConfigBoolTrue}, v) +} + +func boolToConfigBool(v bool) gosnowflake.ConfigBool { + if v { + return gosnowflake.ConfigBoolTrue + } + return gosnowflake.ConfigBoolFalse +} + +func GetConfigFileName() (string, error) { + // has the user overridden the default config path? if configPath, ok := os.LookupEnv("SNOWFLAKE_CONFIG_PATH"); ok { if configPath != "" { return configPath, nil @@ -87,20 +212,280 @@ func configFile() (string, error) { return filepath.Join(dir, ".snowflake", "config"), nil } -func loadConfigFile() (map[string]*gosnowflake.Config, error) { - path, err := configFile() +// TODO(SNOW-1787920): improve TOML parsing +type ConfigDTO struct { + Account *string `toml:"account"` + AccountName *string `toml:"accountname"` + OrganizationName *string `toml:"organizationname"` + User *string `toml:"user"` + Username *string `toml:"username"` + Password *string `toml:"password"` + Host *string `toml:"host"` + Warehouse *string `toml:"warehouse"` + Role *string `toml:"role"` + Params *map[string]*string `toml:"params"` + ClientIp *string `toml:"clientip"` + Protocol *string `toml:"protocol"` + Passcode *string `toml:"passcode"` + Port *int `toml:"port"` + PasscodeInPassword *bool `toml:"passcodeinpassword"` + OktaUrl *string `toml:"oktaurl"` + ClientTimeout *int `toml:"clienttimeout"` + JwtClientTimeout *int `toml:"jwtclienttimeout"` + LoginTimeout *int `toml:"logintimeout"` + RequestTimeout *int `toml:"requesttimeout"` + JwtExpireTimeout *int `toml:"jwtexpiretimeout"` + ExternalBrowserTimeout *int `toml:"externalbrowsertimeout"` + MaxRetryCount *int `toml:"maxretrycount"` + Authenticator *string `toml:"authenticator"` + InsecureMode *bool `toml:"insecuremode"` + OcspFailOpen *bool `toml:"ocspfailopen"` + Token *string `toml:"token"` + KeepSessionAlive *bool `toml:"keepsessionalive"` + PrivateKey *string `toml:"privatekey,multiline"` + PrivateKeyPassphrase *string `toml:"privatekeypassphrase"` + DisableTelemetry *bool `toml:"disabletelemetry"` + ValidateDefaultParameters *bool `toml:"validatedefaultparameters"` + ClientRequestMfaToken *bool `toml:"clientrequestmfatoken"` + ClientStoreTemporaryCredential *bool `toml:"clientstoretemporarycredential"` + Tracing *string `toml:"tracing"` + TmpDirPath *string `toml:"tmpdirpath"` + DisableQueryContextCache *bool `toml:"disablequerycontextcache"` + IncludeRetryReason *bool `toml:"includeretryreason"` + DisableConsoleLogin *bool `toml:"disableconsolelogin"` +} + +func (c *ConfigDTO) DriverConfig() (gosnowflake.Config, error) { + driverCfg := gosnowflake.Config{} + pointerAttributeSet(c.Account, &driverCfg.Account) + if c.AccountName != nil && c.OrganizationName != nil { + driverCfg.Account = fmt.Sprintf("%s-%s", *c.OrganizationName, *c.AccountName) + } + pointerAttributeSet(c.User, &driverCfg.User) + pointerAttributeSet(c.Username, &driverCfg.User) + pointerAttributeSet(c.Password, &driverCfg.Password) + pointerAttributeSet(c.Host, &driverCfg.Host) + pointerAttributeSet(c.Warehouse, &driverCfg.Warehouse) + pointerAttributeSet(c.Role, &driverCfg.Role) + pointerAttributeSet(c.Params, &driverCfg.Params) + pointerIpAttributeSet(c.ClientIp, &driverCfg.ClientIP) + pointerAttributeSet(c.Protocol, &driverCfg.Protocol) + pointerAttributeSet(c.Passcode, &driverCfg.Passcode) + pointerAttributeSet(c.Port, &driverCfg.Port) + pointerAttributeSet(c.PasscodeInPassword, &driverCfg.PasscodeInPassword) + err := pointerUrlAttributeSet(c.OktaUrl, &driverCfg.OktaURL) if err != nil { - return nil, err + return gosnowflake.Config{}, err + } + pointerTimeInSecondsAttributeSet(c.ClientTimeout, &driverCfg.ClientTimeout) + pointerTimeInSecondsAttributeSet(c.JwtClientTimeout, &driverCfg.JWTClientTimeout) + pointerTimeInSecondsAttributeSet(c.LoginTimeout, &driverCfg.LoginTimeout) + pointerTimeInSecondsAttributeSet(c.RequestTimeout, &driverCfg.RequestTimeout) + pointerTimeInSecondsAttributeSet(c.JwtExpireTimeout, &driverCfg.JWTExpireTimeout) + pointerTimeInSecondsAttributeSet(c.ExternalBrowserTimeout, &driverCfg.ExternalBrowserTimeout) + pointerAttributeSet(c.MaxRetryCount, &driverCfg.MaxRetryCount) + if c.Authenticator != nil { + authenticator, err := ToAuthenticatorType(*c.Authenticator) + if err != nil { + return gosnowflake.Config{}, err + } + driverCfg.Authenticator = authenticator + } + pointerAttributeSet(c.InsecureMode, &driverCfg.InsecureMode) + if c.OcspFailOpen != nil { + if *c.OcspFailOpen { + driverCfg.OCSPFailOpen = gosnowflake.OCSPFailOpenTrue + } else { + driverCfg.OCSPFailOpen = gosnowflake.OCSPFailOpenFalse + } } + pointerAttributeSet(c.Token, &driverCfg.Token) + pointerAttributeSet(c.KeepSessionAlive, &driverCfg.KeepSessionAlive) + if c.PrivateKey != nil { + passphrase := make([]byte, 0) + if c.PrivateKeyPassphrase != nil { + passphrase = []byte(*c.PrivateKeyPassphrase) + } + privKey, err := ParsePrivateKey([]byte(*c.PrivateKey), passphrase) + if err != nil { + return gosnowflake.Config{}, err + } + driverCfg.PrivateKey = privKey + } + pointerAttributeSet(c.DisableTelemetry, &driverCfg.DisableTelemetry) + pointerConfigBoolAttributeSet(c.ValidateDefaultParameters, &driverCfg.ValidateDefaultParameters) + pointerConfigBoolAttributeSet(c.ClientRequestMfaToken, &driverCfg.ClientRequestMfaToken) + pointerConfigBoolAttributeSet(c.ClientStoreTemporaryCredential, &driverCfg.ClientStoreTemporaryCredential) + pointerAttributeSet(c.Tracing, &driverCfg.Tracing) + pointerAttributeSet(c.TmpDirPath, &driverCfg.TmpDirPath) + pointerAttributeSet(c.DisableQueryContextCache, &driverCfg.DisableQueryContextCache) + pointerConfigBoolAttributeSet(c.IncludeRetryReason, &driverCfg.IncludeRetryReason) + pointerConfigBoolAttributeSet(c.DisableConsoleLogin, &driverCfg.DisableConsoleLogin) + + return driverCfg, nil +} + +func pointerAttributeSet[T any](src, dst *T) { + if src != nil { + *dst = *src + } +} + +func pointerTimeInSecondsAttributeSet(src *int, dst *time.Duration) { + if src != nil { + *dst = time.Second * time.Duration(*src) + } +} + +func pointerConfigBoolAttributeSet(src *bool, dst *gosnowflake.ConfigBool) { + if src != nil { + *dst = boolToConfigBool(*src) + } +} + +func pointerIpAttributeSet(src *string, dst *net.IP) { + if src != nil { + *dst = net.ParseIP(*src) + } +} + +func pointerUrlAttributeSet(src *string, dst **url.URL) error { + if src != nil { + url, err := url.Parse(*src) + if err != nil { + return err + } + *dst = url + } + return nil +} + +func loadConfigFile(path string) (map[string]ConfigDTO, error) { dat, err := os.ReadFile(path) if err != nil { return nil, err } - var s map[string]*gosnowflake.Config + var s map[string]ConfigDTO err = toml.Unmarshal(dat, &s) if err != nil { - log.Printf("[DEBUG] error unmarshalling config file: %v\n", err) - return nil, nil + return nil, fmt.Errorf("unmarshalling config file %s: %w", path, err) } return s, nil } + +func ParsePrivateKey(privateKeyBytes []byte, passphrase []byte) (*rsa.PrivateKey, error) { + privateKeyBlock, _ := pem.Decode(privateKeyBytes) + if privateKeyBlock == nil { + return nil, fmt.Errorf("could not parse private key, key is not in PEM format") + } + + if privateKeyBlock.Type == "ENCRYPTED PRIVATE KEY" { + if len(passphrase) == 0 { + return nil, fmt.Errorf("private key requires a passphrase, but private_key_passphrase was not supplied") + } + privateKey, err := pkcs8.ParsePKCS8PrivateKeyRSA(privateKeyBlock.Bytes, passphrase) + if err != nil { + return nil, fmt.Errorf("could not parse encrypted private key with passphrase, only ciphers aes-128-cbc, aes-128-gcm, aes-192-cbc, aes-192-gcm, aes-256-cbc, aes-256-gcm, and des-ede3-cbc are supported err = %w", err) + } + return privateKey, nil + } + + // TODO(SNOW-1754327): check if we can simply use ssh.ParseRawPrivateKeyWithPassphrase + privateKey, err := ssh.ParseRawPrivateKey(privateKeyBytes) + if err != nil { + return nil, fmt.Errorf("could not parse private key err = %w", err) + } + + rsaPrivateKey, ok := privateKey.(*rsa.PrivateKey) + if !ok { + return nil, errors.New("privateKey not of type RSA") + } + return rsaPrivateKey, nil +} + +type AuthenticationType string + +const ( + AuthenticationTypeSnowflake AuthenticationType = "SNOWFLAKE" + AuthenticationTypeOauth AuthenticationType = "OAUTH" + AuthenticationTypeExternalBrowser AuthenticationType = "EXTERNALBROWSER" + AuthenticationTypeOkta AuthenticationType = "OKTA" + AuthenticationTypeJwtLegacy AuthenticationType = "JWT" + AuthenticationTypeJwt AuthenticationType = "SNOWFLAKE_JWT" + AuthenticationTypeTokenAccessor AuthenticationType = "TOKENACCESSOR" + AuthenticationTypeUsernamePasswordMfa AuthenticationType = "USERNAMEPASSWORDMFA" +) + +var AllAuthenticationTypes = []AuthenticationType{ + AuthenticationTypeSnowflake, + AuthenticationTypeOauth, + AuthenticationTypeExternalBrowser, + AuthenticationTypeOkta, + AuthenticationTypeJwtLegacy, + AuthenticationTypeJwt, + AuthenticationTypeTokenAccessor, + AuthenticationTypeUsernamePasswordMfa, +} + +func ToAuthenticatorType(s string) (gosnowflake.AuthType, error) { + switch strings.ToUpper(s) { + case string(AuthenticationTypeSnowflake): + return gosnowflake.AuthTypeSnowflake, nil + case string(AuthenticationTypeOauth): + return gosnowflake.AuthTypeOAuth, nil + case string(AuthenticationTypeExternalBrowser): + return gosnowflake.AuthTypeExternalBrowser, nil + case string(AuthenticationTypeOkta): + return gosnowflake.AuthTypeOkta, nil + case string(AuthenticationTypeJwt), string(AuthenticationTypeJwtLegacy): + return gosnowflake.AuthTypeJwt, nil + case string(AuthenticationTypeTokenAccessor): + return gosnowflake.AuthTypeTokenAccessor, nil + case string(AuthenticationTypeUsernamePasswordMfa): + return gosnowflake.AuthTypeUsernamePasswordMFA, nil + default: + return gosnowflake.AuthType(0), fmt.Errorf("invalid authenticator type: %s", s) + } +} + +type DriverLogLevel string + +const ( + // these values are lower case on purpose to match gosnowflake case + DriverLogLevelTrace DriverLogLevel = "trace" + DriverLogLevelDebug DriverLogLevel = "debug" + DriverLogLevelInfo DriverLogLevel = "info" + DriverLogLevelPrint DriverLogLevel = "print" + DriverLogLevelWarning DriverLogLevel = "warning" + DriverLogLevelError DriverLogLevel = "error" + DriverLogLevelFatal DriverLogLevel = "fatal" + DriverLogLevelPanic DriverLogLevel = "panic" +) + +var AllDriverLogLevels = []DriverLogLevel{ + DriverLogLevelTrace, + DriverLogLevelDebug, + DriverLogLevelInfo, + DriverLogLevelPrint, + DriverLogLevelWarning, + DriverLogLevelError, + DriverLogLevelFatal, + DriverLogLevelPanic, +} + +func ToDriverLogLevel(s string) (DriverLogLevel, error) { + lowerCase := strings.ToLower(s) + switch lowerCase { + case string(DriverLogLevelTrace), + string(DriverLogLevelDebug), + string(DriverLogLevelInfo), + string(DriverLogLevelPrint), + string(DriverLogLevelWarning), + string(DriverLogLevelError), + string(DriverLogLevelFatal), + string(DriverLogLevelPanic): + return DriverLogLevel(lowerCase), nil + default: + return "", fmt.Errorf("invalid driver log level: %s", s) + } +} diff --git a/pkg/sdk/config_test.go b/pkg/sdk/config_test.go index d34d812a88..8f67022ba0 100644 --- a/pkg/sdk/config_test.go +++ b/pkg/sdk/config_test.go @@ -1,11 +1,17 @@ package sdk import ( - "os" - "path/filepath" + "crypto/x509" + "encoding/pem" + "fmt" + "net" + "net/url" "testing" + "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers" "github.com/snowflakedb/gosnowflake" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -25,40 +31,120 @@ func TestLoadConfigFile(t *testing.T) { password='abcd1234' role='SECURITYADMIN' ` - configPath := testFile(t, "config", []byte(c)) - t.Setenv(snowflakeenvs.ConfigPath, configPath) + configPath := testhelpers.TestFile(t, "config", []byte(c)) - m, err := loadConfigFile() + m, err := loadConfigFile(configPath) require.NoError(t, err) - assert.Equal(t, "TEST_ACCOUNT", m["default"].Account) - assert.Equal(t, "TEST_USER", m["default"].User) - assert.Equal(t, "abcd1234", m["default"].Password) - assert.Equal(t, "ACCOUNTADMIN", m["default"].Role) - assert.Equal(t, "TEST_ACCOUNT", m["securityadmin"].Account) - assert.Equal(t, "TEST_USER", m["securityadmin"].User) - assert.Equal(t, "abcd1234", m["securityadmin"].Password) - assert.Equal(t, "SECURITYADMIN", m["securityadmin"].Role) + assert.Equal(t, "TEST_ACCOUNT", *m["default"].Account) + assert.Equal(t, "TEST_USER", *m["default"].User) + assert.Equal(t, "abcd1234", *m["default"].Password) + assert.Equal(t, "ACCOUNTADMIN", *m["default"].Role) + assert.Equal(t, "TEST_ACCOUNT", *m["securityadmin"].Account) + assert.Equal(t, "TEST_USER", *m["securityadmin"].User) + assert.Equal(t, "abcd1234", *m["securityadmin"].Password) + assert.Equal(t, "SECURITYADMIN", *m["securityadmin"].Role) } func TestProfileConfig(t *testing.T) { - c := ` + unencryptedKey, encryptedKey := random.GenerateRSAPrivateKeyEncrypted(t, "password") + + c := fmt.Sprintf(` [securityadmin] - account='TEST_ACCOUNT' - user='TEST_USER' - password='abcd1234' - role='SECURITYADMIN' - ` - configPath := testFile(t, "config", []byte(c)) + account='account' + accountname='accountname' + organizationname='organizationname' + user='user' + password='password' + host='host' + warehouse='warehouse' + role='role' + clientip='1.1.1.1' + protocol='http' + passcode='passcode' + port=1 + passcodeinpassword=true + oktaurl='https://example.com' + clienttimeout=10 + jwtclienttimeout=20 + logintimeout=30 + requesttimeout=40 + jwtexpiretimeout=50 + externalbrowsertimeout=60 + maxretrycount=1 + authenticator='jwt' + insecuremode=true + ocspfailopen=true + token='token' + keepsessionalive=true + privatekey="""%s""" + privatekeypassphrase='%s' + disabletelemetry=true + validatedefaultparameters=true + clientrequestmfatoken=true + clientstoretemporarycredential=true + tracing='tracing' + tmpdirpath='.' + disablequerycontextcache=true + includeretryreason=true + disableconsolelogin=true + + [securityadmin.params] + foo = 'bar' + `, encryptedKey, "password") + configPath := testhelpers.TestFile(t, "config", []byte(c)) t.Run("with found profile", func(t *testing.T) { t.Setenv(snowflakeenvs.ConfigPath, configPath) config, err := ProfileConfig("securityadmin") require.NoError(t, err) - assert.Equal(t, "TEST_ACCOUNT", config.Account) - assert.Equal(t, "TEST_USER", config.User) - assert.Equal(t, "abcd1234", config.Password) - assert.Equal(t, "SECURITYADMIN", config.Role) + require.NotNil(t, config.PrivateKey) + + gotKey, err := x509.MarshalPKCS8PrivateKey(config.PrivateKey) + require.NoError(t, err) + gotUnencryptedKey := pem.EncodeToMemory( + &pem.Block{ + Type: "PRIVATE KEY", + Bytes: gotKey, + }, + ) + + assert.Equal(t, "organizationname-accountname", config.Account) + assert.Equal(t, "user", config.User) + assert.Equal(t, "password", config.Password) + assert.Equal(t, "warehouse", config.Warehouse) + assert.Equal(t, "role", config.Role) + assert.Equal(t, map[string]*string{"foo": Pointer("bar")}, config.Params) + assert.Equal(t, gosnowflake.ConfigBoolTrue, config.ValidateDefaultParameters) + assert.Equal(t, "1.1.1.1", config.ClientIP.String()) + assert.Equal(t, "http", config.Protocol) + assert.Equal(t, "host", config.Host) + assert.Equal(t, 1, config.Port) + assert.Equal(t, gosnowflake.AuthTypeJwt, config.Authenticator) + assert.Equal(t, "passcode", config.Passcode) + assert.Equal(t, true, config.PasscodeInPassword) + assert.Equal(t, "https://example.com", config.OktaURL.String()) + assert.Equal(t, 10*time.Second, config.ClientTimeout) + assert.Equal(t, 20*time.Second, config.JWTClientTimeout) + assert.Equal(t, 30*time.Second, config.LoginTimeout) + assert.Equal(t, 40*time.Second, config.RequestTimeout) + assert.Equal(t, 50*time.Second, config.JWTExpireTimeout) + assert.Equal(t, 60*time.Second, config.ExternalBrowserTimeout) + assert.Equal(t, 1, config.MaxRetryCount) + assert.Equal(t, true, config.InsecureMode) + assert.Equal(t, "token", config.Token) + assert.Equal(t, gosnowflake.OCSPFailOpenTrue, config.OCSPFailOpen) + assert.Equal(t, true, config.KeepSessionAlive) + assert.Equal(t, unencryptedKey, string(gotUnencryptedKey)) + assert.Equal(t, true, config.DisableTelemetry) + assert.Equal(t, "tracing", config.Tracing) + assert.Equal(t, ".", config.TmpDirPath) + assert.Equal(t, gosnowflake.ConfigBoolTrue, config.ClientRequestMfaToken) + assert.Equal(t, gosnowflake.ConfigBoolTrue, config.ClientStoreTemporaryCredential) + assert.Equal(t, true, config.DisableQueryContextCache) + assert.Equal(t, gosnowflake.ConfigBoolTrue, config.IncludeRetryReason) + assert.Equal(t, gosnowflake.ConfigBoolTrue, config.IncludeRetryReason) + assert.Equal(t, gosnowflake.ConfigBoolTrue, config.DisableConsoleLogin) }) t.Run("with not found profile", func(t *testing.T) { @@ -70,71 +156,199 @@ func TestProfileConfig(t *testing.T) { }) t.Run("with not found config", func(t *testing.T) { - dir, err := os.UserHomeDir() - require.NoError(t, err) - t.Setenv(snowflakeenvs.ConfigPath, dir) + filename := random.AlphaN(8) + t.Setenv(snowflakeenvs.ConfigPath, filename) config, err := ProfileConfig("orgadmin") - require.Error(t, err) + require.ErrorContains(t, err, fmt.Sprintf("could not load config file: open %s: no such file or directory", filename)) require.Nil(t, config) }) } func Test_MergeConfig(t *testing.T) { - createConfig := func(user string, password string, account string, region string) *gosnowflake.Config { - return &gosnowflake.Config{ - User: user, - Password: password, - Account: account, - Region: region, - } + oktaUrl1, err := url.Parse("https://example1.com") + require.NoError(t, err) + oktaUrl2, err := url.Parse("https://example2.com") + require.NoError(t, err) + + config1 := &gosnowflake.Config{ + Account: "account1", + User: "user1", + Password: "password1", + Warehouse: "warehouse1", + Role: "role1", + ValidateDefaultParameters: 1, + Params: map[string]*string{ + "foo": Pointer("1"), + }, + ClientIP: net.ParseIP("1.1.1.1"), + Protocol: "protocol1", + Host: "host1", + Port: 1, + Authenticator: 1, + Passcode: "passcode1", + PasscodeInPassword: false, + OktaURL: oktaUrl1, + LoginTimeout: 1, + RequestTimeout: 1, + JWTExpireTimeout: 1, + ClientTimeout: 1, + JWTClientTimeout: 1, + ExternalBrowserTimeout: 1, + MaxRetryCount: 1, + InsecureMode: false, + OCSPFailOpen: 1, + Token: "token1", + KeepSessionAlive: false, + PrivateKey: random.GenerateRSAPrivateKey(t), + DisableTelemetry: false, + Tracing: "tracing1", + TmpDirPath: "tmpdirpath1", + ClientRequestMfaToken: gosnowflake.ConfigBoolFalse, + ClientStoreTemporaryCredential: gosnowflake.ConfigBoolFalse, + DisableQueryContextCache: false, + IncludeRetryReason: 1, + DisableConsoleLogin: gosnowflake.ConfigBoolFalse, } - t.Run("merge configs", func(t *testing.T) { - config1 := createConfig("user", "password", "account", "") - config2 := createConfig("user2", "", "", "region2") + config2 := &gosnowflake.Config{ + Account: "account2", + User: "user2", + Password: "password2", + Warehouse: "warehouse2", + Role: "role2", + ValidateDefaultParameters: 1, + Params: map[string]*string{ + "foo": Pointer("2"), + }, + ClientIP: net.ParseIP("2.2.2.2"), + Protocol: "protocol2", + Host: "host2", + Port: 2, + Authenticator: 2, + Passcode: "passcode2", + PasscodeInPassword: true, + OktaURL: oktaUrl2, + LoginTimeout: 2, + RequestTimeout: 2, + JWTExpireTimeout: 2, + ClientTimeout: 2, + JWTClientTimeout: 2, + ExternalBrowserTimeout: 2, + MaxRetryCount: 2, + InsecureMode: true, + OCSPFailOpen: 2, + Token: "token2", + KeepSessionAlive: true, + PrivateKey: random.GenerateRSAPrivateKey(t), + DisableTelemetry: true, + Tracing: "tracing2", + TmpDirPath: "tmpdirpath2", + ClientRequestMfaToken: gosnowflake.ConfigBoolTrue, + ClientStoreTemporaryCredential: gosnowflake.ConfigBoolTrue, + DisableQueryContextCache: true, + IncludeRetryReason: gosnowflake.ConfigBoolTrue, + DisableConsoleLogin: gosnowflake.ConfigBoolTrue, + } - config := MergeConfig(config1, config2) + t.Run("base config empty", func(t *testing.T) { + config := MergeConfig(&gosnowflake.Config{}, config1) - require.Equal(t, "user", config.User) - require.Equal(t, "password", config.Password) - require.Equal(t, "account", config.Account) - require.Equal(t, "region2", config.Region) - require.Equal(t, "", config.Role) + require.Equal(t, config1, config) + }) + + t.Run("merge config empty", func(t *testing.T) { + config := MergeConfig(config1, &gosnowflake.Config{}) + + require.Equal(t, config1, config) + }) + t.Run("both configs filled - base config takes precedence", func(t *testing.T) { + config := MergeConfig(config1, config2) require.Equal(t, config1, config) - require.Equal(t, "user", config1.User) - require.Equal(t, "password", config1.Password) - require.Equal(t, "account", config1.Account) - require.Equal(t, "region2", config1.Region) - require.Equal(t, "", config1.Role) }) +} - t.Run("merge configs inverted", func(t *testing.T) { - config1 := createConfig("user", "password", "account", "") - config2 := createConfig("user2", "", "", "region2") +func Test_toAuthenticationType(t *testing.T) { + type test struct { + input string + want gosnowflake.AuthType + } - config := MergeConfig(config2, config1) + valid := []test{ + // Case insensitive. + {input: "snowflake", want: gosnowflake.AuthTypeSnowflake}, - require.Equal(t, "user2", config.User) - require.Equal(t, "password", config.Password) - require.Equal(t, "account", config.Account) - require.Equal(t, "region2", config.Region) - require.Equal(t, "", config.Role) + // Supported Values. + {input: "SNOWFLAKE", want: gosnowflake.AuthTypeSnowflake}, + {input: "OAUTH", want: gosnowflake.AuthTypeOAuth}, + {input: "EXTERNALBROWSER", want: gosnowflake.AuthTypeExternalBrowser}, + {input: "OKTA", want: gosnowflake.AuthTypeOkta}, + {input: "JWT", want: gosnowflake.AuthTypeJwt}, + {input: "SNOWFLAKE_JWT", want: gosnowflake.AuthTypeJwt}, + {input: "TOKENACCESSOR", want: gosnowflake.AuthTypeTokenAccessor}, + {input: "USERNAMEPASSWORDMFA", want: gosnowflake.AuthTypeUsernamePasswordMFA}, + } - require.Equal(t, config2, config) - require.Equal(t, "user2", config2.User) - require.Equal(t, "password", config2.Password) - require.Equal(t, "account", config2.Account) - require.Equal(t, "region2", config2.Region) - require.Equal(t, "", config2.Role) - }) + invalid := []test{ + {input: ""}, + {input: "foo"}, + } + + for _, tc := range valid { + t.Run(tc.input, func(t *testing.T) { + got, err := ToAuthenticatorType(tc.input) + require.NoError(t, err) + require.Equal(t, tc.want, got) + }) + } + + for _, tc := range invalid { + t.Run(tc.input, func(t *testing.T) { + _, err := ToAuthenticatorType(tc.input) + require.Error(t, err) + }) + } } -func testFile(t *testing.T, filename string, dat []byte) string { - t.Helper() - path := filepath.Join(t.TempDir(), filename) - err := os.WriteFile(path, dat, 0o600) - require.NoError(t, err) - return path +func Test_Provider_toDriverLogLevel(t *testing.T) { + type test struct { + input string + want DriverLogLevel + } + + valid := []test{ + // Case insensitive. + {input: "WARNING", want: DriverLogLevelWarning}, + + // Supported Values. + {input: "trace", want: DriverLogLevelTrace}, + {input: "debug", want: DriverLogLevelDebug}, + {input: "info", want: DriverLogLevelInfo}, + {input: "print", want: DriverLogLevelPrint}, + {input: "warning", want: DriverLogLevelWarning}, + {input: "error", want: DriverLogLevelError}, + {input: "fatal", want: DriverLogLevelFatal}, + {input: "panic", want: DriverLogLevelPanic}, + } + + invalid := []test{ + {input: ""}, + {input: "foo"}, + } + + for _, tc := range valid { + t.Run(tc.input, func(t *testing.T) { + got, err := ToDriverLogLevel(tc.input) + require.NoError(t, err) + require.Equal(t, tc.want, got) + }) + } + + for _, tc := range invalid { + t.Run(tc.input, func(t *testing.T) { + _, err := ToDriverLogLevel(tc.input) + require.Error(t, err) + }) + } } diff --git a/pkg/sdk/connections_def.go b/pkg/sdk/connections_def.go index 39f583aff2..cad11042c9 100644 --- a/pkg/sdk/connections_def.go +++ b/pkg/sdk/connections_def.go @@ -20,7 +20,7 @@ var ConnectionDef = g.NewInterface( OptionalIdentifier( "AsReplicaOf", g.KindOfT[ExternalObjectIdentifier](), - g.IdentifierOptions().Required().SQL("AS REPLICA OF")). + g.IdentifierOptions().SQL("AS REPLICA OF")). OptionalComment(). WithValidation(g.ValidIdentifier, "name"). WithValidation(g.ValidIdentifierIfSet, "AsReplicaOf"), @@ -34,7 +34,8 @@ var ConnectionDef = g.NewInterface( OptionalQueryStructField( "EnableConnectionFailover", g.NewQueryStruct("EnableConnectionFailover"). - List("ToAccounts", "AccountIdentifier", g.ListOptions().NoParentheses()), + List("ToAccounts", "AccountIdentifier", g.ListOptions().NoParentheses().Required()). + WithValidation(g.AtLeastOneValueSet, "ToAccounts"), g.KeywordOptions().SQL("ENABLE FAILOVER TO ACCOUNTS"), ). OptionalQueryStructField( @@ -43,7 +44,7 @@ var ConnectionDef = g.NewInterface( OptionalQueryStructField( "ToAccounts", g.NewQueryStruct("ToAccounts"). - List("Accounts", "AccountIdentifier", g.ListOptions().NoParentheses()), + List("Accounts", "AccountIdentifier", g.ListOptions().NoParentheses().Required()), g.KeywordOptions().SQL("TO ACCOUNTS"), ), g.KeywordOptions().SQL("DISABLE FAILOVER"), @@ -51,14 +52,14 @@ var ConnectionDef = g.NewInterface( OptionalSQL("PRIMARY"). OptionalQueryStructField( "Set", - g.NewQueryStruct("Set"). + g.NewQueryStruct("ConnectionSet"). OptionalComment(). WithValidation(g.AtLeastOneValueSet, "Comment"), g.KeywordOptions().SQL("SET"), ). OptionalQueryStructField( "Unset", - g.NewQueryStruct("Unset"). + g.NewQueryStruct("ConnectionUnset"). OptionalSQL("COMMENT"). WithValidation(g.AtLeastOneValueSet, "Comment"), g.KeywordOptions().SQL("UNSET"), @@ -95,8 +96,8 @@ var ConnectionDef = g.NewInterface( Text("Name"). OptionalText("Comment"). Bool("IsPrimary"). - Text("Primary"). - Field("FailoverAllowedToAccounts", "[]string"). + Field("Primary", "ExternalObjectIdentifier"). + Field("FailoverAllowedToAccounts", "[]AccountIdentifier"). Text("ConnectionUrl"). Text("OrganizationName"). Text("AccountLocator"), diff --git a/pkg/sdk/connections_dto_builders_gen.go b/pkg/sdk/connections_dto_builders_gen.go index eb226b05a2..029d361d4b 100644 --- a/pkg/sdk/connections_dto_builders_gen.go +++ b/pkg/sdk/connections_dto_builders_gen.go @@ -2,6 +2,8 @@ package sdk +import () + func NewCreateConnectionRequest( name AccountObjectIdentifier, ) *CreateConnectionRequest { @@ -53,23 +55,22 @@ func (s *AlterConnectionRequest) WithPrimary(Primary bool) *AlterConnectionReque return s } -func (s *AlterConnectionRequest) WithSet(Set SetRequest) *AlterConnectionRequest { +func (s *AlterConnectionRequest) WithSet(Set ConnectionSetRequest) *AlterConnectionRequest { s.Set = &Set return s } -func (s *AlterConnectionRequest) WithUnset(Unset UnsetRequest) *AlterConnectionRequest { +func (s *AlterConnectionRequest) WithUnset(Unset ConnectionUnsetRequest) *AlterConnectionRequest { s.Unset = &Unset return s } -func NewEnableConnectionFailoverRequest() *EnableConnectionFailoverRequest { - return &EnableConnectionFailoverRequest{} -} - -func (s *EnableConnectionFailoverRequest) WithToAccounts(ToAccounts []AccountIdentifier) *EnableConnectionFailoverRequest { +func NewEnableConnectionFailoverRequest( + ToAccounts []AccountIdentifier, +) *EnableConnectionFailoverRequest { + s := EnableConnectionFailoverRequest{} s.ToAccounts = ToAccounts - return s + return &s } func NewDisableConnectionFailoverRequest() *DisableConnectionFailoverRequest { @@ -81,29 +82,28 @@ func (s *DisableConnectionFailoverRequest) WithToAccounts(ToAccounts ToAccountsR return s } -func NewToAccountsRequest() *ToAccountsRequest { - return &ToAccountsRequest{} -} - -func (s *ToAccountsRequest) WithAccounts(Accounts []AccountIdentifier) *ToAccountsRequest { +func NewToAccountsRequest( + Accounts []AccountIdentifier, +) *ToAccountsRequest { + s := ToAccountsRequest{} s.Accounts = Accounts - return s + return &s } -func NewSetRequest() *SetRequest { - return &SetRequest{} +func NewConnectionSetRequest() *ConnectionSetRequest { + return &ConnectionSetRequest{} } -func (s *SetRequest) WithComment(Comment string) *SetRequest { +func (s *ConnectionSetRequest) WithComment(Comment string) *ConnectionSetRequest { s.Comment = &Comment return s } -func NewUnsetRequest() *UnsetRequest { - return &UnsetRequest{} +func NewConnectionUnsetRequest() *ConnectionUnsetRequest { + return &ConnectionUnsetRequest{} } -func (s *UnsetRequest) WithComment(Comment bool) *UnsetRequest { +func (s *ConnectionUnsetRequest) WithComment(Comment bool) *ConnectionUnsetRequest { s.Comment = &Comment return s } diff --git a/pkg/sdk/connections_dto_gen.go b/pkg/sdk/connections_dto_gen.go index 1323bf0af3..c3daa0a107 100644 --- a/pkg/sdk/connections_dto_gen.go +++ b/pkg/sdk/connections_dto_gen.go @@ -22,12 +22,12 @@ type AlterConnectionRequest struct { EnableConnectionFailover *EnableConnectionFailoverRequest DisableConnectionFailover *DisableConnectionFailoverRequest Primary *bool - Set *SetRequest - Unset *UnsetRequest + Set *ConnectionSetRequest + Unset *ConnectionUnsetRequest } type EnableConnectionFailoverRequest struct { - ToAccounts []AccountIdentifier + ToAccounts []AccountIdentifier // required } type DisableConnectionFailoverRequest struct { @@ -35,14 +35,14 @@ type DisableConnectionFailoverRequest struct { } type ToAccountsRequest struct { - Accounts []AccountIdentifier + Accounts []AccountIdentifier // required } -type SetRequest struct { +type ConnectionSetRequest struct { Comment *string } -type UnsetRequest struct { +type ConnectionUnsetRequest struct { Comment *bool } diff --git a/pkg/sdk/connections_gen.go b/pkg/sdk/connections_gen.go index 80885df87b..692f38f2f5 100644 --- a/pkg/sdk/connections_gen.go +++ b/pkg/sdk/connections_gen.go @@ -33,8 +33,8 @@ type AlterConnectionOptions struct { EnableConnectionFailover *EnableConnectionFailover `ddl:"keyword" sql:"ENABLE FAILOVER TO ACCOUNTS"` DisableConnectionFailover *DisableConnectionFailover `ddl:"keyword" sql:"DISABLE FAILOVER"` Primary *bool `ddl:"keyword" sql:"PRIMARY"` - Set *Set `ddl:"keyword" sql:"SET"` - Unset *Unset `ddl:"keyword" sql:"UNSET"` + Set *ConnectionSet `ddl:"keyword" sql:"SET"` + Unset *ConnectionUnset `ddl:"keyword" sql:"UNSET"` } type EnableConnectionFailover struct { ToAccounts []AccountIdentifier `ddl:"list,no_parentheses"` @@ -45,10 +45,10 @@ type DisableConnectionFailover struct { type ToAccounts struct { Accounts []AccountIdentifier `ddl:"list,no_parentheses"` } -type Set struct { +type ConnectionSet struct { Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` } -type Unset struct { +type ConnectionUnset struct { Comment *bool `ddl:"keyword" sql:"COMMENT"` } @@ -88,8 +88,8 @@ type Connection struct { Name string Comment *string IsPrimary bool - Primary string - FailoverAllowedToAccounts []string + Primary ExternalObjectIdentifier + FailoverAllowedToAccounts []AccountIdentifier ConnectionUrl string OrganizationName string AccountLocator string diff --git a/pkg/sdk/connections_gen_test.go b/pkg/sdk/connections_gen_test.go index 8ee6f7441a..b0b48b8934 100644 --- a/pkg/sdk/connections_gen_test.go +++ b/pkg/sdk/connections_gen_test.go @@ -1,6 +1,8 @@ package sdk -import "testing" +import ( + "testing" +) func TestConnections_Create(t *testing.T) { id := randomAccountObjectIdentifier() @@ -78,20 +80,26 @@ func TestConnections_Alter(t *testing.T) { opts.EnableConnectionFailover = &EnableConnectionFailover{} opts.DisableConnectionFailover = &DisableConnectionFailover{} opts.Primary = Bool(true) - opts.Set = &Set{} - opts.Unset = &Unset{} + opts.Set = &ConnectionSet{} + opts.Unset = &ConnectionUnset{} assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterConnectionOptions", "EnableConnectionFailover", "DisableConnectionFailover", "Primary", "Set", "Unset")) }) + t.Run("validation: at least one of the fields [opts.EnableConnectionFailover.ToAccounts] should be set", func(t *testing.T) { + opts := defaultOpts() + opts.EnableConnectionFailover = &EnableConnectionFailover{} + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterConnectionOptions.EnableConnectionFailover", "ToAccounts")) + }) + t.Run("validation: at least one of the fields [opts.Set.Comment] should be set", func(t *testing.T) { opts := defaultOpts() - opts.Set = &Set{} + opts.Set = &ConnectionSet{} assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterConnectionOptions.Set", "Comment")) }) t.Run("validation: at least one of the fields [opts.Unset.Comment] should be set", func(t *testing.T) { opts := defaultOpts() - opts.Unset = &Unset{} + opts.Unset = &ConnectionUnset{} assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterConnectionOptions.Unset", "Comment")) }) @@ -128,13 +136,13 @@ func TestConnections_Alter(t *testing.T) { t.Run("set comment", func(t *testing.T) { opts := defaultOpts() - opts.Set = &Set{Comment: String("test comment")} + opts.Set = &ConnectionSet{Comment: String("test comment")} assertOptsValidAndSQLEquals(t, opts, "ALTER CONNECTION %s SET COMMENT = 'test comment'", id.FullyQualifiedName()) }) t.Run("unset comment", func(t *testing.T) { opts := defaultOpts() - opts.Unset = &Unset{Comment: Bool(true)} + opts.Unset = &ConnectionUnset{Comment: Bool(true)} assertOptsValidAndSQLEquals(t, opts, "ALTER CONNECTION %s UNSET COMMENT", id.FullyQualifiedName()) }) } diff --git a/pkg/sdk/connections_impl_gen.go b/pkg/sdk/connections_impl_gen.go index 325141116e..a1fb403c4c 100644 --- a/pkg/sdk/connections_impl_gen.go +++ b/pkg/sdk/connections_impl_gen.go @@ -2,6 +2,7 @@ package sdk import ( "context" + "log" "strconv" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" @@ -53,14 +54,9 @@ func (r *CreateConnectionRequest) toOpts() *CreateConnectionOptions { opts := &CreateConnectionOptions{ IfNotExists: r.IfNotExists, name: r.name, - - Comment: r.Comment, - } - - if r.AsReplicaOf != nil { - opts.AsReplicaOf = r.AsReplicaOf + AsReplicaOf: r.AsReplicaOf, + Comment: r.Comment, } - return opts } @@ -88,13 +84,13 @@ func (r *AlterConnectionRequest) toOpts() *AlterConnectionOptions { } if r.Set != nil { - opts.Set = &Set{ + opts.Set = &ConnectionSet{ Comment: r.Set.Comment, } } if r.Unset != nil { - opts.Unset = &Unset{ + opts.Unset = &ConnectionUnset{ Comment: r.Unset.Comment, } } @@ -119,18 +115,35 @@ func (r *ShowConnectionRequest) toOpts() *ShowConnectionOptions { func (r connectionRow) convert() *Connection { c := &Connection{ - SnowflakeRegion: r.SnowflakeRegion, - CreatedOn: r.CreatedOn, - AccountName: r.AccountName, - Name: r.Name, - Primary: r.Primary, - FailoverAllowedToAccounts: ParseCommaSeparatedStringArray(r.FailoverAllowedToAccounts, false), - ConnectionUrl: r.ConnectionUrl, - OrganizationName: r.OrganizationName, - AccountLocator: r.AccountLocator, + SnowflakeRegion: r.SnowflakeRegion, + CreatedOn: r.CreatedOn, + AccountName: r.AccountName, + Name: r.Name, + ConnectionUrl: r.ConnectionUrl, + OrganizationName: r.OrganizationName, + AccountLocator: r.AccountLocator, + } + + parsedIsPrimary, err := strconv.ParseBool(r.IsPrimary) + if err != nil { + log.Printf("unable to parse bool is_primary for connection: %v, err = %s", r.IsPrimary, err) + } else { + c.IsPrimary = parsedIsPrimary } - b, _ := strconv.ParseBool(r.IsPrimary) - c.IsPrimary = b + + primaryExternalId, err := ParseExternalObjectIdentifier(r.Primary) + if err != nil { + log.Printf("unable to parse primary connection external identifier: %v, err = %s", r.Primary, err) + } else { + c.Primary = primaryExternalId + } + + if allowedToAccounts, err := ParseCommaSeparatedAccountIdentifierArray(r.FailoverAllowedToAccounts); err != nil { + log.Printf("unable to parse account identifier list for 'enable failover to accounts': %s, err = %v", r.FailoverAllowedToAccounts, err) + } else { + c.FailoverAllowedToAccounts = allowedToAccounts + } + if r.Comment.Valid { c.Comment = String(r.Comment.String) } diff --git a/pkg/sdk/connections_validations_gen.go b/pkg/sdk/connections_validations_gen.go index df6b4e1d0f..f96dad1dd0 100644 --- a/pkg/sdk/connections_validations_gen.go +++ b/pkg/sdk/connections_validations_gen.go @@ -29,6 +29,11 @@ func (opts *AlterConnectionOptions) validate() error { if !exactlyOneValueSet(opts.EnableConnectionFailover, opts.DisableConnectionFailover, opts.Primary, opts.Set, opts.Unset) { errs = append(errs, errExactlyOneOf("AlterConnectionOptions", "EnableConnectionFailover", "DisableConnectionFailover", "Primary", "Set", "Unset")) } + if valueSet(opts.EnableConnectionFailover) { + if !anyValueSet(opts.EnableConnectionFailover.ToAccounts) { + errs = append(errs, errAtLeastOneOf("AlterConnectionOptions.EnableConnectionFailover", "ToAccounts")) + } + } if valueSet(opts.Set) { if !anyValueSet(opts.Set.Comment) { errs = append(errs, errAtLeastOneOf("AlterConnectionOptions.Set", "Comment")) diff --git a/pkg/sdk/identifier_parsers.go b/pkg/sdk/identifier_parsers.go index 709e473990..291d861b9e 100644 --- a/pkg/sdk/identifier_parsers.go +++ b/pkg/sdk/identifier_parsers.go @@ -41,15 +41,11 @@ func ParseIdentifierString(identifier string) ([]string, error) { if strings.Contains(part, `"`) { return nil, fmt.Errorf(`unable to parse identifier: %s, currently identifiers containing double quotes are not supported in the provider`, identifier) } - // TODO(SNOW-1571674): Remove the validation - if strings.ContainsAny(part, `()`) { - return nil, fmt.Errorf(`unable to parse identifier: %s, currently identifiers containing opening and closing parentheses '()' are not supported in the provider`, identifier) - } } return parts, nil } -func parseIdentifier[T ObjectIdentifier](identifier string, expectedParts int, expectedFormat string, constructFromParts func(parts []string) T) (T, error) { +func parseIdentifier[T AccountIdentifier | AccountObjectIdentifier | DatabaseObjectIdentifier | ExternalObjectIdentifier | SchemaObjectIdentifier | TableColumnIdentifier](identifier string, expectedParts int, expectedFormat string, constructFromParts func(parts []string) T) (T, error) { var emptyIdentifier T parts, err := ParseIdentifierString(identifier) if err != nil { diff --git a/pkg/sdk/identifier_parsers_test.go b/pkg/sdk/identifier_parsers_test.go index 375d3178a3..05d6b58e96 100644 --- a/pkg/sdk/identifier_parsers_test.go +++ b/pkg/sdk/identifier_parsers_test.go @@ -80,25 +80,34 @@ func Test_ParseIdentifierString(t *testing.T) { require.ErrorContains(t, err, `unable to parse identifier: "ab""c".def, currently identifiers containing double quotes are not supported in the provider`) }) - t.Run("returns error when identifier contains opening parenthesis", func(t *testing.T) { + t.Run("returns parts correctly when identifier contains opening parenthesis", func(t *testing.T) { input := `"ab(c".def` - _, err := ParseIdentifierString(input) + expected := []string{"ab(c", "def"} + + parts, err := ParseIdentifierString(input) - require.ErrorContains(t, err, `unable to parse identifier: "ab(c".def, currently identifiers containing opening and closing parentheses '()' are not supported in the provider`) + require.NoError(t, err) + containsAll(t, parts, expected) }) - t.Run("returns error when identifier contains closing parenthesis", func(t *testing.T) { + t.Run("returns parts correctly when identifier contains closing parenthesis", func(t *testing.T) { input := `"ab)c".def` - _, err := ParseIdentifierString(input) + expected := []string{"ab)c", "def"} + + parts, err := ParseIdentifierString(input) - require.ErrorContains(t, err, `unable to parse identifier: "ab)c".def, currently identifiers containing opening and closing parentheses '()' are not supported in the provider`) + require.NoError(t, err) + containsAll(t, parts, expected) }) - t.Run("returns error when identifier contains opening and closing parentheses", func(t *testing.T) { + t.Run("returns parts correctly when identifier contains opening and closing parentheses", func(t *testing.T) { input := `"ab()c".def` - _, err := ParseIdentifierString(input) + expected := []string{"ab()c", "def"} - require.ErrorContains(t, err, `unable to parse identifier: "ab()c".def, currently identifiers containing opening and closing parentheses '()' are not supported in the provider`) + parts, err := ParseIdentifierString(input) + + require.NoError(t, err) + containsAll(t, parts, expected) }) t.Run("returns parts correctly with dots inside", func(t *testing.T) { diff --git a/pkg/sdk/internal/client/client_test.go b/pkg/sdk/internal/client/client_test.go index 982e581856..c410012a27 100644 --- a/pkg/sdk/internal/client/client_test.go +++ b/pkg/sdk/internal/client/client_test.go @@ -7,7 +7,6 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/snowflakedb/gosnowflake" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -22,6 +21,7 @@ func TestNewClientWithoutInstrumentedSQL(t *testing.T) { t.Run("registers snowflake-not-instrumented driver", func(t *testing.T) { config := sdk.DefaultConfig() + config.Tracing = string(sdk.DriverLogLevelDebug) _, err := sdk.NewClient(config) require.NoError(t, err) @@ -29,17 +29,3 @@ func TestNewClientWithoutInstrumentedSQL(t *testing.T) { assert.Contains(t, sql.Drivers(), "snowflake") }) } - -func TestNewClientWithDebugLoggingSetFromEnv(t *testing.T) { - t.Run("set gosnowflake driver logging to debug", func(t *testing.T) { - if os.Getenv(snowflakeenvs.GosnowflakeLogLevel) == "" { - t.Skipf("Skipping TestNewClientWithDebugLoggingSet, because %s is not set", snowflakeenvs.GosnowflakeLogLevel) - } - - config := sdk.DefaultConfig() - _, err := sdk.NewClient(config) - require.NoError(t, err) - - assert.Equal(t, "debug", gosnowflake.GetLogger().GetLogLevel()) - }) -} diff --git a/pkg/sdk/parsers.go b/pkg/sdk/parsers.go index 09952d9381..9495058b02 100644 --- a/pkg/sdk/parsers.go +++ b/pkg/sdk/parsers.go @@ -52,3 +52,18 @@ func ParseCommaSeparatedSchemaObjectIdentifierArray(value string) ([]SchemaObjec } return ids, nil } + +// ParseCommaSeparatedAccountIdentifierArray can be used to parse Snowflake output containing a list of account identifiers +// in the format of ["organization1.account1", "organization2.account2", ...], +func ParseCommaSeparatedAccountIdentifierArray(value string) ([]AccountIdentifier, error) { + idsRaw := ParseCommaSeparatedStringArray(value, false) + ids := make([]AccountIdentifier, len(idsRaw)) + for i := range idsRaw { + id, err := ParseAccountIdentifier(idsRaw[i]) + if err != nil { + return nil, err + } + ids[i] = id + } + return ids, nil +} diff --git a/pkg/sdk/parsers_test.go b/pkg/sdk/parsers_test.go index 314ef9892b..6ee19ca2ad 100644 --- a/pkg/sdk/parsers_test.go +++ b/pkg/sdk/parsers_test.go @@ -203,3 +203,101 @@ func TestParseCommaSeparatedSchemaObjectIdentifierArray_Invalid(t *testing.T) { }) } } + +func TestParseCommaSeparatedAccountIdentifierArray(t *testing.T) { + testCases := []struct { + Name string + Value string + Result []AccountIdentifier + }{ + { + Name: "empty list", + Value: "[]", + Result: []AccountIdentifier{}, + }, + { + Name: "empty string", + Value: "", + Result: []AccountIdentifier{}, + }, + { + Name: "one element in list", + Value: "[A.B]", + Result: []AccountIdentifier{NewAccountIdentifier("A", "B")}, + }, + { + Name: "one element in list - with mixed cases", + Value: `[A."b"]`, + Result: []AccountIdentifier{NewAccountIdentifier("A", "b")}, + }, + { + Name: "multiple elements in list", + Value: "[A.B, C.D]", + Result: []AccountIdentifier{NewAccountIdentifier("A", "B"), NewAccountIdentifier("C", "D")}, + }, + { + Name: "multiple elements in list - with mixed cases", + Value: `[A."b", "c"."d"]`, + Result: []AccountIdentifier{NewAccountIdentifier("A", "b"), NewAccountIdentifier("c", "d")}, + }, + { + Name: "multiple elements in list - packed", + Value: "[A.B,C.D]", + Result: []AccountIdentifier{NewAccountIdentifier("A", "B"), NewAccountIdentifier("C", "D")}, + }, + { + Name: "multiple elements in list - additional spaces", + Value: "[A.B, C.D]", + Result: []AccountIdentifier{NewAccountIdentifier("A", "B"), NewAccountIdentifier("C", "D")}, + }, + { + Name: "list without brackets", + Value: "A.B, C.D", + Result: []AccountIdentifier{NewAccountIdentifier("A", "B"), NewAccountIdentifier("C", "D")}, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + ids, err := ParseCommaSeparatedAccountIdentifierArray(tc.Value) + require.NoError(t, err) + require.Equal(t, tc.Result, ids) + }) + } +} + +func TestParseCommaSeparatedAccountIdentifierArray_Invalid(t *testing.T) { + testCases := []struct { + Name string + Value string + Error string + }{ + { + Name: "invalid qoutes", + Value: `["a.b]`, + Error: "unable to read identifier: \"a.b, err = parse error on line 1, column 5: extraneous or missing \" in quoted-field", + }, + { + Name: "missing parts", + Value: "[a.b, a]", + Error: "unexpected number of parts 1 in identifier a, expected 2 in a form of \".\"", + }, + { + Name: "too many parts", + Value: "[a.b, a.b.c]", + Error: "unexpected number of parts 3 in identifier a.b.c, expected 2 in a form of \".\"", + }, + { + Name: "missing parts - empty id", + Value: "[a.b, ]", + Error: "incompatible identifier", + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + _, err := ParseCommaSeparatedAccountIdentifierArray(tc.Value) + require.ErrorContains(t, err, tc.Error) + }) + } +} diff --git a/pkg/sdk/privileges.go b/pkg/sdk/privileges.go index 8b2d792ee6..7f8cd38f42 100644 --- a/pkg/sdk/privileges.go +++ b/pkg/sdk/privileges.go @@ -48,6 +48,7 @@ const ( GlobalPrivilegeManageOrganizationSupportCases GlobalPrivilege = "MANAGE ORGANIZATION SUPPORT CASES" GlobalPrivilegeManageUserSupportCases GlobalPrivilege = "MANAGE USER SUPPORT CASES" GlobalPrivilegeManageWarehouses GlobalPrivilege = "MANAGE WAREHOUSES" + GlobalPrivilegeManageShareTarget GlobalPrivilege = "MANAGE SHARE TARGET" GlobalPrivilegeModifyLogLevel GlobalPrivilege = "MODIFY LOG LEVEL" GlobalPrivilegeModifyTraceLevel GlobalPrivilege = "MODIFY TRACE LEVEL" diff --git a/pkg/sdk/security_integrations_dto_gen.go b/pkg/sdk/security_integrations_dto_gen.go index 43b2d73aae..db894c742b 100644 --- a/pkg/sdk/security_integrations_dto_gen.go +++ b/pkg/sdk/security_integrations_dto_gen.go @@ -61,6 +61,10 @@ type CreateApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationReq Comment *string } +func (r *CreateApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationRequest) GetName() AccountObjectIdentifier { + return r.name +} + type CreateApiAuthenticationWithJwtBearerFlowSecurityIntegrationRequest struct { OrReplace *bool IfNotExists *bool @@ -78,6 +82,10 @@ type CreateApiAuthenticationWithJwtBearerFlowSecurityIntegrationRequest struct { Comment *string } +func (r *CreateApiAuthenticationWithJwtBearerFlowSecurityIntegrationRequest) GetName() AccountObjectIdentifier { + return r.name +} + type CreateExternalOauthSecurityIntegrationRequest struct { OrReplace *bool IfNotExists *bool @@ -99,6 +107,10 @@ type CreateExternalOauthSecurityIntegrationRequest struct { Comment *string } +func (r *CreateExternalOauthSecurityIntegrationRequest) GetName() AccountObjectIdentifier { + return r.name +} + type BlockedRolesListRequest struct { BlockedRolesList []AccountObjectIdentifier // required } diff --git a/pkg/sdk/storage_integration_gen.go b/pkg/sdk/storage_integration_gen.go index 285f35092d..82b02c4547 100644 --- a/pkg/sdk/storage_integration_gen.go +++ b/pkg/sdk/storage_integration_gen.go @@ -121,6 +121,10 @@ type StorageIntegration struct { CreatedOn time.Time } +func (v *StorageIntegration) ID() AccountObjectIdentifier { + return NewAccountObjectIdentifier(v.Name) +} + // DescribeStorageIntegrationOptions is based on https://docs.snowflake.com/en/sql-reference/sql/desc-integration. type DescribeStorageIntegrationOptions struct { describe bool `ddl:"static" sql:"DESCRIBE"` diff --git a/pkg/sdk/sweepers_test.go b/pkg/sdk/sweepers_test.go index 49ce33c9dc..a9269a2337 100644 --- a/pkg/sdk/sweepers_test.go +++ b/pkg/sdk/sweepers_test.go @@ -104,6 +104,9 @@ func Test_Sweeper_NukeStaleObjects(t *testing.T) { }) // TODO [SNOW-867247]: nuke stale objects (e.g. created more than 2 weeks ago) + + // TODO [SNOW-867247]: nuke external oauth integrations because of errors like + // Error: 003524 (22023): SQL execution error: An integration with the given issuer already exists for this account } // TODO [SNOW-867247]: generalize nuke methods (sweepers too) diff --git a/pkg/sdk/system_functions.go b/pkg/sdk/system_functions.go index 4e179ee9ba..6777a013a4 100644 --- a/pkg/sdk/system_functions.go +++ b/pkg/sdk/system_functions.go @@ -27,13 +27,16 @@ type systemFunctions struct { } func (c *systemFunctions) GetTag(ctx context.Context, tagID ObjectIdentifier, objectID ObjectIdentifier, objectType ObjectType) (string, error) { - objectType = normalizeGetTagObjectType(objectType) + objectType, err := normalizeGetTagObjectType(objectType) + if err != nil { + return "", err + } s := &struct { Tag string `db:"TAG"` }{} sql := fmt.Sprintf(`SELECT SYSTEM$GET_TAG('%s', '%s', '%v') AS "TAG"`, tagID.FullyQualifiedName(), objectID.FullyQualifiedName(), objectType) - err := c.client.queryOne(ctx, s, sql) + err = c.client.queryOne(ctx, s, sql) if err != nil { return "", err } @@ -43,15 +46,18 @@ func (c *systemFunctions) GetTag(ctx context.Context, tagID ObjectIdentifier, ob // normalize object types for some values because of errors like below // SQL compilation error: Invalid value VIEW for argument OBJECT_TYPE. Please use object type TABLE for all kinds of table-like objects. // TODO [SNOW-1022645]: discuss how we handle situation like this in the SDK -func normalizeGetTagObjectType(objectType ObjectType) ObjectType { - if slices.Contains([]ObjectType{ObjectTypeView, ObjectTypeMaterializedView, ObjectTypeExternalTable}, objectType) { - return ObjectTypeTable +func normalizeGetTagObjectType(objectType ObjectType) (ObjectType, error) { + if !canBeAssociatedWithTag(objectType) { + return "", fmt.Errorf("tagging for object type %s is not supported", objectType) + } + if slices.Contains([]ObjectType{ObjectTypeView, ObjectTypeMaterializedView, ObjectTypeExternalTable, ObjectTypeEventTable}, objectType) { + return ObjectTypeTable, nil } if slices.Contains([]ObjectType{ObjectTypeExternalFunction}, objectType) { - return ObjectTypeFunction + return ObjectTypeFunction, nil } - return objectType + return objectType, nil } type PipeExecutionState string diff --git a/pkg/sdk/tag_association_validations.go b/pkg/sdk/tag_association_validations.go index b7f5c03b03..cb4c22f818 100644 --- a/pkg/sdk/tag_association_validations.go +++ b/pkg/sdk/tag_association_validations.go @@ -1,5 +1,7 @@ package sdk +import "slices" + var ( // based on https://docs.snowflake.com/en/user-guide/object-tagging.html#supported-objects TagAssociationAllowedObjectTypes = []ObjectType{ @@ -71,6 +73,10 @@ var ( TagAssociationAllowedObjectTypesString = make([]string, len(TagAssociationAllowedObjectTypes)) ) +func canBeAssociatedWithTag(o ObjectType) bool { + return slices.Contains(TagAssociationAllowedObjectTypes, o) +} + func init() { for i, v := range TagAssociationAllowedObjectTypes { TagAssociationAllowedObjectTypesString[i] = v.String() diff --git a/pkg/sdk/tags.go b/pkg/sdk/tags.go index 8e0c87fc29..0fb31322ce 100644 --- a/pkg/sdk/tags.go +++ b/pkg/sdk/tags.go @@ -15,6 +15,8 @@ type Tags interface { Undrop(ctx context.Context, request *UndropTagRequest) error Set(ctx context.Context, request *SetTagRequest) error Unset(ctx context.Context, request *UnsetTagRequest) error + SetOnCurrentAccount(ctx context.Context, request *SetTagOnCurrentAccountRequest) error + UnsetOnCurrentAccount(ctx context.Context, request *UnsetTagOnCurrentAccountRequest) error } type setTagOptions struct { diff --git a/pkg/sdk/tags_dto.go b/pkg/sdk/tags_dto.go index f79a6589a2..4aa1a50df4 100644 --- a/pkg/sdk/tags_dto.go +++ b/pkg/sdk/tags_dto.go @@ -25,6 +25,14 @@ type UnsetTagRequest struct { UnsetTags []ObjectIdentifier } +type SetTagOnCurrentAccountRequest struct { + SetTags []TagAssociation +} + +type UnsetTagOnCurrentAccountRequest struct { + UnsetTags []ObjectIdentifier +} + type CreateTagRequest struct { orReplace *bool ifNotExists *bool diff --git a/pkg/sdk/tags_dto_builders.go b/pkg/sdk/tags_dto_builders.go index b9aaa38e7a..4e8a155989 100644 --- a/pkg/sdk/tags_dto_builders.go +++ b/pkg/sdk/tags_dto_builders.go @@ -24,6 +24,24 @@ func (s *UnsetTagRequest) WithUnsetTags(tags []ObjectIdentifier) *UnsetTagReques return s } +func NewSetTagOnCurrentAccountRequest() *SetTagOnCurrentAccountRequest { + return &SetTagOnCurrentAccountRequest{} +} + +func (s *SetTagOnCurrentAccountRequest) WithSetTags(tags []TagAssociation) *SetTagOnCurrentAccountRequest { + s.SetTags = tags + return s +} + +func NewUnsetTagOnCurrentAccountRequest() *UnsetTagOnCurrentAccountRequest { + return &UnsetTagOnCurrentAccountRequest{} +} + +func (s *UnsetTagOnCurrentAccountRequest) WithUnsetTags(tags []ObjectIdentifier) *UnsetTagOnCurrentAccountRequest { + s.UnsetTags = tags + return s +} + func NewCreateTagRequest(name SchemaObjectIdentifier) *CreateTagRequest { s := CreateTagRequest{} s.name = name diff --git a/pkg/sdk/tags_impl.go b/pkg/sdk/tags_impl.go index 529dc851eb..9813b03803 100644 --- a/pkg/sdk/tags_impl.go +++ b/pkg/sdk/tags_impl.go @@ -57,17 +57,41 @@ func (v *tags) Undrop(ctx context.Context, request *UndropTagRequest) error { } func (v *tags) Set(ctx context.Context, request *SetTagRequest) error { + objectType, err := normalizeGetTagObjectType(request.objectType) + if err != nil { + return err + } + request.objectType = objectType + // TODO [SNOW-1022645]: use query from resource sdk - similarly to https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/0e88e082282adf35f605c323569908a99bd406f9/pkg/acceptance/check_destroy.go#L67 opts := request.toOpts() return validateAndExec(v.client, ctx, opts) } func (v *tags) Unset(ctx context.Context, request *UnsetTagRequest) error { + objectType, err := normalizeGetTagObjectType(request.objectType) + if err != nil { + return err + } + request.objectType = objectType + // TODO [SNOW-1022645]: use query from resource sdk - similarly to https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/0e88e082282adf35f605c323569908a99bd406f9/pkg/acceptance/check_destroy.go#L67 opts := request.toOpts() return validateAndExec(v.client, ctx, opts) } +func (v *tags) SetOnCurrentAccount(ctx context.Context, request *SetTagOnCurrentAccountRequest) error { + return v.client.Accounts.Alter(ctx, &AlterAccountOptions{ + SetTag: request.SetTags, + }) +} + +func (v *tags) UnsetOnCurrentAccount(ctx context.Context, request *UnsetTagOnCurrentAccountRequest) error { + return v.client.Accounts.Alter(ctx, &AlterAccountOptions{ + UnsetTag: request.UnsetTags, + }) +} + func (s *CreateTagRequest) toOpts() *createTagOptions { return &createTagOptions{ OrReplace: s.orReplace, diff --git a/pkg/sdk/tags_test.go b/pkg/sdk/tags_test.go index 5c3e01717a..0979537b27 100644 --- a/pkg/sdk/tags_test.go +++ b/pkg/sdk/tags_test.go @@ -1,6 +1,7 @@ package sdk import ( + "errors" "testing" ) @@ -370,6 +371,18 @@ func TestTagSet(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) + t.Run("validation: unsupported object type", func(t *testing.T) { + opts := defaultOpts() + opts.objectType = ObjectTypeSequence + assertOptsInvalidJoinedErrors(t, opts, errors.New("tagging for object type SEQUENCE is not supported")) + }) + + t.Run("validation: unsupported account", func(t *testing.T) { + opts := defaultOpts() + opts.objectType = ObjectTypeAccount + assertOptsInvalidJoinedErrors(t, opts, errors.New("tagging for object type ACCOUNT is not supported - use Tags.SetOnCurrentAccount instead")) + }) + t.Run("set with all optional", func(t *testing.T) { opts := defaultOpts() opts.SetTags = []TagAssociation{ @@ -415,6 +428,18 @@ func TestTagUnset(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) + t.Run("validation: unsupported object type", func(t *testing.T) { + opts := defaultOpts() + opts.objectType = ObjectTypeSequence + assertOptsInvalidJoinedErrors(t, opts, errors.New("tagging for object type SEQUENCE is not supported")) + }) + + t.Run("validation: unsupported account", func(t *testing.T) { + opts := defaultOpts() + opts.objectType = ObjectTypeAccount + assertOptsInvalidJoinedErrors(t, opts, errors.New("tagging for object type ACCOUNT is not supported - use Tags.UnsetOnCurrentAccount instead")) + }) + t.Run("unset with all optional", func(t *testing.T) { opts := defaultOpts() opts.UnsetTags = []ObjectIdentifier{ diff --git a/pkg/sdk/tags_validations.go b/pkg/sdk/tags_validations.go index cd0b55cd0a..f6fd74b83b 100644 --- a/pkg/sdk/tags_validations.go +++ b/pkg/sdk/tags_validations.go @@ -2,6 +2,7 @@ package sdk import ( "errors" + "fmt" ) var ( @@ -150,6 +151,12 @@ func (opts *setTagOptions) validate() error { if !ValidObjectIdentifier(opts.objectName) { errs = append(errs, ErrInvalidObjectIdentifier) } + if !canBeAssociatedWithTag(opts.objectType) { + return fmt.Errorf("tagging for object type %s is not supported", opts.objectType) + } + if opts.objectType == ObjectTypeAccount { + return fmt.Errorf("tagging for object type ACCOUNT is not supported - use Tags.SetOnCurrentAccount instead") + } return errors.Join(errs...) } @@ -161,5 +168,11 @@ func (opts *unsetTagOptions) validate() error { if !ValidObjectIdentifier(opts.objectName) { errs = append(errs, ErrInvalidObjectIdentifier) } + if !canBeAssociatedWithTag(opts.objectType) { + return fmt.Errorf("tagging for object type %s is not supported", opts.objectType) + } + if opts.objectType == ObjectTypeAccount { + return fmt.Errorf("tagging for object type ACCOUNT is not supported - use Tags.UnsetOnCurrentAccount instead") + } return errors.Join(errs...) } diff --git a/pkg/sdk/testint/accounts_integration_test.go b/pkg/sdk/testint/accounts_integration_test.go index 232465509c..3192bd27d1 100644 --- a/pkg/sdk/testint/accounts_integration_test.go +++ b/pkg/sdk/testint/accounts_integration_test.go @@ -288,32 +288,4 @@ func TestInt_AccountAlter(t *testing.T) { err = client.Accounts.Alter(ctx, opts) require.NoError(t, err) }) - - t.Run("set and unset tag", func(t *testing.T) { - tagTest1, tagCleanup1 := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup1) - tagTest2, tagCleanup2 := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup2) - - opts := &sdk.AlterAccountOptions{ - SetTag: []sdk.TagAssociation{ - { - Name: tagTest1.ID(), - Value: "abc", - }, - { - Name: tagTest2.ID(), - Value: "123", - }, - }, - } - err := client.Accounts.Alter(ctx, opts) - require.NoError(t, err) - tagValue, err := client.SystemFunctions.GetTag(ctx, tagTest1.ID(), testClientHelper().Ids.AccountIdentifierWithLocator(), sdk.ObjectTypeAccount) - require.NoError(t, err) - assert.Equal(t, "abc", tagValue) - tagValue, err = client.SystemFunctions.GetTag(ctx, tagTest2.ID(), testClientHelper().Ids.AccountIdentifierWithLocator(), sdk.ObjectTypeAccount) - require.NoError(t, err) - assert.Equal(t, "123", tagValue) - }) } diff --git a/pkg/sdk/testint/api_integrations_gen_integration_test.go b/pkg/sdk/testint/api_integrations_gen_integration_test.go index 484f5f3e81..b4c04c6e64 100644 --- a/pkg/sdk/testint/api_integrations_gen_integration_test.go +++ b/pkg/sdk/testint/api_integrations_gen_integration_test.go @@ -347,42 +347,6 @@ func TestInt_ApiIntegrations(t *testing.T) { assert.Contains(t, details, sdk.ApiIntegrationProperty{Name: "GOOGLE_AUDIENCE", Type: "String", Value: googleOtherAudience, Default: ""}) }) - t.Run("alter api integration: set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - integration := createAwsApiIntegration(t) - id := integration.ID() - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterApiIntegrationRequest(id).WithSetTags(tags) - - err := client.ApiIntegrations.Alter(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterApiIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.ApiIntegrations.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("drop api integration: existing", func(t *testing.T) { request := createApiIntegrationAwsRequest(t) id := request.GetName() diff --git a/pkg/sdk/testint/application_packages_integration_test.go b/pkg/sdk/testint/application_packages_integration_test.go index 196d299974..a9e5e5b2fd 100644 --- a/pkg/sdk/testint/application_packages_integration_test.go +++ b/pkg/sdk/testint/application_packages_integration_test.go @@ -53,31 +53,6 @@ func TestInt_ApplicationPackages(t *testing.T) { return e } - assertApplicationPackage := func(t *testing.T, id sdk.AccountObjectIdentifier) { - t.Helper() - - param, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameterDataRetentionTimeInDays) - require.NoError(t, err) - - defaultDataRetentionTimeInDays, err := strconv.Atoi(param.Value) - require.NoError(t, err) - - e, err := client.ApplicationPackages.ShowByID(ctx, id) - require.NoError(t, err) - - assert.NotEmpty(t, e.CreatedOn) - assert.Equal(t, id.Name(), e.Name) - assert.Equal(t, false, e.IsDefault) - assert.Equal(t, true, e.IsCurrent) - assert.Equal(t, sdk.DistributionInternal, sdk.Distribution(e.Distribution)) - assert.Equal(t, "ACCOUNTADMIN", e.Owner) - assert.Empty(t, e.Comment) - assert.Equal(t, defaultDataRetentionTimeInDays, e.RetentionTime) - assert.Empty(t, e.Options) - assert.Empty(t, e.DroppedOn) - assert.Empty(t, e.ApplicationClass) - } - t.Run("create application package", func(t *testing.T) { id := testClientHelper().Ids.RandomAccountObjectIdentifier() comment := random.Comment() @@ -145,32 +120,6 @@ func TestInt_ApplicationPackages(t *testing.T) { require.Equal(t, sdk.DistributionInternal, sdk.Distribution(o.Distribution)) }) - t.Run("alter application package: set and unset tags", func(t *testing.T) { - e := createApplicationPackageHandle(t) - id := e.ID() - - setTags := []sdk.TagAssociation{ - { - Name: tagTest.ID(), - Value: "v1", - }, - } - err := client.ApplicationPackages.Alter(ctx, sdk.NewAlterApplicationPackageRequest(id).WithSetTags(setTags)) - require.NoError(t, err) - assertApplicationPackage(t, id) - - value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), id, sdk.ObjectTypeApplicationPackage) - require.NoError(t, err) - assert.Equal(t, "v1", value) - - unsetTags := []sdk.ObjectIdentifier{ - tagTest.ID(), - } - err = client.ApplicationPackages.Alter(ctx, sdk.NewAlterApplicationPackageRequest(id).WithUnsetTags(unsetTags)) - require.NoError(t, err) - assertApplicationPackage(t, id) - }) - t.Run("show application package for SQL: with like", func(t *testing.T) { e := createApplicationPackageHandle(t) diff --git a/pkg/sdk/testint/applications_integration_test.go b/pkg/sdk/testint/applications_integration_test.go index 2ee81c8da6..4332983a97 100644 --- a/pkg/sdk/testint/applications_integration_test.go +++ b/pkg/sdk/testint/applications_integration_test.go @@ -236,33 +236,6 @@ func TestInt_Applications(t *testing.T) { require.Equal(t, strconv.FormatBool(false), pairs["debug_mode"]) }) - t.Run("alter application: set and unset tags", func(t *testing.T) { - version, patch := "V001", 0 - _, e, applicationPackage := createApplicationHandle(t, version, patch, false, true, false) - id := e.ID() - - setTags := []sdk.TagAssociation{ - { - Name: tagTest.ID(), - Value: "v1", - }, - } - err := client.Applications.Alter(ctx, sdk.NewAlterApplicationRequest(id).WithSetTags(setTags)) - require.NoError(t, err) - assertApplication(t, id, applicationPackage.Name, version, patch, "") - - // TODO(SNOW-1746420): adjust after this is fixed on Snowflake side - _, err = client.SystemFunctions.GetTag(ctx, tagTest.ID(), id, sdk.ObjectTypeApplication) - require.ErrorContains(t, err, "391801 (0A000): SQL compilation error: Object tagging not supported for object type APPLICATION") - - unsetTags := []sdk.ObjectIdentifier{ - tagTest.ID(), - } - err = client.Applications.Alter(ctx, sdk.NewAlterApplicationRequest(id).WithUnsetTags(unsetTags)) - require.NoError(t, err) - assertApplication(t, id, applicationPackage.Name, version, patch, "") - }) - t.Run("alter application: upgrade with version and patch", func(t *testing.T) { version, patch := "V001", 0 _, e, applicationPackage := createApplicationHandle(t, version, patch, false, true, true) diff --git a/pkg/sdk/testint/authentication_policies_gen_integration_test.go b/pkg/sdk/testint/authentication_policies_gen_integration_test.go index 67cf9efd92..6de2607031 100644 --- a/pkg/sdk/testint/authentication_policies_gen_integration_test.go +++ b/pkg/sdk/testint/authentication_policies_gen_integration_test.go @@ -58,10 +58,9 @@ func TestInt_AuthenticationPolicies(t *testing.T) { t.Run("Create - complete", func(t *testing.T) { id := testClientHelper().Ids.RandomSchemaObjectIdentifier() - saml2Id := testClientHelper().Ids.RandomAccountObjectIdentifier() comment := random.Comment() - _, cleanupSamlIntegration := testClientHelper().SecurityIntegration.CreateSaml2(t, saml2Id) + samlIntegration, cleanupSamlIntegration := testClientHelper().SecurityIntegration.CreateSaml2(t) t.Cleanup(cleanupSamlIntegration) err := client.AuthenticationPolicies.Create(ctx, sdk.NewCreateAuthenticationPolicyRequest(id). @@ -72,7 +71,7 @@ func TestInt_AuthenticationPolicies(t *testing.T) { {Method: sdk.MfaAuthenticationMethodsSaml}, }). WithSecurityIntegrations([]sdk.SecurityIntegrationsOption{ - {Name: saml2Id}, + {Name: samlIntegration.ID()}, }). WithClientTypes([]sdk.ClientTypes{ {ClientType: sdk.ClientTypesDrivers}, @@ -93,19 +92,18 @@ func TestInt_AuthenticationPolicies(t *testing.T) { assertProperty(t, desc, "COMMENT", comment) assertProperty(t, desc, "MFA_ENROLLMENT", "OPTIONAL") assertProperty(t, desc, "MFA_AUTHENTICATION_METHODS", "[PASSWORD, SAML]") - assertProperty(t, desc, "SECURITY_INTEGRATIONS", fmt.Sprintf("[%s]", saml2Id.Name())) + assertProperty(t, desc, "SECURITY_INTEGRATIONS", fmt.Sprintf("[%s]", samlIntegration.ID().Name())) assertProperty(t, desc, "CLIENT_TYPES", "[DRIVERS, SNOWSQL]") assertProperty(t, desc, "AUTHENTICATION_METHODS", "[PASSWORD, SAML]") }) t.Run("Alter - set and unset properties", func(t *testing.T) { - saml2Id := testClientHelper().Ids.RandomAccountObjectIdentifier() comment := random.Comment() authenticationPolicy, cleanupAuthPolicy := testClientHelper().AuthenticationPolicy.Create(t) t.Cleanup(cleanupAuthPolicy) - _, cleanupSamlIntegration := testClientHelper().SecurityIntegration.CreateSaml2(t, saml2Id) + samlIntegration, cleanupSamlIntegration := testClientHelper().SecurityIntegration.CreateSaml2(t) t.Cleanup(cleanupSamlIntegration) err := client.AuthenticationPolicies.Alter(ctx, sdk.NewAlterAuthenticationPolicyRequest(authenticationPolicy.ID()). @@ -117,7 +115,7 @@ func TestInt_AuthenticationPolicies(t *testing.T) { {Method: sdk.MfaAuthenticationMethodsSaml}, }). WithSecurityIntegrations([]sdk.SecurityIntegrationsOption{ - {Name: saml2Id}, + {Name: samlIntegration.ID()}, }). WithClientTypes([]sdk.ClientTypes{ {ClientType: sdk.ClientTypesDrivers}, @@ -136,7 +134,7 @@ func TestInt_AuthenticationPolicies(t *testing.T) { assertProperty(t, desc, "COMMENT", comment) assertProperty(t, desc, "MFA_ENROLLMENT", "REQUIRED") assertProperty(t, desc, "MFA_AUTHENTICATION_METHODS", "[PASSWORD, SAML]") - assertProperty(t, desc, "SECURITY_INTEGRATIONS", fmt.Sprintf("[%s]", saml2Id.Name())) + assertProperty(t, desc, "SECURITY_INTEGRATIONS", fmt.Sprintf("[%s]", samlIntegration.ID().Name())) assertProperty(t, desc, "CLIENT_TYPES", "[DRIVERS, SNOWSQL, SNOWFLAKE_UI]") assertProperty(t, desc, "AUTHENTICATION_METHODS", "[PASSWORD, SAML]") diff --git a/pkg/sdk/testint/connections_gen_integration_test.go b/pkg/sdk/testint/connections_gen_integration_test.go index 79573d1b6d..ff658dd622 100644 --- a/pkg/sdk/testint/connections_gen_integration_test.go +++ b/pkg/sdk/testint/connections_gen_integration_test.go @@ -16,6 +16,9 @@ import ( const ConnectionFailoverToAccountInSameRegionErrorMessage = "The connection cannot be failed over to an account in the same region" func TestInt_Connections(t *testing.T) { + // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed + _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) + client := testClient(t) secondaryClient := testSecondaryClient(t) ctx := testContext(t) @@ -25,9 +28,6 @@ func TestInt_Connections(t *testing.T) { accountId := testClientHelper().Account.GetAccountIdentifier(t) t.Run("Create minimal", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id := testClientHelper().Ids.RandomAccountObjectIdentifier() require.NoError(t, err) @@ -43,11 +43,7 @@ func TestInt_Connections(t *testing.T) { HasNoComment(). HasIsPrimary(true). HasPrimaryIdentifier(externalObjectIdentifier). - HasFailoverAllowedToAccounts( - []string{ - accountId.Name(), - }, - ). + HasFailoverAllowedToAccounts(accountId). HasOrganizationName(sessionDetails.OrganizationName). HasAccountLocator(client.GetAccountLocator()). HasConnectionUrl( @@ -59,9 +55,6 @@ func TestInt_Connections(t *testing.T) { }) t.Run("Create all options", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id := testClientHelper().Ids.RandomAccountObjectIdentifier() err := client.Connections.Create(ctx, sdk.NewCreateConnectionRequest(id). WithIfNotExists(true). @@ -77,11 +70,7 @@ func TestInt_Connections(t *testing.T) { HasComment("test comment for connection"). HasIsPrimary(true). HasPrimaryIdentifier(externalObjectIdentifier). - HasFailoverAllowedToAccounts( - []string{ - accountId.Name(), - }, - ). + HasFailoverAllowedToAccounts(accountId). HasOrganizationName(sessionDetails.OrganizationName). HasAccountLocator(client.GetAccountLocator()). HasConnectionUrl( @@ -93,58 +82,45 @@ func TestInt_Connections(t *testing.T) { }) t.Run("Alter enable failover", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id := testClientHelper().Ids.RandomAccountObjectIdentifier() - secondaryAccountId := secondaryTestClientHelper().Ids.AccountIdentifierWithLocator() + + secondaryAccountId := secondaryTestClientHelper().Account.GetAccountIdentifier(t) _, connectionCleanup := testClientHelper().Connection.Create(t, id) t.Cleanup(connectionCleanup) err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id). WithEnableConnectionFailover( - *sdk.NewEnableConnectionFailoverRequest().WithToAccounts( - []sdk.AccountIdentifier{ - secondaryAccountId, - }, - ), - ), + *sdk.NewEnableConnectionFailoverRequest([]sdk.AccountIdentifier{secondaryAccountId})), ) + // TODO: [SNOW-1763442] + // require.NoError(t, err) require.ErrorContains(t, err, ConnectionFailoverToAccountInSameRegionErrorMessage) - // TODO: [SNOW-1763442] - /* - require.NoError(t, err) - externalObjectIdentifier := sdk.NewExternalObjectIdentifier(accountId, id) - assertions.AssertThatObject(t, objectassert.Connection(t, id). - HasSnowflakeRegion(sessionDetails.Region). - HasAccountName(sessionDetails.AccountName). - HasName(id.Name()). - HasNoComment(). - HasIsPrimary(true). - HasPrimaryIdentifier(externalObjectIdentifier). - HasFailoverAllowedToAccounts( - []string{ - accountId.Name(), - secondaryAccountId.Name(), - }, - ). - HasOrganizationName(sessionDetails.OrganizationName). - HasAccountLocator(client.GetAccountLocator()), - HasConnectionUrl( - strings.ToLower( - fmt.Sprintf("%s-%s.snowflakecomputing.com", sessionDetails.OrganizationName, id.Name()), - ), - ), - ) - */ + externalObjectIdentifier := sdk.NewExternalObjectIdentifier(accountId, id) + assertions.AssertThatObject(t, objectassert.Connection(t, id). + HasSnowflakeRegion(sessionDetails.Region). + HasAccountName(sessionDetails.AccountName). + HasName(id.Name()). + HasNoComment(). + HasIsPrimary(true). + HasPrimaryIdentifier(externalObjectIdentifier). + HasFailoverAllowedToAccounts( + accountId, + secondaryAccountId, + ). + HasOrganizationName(sessionDetails.OrganizationName). + HasAccountLocator(client.GetAccountLocator()). + HasConnectionUrl( + strings.ToLower( + fmt.Sprintf("%s-%s.snowflakecomputing.com", sessionDetails.OrganizationName, id.Name()), + ), + ), + ) }) t.Run("Create as replica of", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - + id := testClientHelper().Ids.RandomAccountObjectIdentifier() secondaryAccountId := secondaryTestClientHelper().Ids.AccountIdentifierWithLocator() primaryConn, connectionCleanup := testClientHelper().Connection.Create(t, testClientHelper().Ids.RandomAccountObjectIdentifier()) @@ -152,12 +128,7 @@ func TestInt_Connections(t *testing.T) { err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(primaryConn.ID()). WithEnableConnectionFailover( - *sdk.NewEnableConnectionFailoverRequest().WithToAccounts( - []sdk.AccountIdentifier{ - secondaryAccountId, - }, - ), - ), + *sdk.NewEnableConnectionFailoverRequest([]sdk.AccountIdentifier{secondaryAccountId})), ) require.ErrorContains(t, err, ConnectionFailoverToAccountInSameRegionErrorMessage) // TODO: [SNOW-1763442] @@ -165,43 +136,33 @@ func TestInt_Connections(t *testing.T) { // require.NoError(t, err) // create replica on secondary account - /* - externalObjectIdentifier := sdk.NewExternalObjectIdentifier(accountId, id) - err = secondaryClient.Connections.Create(ctx, sdk.NewCreateConnectionRequest(id). - WithAsReplicaOf(sdk.AsReplicaOfRequest{ - AsReplicaOf: externalObjectIdentifier, - })) - t.Cleanup(testClientHelper().Connection.DropFunc(t, id)) - require.NoError(t, err) - - assertions.AssertThatObject(t, objectassert.Connection(t, id). - HasSnowflakeRegion(sessionDetails.Region). - HasAccountName(sessionDetails.AccountName). - HasName(id.Name()). - HasNoComment(). - HasIsPrimary(false). - HasPrimaryIdentifier(externalObjectIdentifier). - HasFailoverAllowedToAccounts( - []string{ - accountId.Name(), - secondaryAccountId.Name(), - }, - ). - HasOrganizationName(sessionDetails.OrganizationName). - HasAccountLocator(client.GetAccountLocator()). - HasConnectionUrl( - strings.ToLower( - fmt.Sprintf("%s-%s.snowflakecomputing.com", sessionDetails.OrganizationName, id.Name()), - ), + externalObjectIdentifier := sdk.NewExternalObjectIdentifier(accountId, id) + err = secondaryClient.Connections.Create(ctx, sdk.NewCreateConnectionRequest(id).WithAsReplicaOf(externalObjectIdentifier)) + t.Cleanup(testClientHelper().Connection.DropFunc(t, id)) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.Connection(t, id). + HasSnowflakeRegion(sessionDetails.Region). + HasAccountName(sessionDetails.AccountName). + HasName(id.Name()). + HasNoComment(). + HasIsPrimary(false). + HasPrimaryIdentifier(externalObjectIdentifier). + HasFailoverAllowedToAccounts( + accountId, + secondaryAccountId, + ). + HasOrganizationName(sessionDetails.OrganizationName). + HasAccountLocator(client.GetAccountLocator()). + HasConnectionUrl( + strings.ToLower( + fmt.Sprintf("%s-%s.snowflakecomputing.com", sessionDetails.OrganizationName, id.Name()), ), - ) - */ + ), + ) }) t.Run("Alter disable failover", func(t *testing.T) { - // TODO: [SNOW-1763442]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id := testClientHelper().Ids.RandomAccountObjectIdentifier() secondaryAccountId := secondaryTestClientHelper().Account.GetAccountIdentifier(t) @@ -211,12 +172,7 @@ func TestInt_Connections(t *testing.T) { // Add secondary account to failover list err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id). WithEnableConnectionFailover( - *sdk.NewEnableConnectionFailoverRequest().WithToAccounts( - []sdk.AccountIdentifier{ - secondaryAccountId, - }, - ), - ), + *sdk.NewEnableConnectionFailoverRequest([]sdk.AccountIdentifier{secondaryAccountId})), ) require.ErrorContains(t, err, ConnectionFailoverToAccountInSameRegionErrorMessage) // TODO: [SNOW-1763442] @@ -232,11 +188,7 @@ func TestInt_Connections(t *testing.T) { externalObjectIdentifier := sdk.NewExternalObjectIdentifier(accountId, id) assertions.AssertThatObject(t, objectassert.Connection(t, primaryConn.ID()). HasPrimaryIdentifier(externalObjectIdentifier). - HasFailoverAllowedToAccounts( - []string{ - accountId.Name(), - }, - ), + HasFailoverAllowedToAccounts(accountId), ) // Try to create repllication on secondary account @@ -245,16 +197,13 @@ func TestInt_Connections(t *testing.T) { }) t.Run("Alter comment", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id := testClientHelper().Ids.RandomAccountObjectIdentifier() _, connectionCleanup := testClientHelper().Connection.Create(t, id) t.Cleanup(connectionCleanup) // Set err := client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id). - WithSet(*sdk.NewSetRequest(). + WithSet(*sdk.NewConnectionSetRequest(). WithComment("new integration test comment"))) require.NoError(t, err) @@ -265,7 +214,7 @@ func TestInt_Connections(t *testing.T) { // Unset err = client.Connections.Alter(ctx, sdk.NewAlterConnectionRequest(id). - WithUnset(*sdk.NewUnsetRequest(). + WithUnset(*sdk.NewConnectionUnsetRequest(). WithComment(true))) require.NoError(t, err) @@ -276,9 +225,6 @@ func TestInt_Connections(t *testing.T) { }) t.Run("Drop", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id := testClientHelper().Ids.RandomAccountObjectIdentifier() _, connectionCleanup := testClientHelper().Connection.Create(t, id) t.Cleanup(connectionCleanup) @@ -296,9 +242,6 @@ func TestInt_Connections(t *testing.T) { }) t.Run("Drop with if exists", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - err = client.Connections.Drop(ctx, sdk.NewDropConnectionRequest(NonExistingAccountObjectIdentifier)) require.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) @@ -307,9 +250,6 @@ func TestInt_Connections(t *testing.T) { }) t.Run("Show", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id1 := testClientHelper().Ids.RandomAccountObjectIdentifier() id2 := testClientHelper().Ids.RandomAccountObjectIdentifier() @@ -326,9 +266,6 @@ func TestInt_Connections(t *testing.T) { }) t.Run("Show with Like", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id1 := testClientHelper().Ids.RandomAccountObjectIdentifier() id2 := testClientHelper().Ids.RandomAccountObjectIdentifier() @@ -348,9 +285,6 @@ func TestInt_Connections(t *testing.T) { }) t.Run("ShowByID", func(t *testing.T) { - // TODO: [SNOW-1002023]: Unskip; Business Critical Snowflake Edition needed - _ = testenvs.GetOrSkipTest(t, testenvs.TestFailoverGroups) - id := testClientHelper().Ids.RandomAccountObjectIdentifier() _, connectionCleanup := testClientHelper().Connection.Create(t, id) diff --git a/pkg/sdk/testint/database_role_integration_test.go b/pkg/sdk/testint/database_role_integration_test.go index aa0f716150..9d38c2851a 100644 --- a/pkg/sdk/testint/database_role_integration_test.go +++ b/pkg/sdk/testint/database_role_integration_test.go @@ -163,39 +163,6 @@ func TestInt_DatabaseRoles(t *testing.T) { assert.ErrorIs(t, err, sdk.ErrDifferentDatabase) }) - t.Run("alter database_role: set and unset tag", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - databaseRole, cleanupDatabaseRole := testClientHelper().DatabaseRole.CreateDatabaseRole(t) - t.Cleanup(cleanupDatabaseRole) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - - err := client.DatabaseRoles.Alter(ctx, sdk.NewAlterDatabaseRoleRequest(databaseRole.ID()).WithSetTags(tags)) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), databaseRole.ID(), sdk.ObjectTypeDatabaseRole) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - err = client.DatabaseRoles.Alter(ctx, sdk.NewAlterDatabaseRoleRequest(databaseRole.ID()).WithUnsetTags(unsetTags)) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), databaseRole.ID(), sdk.ObjectTypeDatabaseRole) - require.Error(t, err) - }) - t.Run("show database_role: without like", func(t *testing.T) { role1 := createDatabaseRole(t) role2 := createDatabaseRole(t) diff --git a/pkg/sdk/testint/databases_integration_test.go b/pkg/sdk/testint/databases_integration_test.go index d6b45916b7..8dc2408eee 100644 --- a/pkg/sdk/testint/databases_integration_test.go +++ b/pkg/sdk/testint/databases_integration_test.go @@ -337,12 +337,8 @@ func TestInt_DatabasesCreateSecondary(t *testing.T) { func TestInt_DatabasesAlter(t *testing.T) { client := testClient(t) - secondaryClient := testSecondaryClient(t) ctx := testContext(t) - tagTest, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - assertDatabaseParameterEquals := func(t *testing.T, params []*sdk.Parameter, parameterName sdk.AccountParameter, expected string) { t.Helper() assert.Equal(t, expected, helpers.FindParameter(t, params, parameterName).Value) @@ -372,74 +368,11 @@ func TestInt_DatabasesAlter(t *testing.T) { }, { DatabaseType: "From Share", - CreateFn: func(t *testing.T) (*sdk.Database, func()) { - t.Helper() - - shareTest, shareCleanup := secondaryTestClientHelper().Share.CreateShare(t) - t.Cleanup(shareCleanup) - - sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) - t.Cleanup(sharedDatabaseCleanup) - - databaseId := sharedDatabase.ID() - - err := secondaryClient.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ - Database: sharedDatabase.ID(), - }, shareTest.ID()) - require.NoError(t, err) - t.Cleanup(func() { - err := secondaryClient.Grants.RevokePrivilegeFromShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ - Database: sharedDatabase.ID(), - }, shareTest.ID()) - require.NoError(t, err) - }) - - err = secondaryClient.Shares.Alter(ctx, shareTest.ID(), &sdk.AlterShareOptions{ - IfExists: sdk.Bool(true), - Set: &sdk.ShareSet{ - Accounts: []sdk.AccountIdentifier{ - testClientHelper().Account.GetAccountIdentifier(t), - }, - }, - }) - require.NoError(t, err) - - err = client.Databases.CreateShared(ctx, databaseId, shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{}) - require.NoError(t, err) - - database, err := client.Databases.ShowByID(ctx, databaseId) - require.NoError(t, err) - - return database, testClientHelper().Database.DropDatabaseFunc(t, database.ID()) - }, + CreateFn: createDatabaseFromShare, }, { DatabaseType: "Replica", - CreateFn: func(t *testing.T) (*sdk.Database, func()) { - t.Helper() - - sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) - t.Cleanup(sharedDatabaseCleanup) - - err := secondaryClient.Databases.AlterReplication(ctx, sharedDatabase.ID(), &sdk.AlterDatabaseReplicationOptions{ - EnableReplication: &sdk.EnableReplication{ - ToAccounts: []sdk.AccountIdentifier{ - testClientHelper().Account.GetAccountIdentifier(t), - }, - IgnoreEditionCheck: sdk.Bool(true), - }, - }) - require.NoError(t, err) - - externalDatabaseId := sdk.NewExternalObjectIdentifier(secondaryTestClientHelper().Ids.AccountIdentifierWithLocator(), sharedDatabase.ID()) - err = client.Databases.CreateSecondary(ctx, sharedDatabase.ID(), externalDatabaseId, &sdk.CreateSecondaryDatabaseOptions{}) - require.NoError(t, err) - - database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) - require.NoError(t, err) - - return database, testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID()) - }, + CreateFn: createDatabaseReplica, }, } @@ -585,38 +518,6 @@ func TestInt_DatabasesAlter(t *testing.T) { assert.Equal(t, "", database.Comment) }) - t.Run(fmt.Sprintf("Database: %s - setting and unsetting tags", testCase.DatabaseType), func(t *testing.T) { - if testCase.DatabaseType == "Replica" { - t.Skipf("Skipping database test because secondary databases cannot be modified") - } - databaseTest, databaseTestCleanup := testCase.CreateFn(t) - t.Cleanup(databaseTestCleanup) - - err := client.Databases.Alter(ctx, databaseTest.ID(), &sdk.AlterDatabaseOptions{ - SetTag: []sdk.TagAssociation{ - { - Name: tagTest.ID(), - Value: "v1", - }, - }, - }) - require.NoError(t, err) - - value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), databaseTest.ID(), sdk.ObjectTypeDatabase) - require.NoError(t, err) - assert.Equal(t, "v1", value) - - err = client.Databases.Alter(ctx, databaseTest.ID(), &sdk.AlterDatabaseOptions{ - UnsetTag: []sdk.ObjectIdentifier{ - tagTest.ID(), - }, - }) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tagTest.ID(), databaseTest.ID(), sdk.ObjectTypeDatabase) - require.Error(t, err) - }) - t.Run(fmt.Sprintf("Database: %s - swap with another database", testCase.DatabaseType), func(t *testing.T) { databaseTest, databaseTestCleanup := testCase.CreateFn(t) t.Cleanup(databaseTestCleanup) diff --git a/pkg/sdk/testint/event_tables_integration_test.go b/pkg/sdk/testint/event_tables_integration_test.go index 806095fe4b..434e0e60c0 100644 --- a/pkg/sdk/testint/event_tables_integration_test.go +++ b/pkg/sdk/testint/event_tables_integration_test.go @@ -147,28 +147,6 @@ func TestInt_EventTables(t *testing.T) { require.NoError(t, err) }) - t.Run("alter event table: set and unset tag", func(t *testing.T) { - dt := createEventTableHandle(t) - id := dt.ID() - - set := []sdk.TagAssociation{ - { - Name: tagTest.ID(), - Value: "v1", - }, - } - err := client.EventTables.Alter(ctx, sdk.NewAlterEventTableRequest(id).WithSetTags(set)) - require.NoError(t, err) - - value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), id, sdk.ObjectTypeEventTable) - require.NoError(t, err) - assert.Equal(t, "v1", value) - - unset := []sdk.ObjectIdentifier{tagTest.ID()} - err = client.EventTables.Alter(ctx, sdk.NewAlterEventTableRequest(id).WithUnsetTags(unset)) - require.NoError(t, err) - }) - t.Run("alter event table: rename", func(t *testing.T) { id := testClientHelper().Ids.RandomSchemaObjectIdentifier() diff --git a/pkg/sdk/testint/external_functions_integration_test.go b/pkg/sdk/testint/external_functions_integration_test.go index a9858c5742..3d23fd6d6d 100644 --- a/pkg/sdk/testint/external_functions_integration_test.go +++ b/pkg/sdk/testint/external_functions_integration_test.go @@ -5,7 +5,6 @@ import ( "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -195,33 +194,6 @@ func TestInt_ExternalFunctions(t *testing.T) { assertExternalFunction(t, externalFunction.ID(), true) }) - t.Run("alter external function: set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - externalFunction := createExternalFunction(t) - - id := externalFunction.ID() - setTags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: "v1", - }, - } - err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetTags(setTags)) - require.NoError(t, err) - - value, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeExternalFunction) - require.NoError(t, err) - assert.Equal(t, "v1", value) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - err = client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetTags(unsetTags)) - require.NoError(t, err) - }) - t.Run("show external function: with like", func(t *testing.T) { e1 := createExternalFunction(t) e2 := createExternalFunction(t) diff --git a/pkg/sdk/testint/external_tables_integration_test.go b/pkg/sdk/testint/external_tables_integration_test.go index 83832274d9..26f0e0a720 100644 --- a/pkg/sdk/testint/external_tables_integration_test.go +++ b/pkg/sdk/testint/external_tables_integration_test.go @@ -239,50 +239,6 @@ func TestInt_ExternalTables(t *testing.T) { require.NoError(t, err) }) - t.Run("Alter: set tags", func(t *testing.T) { - externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(externalTableID)) - require.NoError(t, err) - - tagValue := "tag-value" - err = client.Tables.Alter( - ctx, - sdk.NewAlterTableRequest(externalTableID). - WithIfExists(sdk.Bool(true)). - WithSetTags([]sdk.TagAssociationRequest{*sdk.NewTagAssociationRequest(tag.ID(), tagValue)})) - require.NoError(t, err) - - tv, err := client.SystemFunctions.GetTag(ctx, tag.ID(), externalTableID, sdk.ObjectTypeExternalTable) - require.NoError(t, err) - assert.Equal(t, tagValue, tv) - }) - - t.Run("Alter: unset tags", func(t *testing.T) { - externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - err := client.ExternalTables.Create( - ctx, - minimalCreateExternalTableReq(externalTableID). - WithTag([]*sdk.TagAssociationRequest{sdk.NewTagAssociationRequest(tag.ID(), "tag-value")}), - ) - require.NoError(t, err) - tv, err := client.SystemFunctions.GetTag(ctx, tag.ID(), externalTableID, sdk.ObjectTypeExternalTable) - require.NoError(t, err) - assert.Equal(t, "tag-value", tv) - - err = client.Tables.Alter( - ctx, - sdk.NewAlterTableRequest(externalTableID). - WithIfExists(sdk.Bool(true)). - WithUnsetTags([]sdk.ObjectIdentifier{ - tag.ID(), - }), - ) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), externalTableID, sdk.ObjectTypeExternalTable) - require.Error(t, err) - }) - t.Run("Alter: add partitions", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() err := client.ExternalTables.CreateWithManualPartitioning(ctx, createExternalTableWithManualPartitioningReq(externalTableID)) diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index 31e96153ca..1fe0a04bb9 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -189,9 +189,6 @@ func TestInt_OtherFunctions(t *testing.T) { client := testClient(t) ctx := testContext(t) - tagTest, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - assertFunction := func(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, secure bool, withArguments bool) { t.Helper() @@ -362,32 +359,6 @@ func TestInt_OtherFunctions(t *testing.T) { assertFunction(t, id, false, true) }) - t.Run("alter function: set and unset tags", func(t *testing.T) { - f := createFunctionForSQLHandle(t, true, true) - - id := f.ID() - setTags := []sdk.TagAssociation{ - { - Name: tagTest.ID(), - Value: "v1", - }, - } - err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetTags(setTags)) - require.NoError(t, err) - assertFunction(t, id, false, true) - - value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), id, sdk.ObjectTypeFunction) - require.NoError(t, err) - assert.Equal(t, "v1", value) - - unsetTags := []sdk.ObjectIdentifier{ - tagTest.ID(), - } - err = client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetTags(unsetTags)) - require.NoError(t, err) - assertFunction(t, id, false, true) - }) - t.Run("show function for SQL: without like", func(t *testing.T) { f1 := createFunctionForSQLHandle(t, true, true) f2 := createFunctionForSQLHandle(t, true, true) diff --git a/pkg/sdk/testint/helpers.go b/pkg/sdk/testint/helpers.go new file mode 100644 index 0000000000..5f5443d945 --- /dev/null +++ b/pkg/sdk/testint/helpers.go @@ -0,0 +1,155 @@ +package testint + +import ( + "context" + "fmt" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/require" +) + +func createDatabaseFromShare(t *testing.T) (*sdk.Database, func()) { + t.Helper() + client := testClient(t) + secondaryClient := testSecondaryClient(t) + ctx := testContext(t) + + shareTest, shareCleanup := secondaryTestClientHelper().Share.CreateShare(t) + t.Cleanup(shareCleanup) + + sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) + t.Cleanup(sharedDatabaseCleanup) + + databaseId := sharedDatabase.ID() + + err := secondaryClient.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: sharedDatabase.ID(), + }, shareTest.ID()) + require.NoError(t, err) + t.Cleanup(func() { + err := secondaryClient.Grants.RevokePrivilegeFromShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: sharedDatabase.ID(), + }, shareTest.ID()) + require.NoError(t, err) + }) + + err = secondaryClient.Shares.Alter(ctx, shareTest.ID(), &sdk.AlterShareOptions{ + IfExists: sdk.Bool(true), + Set: &sdk.ShareSet{ + Accounts: []sdk.AccountIdentifier{ + testClientHelper().Account.GetAccountIdentifier(t), + }, + }, + }) + require.NoError(t, err) + + err = client.Databases.CreateShared(ctx, databaseId, shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{}) + require.NoError(t, err) + + database, err := client.Databases.ShowByID(ctx, databaseId) + require.NoError(t, err) + + return database, testClientHelper().Database.DropDatabaseFunc(t, database.ID()) +} + +func createDatabaseReplica(t *testing.T) (*sdk.Database, func()) { + t.Helper() + client := testClient(t) + secondaryClient := testSecondaryClient(t) + ctx := testContext(t) + + sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) + t.Cleanup(sharedDatabaseCleanup) + + err := secondaryClient.Databases.AlterReplication(ctx, sharedDatabase.ID(), &sdk.AlterDatabaseReplicationOptions{ + EnableReplication: &sdk.EnableReplication{ + ToAccounts: []sdk.AccountIdentifier{ + testClientHelper().Account.GetAccountIdentifier(t), + }, + IgnoreEditionCheck: sdk.Bool(true), + }, + }) + require.NoError(t, err) + + externalDatabaseId := sdk.NewExternalObjectIdentifier(secondaryTestClientHelper().Ids.AccountIdentifierWithLocator(), sharedDatabase.ID()) + err = client.Databases.CreateSecondary(ctx, sharedDatabase.ID(), externalDatabaseId, &sdk.CreateSecondaryDatabaseOptions{}) + require.NoError(t, err) + + database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) + require.NoError(t, err) + + return database, testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID()) +} + +func createApplicationPackage(t *testing.T) (*sdk.ApplicationPackage, func()) { + t.Helper() + + stage, cleanupStage := testClientHelper().Stage.CreateStage(t) + t.Cleanup(cleanupStage) + + testClientHelper().Stage.PutOnStageWithContent(t, stage.ID(), "manifest.yml", "") + testClientHelper().Stage.PutOnStageWithContent(t, stage.ID(), "setup.sql", "CREATE APPLICATION ROLE IF NOT EXISTS APP_HELLO_SNOWFLAKE;") + + applicationPackage, cleanupApplicationPackage := testClientHelper().ApplicationPackage.CreateApplicationPackage(t) + t.Cleanup(cleanupApplicationPackage) + + testClientHelper().ApplicationPackage.AddApplicationPackageVersion(t, applicationPackage.ID(), stage.ID(), "V01") + + return applicationPackage, cleanupApplicationPackage +} + +func createShare(t *testing.T, ctx context.Context, client *sdk.Client) (*sdk.Share, func()) { + t.Helper() + object, objectCleanup := testClientHelper().Share.CreateShare(t) + t.Cleanup(objectCleanup) + + err := client.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: testClientHelper().Ids.DatabaseId(), + }, object.ID()) + require.NoError(t, err) + cleanup := func() { + err = client.Grants.RevokePrivilegeFromShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: testClientHelper().Ids.DatabaseId(), + }, object.ID()) + require.NoError(t, err) + } + return object, cleanup +} + +func createPipe(t *testing.T) (*sdk.Pipe, func()) { + t.Helper() + table, tableCleanup := testClientHelper().Table.Create(t) + t.Cleanup(tableCleanup) + + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + return testClientHelper().Pipe.CreatePipe(t, fmt.Sprintf("COPY INTO %s\nFROM @%s", table.ID().FullyQualifiedName(), stage.ID().FullyQualifiedName())) +} + +func createMaterializedView(t *testing.T) (*sdk.MaterializedView, func()) { + t.Helper() + table, tableCleanup := testClientHelper().Table.Create(t) + t.Cleanup(tableCleanup) + query := fmt.Sprintf(`SELECT * FROM %s`, table.ID().FullyQualifiedName()) + return testClientHelper().MaterializedView.CreateMaterializedView(t, query, false) +} + +func createStream(t *testing.T) (*sdk.Stream, func()) { + t.Helper() + table, tableCleanup := testClientHelper().Table.CreateInSchema(t, testClientHelper().Ids.SchemaId()) + t.Cleanup(tableCleanup) + + return testClientHelper().Stream.CreateOnTable(t, table.ID()) +} + +func createExternalTable(t *testing.T) (*sdk.ExternalTable, func()) { + t.Helper() + stageID := testClientHelper().Ids.RandomSchemaObjectIdentifier() + stageLocation := fmt.Sprintf("@%s", stageID.FullyQualifiedName()) + _, stageCleanup := testClientHelper().Stage.CreateStageWithURL(t, stageID) + t.Cleanup(stageCleanup) + + return testClientHelper().ExternalTable.CreateWithLocation(t, stageLocation) +} diff --git a/pkg/sdk/testint/masking_policy_integration_test.go b/pkg/sdk/testint/masking_policy_integration_test.go index e23dc59660..6a645c783a 100644 --- a/pkg/sdk/testint/masking_policy_integration_test.go +++ b/pkg/sdk/testint/masking_policy_integration_test.go @@ -343,40 +343,6 @@ func TestInt_MaskingPolicyAlter(t *testing.T) { require.NoError(t, err) }) - t.Run("setting and unsetting tags", func(t *testing.T) { - maskingPolicy, maskingPolicyCleanup := testClientHelper().MaskingPolicy.CreateMaskingPolicy(t) - id := maskingPolicy.ID() - t.Cleanup(maskingPolicyCleanup) - - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - tag2, tag2Cleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tag2Cleanup) - - tagAssociations := []sdk.TagAssociation{{Name: tag.ID(), Value: "value1"}, {Name: tag2.ID(), Value: "value2"}} - alterOptions := &sdk.AlterMaskingPolicyOptions{ - SetTag: tagAssociations, - } - err := client.MaskingPolicies.Alter(ctx, id, alterOptions) - require.NoError(t, err) - tagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeMaskingPolicy) - require.NoError(t, err) - assert.Equal(t, tagAssociations[0].Value, tagValue) - tag2Value, err := client.SystemFunctions.GetTag(ctx, tag2.ID(), id, sdk.ObjectTypeMaskingPolicy) - require.NoError(t, err) - assert.Equal(t, tagAssociations[1].Value, tag2Value) - - // unset tag - alterOptions = &sdk.AlterMaskingPolicyOptions{ - UnsetTag: []sdk.ObjectIdentifier{tag.ID()}, - } - err = client.MaskingPolicies.Alter(ctx, id, alterOptions) - require.NoError(t, err) - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeMaskingPolicy) - assert.Error(t, err) - }) - t.Run("set body", func(t *testing.T) { maskingPolicy, maskingPolicyCleanup := testClientHelper().MaskingPolicy.CreateMaskingPolicy(t) id := maskingPolicy.ID() diff --git a/pkg/sdk/testint/materialized_views_gen_integration_test.go b/pkg/sdk/testint/materialized_views_gen_integration_test.go index 08c625ad3a..98ba774f94 100644 --- a/pkg/sdk/testint/materialized_views_gen_integration_test.go +++ b/pkg/sdk/testint/materialized_views_gen_integration_test.go @@ -305,44 +305,6 @@ func TestInt_MaterializedViews(t *testing.T) { assert.Equal(t, false, alteredView.IsSecure) }) - // Based on usage notes, set/unset tags is done through VIEW (https://docs.snowflake.com/en/sql-reference/sql/alter-materialized-view#usage-notes). - // TODO [SNOW-1022645]: discuss how we handle situation like this in the SDK - t.Run("alter materialized view: set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - materializedView := createMaterializedView(t) - id := materializedView.ID() - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterViewRequest(id).WithSetTags(tags) - - err := client.Views.Alter(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeMaterializedView) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterViewRequest(id).WithUnsetTags(unsetTags) - - err = client.Views.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeMaterializedView) - require.Error(t, err) - }) - t.Run("show materialized view: default", func(t *testing.T) { view1 := createMaterializedView(t) view2 := createMaterializedView(t) diff --git a/pkg/sdk/testint/notification_integrations_gen_integration_test.go b/pkg/sdk/testint/notification_integrations_gen_integration_test.go index 2a706790a9..e813680226 100644 --- a/pkg/sdk/testint/notification_integrations_gen_integration_test.go +++ b/pkg/sdk/testint/notification_integrations_gen_integration_test.go @@ -358,42 +358,6 @@ func TestInt_NotificationIntegrations(t *testing.T) { assert.Contains(t, details, sdk.NotificationIntegrationProperty{Name: "COMMENT", Type: "String", Value: "", Default: ""}) }) - t.Run("alter notification integration: set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - integration := createNotificationIntegrationEmail(t) - id := integration.ID() - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterNotificationIntegrationRequest(id).WithSetTags(tags) - - err := client.NotificationIntegrations.Alter(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterNotificationIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.NotificationIntegrations.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("drop notification integration: existing", func(t *testing.T) { request := createNotificationIntegrationEmailRequest(t) id := request.GetName() diff --git a/pkg/sdk/testint/pipes_integration_test.go b/pkg/sdk/testint/pipes_integration_test.go index 2f8701e802..f352d06b26 100644 --- a/pkg/sdk/testint/pipes_integration_test.go +++ b/pkg/sdk/testint/pipes_integration_test.go @@ -281,44 +281,6 @@ func TestInt_PipeAlter(t *testing.T) { assert.Equal(t, "", alteredPipe.Comment) }) - t.Run("set and unset tag", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - pipe, pipeCleanup := testClientHelper().Pipe.CreatePipe(t, pipeCopyStatement) - t.Cleanup(pipeCleanup) - - tagValue := "abc" - alterOptions := &sdk.AlterPipeOptions{ - SetTag: []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - }, - } - - err := itc.client.Pipes.Alter(itc.ctx, pipe.ID(), alterOptions) - require.NoError(t, err) - - returnedTagValue, err := itc.client.SystemFunctions.GetTag(itc.ctx, tag.ID(), pipe.ID(), sdk.ObjectTypePipe) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - alterOptions = &sdk.AlterPipeOptions{ - UnsetTag: []sdk.ObjectIdentifier{ - tag.ID(), - }, - } - - err = itc.client.Pipes.Alter(itc.ctx, pipe.ID(), alterOptions) - require.NoError(t, err) - - _, err = itc.client.SystemFunctions.GetTag(itc.ctx, tag.ID(), pipe.ID(), sdk.ObjectTypePipe) - assert.Error(t, err) - }) - t.Run("refresh with all", func(t *testing.T) { pipe, pipeCleanup := testClientHelper().Pipe.CreatePipe(t, pipeCopyStatement) t.Cleanup(pipeCleanup) diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 687d80a2db..309a3db9f9 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -342,9 +342,6 @@ func TestInt_OtherProcedureFunctions(t *testing.T) { client := testClient(t) ctx := testContext(t) - tagTest, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - assertProcedure := func(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, secure bool) { t.Helper() @@ -471,32 +468,6 @@ func TestInt_OtherProcedureFunctions(t *testing.T) { assertProcedure(t, id, true) }) - t.Run("alter procedure: set and unset tags", func(t *testing.T) { - f := createProcedureForSQLHandle(t, true) - - id := f.ID() - setTags := []sdk.TagAssociation{ - { - Name: tagTest.ID(), - Value: "v1", - }, - } - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSetTags(setTags)) - require.NoError(t, err) - assertProcedure(t, id, true) - - value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), id, sdk.ObjectTypeProcedure) - require.NoError(t, err) - assert.Equal(t, "v1", value) - - unsetTags := []sdk.ObjectIdentifier{ - tagTest.ID(), - } - err = client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnsetTags(unsetTags)) - require.NoError(t, err) - assertProcedure(t, id, true) - }) - t.Run("show procedure for SQL: without like", func(t *testing.T) { f1 := createProcedureForSQLHandle(t, true) f2 := createProcedureForSQLHandle(t, true) diff --git a/pkg/sdk/testint/roles_integration_test.go b/pkg/sdk/testint/roles_integration_test.go index a27422745f..5c173a1ada 100644 --- a/pkg/sdk/testint/roles_integration_test.go +++ b/pkg/sdk/testint/roles_integration_test.go @@ -100,53 +100,6 @@ func TestInt_Roles(t *testing.T) { assert.Equal(t, newName.Name(), r.Name) }) - t.Run("alter set tags", func(t *testing.T) { - role, cleanup := testClientHelper().Role.CreateRole(t) - t.Cleanup(cleanup) - - _, err := client.SystemFunctions.GetTag(ctx, tag.ID(), role.ID(), "ROLE") - require.Error(t, err) - - tagValue := "new-tag-value" - err = client.Roles.Alter(ctx, sdk.NewAlterRoleRequest(role.ID()).WithSetTags([]sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - })) - require.NoError(t, err) - - addedTag, err := client.SystemFunctions.GetTag(ctx, tag.ID(), role.ID(), sdk.ObjectTypeRole) - require.NoError(t, err) - assert.Equal(t, tagValue, addedTag) - - err = client.Roles.Alter(ctx, sdk.NewAlterRoleRequest(role.ID()).WithUnsetTags([]sdk.ObjectIdentifier{tag.ID()})) - require.NoError(t, err) - }) - - t.Run("alter unset tags", func(t *testing.T) { - tagValue := "tag-value" - id := testClientHelper().Ids.RandomAccountObjectIdentifier() - role, cleanup := testClientHelper().Role.CreateRoleWithRequest(t, sdk.NewCreateRoleRequest(id). - WithTag([]sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - })) - t.Cleanup(cleanup) - - value, err := client.SystemFunctions.GetTag(ctx, tag.ID(), role.ID(), sdk.ObjectTypeRole) - require.NoError(t, err) - assert.Equal(t, tagValue, value) - - err = client.Roles.Alter(ctx, sdk.NewAlterRoleRequest(role.ID()).WithUnsetTags([]sdk.ObjectIdentifier{tag.ID()})) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), role.ID(), sdk.ObjectTypeRole) - require.Error(t, err) - }) - t.Run("alter set comment", func(t *testing.T) { role, cleanupRole := testClientHelper().Role.CreateRole(t) t.Cleanup(cleanupRole) diff --git a/pkg/sdk/testint/row_access_policies_gen_integration_test.go b/pkg/sdk/testint/row_access_policies_gen_integration_test.go index 577bf14ef7..4241a7eab1 100644 --- a/pkg/sdk/testint/row_access_policies_gen_integration_test.go +++ b/pkg/sdk/testint/row_access_policies_gen_integration_test.go @@ -168,43 +168,6 @@ func TestInt_RowAccessPolicies(t *testing.T) { assert.Equal(t, "true", alteredRowAccessPolicyDescription.Body) }) - t.Run("alter row access policy: set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - rowAccessPolicy, cleanup := testClientHelper().RowAccessPolicy.CreateRowAccessPolicy(t) - t.Cleanup(cleanup) - id := rowAccessPolicy.ID() - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterRowAccessPolicyRequest(id).WithSetTags(tags) - - err := client.RowAccessPolicies.Alter(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeRowAccessPolicy) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterRowAccessPolicyRequest(id).WithUnsetTags(unsetTags) - - err = client.RowAccessPolicies.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeRowAccessPolicy) - require.Error(t, err) - }) - t.Run("show row access policy: default", func(t *testing.T) { rowAccessPolicy1, cleanup1 := testClientHelper().RowAccessPolicy.CreateRowAccessPolicy(t) t.Cleanup(cleanup1) diff --git a/pkg/sdk/testint/schemas_integration_test.go b/pkg/sdk/testint/schemas_integration_test.go index 3b59001862..a1eaa75c3c 100644 --- a/pkg/sdk/testint/schemas_integration_test.go +++ b/pkg/sdk/testint/schemas_integration_test.go @@ -445,60 +445,6 @@ func TestInt_Schemas(t *testing.T) { }) }) - t.Run("alter: set tags", func(t *testing.T) { - schema, cleanupSchema := testClientHelper().Schema.CreateSchema(t) - t.Cleanup(cleanupSchema) - - tag, cleanupTag := testClientHelper().Tag.CreateTagInSchema(t, schema.ID()) - t.Cleanup(cleanupTag) - - tagValue := "tag-value" - err := client.Schemas.Alter(ctx, schema.ID(), &sdk.AlterSchemaOptions{ - SetTag: []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - }, - }) - require.NoError(t, err) - - tv, err := client.SystemFunctions.GetTag(ctx, tag.ID(), schema.ID(), sdk.ObjectTypeSchema) - require.NoError(t, err) - assert.Equal(t, tagValue, tv) - }) - - t.Run("alter: unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - schemaID := testClientHelper().Ids.RandomDatabaseObjectIdentifier() - tagValue := random.String() - err := client.Schemas.Create(ctx, schemaID, &sdk.CreateSchemaOptions{ - Tag: []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - }, - }) - require.NoError(t, err) - t.Cleanup(func() { - err = client.Schemas.Drop(ctx, schemaID, nil) - require.NoError(t, err) - }) - - err = client.Schemas.Alter(ctx, schemaID, &sdk.AlterSchemaOptions{ - UnsetTag: []sdk.ObjectIdentifier{ - tag.ID(), - }, - }) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), schemaID, sdk.ObjectTypeSchema) - require.Error(t, err) - }) - t.Run("alter: enable managed access", func(t *testing.T) { schema, cleanupSchema := testClientHelper().Schema.CreateSchema(t) t.Cleanup(cleanupSchema) diff --git a/pkg/sdk/testint/security_integrations_gen_integration_test.go b/pkg/sdk/testint/security_integrations_gen_integration_test.go index cc2fd07ab4..3f6ee32ddf 100644 --- a/pkg/sdk/testint/security_integrations_gen_integration_test.go +++ b/pkg/sdk/testint/security_integrations_gen_integration_test.go @@ -642,41 +642,6 @@ func TestInt_SecurityIntegrations(t *testing.T) { assert.Contains(t, details, sdk.SecurityIntegrationProperty{Name: "COMMENT", Type: "String", Value: "", Default: ""}) }) - t.Run("AlterApiAuthenticationWithClientCredentialsFlow - set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id := createApiAuthClientCred(t, nil) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterApiAuthenticationWithClientCredentialsFlowSecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterApiAuthenticationWithClientCredentialsFlow(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterApiAuthenticationWithClientCredentialsFlowSecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterApiAuthenticationWithClientCredentialsFlow(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("AlterApiAuthenticationWithAuthorizationCodeGrantFlow", func(t *testing.T) { _, id := createApiAuthCodeGrant(t, nil) setRequest := sdk.NewAlterApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationRequest(id). @@ -728,41 +693,6 @@ func TestInt_SecurityIntegrations(t *testing.T) { assert.Contains(t, details, sdk.SecurityIntegrationProperty{Name: "COMMENT", Type: "String", Value: "", Default: ""}) }) - t.Run("AlterApiAuthenticationWithAuthorizationCodeGrantFlow - set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id := createApiAuthCodeGrant(t, nil) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterApiAuthenticationWithAuthorizationCodeGrantFlow(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterApiAuthenticationWithAuthorizationCodeGrantFlow(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("AlterApiAuthenticationWithJwtBearerFlow", func(t *testing.T) { // TODO [SNOW-1452191]: unskip t.Skip("Skip because of the error: Invalid value specified for property 'OAUTH_CLIENT_SECRET'") @@ -817,44 +747,6 @@ func TestInt_SecurityIntegrations(t *testing.T) { assert.Contains(t, details, sdk.SecurityIntegrationProperty{Name: "COMMENT", Type: "String", Value: "", Default: ""}) }) - t.Run("AlterApiAuthenticationWithJwtBearerFlow - set and unset tags", func(t *testing.T) { - // TODO [SNOW-1452191]: unskip - t.Skip("Skip because of the error: Invalid value specified for property 'OAUTH_CLIENT_SECRET'") - - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id := createApiAuthJwtBearer(t, nil) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterApiAuthenticationWithJwtBearerFlowSecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterApiAuthenticationWithJwtBearerFlow(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterApiAuthenticationWithJwtBearerFlowSecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterApiAuthenticationWithJwtBearerFlow(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("AlterExternalOauth with other options", func(t *testing.T) { _, id, _ := createExternalOauth(t, func(r *sdk.CreateExternalOauthSecurityIntegrationRequest) { r.WithExternalOauthRsaPublicKey(rsaKey). @@ -888,6 +780,7 @@ func TestInt_SecurityIntegrations(t *testing.T) { enabled: "true", externalOauthIssuer: newIssuer, externalOauthAnyRoleMode: string(sdk.ExternalOauthSecurityIntegrationAnyRoleModeDisable), + externalOauthScopeMappingAttribute: "scp", externalOauthRsaPublicKey: rsaKey, externalOauthRsaPublicKey2: rsaKey, externalOauthBlockedRolesList: role1.Name, @@ -914,43 +807,6 @@ func TestInt_SecurityIntegrations(t *testing.T) { assert.Contains(t, details, sdk.SecurityIntegrationProperty{Name: "EXTERNAL_OAUTH_AUDIENCE_LIST", Type: "List", Value: "", Default: "[]"}) }) - t.Run("AlterExternalOauth - set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id, _ := createExternalOauth(t, func(r *sdk.CreateExternalOauthSecurityIntegrationRequest) { - r.WithExternalOauthJwsKeysUrl([]sdk.JwsKeysUrl{{JwsKeyUrl: "http://example.com"}}) - }) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterExternalOauthSecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterExternalOauth(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterExternalOauthSecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterExternalOauth(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("AlterOauthPartner", func(t *testing.T) { _, id := createOauthPartner(t, func(r *sdk.CreateOauthForPartnerApplicationsSecurityIntegrationRequest) { r.WithOauthRedirectUri("http://example.com") @@ -1002,41 +858,6 @@ func TestInt_SecurityIntegrations(t *testing.T) { assert.Contains(t, details, sdk.SecurityIntegrationProperty{Name: "OAUTH_USE_SECONDARY_ROLES", Type: "String", Value: "NONE", Default: "NONE"}) }) - t.Run("AlterOauthPartner - set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id := createOauthPartner(t, nil) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterOauthForPartnerApplicationsSecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterOauthForPartnerApplications(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterOauthForPartnerApplicationsSecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterOauthForPartnerApplications(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("AlterOauthCustom", func(t *testing.T) { _, id := createOauthCustom(t, nil) @@ -1103,41 +924,6 @@ func TestInt_SecurityIntegrations(t *testing.T) { assert.Contains(t, details, sdk.SecurityIntegrationProperty{Name: "OAUTH_CLIENT_RSA_PUBLIC_KEY_2_FP", Type: "String", Value: "", Default: ""}) }) - t.Run("AlterOauthCustom - set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id := createOauthCustom(t, nil) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterOauthForCustomClientsSecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterOauthForCustomClients(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterOauthForCustomClientsSecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterOauthForCustomClients(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("AlterSAML2Integration", func(t *testing.T) { _, id, issuer := createSAML2Integration(t, nil) @@ -1213,124 +999,6 @@ func TestInt_SecurityIntegrations(t *testing.T) { require.NoError(t, err) }) - t.Run("AlterSAML2Integration - set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id, _ := createSAML2Integration(t, nil) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterSaml2SecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterSaml2(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterSaml2SecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterSaml2(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - - t.Run("AlterSCIMIntegration", func(t *testing.T) { - _, id := createSCIMIntegration(t, nil) - - networkPolicy, networkPolicyCleanup := testClientHelper().NetworkPolicy.CreateNetworkPolicy(t) - t.Cleanup(networkPolicyCleanup) - - setRequest := sdk.NewAlterScimSecurityIntegrationRequest(id). - WithSet( - *sdk.NewScimIntegrationSetRequest(). - WithNetworkPolicy(networkPolicy.ID()). - WithEnabled(false). - WithSyncPassword(false). - WithComment(sdk.StringAllowEmpty{Value: "altered"}), - ) - err := client.SecurityIntegrations.AlterScim(ctx, setRequest) - require.NoError(t, err) - - details, err := client.SecurityIntegrations.Describe(ctx, id) - require.NoError(t, err) - - assertSCIMDescribe(details, "false", networkPolicy.Name, "GENERIC_SCIM_PROVISIONER", "false", "altered") - - unsetRequest := sdk.NewAlterScimSecurityIntegrationRequest(id). - WithUnset( - *sdk.NewScimIntegrationUnsetRequest(). - WithEnabled(true). - WithNetworkPolicy(true). - WithSyncPassword(true), - ) - err = client.SecurityIntegrations.AlterScim(ctx, unsetRequest) - require.NoError(t, err) - - // check setting empty comment because of lacking UNSET COMMENT - // TODO(SNOW-1461780): change this to UNSET - setRequest = sdk.NewAlterScimSecurityIntegrationRequest(id). - WithSet( - *sdk.NewScimIntegrationSetRequest(). - WithComment(sdk.StringAllowEmpty{Value: ""}), - ) - err = client.SecurityIntegrations.AlterScim(ctx, setRequest) - require.NoError(t, err) - - details, err = client.SecurityIntegrations.Describe(ctx, id) - require.NoError(t, err) - - assertSCIMDescribe(details, "false", "", "GENERIC_SCIM_PROVISIONER", "true", "") - }) - - t.Run("AlterSCIMIntegration - set and unset tags", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - _, id := createSCIMIntegration(t, nil) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterScimSecurityIntegrationRequest(id).WithSetTags(tags) - - err := client.SecurityIntegrations.AlterScim(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterScimSecurityIntegrationRequest(id).WithUnsetTags(unsetTags) - - err = client.SecurityIntegrations.AlterScim(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err) - }) - t.Run("Drop", func(t *testing.T) { _, id := createSCIMIntegration(t, nil) diff --git a/pkg/sdk/testint/session_policies_gen_integration_test.go b/pkg/sdk/testint/session_policies_gen_integration_test.go index 6f936e33ff..2c1aaf7678 100644 --- a/pkg/sdk/testint/session_policies_gen_integration_test.go +++ b/pkg/sdk/testint/session_policies_gen_integration_test.go @@ -140,45 +140,6 @@ func TestInt_SessionPolicies(t *testing.T) { assert.Equal(t, "", alteredSessionPolicy.Comment) }) - t.Run("set and unset tag", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - id := testClientHelper().Ids.RandomSchemaObjectIdentifier() - - err := client.SessionPolicies.Create(ctx, sdk.NewCreateSessionPolicyRequest(id)) - require.NoError(t, err) - t.Cleanup(cleanupSessionPolicyProvider(id)) - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterSessionPolicyRequest(id).WithSetTags(tags) - - err = client.SessionPolicies.Alter(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeSessionPolicy) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterSessionPolicyRequest(id).WithUnsetTags(unsetTags) - - err = client.SessionPolicies.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeSessionPolicy) - require.Error(t, err) - }) - t.Run("alter session_policy: rename", func(t *testing.T) { id := testClientHelper().Ids.RandomSchemaObjectIdentifier() diff --git a/pkg/sdk/testint/shares_integration_test.go b/pkg/sdk/testint/shares_integration_test.go index ad54a6102a..91d6ddd455 100644 --- a/pkg/sdk/testint/shares_integration_test.go +++ b/pkg/sdk/testint/shares_integration_test.go @@ -276,61 +276,6 @@ func TestInt_SharesAlter(t *testing.T) { share = shares[0] assert.Equal(t, "", share.Comment) }) - - t.Run("set and unset tags", func(t *testing.T) { - shareTest, shareCleanup := testClientHelper().Share.CreateShare(t) - t.Cleanup(shareCleanup) - err := client.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ - Database: testClientHelper().Ids.DatabaseId(), - }, shareTest.ID()) - require.NoError(t, err) - t.Cleanup(func() { - err = client.Grants.RevokePrivilegeFromShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ - Database: testClientHelper().Ids.DatabaseId(), - }, shareTest.ID()) - require.NoError(t, err) - }) - - tagTest, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - tagTest2, tagCleanup2 := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup2) - tagAssociations := []sdk.TagAssociation{ - { - Name: tagTest.ID(), - Value: random.String(), - }, - { - Name: tagTest2.ID(), - Value: random.String(), - }, - } - err = client.Shares.Alter(ctx, shareTest.ID(), &sdk.AlterShareOptions{ - IfExists: sdk.Bool(true), - SetTag: tagAssociations, - }) - require.NoError(t, err) - tagValue, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), shareTest.ID(), sdk.ObjectTypeShare) - require.NoError(t, err) - assert.Equal(t, tagAssociations[0].Value, tagValue) - tagValue, err = client.SystemFunctions.GetTag(ctx, tagTest2.ID(), shareTest.ID(), sdk.ObjectTypeShare) - require.NoError(t, err) - assert.Equal(t, tagAssociations[1].Value, tagValue) - - // unset tags - err = client.Shares.Alter(ctx, shareTest.ID(), &sdk.AlterShareOptions{ - IfExists: sdk.Bool(true), - UnsetTag: []sdk.ObjectIdentifier{ - tagTest.ID(), - }, - }) - require.NoError(t, err) - _, err = client.SystemFunctions.GetTag(ctx, tagTest.ID(), shareTest.ID(), sdk.ObjectTypeShare) - require.Error(t, err) - tagValue, err = client.SystemFunctions.GetTag(ctx, tagTest2.ID(), shareTest.ID(), sdk.ObjectTypeShare) - require.NoError(t, err) - assert.Equal(t, tagAssociations[1].Value, tagValue) - }) } func TestInt_ShareDescribeProvider(t *testing.T) { diff --git a/pkg/sdk/testint/stages_gen_integration_test.go b/pkg/sdk/testint/stages_gen_integration_test.go index dd70aa4429..5bb3a94198 100644 --- a/pkg/sdk/testint/stages_gen_integration_test.go +++ b/pkg/sdk/testint/stages_gen_integration_test.go @@ -235,42 +235,6 @@ func TestInt_Stages(t *testing.T) { require.NoError(t, err) }) - t.Run("Alter - set unset tags", func(t *testing.T) { - id := testClientHelper().Ids.RandomSchemaObjectIdentifier() - tag, cleanupTag := testClientHelper().Tag.CreateTag(t) - t.Cleanup(cleanupTag) - - err := client.Stages.CreateInternal(ctx, sdk.NewCreateInternalStageRequest(id)) - require.NoError(t, err) - t.Cleanup(func() { - err := client.Stages.Drop(ctx, sdk.NewDropStageRequest(id)) - require.NoError(t, err) - }) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeStage) - require.Error(t, err) - - err = client.Stages.Alter(ctx, sdk.NewAlterStageRequest(id).WithSetTags([]sdk.TagAssociation{ - { - Name: tag.ID(), - Value: "tag value", - }, - })) - require.NoError(t, err) - - value, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeStage) - require.NoError(t, err) - assert.Equal(t, "tag value", value) - - err = client.Stages.Alter(ctx, sdk.NewAlterStageRequest(id).WithUnsetTags([]sdk.ObjectIdentifier{ - tag.ID(), - })) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeStage) - require.Error(t, err) - }) - t.Run("AlterInternalStage", func(t *testing.T) { id := testClientHelper().Ids.RandomSchemaObjectIdentifier() diff --git a/pkg/sdk/testint/storage_integration_gen_integration_test.go b/pkg/sdk/testint/storage_integration_gen_integration_test.go index cc3edad74d..5fb1839705 100644 --- a/pkg/sdk/testint/storage_integration_gen_integration_test.go +++ b/pkg/sdk/testint/storage_integration_gen_integration_test.go @@ -307,36 +307,6 @@ func TestInt_StorageIntegrations(t *testing.T) { assertS3StorageIntegrationDescResult(t, props, false, s3AllowedLocations, []sdk.StorageLocation{}, "") }) - t.Run("Alter - set and unset tags", func(t *testing.T) { - id := createS3StorageIntegration(t) - - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - err := client.StorageIntegrations.Alter(ctx, sdk.NewAlterStorageIntegrationRequest(id). - WithSetTags([]sdk.TagAssociation{ - { - Name: tag.ID(), - Value: "tag-value", - }, - })) - require.NoError(t, err) - - tagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.NoError(t, err) - - assert.Equal(t, "tag-value", tagValue) - - err = client.StorageIntegrations.Alter(ctx, sdk.NewAlterStorageIntegrationRequest(id). - WithUnsetTags([]sdk.ObjectIdentifier{ - tag.ID(), - })) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeIntegration) - require.Error(t, err, sdk.ErrObjectNotExistOrAuthorized) - }) - t.Run("Describe - S3", func(t *testing.T) { id := createS3StorageIntegration(t) diff --git a/pkg/sdk/testint/streams_gen_integration_test.go b/pkg/sdk/testint/streams_gen_integration_test.go index 2e99ca4f05..f5cd1b4581 100644 --- a/pkg/sdk/testint/streams_gen_integration_test.go +++ b/pkg/sdk/testint/streams_gen_integration_test.go @@ -211,44 +211,6 @@ func TestInt_Streams(t *testing.T) { ) }) - t.Run("Alter tags", func(t *testing.T) { - table, cleanupTable := testClientHelper().Table.CreateInSchema(t, schemaId) - t.Cleanup(cleanupTable) - - id := testClientHelper().Ids.RandomSchemaObjectIdentifier() - req := sdk.NewCreateOnTableStreamRequest(id, table.ID()) - err := client.Streams.CreateOnTable(ctx, req) - require.NoError(t, err) - t.Cleanup(testClientHelper().Stream.DropFunc(t, id)) - - tag, cleanupTag := testClientHelper().Tag.CreateTag(t) - t.Cleanup(cleanupTag) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeStream) - require.Error(t, err) - - err = client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithSetTags([]sdk.TagAssociation{ - { - Name: tag.ID(), - Value: "tag_value", - }, - })) - require.NoError(t, err) - - tagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeStream) - require.NoError(t, err) - assert.Equal(t, "tag_value", tagValue) - - err = client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithUnsetTags([]sdk.ObjectIdentifier{tag.ID()})) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeStream) - require.Error(t, err) - - _, err = client.Streams.ShowByID(ctx, id) - require.NoError(t, err) - }) - t.Run("Alter comment", func(t *testing.T) { table, cleanupTable := testClientHelper().Table.CreateInSchema(t, schemaId) t.Cleanup(cleanupTable) diff --git a/pkg/sdk/testint/system_functions_integration_test.go b/pkg/sdk/testint/system_functions_integration_test.go index 4b60bc016b..02208e84c2 100644 --- a/pkg/sdk/testint/system_functions_integration_test.go +++ b/pkg/sdk/testint/system_functions_integration_test.go @@ -45,6 +45,10 @@ func TestInt_GetTag(t *testing.T) { require.Error(t, err) assert.Equal(t, "", s) }) + t.Run("unsupported object type", func(t *testing.T) { + _, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), testClientHelper().Ids.RandomAccountObjectIdentifier(), sdk.ObjectTypeSequence) + require.ErrorContains(t, err, "tagging for object type SEQUENCE is not supported") + }) } func TestInt_PipeStatus(t *testing.T) { diff --git a/pkg/sdk/testint/tables_integration_test.go b/pkg/sdk/testint/tables_integration_test.go index 7ea659be23..19d7e8732f 100644 --- a/pkg/sdk/testint/tables_integration_test.go +++ b/pkg/sdk/testint/tables_integration_test.go @@ -540,114 +540,6 @@ func TestInt_Table(t *testing.T) { assert.Empty(t, tableDetails[0].PolicyName) }) - t.Run("alter table: set and unset tags", func(t *testing.T) { - id := testClientHelper().Ids.RandomSchemaObjectIdentifier() - columns := []sdk.TableColumnRequest{ - *sdk.NewTableColumnRequest("COLUMN_1", sdk.DataTypeVARCHAR), - *sdk.NewTableColumnRequest("COLUMN_2", sdk.DataTypeVARCHAR), - } - - err := client.Tables.Create(ctx, sdk.NewCreateTableRequest(id, columns)) - require.NoError(t, err) - t.Cleanup(cleanupTableProvider(id)) - - columnTags := []sdk.TagAssociationRequest{ - { - Name: tag1.ID(), - Value: "v1", - }, - { - Name: tag2.ID(), - Value: "v2", - }, - } - - alterRequest := sdk.NewAlterTableRequest(id).WithSetTags(columnTags) - err = client.Tables.Alter(ctx, alterRequest) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag1.ID(), id, sdk.ObjectTypeTable) - require.NoError(t, err) - - assert.Equal(t, "v1", returnedTagValue) - - returnedTagValue, err = client.SystemFunctions.GetTag(ctx, tag2.ID(), id, sdk.ObjectTypeTable) - require.NoError(t, err) - - assert.Equal(t, "v2", returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag1.ID(), - tag2.ID(), - } - alterRequestUnsetTags := sdk.NewAlterTableRequest(id).WithUnsetTags(unsetTags) - - err = client.Tables.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag1.ID(), id, sdk.ObjectTypeTable) - require.Error(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag2.ID(), id, sdk.ObjectTypeTable) - require.Error(t, err) - }) - - t.Run("alter table: set and unset tags on columns", func(t *testing.T) { - id := testClientHelper().Ids.RandomSchemaObjectIdentifier() - columns := []sdk.TableColumnRequest{ - *sdk.NewTableColumnRequest("COLUMN_1", sdk.DataTypeVARCHAR), - *sdk.NewTableColumnRequest("COLUMN_2", sdk.DataTypeVARCHAR), - } - - err := client.Tables.Create(ctx, sdk.NewCreateTableRequest(id, columns)) - require.NoError(t, err) - t.Cleanup(cleanupTableProvider(id)) - - columnTags := []sdk.TagAssociation{ - { - Name: tag1.ID(), - Value: "v1", - }, - { - Name: tag2.ID(), - Value: "v2", - }, - } - - alterRequestSetTags := sdk.NewAlterTableRequest(id).WithColumnAction(sdk.NewTableColumnActionRequest(). - WithSetTags(sdk.NewTableColumnAlterSetTagsActionRequest("COLUMN_1", columnTags))) - err = client.Tables.Alter(ctx, alterRequestSetTags) - require.NoError(t, err) - - columnId := sdk.NewTableColumnIdentifier(id.DatabaseName(), id.SchemaName(), id.Name(), "COLUMN_1") - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag1.ID(), columnId, sdk.ObjectTypeColumn) - require.NoError(t, err) - - assert.Equal(t, "v1", returnedTagValue) - - returnedTagValue, err = client.SystemFunctions.GetTag(ctx, tag2.ID(), columnId, sdk.ObjectTypeColumn) - require.NoError(t, err) - - assert.Equal(t, "v2", returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag1.ID(), - tag2.ID(), - } - alterRequestUnsetTags := sdk.NewAlterTableRequest(id).WithColumnAction(sdk.NewTableColumnActionRequest(). - WithUnsetTags(sdk.NewTableColumnAlterUnsetTagsActionRequest("COLUMN_1", unsetTags))) - - err = client.Tables.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag1.ID(), id, sdk.ObjectTypeColumn) - require.Error(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag2.ID(), id, sdk.ObjectTypeColumn) - require.Error(t, err) - }) - t.Run("alter table: drop columns", func(t *testing.T) { id := testClientHelper().Ids.RandomSchemaObjectIdentifier() columns := []sdk.TableColumnRequest{ diff --git a/pkg/sdk/testint/tags_integration_test.go b/pkg/sdk/testint/tags_integration_test.go index d1fc3ba772..d16d74d661 100644 --- a/pkg/sdk/testint/tags_integration_test.go +++ b/pkg/sdk/testint/tags_integration_test.go @@ -2,11 +2,13 @@ package testint import ( "context" + "fmt" "testing" assertions "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectassert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -281,3 +283,790 @@ func TestInt_TagsShowByID(t *testing.T) { require.Equal(t, id2, e2.ID()) }) } + +type IDProvider[T sdk.AccountObjectIdentifier | sdk.DatabaseObjectIdentifier | sdk.SchemaObjectIdentifier | sdk.TableColumnIdentifier] interface { + ID() T +} + +func TestInt_TagsAssociations(t *testing.T) { + client := testClient(t) + ctx := testContext(t) + + awsBucketUrl := testenvs.GetOrSkipTest(t, testenvs.AwsExternalBucketUrl) + awsRoleARN := testenvs.GetOrSkipTest(t, testenvs.AwsExternalRoleArn) + + tag, tagCleanup := testClientHelper().Tag.CreateTag(t) + t.Cleanup(tagCleanup) + + tagValue := "abc" + tags := []sdk.TagAssociation{ + { + Name: tag.ID(), + Value: tagValue, + }, + } + unsetTags := []sdk.ObjectIdentifier{ + tag.ID(), + } + + testTagSet := func(id sdk.ObjectIdentifier, objectType sdk.ObjectType) { + err := client.Tags.Set(ctx, sdk.NewSetTagRequest(objectType, id).WithSetTags(tags)) + require.NoError(t, err) + + returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, objectType) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = client.Tags.Unset(ctx, sdk.NewUnsetTagRequest(objectType, id).WithUnsetTags(unsetTags)) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, objectType) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + } + + t.Run("TestInt_TagAssociationForAccount", func(t *testing.T) { + id := testClientHelper().Ids.AccountIdentifierWithLocator() + err := client.Accounts.Alter(ctx, &sdk.AlterAccountOptions{ + SetTag: tags, + }) + require.NoError(t, err) + + returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeAccount) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = client.Accounts.Alter(ctx, &sdk.AlterAccountOptions{ + UnsetTag: unsetTags, + }) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeAccount) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + + // test tag sdk method + err = client.Tags.SetOnCurrentAccount(ctx, sdk.NewSetTagOnCurrentAccountRequest().WithSetTags(tags)) + require.NoError(t, err) + + returnedTagValue, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeAccount) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = client.Tags.UnsetOnCurrentAccount(ctx, sdk.NewUnsetTagOnCurrentAccountRequest().WithUnsetTags(unsetTags)) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeAccount) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + }) + + accountObjectTestCases := []struct { + name string + objectType sdk.ObjectType + setupObject func() (IDProvider[sdk.AccountObjectIdentifier], func()) + setTags func(sdk.AccountObjectIdentifier, []sdk.TagAssociation) error + unsetTags func(sdk.AccountObjectIdentifier, []sdk.ObjectIdentifier) error + }{ + { + name: "ApplicationPackage", + objectType: sdk.ObjectTypeApplicationPackage, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().ApplicationPackage.CreateApplicationPackage(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.ApplicationPackages.Alter(ctx, sdk.NewAlterApplicationPackageRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.ApplicationPackages.Alter(ctx, sdk.NewAlterApplicationPackageRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "NormalDatabase", + objectType: sdk.ObjectTypeDatabase, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().Database.CreateDatabase(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + UnsetTag: tags, + }) + }, + }, + { + name: "DatabaseFromShare", + objectType: sdk.ObjectTypeDatabase, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return createDatabaseFromShare(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + UnsetTag: tags, + }) + }, + }, + // TODO [SNOW-1002023]: Add a test for failover groups; Business Critical Snowflake Edition needed + { + name: "ApiIntegration", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().ApiIntegration.CreateApiIntegration(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.ApiIntegrations.Alter(ctx, sdk.NewAlterApiIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.ApiIntegrations.Alter(ctx, sdk.NewAlterApiIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "NotificationIntegration", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().NotificationIntegration.Create(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.NotificationIntegrations.Alter(ctx, sdk.NewAlterNotificationIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.NotificationIntegrations.Alter(ctx, sdk.NewAlterNotificationIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "StorageIntegration", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().StorageIntegration.CreateS3(t, awsBucketUrl, awsRoleARN) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.StorageIntegrations.Alter(ctx, sdk.NewAlterStorageIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.StorageIntegrations.Alter(ctx, sdk.NewAlterStorageIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "ApiAuthenticationWithClientCredentialsFlow", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().SecurityIntegration.CreateApiAuthenticationWithClientCredentialsFlow(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SecurityIntegrations.AlterApiAuthenticationWithClientCredentialsFlow(ctx, sdk.NewAlterApiAuthenticationWithClientCredentialsFlowSecurityIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SecurityIntegrations.AlterApiAuthenticationWithClientCredentialsFlow(ctx, sdk.NewAlterApiAuthenticationWithClientCredentialsFlowSecurityIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "ApiAuthenticationWithAuthorizationCodeGrantFlow", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().SecurityIntegration.CreateApiAuthenticationWithAuthorizationCodeGrantFlow(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SecurityIntegrations.AlterApiAuthenticationWithAuthorizationCodeGrantFlow(ctx, sdk.NewAlterApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SecurityIntegrations.AlterApiAuthenticationWithAuthorizationCodeGrantFlow(ctx, sdk.NewAlterApiAuthenticationWithAuthorizationCodeGrantFlowSecurityIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + // TODO [SNOW-1452191]: add a test for jwt bearer integration + { + name: "ExternalOauth", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().SecurityIntegration.CreateExternalOauth(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SecurityIntegrations.AlterExternalOauth(ctx, sdk.NewAlterExternalOauthSecurityIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SecurityIntegrations.AlterExternalOauth(ctx, sdk.NewAlterExternalOauthSecurityIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "OauthForPartnerApplications", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().SecurityIntegration.CreateOauthForPartnerApplications(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SecurityIntegrations.AlterOauthForPartnerApplications(ctx, sdk.NewAlterOauthForPartnerApplicationsSecurityIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SecurityIntegrations.AlterOauthForPartnerApplications(ctx, sdk.NewAlterOauthForPartnerApplicationsSecurityIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "OauthForCustomClients", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().SecurityIntegration.CreateOauthForCustomClients(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SecurityIntegrations.AlterOauthForCustomClients(ctx, sdk.NewAlterOauthForCustomClientsSecurityIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SecurityIntegrations.AlterOauthForCustomClients(ctx, sdk.NewAlterOauthForCustomClientsSecurityIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Saml2", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().SecurityIntegration.CreateSaml2(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SecurityIntegrations.AlterSaml2(ctx, sdk.NewAlterSaml2SecurityIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SecurityIntegrations.AlterSaml2(ctx, sdk.NewAlterSaml2SecurityIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Scim", + objectType: sdk.ObjectTypeIntegration, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().SecurityIntegration.CreateScim(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SecurityIntegrations.AlterScim(ctx, sdk.NewAlterScimSecurityIntegrationRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SecurityIntegrations.AlterScim(ctx, sdk.NewAlterScimSecurityIntegrationRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Role", + objectType: sdk.ObjectTypeRole, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().Role.CreateRole(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Roles.Alter(ctx, sdk.NewAlterRoleRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Roles.Alter(ctx, sdk.NewAlterRoleRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Share", + objectType: sdk.ObjectTypeShare, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return createShare(t, ctx, client) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Shares.Alter(ctx, id, &sdk.AlterShareOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Shares.Alter(ctx, id, &sdk.AlterShareOptions{ + UnsetTag: tags, + }) + }, + }, + { + name: "User", + objectType: sdk.ObjectTypeUser, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().User.CreateUser(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Users.Alter(ctx, id, &sdk.AlterUserOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Users.Alter(ctx, id, &sdk.AlterUserOptions{ + UnsetTag: tags, + }) + }, + }, + { + name: "Warehouse", + objectType: sdk.ObjectTypeWarehouse, + setupObject: func() (IDProvider[sdk.AccountObjectIdentifier], func()) { + return testClientHelper().Warehouse.CreateWarehouse(t) + }, + setTags: func(id sdk.AccountObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Warehouses.Alter(ctx, id, &sdk.AlterWarehouseOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.AccountObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Warehouses.Alter(ctx, id, &sdk.AlterWarehouseOptions{ + UnsetTag: tags, + }) + }, + }, + } + + for _, tc := range accountObjectTestCases { + t.Run(fmt.Sprintf("account object %s", tc.name), func(t *testing.T) { + idProvider, cleanup := tc.setupObject() + t.Cleanup(cleanup) + id := idProvider.ID() + err := tc.setTags(id, tags) + require.NoError(t, err) + + returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = tc.unsetTags(id, unsetTags) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + + // test object methods + testTagSet(id, tc.objectType) + }) + } + + t.Run("account object Application: invalid operation", func(t *testing.T) { + applicationPackage, applicationPackageCleanup := createApplicationPackage(t) + t.Cleanup(applicationPackageCleanup) + db, dbCleanup := testClientHelper().Application.CreateApplication(t, applicationPackage.ID(), "V01") + t.Cleanup(dbCleanup) + id := db.ID() + + err := client.Applications.Alter(ctx, sdk.NewAlterApplicationRequest(id).WithSetTags(tags)) + require.NoError(t, err) + + // TODO(SNOW-1746420): adjust after this is fixed on Snowflake side + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeApplication) + require.ErrorContains(t, err, "391801 (0A000): SQL compilation error: Object tagging not supported for object type APPLICATION") + + err = client.Applications.Alter(ctx, sdk.NewAlterApplicationRequest(id).WithUnsetTags(unsetTags)) + require.NoError(t, err) + }) + + t.Run("account object database replica: invalid operation", func(t *testing.T) { + db, dbCleanup := createDatabaseReplica(t) + t.Cleanup(dbCleanup) + id := db.ID() + + err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + SetTag: tags, + }) + require.ErrorContains(t, err, "is a read-only secondary database and cannot be modified.") + }) + + databaseObjectTestCases := []struct { + name string + objectType sdk.ObjectType + setupObject func() (IDProvider[sdk.DatabaseObjectIdentifier], func()) + setTags func(sdk.DatabaseObjectIdentifier, []sdk.TagAssociation) error + unsetTags func(sdk.DatabaseObjectIdentifier, []sdk.ObjectIdentifier) error + }{ + { + name: "DatabaseRole", + objectType: sdk.ObjectTypeDatabaseRole, + setupObject: func() (IDProvider[sdk.DatabaseObjectIdentifier], func()) { + return testClientHelper().DatabaseRole.CreateDatabaseRole(t) + }, + setTags: func(id sdk.DatabaseObjectIdentifier, tags []sdk.TagAssociation) error { + return client.DatabaseRoles.Alter(ctx, sdk.NewAlterDatabaseRoleRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.DatabaseObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.DatabaseRoles.Alter(ctx, sdk.NewAlterDatabaseRoleRequest(id).WithUnsetTags(unsetTags)) + }, + }, + { + name: "Schema", + objectType: sdk.ObjectTypeSchema, + setupObject: func() (IDProvider[sdk.DatabaseObjectIdentifier], func()) { + return testClientHelper().Schema.CreateSchema(t) + }, + setTags: func(id sdk.DatabaseObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.DatabaseObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ + UnsetTag: tags, + }) + }, + }, + } + + for _, tc := range databaseObjectTestCases { + t.Run(fmt.Sprintf("database object %s", tc.name), func(t *testing.T) { + idProvider, cleanup := tc.setupObject() + t.Cleanup(cleanup) + id := idProvider.ID() + err := tc.setTags(id, tags) + require.NoError(t, err) + + returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = tc.unsetTags(id, unsetTags) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + + // test object methods + testTagSet(id, tc.objectType) + }) + } + + schemaObjectTestCases := []struct { + name string + objectType sdk.ObjectType + setupObject func() (IDProvider[sdk.SchemaObjectIdentifier], func()) + setTags func(sdk.SchemaObjectIdentifier, []sdk.TagAssociation) error + unsetTags func(sdk.SchemaObjectIdentifier, []sdk.ObjectIdentifier) error + }{ + { + name: "ExternalTable", + objectType: sdk.ObjectTypeExternalTable, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return createExternalTable(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + setTags := make([]sdk.TagAssociationRequest, len(tags)) + for i, tag := range tags { + setTags[i] = *sdk.NewTagAssociationRequest(tag.Name, tag.Value) + } + return client.Tables.Alter(ctx, sdk.NewAlterTableRequest(id).WithSetTags(setTags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Tables.Alter(ctx, sdk.NewAlterTableRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "MaterializedView", + objectType: sdk.ObjectTypeMaterializedView, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return createMaterializedView(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Pipe", + objectType: sdk.ObjectTypePipe, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return createPipe(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Pipes.Alter(ctx, id, &sdk.AlterPipeOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Pipes.Alter(ctx, id, &sdk.AlterPipeOptions{ + UnsetTag: tags, + }) + }, + }, + { + name: "MaskingPolicy", + objectType: sdk.ObjectTypeMaskingPolicy, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().MaskingPolicy.CreateMaskingPolicy(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.MaskingPolicies.Alter(ctx, id, &sdk.AlterMaskingPolicyOptions{ + SetTag: tags, + }) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.MaskingPolicies.Alter(ctx, id, &sdk.AlterMaskingPolicyOptions{ + UnsetTag: tags, + }) + }, + }, + { + name: "RowAccessPolicy", + objectType: sdk.ObjectTypeRowAccessPolicy, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().RowAccessPolicy.CreateRowAccessPolicy(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.RowAccessPolicies.Alter(ctx, sdk.NewAlterRowAccessPolicyRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.RowAccessPolicies.Alter(ctx, sdk.NewAlterRowAccessPolicyRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "SessionPolicy", + objectType: sdk.ObjectTypeSessionPolicy, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().SessionPolicy.CreateSessionPolicy(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.SessionPolicies.Alter(ctx, sdk.NewAlterSessionPolicyRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.SessionPolicies.Alter(ctx, sdk.NewAlterSessionPolicyRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Stage", + objectType: sdk.ObjectTypeStage, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().Stage.CreateStage(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Stages.Alter(ctx, sdk.NewAlterStageRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Stages.Alter(ctx, sdk.NewAlterStageRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Stream", + objectType: sdk.ObjectTypeStream, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return createStream(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "EventTable", + objectType: sdk.ObjectTypeEventTable, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().EventTable.Create(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.EventTables.Alter(ctx, sdk.NewAlterEventTableRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.EventTables.Alter(ctx, sdk.NewAlterEventTableRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Table", + objectType: sdk.ObjectTypeTable, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().Table.Create(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + setTags := make([]sdk.TagAssociationRequest, len(tags)) + for i, tag := range tags { + setTags[i] = *sdk.NewTagAssociationRequest(tag.Name, tag.Value) + } + return client.Tables.Alter(ctx, sdk.NewAlterTableRequest(id).WithSetTags(setTags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Tables.Alter(ctx, sdk.NewAlterTableRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Task", + objectType: sdk.ObjectTypeTask, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().Task.Create(t) + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Tasks.Alter(ctx, sdk.NewAlterTaskRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Tasks.Alter(ctx, sdk.NewAlterTaskRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "View", + objectType: sdk.ObjectTypeView, + setupObject: func() (IDProvider[sdk.SchemaObjectIdentifier], func()) { + return testClientHelper().View.CreateView(t, "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES") + }, + setTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.TagAssociation) error { + return client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithUnsetTags(tags)) + }, + }, + } + + for _, tc := range schemaObjectTestCases { + t.Run(fmt.Sprintf("schema object %s", tc.name), func(t *testing.T) { + idProvider, cleanup := tc.setupObject() + t.Cleanup(cleanup) + id := idProvider.ID() + err := tc.setTags(id, tags) + require.NoError(t, err) + + returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = tc.unsetTags(id, unsetTags) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + + // test object methods + testTagSet(id, tc.objectType) + }) + } + + columnTestCases := []struct { + name string + setupObject func() (sdk.TableColumnIdentifier, func()) + setTags func(sdk.TableColumnIdentifier, []sdk.TagAssociation) error + unsetTags func(sdk.TableColumnIdentifier, []sdk.ObjectIdentifier) error + }{ + { + name: "Table", + setupObject: func() (sdk.TableColumnIdentifier, func()) { + object, objectCleanup := testClientHelper().Table.Create(t) + columnId := sdk.NewTableColumnIdentifier(object.ID().DatabaseName(), object.ID().SchemaName(), object.ID().Name(), "ID") + return columnId, objectCleanup + }, + setTags: func(id sdk.TableColumnIdentifier, tags []sdk.TagAssociation) error { + return client.Tables.Alter(ctx, sdk.NewAlterTableRequest(id.SchemaObjectId()).WithColumnAction(sdk.NewTableColumnActionRequest(). + WithSetTags(sdk.NewTableColumnAlterSetTagsActionRequest(id.Name(), tags)))) + }, + unsetTags: func(id sdk.TableColumnIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Tables.Alter(ctx, sdk.NewAlterTableRequest(id.SchemaObjectId()).WithColumnAction(sdk.NewTableColumnActionRequest(). + WithUnsetTags(sdk.NewTableColumnAlterUnsetTagsActionRequest(id.Name(), tags)))) + }, + }, + { + name: "View", + setupObject: func() (sdk.TableColumnIdentifier, func()) { + object, objectCleanup := testClientHelper().View.CreateView(t, "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES") + t.Cleanup(objectCleanup) + columnId := sdk.NewTableColumnIdentifier(object.ID().DatabaseName(), object.ID().SchemaName(), object.ID().Name(), "ROLE_NAME") + return columnId, objectCleanup + }, + setTags: func(id sdk.TableColumnIdentifier, tags []sdk.TagAssociation) error { + return client.Views.Alter(ctx, sdk.NewAlterViewRequest(id.SchemaObjectId()).WithSetTagsOnColumn( + *sdk.NewViewSetColumnTagsRequest("ROLE_NAME", tags), + )) + }, + unsetTags: func(id sdk.TableColumnIdentifier, tags []sdk.ObjectIdentifier) error { + return client.Views.Alter(ctx, sdk.NewAlterViewRequest(id.SchemaObjectId()).WithUnsetTagsOnColumn( + *sdk.NewViewUnsetColumnTagsRequest("ROLE_NAME", tags), + )) + }, + }, + } + + for _, tc := range columnTestCases { + t.Run(fmt.Sprintf("column in %s", tc.name), func(t *testing.T) { + id, cleanup := tc.setupObject() + t.Cleanup(cleanup) + err := tc.setTags(id, tags) + require.NoError(t, err) + + returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeColumn) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = tc.unsetTags(id, unsetTags) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeColumn) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + + // test object methods + testTagSet(id, sdk.ObjectTypeColumn) + }) + } + + schemaObjectWithArgumentsTestCases := []struct { + name string + objectType sdk.ObjectType + setupObject func() sdk.SchemaObjectIdentifierWithArguments + setTags func(sdk.SchemaObjectIdentifierWithArguments, []sdk.TagAssociation) error + unsetTags func(sdk.SchemaObjectIdentifierWithArguments, []sdk.ObjectIdentifier) error + }{ + { + name: "Function", + objectType: sdk.ObjectTypeFunction, + setupObject: func() sdk.SchemaObjectIdentifierWithArguments { + // cleanup is set up in the Create function + function := testClientHelper().Function.Create(t, sdk.DataTypeInt) + return function.ID() + }, + setTags: func(id sdk.SchemaObjectIdentifierWithArguments, tags []sdk.TagAssociation) error { + return client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifierWithArguments, tags []sdk.ObjectIdentifier) error { + return client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "ExternalFunction", + objectType: sdk.ObjectTypeExternalFunction, + setupObject: func() sdk.SchemaObjectIdentifierWithArguments { + integration, integrationCleanup := testClientHelper().ApiIntegration.CreateApiIntegration(t) + t.Cleanup(integrationCleanup) + // cleanup is set up in the Create function + function := testClientHelper().ExternalFunction.Create(t, integration.ID(), sdk.DataTypeInt) + return function.ID() + }, + setTags: func(id sdk.SchemaObjectIdentifierWithArguments, tags []sdk.TagAssociation) error { + return client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifierWithArguments, tags []sdk.ObjectIdentifier) error { + return client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetTags(tags)) + }, + }, + { + name: "Procedure", + objectType: sdk.ObjectTypeProcedure, + setupObject: func() sdk.SchemaObjectIdentifierWithArguments { + // cleanup is set up in the Create procedure + procedure := testClientHelper().Procedure.Create(t, sdk.DataTypeInt) + return procedure.ID() + }, + setTags: func(id sdk.SchemaObjectIdentifierWithArguments, tags []sdk.TagAssociation) error { + return client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSetTags(tags)) + }, + unsetTags: func(id sdk.SchemaObjectIdentifierWithArguments, tags []sdk.ObjectIdentifier) error { + return client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnsetTags(tags)) + }, + }, + } + + for _, tc := range schemaObjectWithArgumentsTestCases { + t.Run(fmt.Sprintf("schema object with arguments %s", tc.name), func(t *testing.T) { + id := tc.setupObject() + err := tc.setTags(id, tags) + require.NoError(t, err) + + returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.NoError(t, err) + assert.Equal(t, tagValue, returnedTagValue) + + err = tc.unsetTags(id, unsetTags) + require.NoError(t, err) + + _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, tc.objectType) + require.ErrorContains(t, err, "sql: Scan error on column index 0, name \"TAG\": converting NULL to string is unsupported") + + // test object methods + testTagSet(id, tc.objectType) + }) + } +} diff --git a/pkg/sdk/testint/tasks_gen_integration_test.go b/pkg/sdk/testint/tasks_gen_integration_test.go index 5fee5e5bcf..b73c8484fd 100644 --- a/pkg/sdk/testint/tasks_gen_integration_test.go +++ b/pkg/sdk/testint/tasks_gen_integration_test.go @@ -721,36 +721,6 @@ func TestInt_Tasks(t *testing.T) { assertions.AssertThat(t, objectparametersassert.TaskParameters(t, task.ID()).HasAllDefaults()) }) - t.Run("alter task: set and unset tag", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - task, taskCleanup := testClientHelper().Task.Create(t) - t.Cleanup(taskCleanup) - - tagValue := "abc" - err := client.Tasks.Alter(ctx, sdk.NewAlterTaskRequest(task.ID()).WithSetTags([]sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - })) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), task.ID(), sdk.ObjectTypeTask) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - err = client.Tasks.Alter(ctx, sdk.NewAlterTaskRequest(task.ID()).WithUnsetTags([]sdk.ObjectIdentifier{ - tag.ID(), - })) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), task.ID(), sdk.ObjectTypeTask) - require.Error(t, err) - }) - t.Run("alter task: resume and suspend", func(t *testing.T) { id := testClientHelper().Ids.RandomSchemaObjectIdentifier() task, taskCleanup := testClientHelper().Task.CreateWithRequest(t, sdk.NewCreateTaskRequest(id, sql).WithSchedule("10 MINUTE")) diff --git a/pkg/sdk/testint/users_integration_test.go b/pkg/sdk/testint/users_integration_test.go index 60906fe118..87dc57819a 100644 --- a/pkg/sdk/testint/users_integration_test.go +++ b/pkg/sdk/testint/users_integration_test.go @@ -44,9 +44,6 @@ func TestInt_Users(t *testing.T) { tag, tagCleanup := testClientHelper().Tag.CreateTag(t) t.Cleanup(tagCleanup) - tag2, tag2Cleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tag2Cleanup) - networkPolicy, networkPolicyCleanup := testClientHelper().NetworkPolicy.CreateNetworkPolicy(t) t.Cleanup(networkPolicyCleanup) @@ -1583,49 +1580,6 @@ func TestInt_Users(t *testing.T) { require.NoError(t, err) }) - t.Run("alter: set and unset tags", func(t *testing.T) { - user, userCleanup := testClientHelper().User.CreateUser(t) - t.Cleanup(userCleanup) - - alterOptions := &sdk.AlterUserOptions{ - SetTag: []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: "val", - }, - { - Name: tag2.ID(), - Value: "val2", - }, - }, - } - err := client.Users.Alter(ctx, user.ID(), alterOptions) - require.NoError(t, err) - - val, err := client.SystemFunctions.GetTag(ctx, tag.ID(), user.ID(), sdk.ObjectTypeUser) - require.NoError(t, err) - require.Equal(t, "val", val) - val2, err := client.SystemFunctions.GetTag(ctx, tag2.ID(), user.ID(), sdk.ObjectTypeUser) - require.NoError(t, err) - require.Equal(t, "val2", val2) - - alterOptions = &sdk.AlterUserOptions{ - UnsetTag: []sdk.ObjectIdentifier{ - tag.ID(), - tag2.ID(), - }, - } - err = client.Users.Alter(ctx, user.ID(), alterOptions) - require.NoError(t, err) - - val, err = client.SystemFunctions.GetTag(ctx, tag.ID(), user.ID(), sdk.ObjectTypeUser) - require.Error(t, err) - require.Equal(t, "", val) - val2, err = client.SystemFunctions.GetTag(ctx, tag2.ID(), user.ID(), sdk.ObjectTypeUser) - require.Error(t, err) - require.Equal(t, "", val2) - }) - t.Run("describe: when user exists", func(t *testing.T) { userDetails, err := client.Users.Describe(ctx, user.ID()) require.NoError(t, err) diff --git a/pkg/sdk/testint/views_gen_integration_test.go b/pkg/sdk/testint/views_gen_integration_test.go index e96b2666a6..7fda5f15ae 100644 --- a/pkg/sdk/testint/views_gen_integration_test.go +++ b/pkg/sdk/testint/views_gen_integration_test.go @@ -353,42 +353,6 @@ func TestInt_Views(t *testing.T) { assert.Equal(t, "OFF", alteredView.ChangeTracking) }) - t.Run("alter view: set and unset tag", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - view := createView(t) - id := view.ID() - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - alterRequestSetTags := sdk.NewAlterViewRequest(id).WithSetTags(tags) - - err := client.Views.Alter(ctx, alterRequestSetTags) - require.NoError(t, err) - - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeView) - require.NoError(t, err) - - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - alterRequestUnsetTags := sdk.NewAlterViewRequest(id).WithUnsetTags(unsetTags) - - err = client.Views.Alter(ctx, alterRequestUnsetTags) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), id, sdk.ObjectTypeView) - require.Error(t, err) - }) - t.Run("alter view: set and unset masking policy on column", func(t *testing.T) { maskingPolicy, maskingPolicyCleanup := testClientHelper().MaskingPolicy.CreateMaskingPolicyIdentity(t, sdk.DataTypeNumber) t.Cleanup(maskingPolicyCleanup) @@ -449,46 +413,6 @@ func TestInt_Views(t *testing.T) { require.Empty(t, references) }) - t.Run("alter view: set and unset tags on column", func(t *testing.T) { - tag, tagCleanup := testClientHelper().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - - view := createView(t) - id := view.ID() - - tagValue := "abc" - tags := []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: tagValue, - }, - } - - alterRequest := sdk.NewAlterViewRequest(id).WithSetTagsOnColumn( - *sdk.NewViewSetColumnTagsRequest("ID", tags), - ) - err := client.Views.Alter(ctx, alterRequest) - require.NoError(t, err) - - columnId := sdk.NewTableColumnIdentifier(id.DatabaseName(), id.SchemaName(), id.Name(), "ID") - returnedTagValue, err := client.SystemFunctions.GetTag(ctx, tag.ID(), columnId, sdk.ObjectTypeColumn) - require.NoError(t, err) - assert.Equal(t, tagValue, returnedTagValue) - - unsetTags := []sdk.ObjectIdentifier{ - tag.ID(), - } - - alterRequest = sdk.NewAlterViewRequest(id).WithUnsetTagsOnColumn( - *sdk.NewViewUnsetColumnTagsRequest("ID", unsetTags), - ) - err = client.Views.Alter(ctx, alterRequest) - require.NoError(t, err) - - _, err = client.SystemFunctions.GetTag(ctx, tag.ID(), columnId, sdk.ObjectTypeColumn) - require.Error(t, err) - }) - t.Run("alter view: add and drop row access policies", func(t *testing.T) { rowAccessPolicy, rowAccessPolicyCleanup := testClientHelper().RowAccessPolicy.CreateRowAccessPolicy(t) t.Cleanup(rowAccessPolicyCleanup) diff --git a/pkg/sdk/testint/warehouses_integration_test.go b/pkg/sdk/testint/warehouses_integration_test.go index f22832f0f3..e2dce5e08c 100644 --- a/pkg/sdk/testint/warehouses_integration_test.go +++ b/pkg/sdk/testint/warehouses_integration_test.go @@ -641,50 +641,6 @@ func TestInt_Warehouses(t *testing.T) { assert.Equal(t, 0, result.Queued) }) - t.Run("alter: set tags and unset tags", func(t *testing.T) { - // new warehouse created on purpose - warehouse, warehouseCleanup := testClientHelper().Warehouse.CreateWarehouse(t) - t.Cleanup(warehouseCleanup) - - alterOptions := &sdk.AlterWarehouseOptions{ - SetTag: []sdk.TagAssociation{ - { - Name: tag.ID(), - Value: "val", - }, - { - Name: tag2.ID(), - Value: "val2", - }, - }, - } - err := client.Warehouses.Alter(ctx, warehouse.ID(), alterOptions) - require.NoError(t, err) - - val, err := client.SystemFunctions.GetTag(ctx, tag.ID(), warehouse.ID(), sdk.ObjectTypeWarehouse) - require.NoError(t, err) - require.Equal(t, "val", val) - val2, err := client.SystemFunctions.GetTag(ctx, tag2.ID(), warehouse.ID(), sdk.ObjectTypeWarehouse) - require.NoError(t, err) - require.Equal(t, "val2", val2) - - alterOptions = &sdk.AlterWarehouseOptions{ - UnsetTag: []sdk.ObjectIdentifier{ - tag.ID(), - tag2.ID(), - }, - } - err = client.Warehouses.Alter(ctx, warehouse.ID(), alterOptions) - require.NoError(t, err) - - val, err = client.SystemFunctions.GetTag(ctx, tag.ID(), warehouse.ID(), sdk.ObjectTypeWarehouse) - require.Error(t, err) - require.Equal(t, "", val) - val2, err = client.SystemFunctions.GetTag(ctx, tag2.ID(), warehouse.ID(), sdk.ObjectTypeWarehouse) - require.Error(t, err) - require.Equal(t, "", val2) - }) - t.Run("describe: when warehouse exists", func(t *testing.T) { result, err := client.Warehouses.Describe(ctx, precreatedWarehouseId) require.NoError(t, err) diff --git a/pkg/sdk/users.go b/pkg/sdk/users.go index 263d80198e..9615aa8269 100644 --- a/pkg/sdk/users.go +++ b/pkg/sdk/users.go @@ -95,12 +95,12 @@ type userDBRow struct { MustChangePassword sql.NullString `db:"must_change_password"` SnowflakeLock sql.NullString `db:"snowflake_lock"` DefaultWarehouse sql.NullString `db:"default_warehouse"` - DefaultNamespace string `db:"default_namespace"` - DefaultRole string `db:"default_role"` + DefaultNamespace sql.NullString `db:"default_namespace"` + DefaultRole sql.NullString `db:"default_role"` DefaultSecondaryRoles string `db:"default_secondary_roles"` ExtAuthnDuo sql.NullString `db:"ext_authn_duo"` - ExtAuthnUid string `db:"ext_authn_uid"` - MinsToBypassMfa string `db:"mins_to_bypass_mfa"` + ExtAuthnUid sql.NullString `db:"ext_authn_uid"` + MinsToBypassMfa sql.NullString `db:"mins_to_bypass_mfa"` Owner string `db:"owner"` LastSuccessLogin sql.NullTime `db:"last_success_login"` ExpiresAtTime sql.NullTime `db:"expires_at_time"` @@ -116,11 +116,7 @@ func (row userDBRow) convert() *User { Name: row.Name, CreatedOn: row.CreatedOn, LoginName: row.LoginName, - DefaultNamespace: row.DefaultNamespace, - DefaultRole: row.DefaultRole, DefaultSecondaryRoles: row.DefaultSecondaryRoles, - ExtAuthnUid: row.ExtAuthnUid, - MinsToBypassMfa: row.MinsToBypassMfa, Owner: row.Owner, HasPassword: row.HasPassword, HasRsaPublicKey: row.HasRsaPublicKey, @@ -151,9 +147,21 @@ func (row userDBRow) convert() *User { handleNullableBoolString(row.MustChangePassword, &user.MustChangePassword) handleNullableBoolString(row.SnowflakeLock, &user.SnowflakeLock) handleNullableBoolString(row.ExtAuthnDuo, &user.ExtAuthnDuo) + if row.ExtAuthnUid.Valid { + user.ExtAuthnUid = row.ExtAuthnUid.String + } + if row.MinsToBypassMfa.Valid { + user.MinsToBypassMfa = row.MinsToBypassMfa.String + } if row.DefaultWarehouse.Valid { user.DefaultWarehouse = row.DefaultWarehouse.String } + if row.DefaultNamespace.Valid { + user.DefaultNamespace = row.DefaultNamespace.String + } + if row.DefaultRole.Valid { + user.DefaultRole = row.DefaultRole.String + } if row.LastSuccessLogin.Valid { user.LastSuccessLogin = row.LastSuccessLogin.Time } diff --git a/pkg/testhelpers/helpers.go b/pkg/testhelpers/helpers.go index 298658a63c..6bcd26ce6d 100644 --- a/pkg/testhelpers/helpers.go +++ b/pkg/testhelpers/helpers.go @@ -1,25 +1,18 @@ package testhelpers import ( - "database/sql" + "os" "testing" - sqlmock "github.com/DATA-DOG/go-sqlmock" "github.com/stretchr/testify/require" ) -func WithMockDb(t *testing.T, f func(*sql.DB, sqlmock.Sqlmock)) { +func TestFile(t *testing.T, filename string, data []byte) string { t.Helper() - r := require.New(t) - db, mock, err := sqlmock.New() - r.NoError(err) - defer db.Close() + f, err := os.CreateTemp(t.TempDir(), filename) + require.NoError(t, err) - // Because we are using TypeSet not TypeList, order is non-deterministic. - mock.MatchExpectationsInOrder(false) - - f(db, mock) - if err := mock.ExpectationsWereMet(); err != nil { - t.Errorf("there were unfulfilled expectations: %s", err) - } + err = os.WriteFile(f.Name(), data, 0o600) + require.NoError(t, err) + return f.Name() } diff --git a/pkg/testhelpers/mock/mock.go b/pkg/testhelpers/mock/mock.go new file mode 100644 index 0000000000..8d72aa8eda --- /dev/null +++ b/pkg/testhelpers/mock/mock.go @@ -0,0 +1,25 @@ +package mock + +import ( + "database/sql" + "testing" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/stretchr/testify/require" +) + +func WithMockDb(t *testing.T, f func(*sql.DB, sqlmock.Sqlmock)) { + t.Helper() + r := require.New(t) + db, mock, err := sqlmock.New() + r.NoError(err) + defer db.Close() + + // Because we are using TypeSet not TypeList, order is non-deterministic. + mock.MatchExpectationsInOrder(false) + + f(db, mock) + if err := mock.ExpectationsWereMet(); err != nil { + t.Errorf("there were unfulfilled expectations: %s", err) + } +} diff --git a/templates/data-sources/connections.md.tmpl b/templates/data-sources/connections.md.tmpl new file mode 100644 index 0000000000..e974f2a33b --- /dev/null +++ b/templates/data-sources/connections.md.tmpl @@ -0,0 +1,24 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/data-sources/%s/data-source.tf" .Name)}} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/grants.md.tmpl b/templates/data-sources/grants.md.tmpl new file mode 100644 index 0000000000..c49c97d64d --- /dev/null +++ b/templates/data-sources/grants.md.tmpl @@ -0,0 +1,32 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/data-sources/%s/data-source.tf" .Name)}} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/data-sources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/index.md.tmpl b/templates/index.md.tmpl index 0aade73a3a..11f3ba84bb 100644 --- a/templates/index.md.tmpl +++ b/templates/index.md.tmpl @@ -17,6 +17,8 @@ Coverage is focused on part of Snowflake related to access control. ## Example Provider Configuration +This is an example configuration of the provider in `main.tf` in a configuration directory. More examples are provided [below](#order-precedence). + {{tffile "examples/provider/provider.tf"}} ## Configuration Schema @@ -36,7 +38,7 @@ The Snowflake provider support multiple ways to authenticate: * Private Key * Config File -In all cases account and username are required. +In all cases `organization_name`, `account_name` and `user` are required. ### Keypair Authentication Environment Variables @@ -113,30 +115,72 @@ export SNOWFLAKE_USER='...' export SNOWFLAKE_PASSWORD='...' ``` -### Config File +## Order Precedence -If you choose to use a config file, the optional `profile` attribute specifies the profile to use from the config file. If no profile is specified, the default profile is used. The Snowflake config file lives at `~/.snowflake/config` and uses [TOML](https://toml.io/) format. You can override this location by setting the `SNOWFLAKE_CONFIG_PATH` environment variable. If no username and account are specified, the provider will fall back to reading the config file. +Currently, the provider can be configured in three ways: +1. In a Terraform file located in the Terraform module with other resources. -```shell +Example content of the Terraform file configuration: + +```terraform +provider "snowflake" { + organization_name = "..." + account_name = "..." + username = "..." + password = "..." +} +``` + +2. In environmental variables (envs). This is mainly used to provide sensitive values. + + +```bash +export SNOWFLAKE_USER="..." +export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key" +``` + +3. In a TOML file (default in ~/.snowflake/config). Notice the use of different profiles. The profile name needs to be specified in the Terraform configuration file in `profile` field. When this is not specified, `default` profile is loaded. +When a `default` profile is not present in the TOML file, it is treated as "empty", without failing. + +Example content of the Terraform file configuration: + +```terraform +provider "snowflake" { + profile = "default" +} +``` + +Example content of the TOML file configuration: + +```toml [default] -account='TESTACCOUNT' -user='TEST_USER' -password='hunter2' +organizationname='organization_name' +accountname='account_name' +user='user' +password='password' role='ACCOUNTADMIN' -[securityadmin] -account='TESTACCOUNT' -user='TEST_USER' -password='hunter2' -role='SECURITYADMIN' +[secondary_test_account] +organizationname='organization_name' +accountname='account2_name' +user='user' +password='password' +role='ACCOUNTADMIN' ``` -## Order Precedence +Not all fields must be configured in one source; users can choose which fields are configured in which source. +Provider uses an established hierarchy of sources. The current behavior is that for each field: +1. Check if it is present in the provider configuration. If yes, use this value. If not, go to step 2. +1. Check if it is present in the environment variables. If yes, use this value. If not, go to step 3. +1. Check if it is present in the TOML config file (specifically, use the profile name configured in one of the steps above). If yes, use this value. If not, the value is considered empty. + +An example TOML file contents: + +{{ codefile "toml" "examples/additional/provider_config_toml.MD" | trimspace }} + +An example terraform configuration file equivalent: -The Snowflake provider will use the following order of precedence when determining which credentials to use: -1) Provider Configuration -2) Environment Variables -3) Config File +{{ codefile "terraform" "examples/additional/provider_config_tf.MD" | trimspace }} {{ index (split (codefile "" "examples/additional/deprecated_resources.MD") "```") 1 | trimspace }} diff --git a/templates/resources/grant_account_role.md.tmpl b/templates/resources/grant_account_role.md.tmpl new file mode 100644 index 0000000000..d4fc5ab54a --- /dev/null +++ b/templates/resources/grant_account_role.md.tmpl @@ -0,0 +1,36 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} + +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/grant_application_role.md.tmpl b/templates/resources/grant_application_role.md.tmpl new file mode 100644 index 0000000000..d4fc5ab54a --- /dev/null +++ b/templates/resources/grant_application_role.md.tmpl @@ -0,0 +1,36 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} + +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/grant_database_role.md.tmpl b/templates/resources/grant_database_role.md.tmpl new file mode 100644 index 0000000000..d4fc5ab54a --- /dev/null +++ b/templates/resources/grant_database_role.md.tmpl @@ -0,0 +1,36 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} + +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/grant_ownership.md.tmpl b/templates/resources/grant_ownership.md.tmpl index 9689e950d4..59569cf852 100644 --- a/templates/resources/grant_ownership.md.tmpl +++ b/templates/resources/grant_ownership.md.tmpl @@ -10,6 +10,8 @@ description: |- {{- end }} --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + ~> **Note** For more details about granting ownership, please visit [`GRANT OWNERSHIP` Snowflake documentation page](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership). !> **Warning** Grant ownership resource still has some limitations. Delete operation is not implemented for on_future grants (you have to remove the config and then revoke ownership grant on future X manually). @@ -66,7 +68,7 @@ To set the `AUTO_REFRESH` property back to `TRUE` (after you transfer ownership) Import is supported using the following syntax: -`terraform import "||||"` +`terraform import snowflake_grant_ownership.example '||||'` where: - role_type - string - type of granted role (either ToAccountRole or ToDatabaseRole) diff --git a/templates/resources/grant_privileges_to_account_role.md.tmpl b/templates/resources/grant_privileges_to_account_role.md.tmpl index b84b50553e..f58bc379e2 100644 --- a/templates/resources/grant_privileges_to_account_role.md.tmpl +++ b/templates/resources/grant_privileges_to_account_role.md.tmpl @@ -10,6 +10,8 @@ description: |- {{- end }} --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + {{/* SNOW-990811 */}} !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). @@ -40,7 +42,7 @@ description: |- Import is supported using the following syntax: -`terraform import "|||||"` +`terraform import snowflake_grant_privileges_to_account_role.example '|||||'` where: - account_role_name - fully qualified identifier @@ -53,62 +55,62 @@ where: It has varying number of parts, depending on grant_type. All the possible types are: ### OnAccount -`terraform import "||||OnAccount` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnAccount'` ### OnAccountObject -`terraform import "||||OnAccountObject||` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnAccountObject||'` ### OnSchema On schema contains inner types for all options. #### OnSchema -`terraform import "||||OnSchema|OnSchema|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchema|OnSchema|'` #### OnAllSchemasInDatabase -`terraform import "||||OnSchema|OnAllSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchema|OnAllSchemasInDatabase|'` #### OnFutureSchemasInDatabase -`terraform import "||||OnSchema|OnFutureSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchema|OnFutureSchemasInDatabase|'` ### OnSchemaObject On schema object contains inner types for all options. #### OnObject -`terraform import "||||OnSchemaObject|OnObject||"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnObject||'` #### OnAll On all contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnAll||InDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnAll||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnAll||InSchema|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnAll||InSchema|'` #### OnFuture On future contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnFuture||InDatabase|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnFuture||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnFuture||InSchema|"` +`terraform import snowflake_grant_privileges_to_account_role.example '||||OnSchemaObject|OnFuture||InSchema|'` ### Import examples #### Grant all privileges OnAccountObject (Database) -`terraform import "\"test_db_role\"|false|false|ALL|OnAccountObject|DATABASE|\"test_db\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|ALL|OnAccountObject|DATABASE|"test_db"'` #### Grant list of privileges OnAllSchemasInDatabase -`terraform import "\"test_db_role\"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|\"test_db\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|"test_db"'` #### Grant list of privileges on table -`terraform import "\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|\"test_db\".\"test_schema\".\"test_table\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|"test_db"."test_schema"."test_table"'` #### Grant list of privileges OnAll tables in schema -`terraform import "\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|\"test_db\".\"test_schema\""` +`terraform import snowflake_grant_privileges_to_account_role.example '"test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|"test_db"."test_schema"'` diff --git a/templates/resources/grant_privileges_to_database_role.md.tmpl b/templates/resources/grant_privileges_to_database_role.md.tmpl index e32a92b19f..3b3a2fcb70 100644 --- a/templates/resources/grant_privileges_to_database_role.md.tmpl +++ b/templates/resources/grant_privileges_to_database_role.md.tmpl @@ -10,6 +10,8 @@ description: |- {{- end }} --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + {{/* SNOW-990811 */}} !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). @@ -35,7 +37,7 @@ description: |- Import is supported using the following syntax: -`terraform import "|||||"` +`terraform import snowflake_grant_privileges_to_database_role.example '|||||'` where: - database_role_name - fully qualified identifier @@ -48,59 +50,59 @@ where: It has varying number of parts, depending on grant_type. All the possible types are: ### OnDatabase -`terraform import "||||OnDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnDatabase|'` ### OnSchema On schema contains inner types for all options. #### OnSchema -`terraform import "||||OnSchema|OnSchema|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchema|OnSchema|'` #### OnAllSchemasInDatabase -`terraform import "||||OnSchema|OnAllSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchema|OnAllSchemasInDatabase|'` #### OnFutureSchemasInDatabase -`terraform import "||||OnSchema|OnFutureSchemasInDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchema|OnFutureSchemasInDatabase|'` ### OnSchemaObject On schema object contains inner types for all options. #### OnObject -`terraform import "||||OnSchemaObject|OnObject||"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnObject||'` #### OnAll On all contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnAll||InDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnAll||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnAll||InSchema|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnAll||InSchema|'` #### OnFuture On future contains inner types for all options. ##### InDatabase -`terraform import "||||OnSchemaObject|OnFuture||InDatabase|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnFuture||InDatabase|'` ##### InSchema -`terraform import "||||OnSchemaObject|OnFuture||InSchema|"` +`terraform import snowflake_grant_privileges_to_database_role.example '||||OnSchemaObject|OnFuture||InSchema|'` ### Import examples #### Grant all privileges OnDatabase -`terraform import "\"test_db\".\"test_db_role\"|false|false|ALL|OnDatabase|\"test_db\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|ALL|OnDatabase|"test_db"'` #### Grant list of privileges OnAllSchemasInDatabase -`terraform import "\"test_db\".\"test_db_role\"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|\"test_db\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|CREATE TAG,CREATE TABLE|OnSchema|OnAllSchemasInDatabase|"test_db"'` #### Grant list of privileges on table -`terraform import "\"test_db\".\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|\"test_db\".\"test_schema\".\"test_table\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnObject|TABLE|"test_db"."test_schema"."test_table"'` #### Grant list of privileges OnAll tables in schema -`terraform import "\"test_db\".\"test_db_role\"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|\"test_db\".\"test_schema\""` +`terraform import snowflake_grant_privileges_to_database_role.example '"test_db"."test_db_role"|false|false|SELECT,DELETE,INSERT|OnSchemaObject|OnAll|TABLES|InSchema|"test_db"."test_schema"'` diff --git a/templates/resources/grant_privileges_to_share.md.tmpl b/templates/resources/grant_privileges_to_share.md.tmpl index 3ac9e57792..c9c79fb2d3 100644 --- a/templates/resources/grant_privileges_to_share.md.tmpl +++ b/templates/resources/grant_privileges_to_share.md.tmpl @@ -10,6 +10,8 @@ description: |- {{- end }} --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} @@ -34,7 +36,7 @@ description: |- Import is supported using the following syntax: -`terraform import "|||"` +`terraform import snowflake_grant_privileges_to_share.example '|||'` where: - share_name - fully qualified identifier @@ -43,19 +45,19 @@ where: - grant_identifier - fully qualified identifier ### OnDatabase -`terraform import "||OnDatabase|"` +`terraform import snowflake_grant_privileges_to_share.example '||OnDatabase|'` ### OnSchema -`terraform import "||OnSchema|."` +`terraform import snowflake_grant_privileges_to_share.example '||OnSchema|.'` ### OnTable -`terraform import "||OnTable|.."` +`terraform import snowflake_grant_privileges_to_share.example '||OnTable|..'` ### OnSchema -`terraform import "||OnAllTablesInSchema|."` +`terraform import snowflake_grant_privileges_to_share.example '||OnAllTablesInSchema|.'` ### OnTag -`terraform import "||OnTag|.."` +`terraform import snowflake_grant_privileges_to_share.example '||OnTag|..'` ### OnView -`terraform import "||OnView|.."` +`terraform import snowflake_grant_privileges_to_share.example '||OnView|..'` diff --git a/templates/resources/legacy_service_user.md.tmpl b/templates/resources/legacy_service_user.md.tmpl index c07fef3dc5..e9e1ac462f 100644 --- a/templates/resources/legacy_service_user.md.tmpl +++ b/templates/resources/legacy_service_user.md.tmpl @@ -41,3 +41,5 @@ Import is supported using the following syntax: {{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} {{- end }} + +Note: terraform plan+apply may be needed after successful import to fill out all the missing fields (like `password`) in state. diff --git a/templates/resources/primary_connection.md.tmpl b/templates/resources/primary_connection.md.tmpl new file mode 100644 index 0000000000..a4e271811a --- /dev/null +++ b/templates/resources/primary_connection.md.tmpl @@ -0,0 +1,42 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} + +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + +-> **Note** To demote `snowflake_primary_connection` to [`snowflake_secondary_connection`](./secondary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then recreate it in manually using: + ``` + CREATE CONNECTION AS REPLICA OF ..; + ``` +and then import it as the `snowflake_secondary_connection`. + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/secondary_connection.md.tmpl b/templates/resources/secondary_connection.md.tmpl new file mode 100644 index 0000000000..b6955369ea --- /dev/null +++ b/templates/resources/secondary_connection.md.tmpl @@ -0,0 +1,42 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} + +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + +-> **Note** To promote `snowflake_secondary_connection` to [`snowflake_primary_connection`](./primary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then promote it manually using: + ``` + ALTER CONNECTION PRIMARY; + ``` +and then import it as the `snowflake_primary_connection`. + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/tag.md.tmpl b/templates/resources/tag.md.tmpl new file mode 100644 index 0000000000..7a876a0017 --- /dev/null +++ b/templates/resources/tag.md.tmpl @@ -0,0 +1,35 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/user.md.tmpl b/templates/resources/user.md.tmpl index a15bbb722b..eeaa0b36ff 100644 --- a/templates/resources/user.md.tmpl +++ b/templates/resources/user.md.tmpl @@ -41,3 +41,5 @@ Import is supported using the following syntax: {{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} {{- end }} + +Note: terraform plan+apply may be needed after successful import to fill out all the missing fields (like `password`) in state. diff --git a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD index b1b2065e68..96c1eea1d7 100644 --- a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD +++ b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD @@ -14,11 +14,11 @@ newer provider versions. We will address these while working on the given object | Object Type | Status | Known issues | |--------------------------|:------:|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| ACCOUNT | ❌ | [#2030](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2030), [#2015](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2015), [#1891](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1891), [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679), [#1671](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1671), [#1501](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1501), [#1062](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1062) | +| ACCOUNT | 👨‍💻 | [#2030](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2030), [#2015](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2015), [#1891](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1891), [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679), [#1671](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1671), [#1501](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1501), [#1062](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1062) | | DATABASE | 🚀 | issues in the older versions: [resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Aresource%3Adatabase+) and [datasources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Adata_source%3Adatabases+) | | DATABASE ROLE | 🚀 | - | | NETWORK POLICY | 🚀 | - | -| RESOURCE MONITOR | 👨‍💻 | [#1990](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1990), [#1832](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1832), [#1821](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1821), [#1754](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1754), [#1716](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1716), [#1714](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1714), [#1624](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1624), [#1500](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1500), [#1175](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1175) | +| RESOURCE MONITOR | 🚀 | [#1990](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1990), [#1832](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1832), [#1821](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1821), [#1754](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1754), [#1716](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1716), [#1714](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1714), [#1624](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1624), [#1500](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1500), [#1175](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1175) | | ROLE | 🚀 | - | | SECURITY INTEGRATION | 🚀 | issues in the older versions: [resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Aresource%3Aexternal_oauth_integration%2Cresource%3Asaml_integration%2Cresource%3Aoauth_integration%2Cresource%3Ascim_integration) | | USER | 🚀 | issues in the older versions: [resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Aresource%3Auser+) and [datasources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Adata_source%3Ausers+) | @@ -29,11 +29,11 @@ newer provider versions. We will address these while working on the given object | ROW ACCESS POLICY | 🚀 | [#2053](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2053), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1151](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1151) | | SCHEMA | 🚀 | issues in the older versions: [resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Aresource%3Aschema+) and [datasources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Adata_source%3Aschemas+) | | STAGE | ❌ | [#2995](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2995), [#2818](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2818), [#2679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2679), [#2505](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2505), [#1911](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1911), [#1903](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1903), [#1795](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1795), [#1705](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1705), [#1544](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1544), [#1491](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1491), [#1087](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1087), [#265](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/265) | -| STREAM | 👨‍💻 | [#2975](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2975), [#2413](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2413), [#2201](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2201), [#1150](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1150) | +| STREAM | 🚀 | [#2975](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2975), [#2413](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2413), [#2201](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2201), [#1150](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1150) | | STREAMLIT | 🚀 | - | -| TABLE | ❌ | [#2997](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2997), [#2844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2844), [#2839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2839), [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733), [#2683](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2683), [#2676](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2676), [#2674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2674), [#2629](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2629), [#2418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2418), [#2415](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2415), [#2406](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2406), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1823](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1823), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1387](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1387), [#1272](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1272), [#1271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1271), [#1248](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1248), [#1241](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1241), [#1146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1146), [#1032](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1032), [#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420) | +| TABLE | 👨‍💻 | [#2997](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2997), [#2844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2844), [#2839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2839), [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733), [#2683](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2683), [#2676](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2676), [#2674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2674), [#2629](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2629), [#2418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2418), [#2415](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2415), [#2406](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2406), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1823](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1823), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1387](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1387), [#1272](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1272), [#1271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1271), [#1248](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1248), [#1241](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1241), [#1146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1146), [#1032](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1032), [#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420) | | TAG | 👨‍💻 | [#2943](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2902), [#2598](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2598), [#1910](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1910), [#1909](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1909), [#1862](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1862), [#1806](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1806), [#1657](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1657), [#1496](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1496), [#1443](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1443), [#1394](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1394), [#1372](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1372), [#1074](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1074) | | TASK | 👨‍💻 | [#3136](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3136), [#1419](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1419), [#1250](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1250), [#1194](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1194), [#1088](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1088) | | VIEW | 🚀 | issues in the older versions: [resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Aresource%3Aview+) and [datasources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Adata_source%3Aviews+) | -| snowflake_unsafe_execute | ❌ | [#2934](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2934) | +| snowflake_unsafe_execute | 👨‍💻 | [#2934](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2934) | diff --git a/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md b/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md index 4d449b68bd..1f51a82107 100644 --- a/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md +++ b/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md @@ -12,13 +12,14 @@ * [snowflake_dynamic_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/dynamic_table) * [snowflake_dynamic_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/dynamic_tables) (datasource) * [snowflake_external_function](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/external_function) -* [snowflake_external_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/external_functions) +* [snowflake_external_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/external_functions) (datasource) * [snowflake_external_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/external_table) -* [snowflake_external_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/external_tables) +* [snowflake_external_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/external_tables) (datasource) +* [snowflake_external_volume](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/external_volume) * [snowflake_failover_group](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/failover_group) -* [snowflake_failover_groups](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/failover_groups) +* [snowflake_failover_groups](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/failover_groups) (datasource) * [snowflake_file_format](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/file_format) -* [snowflake_file_formats](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/file_formats) +* [snowflake_file_formats](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/file_formats) (datasource) * [snowflake_managed_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/managed_account) * [snowflake_materialized_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/materialized_view) * [snowflake_materialized_views](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/materialized_views) (datasource) diff --git a/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md b/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md index 4744d66775..b196a5a418 100644 --- a/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md +++ b/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md @@ -1,107 +1,108 @@ We estimate the given list to be accurate, but it may be subject to small changes: * Account (in progress) - * [snowflake_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/account) - * [snowflake_accounts](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/accounts) (datasource) -* Connection (in progress) - * snowflake_connection - * snowflake_connections (datasource) + * [snowflake_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/account) + * [snowflake_accounts](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/accounts) (datasource) +* Connection + * [snowflake_primary_connection](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/primary_connection) + * [snowflake_secondary_connection](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secondary_connection) + * [snowflake_connections](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/connections) (datasource) * Database - * [snowflake_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/database) - * [snowflake_secondary_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/secondary_database) - * [snowflake_shared_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/shared_database) - * [snowflake_databases](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/databases) (datasource) + * [snowflake_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/database) + * [snowflake_secondary_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secondary_database) + * [snowflake_shared_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/shared_database) + * [snowflake_databases](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/databases) (datasource) * Database Role - * [snowflake_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/database_role) - * [snowflake_database_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/database_roles) (datasource) + * [snowflake_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/database_role) + * [snowflake_database_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/database_roles) (datasource) * Function (in progress) * snowflake_function_java * snowflake_function_javascript * snowflake_function_python * snowflake_function_scala * snowflake_function_sql - * [snowflake_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/functions) (datasource) + * [snowflake_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/functions) (datasource) * Grants - * [snowflake_grant_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/grant_account_role) - * [snowflake_grant_application_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/grant_application_role) - * [snowflake_grant_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/grant_database_role) - * [snowflake_grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/grant_ownership) - * [snowflake_grant_privileges_to_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/grant_privileges_to_account_role) - * [snowflake_grant_privileges_to_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/grant_privileges_to_database_role) - * [snowflake_grant_privileges_to_share](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/grant_privileges_to_share) - * [snowflake_grants](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/grants) (datasource) + * [snowflake_grant_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_account_role) + * [snowflake_grant_application_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_application_role) + * [snowflake_grant_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_database_role) + * [snowflake_grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_ownership) + * [snowflake_grant_privileges_to_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_privileges_to_account_role) + * [snowflake_grant_privileges_to_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_privileges_to_database_role) + * [snowflake_grant_privileges_to_share](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_privileges_to_share) + * [snowflake_grants](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/grants) (datasource) * Masking Policy - * [snowflake_masking_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/masking_policy) - * [snowflake_masking_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/masking_policies) (datasource) + * [snowflake_masking_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/masking_policy) + * [snowflake_masking_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/masking_policies) (datasource) * Network Policy - * [snowflake_network_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/network_policy) - * [snowflake_network_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/network_policies) (datasource) + * [snowflake_network_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/network_policy) + * [snowflake_network_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/network_policies) (datasource) * Procedure (in progress) * snowflake_procedure_java * snowflake_procedure_javascript * snowflake_procedure_python * snowflake_procedure_scala * snowflake_procedure_sql - * [snowflake_procedures](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/procedures) (datasource) + * [snowflake_procedures](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/procedures) (datasource) * Resource Monitor - * [snowflake_resource_monitor](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/resource_monitor) - * [snowflake_resource_monitors](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/resource_monitors) (datasource) + * [snowflake_resource_monitor](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/resource_monitor) + * [snowflake_resource_monitors](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/resource_monitors) (datasource) * Role - * [snowflake_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/account_role) - * [snowflake_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/roles) (datasource) + * [snowflake_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/account_role) + * [snowflake_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/roles) (datasource) * Row Access Policy - * [snowflake_row_access_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/row_access_policy) - * [snowflake_row_access_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/row_access_policies) (datasource) + * [snowflake_row_access_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/row_access_policy) + * [snowflake_row_access_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/row_access_policies) (datasource) * Schema - * [snowflake_schema](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/schema) - * [snowflake_schemas](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/schemas) (datasource) -* Secret (in progress) - * snowflake_secret_with_client_credentials - * snowflake_secret_with_authorization_code_grant - * snowflake_secret_with_basic_authentication - * snowflake_secret_with_generic_string - * snowflake_secrets (datasource) + * [snowflake_schema](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/schema) + * [snowflake_schemas](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/schemas) (datasource) +* Secret + * [snowflake_secret_with_client_credentials](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_client_credentials) + * [snowflake_secret_with_authorization_code_grant](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_authorization_code_grant) + * [snowflake_secret_with_basic_authentication](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_basic_authentication) + * [snowflake_secret_with_generic_string](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_generic_string) + * [snowflake_secrets](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/secrets) (datasource) * Security Integration - * [snowflake_api_authentication_integration_with_authorization_code_grant](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/api_authentication_integration_with_authorization_code_grant) - * [snowflake_api_authentication_integration_with_client_credentials](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/api_authentication_integration_with_client_credentials) - * [snowflake_api_authentication_integration_with_jwt_bearer](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/api_authentication_integration_with_jwt_bearer) - * [snowflake_external_oauth_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/external_oauth_integration) - * [snowflake_oauth_integration_for_custom_clients](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/oauth_integration_for_custom_clients) - * [snowflake_oauth_integration_for_partner_applications](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/oauth_integration_for_partner_applications) - * [snowflake_saml2_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/saml2_integration) - * [snowflake_scim_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/scim_integration) - * [snowflake_security_integrations](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/security_integrations) (datasource) + * [snowflake_api_authentication_integration_with_authorization_code_grant](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/api_authentication_integration_with_authorization_code_grant) + * [snowflake_api_authentication_integration_with_client_credentials](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/api_authentication_integration_with_client_credentials) + * [snowflake_api_authentication_integration_with_jwt_bearer](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/api_authentication_integration_with_jwt_bearer) + * [snowflake_external_oauth_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/external_oauth_integration) + * [snowflake_oauth_integration_for_custom_clients](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/oauth_integration_for_custom_clients) + * [snowflake_oauth_integration_for_partner_applications](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/oauth_integration_for_partner_applications) + * [snowflake_saml2_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/saml2_integration) + * [snowflake_scim_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/scim_integration) + * [snowflake_security_integrations](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/security_integrations) (datasource) * Snowflake Parameters (in progress) - * [snowflake_account_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/account_parameter) + * [snowflake_account_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/account_parameter) * SQL Execute (in progress) * \ (no name yet) -* Stream (in progress) - * [snowflake_stream_on_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/stream_on_table) - * [snowflake_stream_on_external_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/stream_on_external_table) - * snowflake_stream_on_directory_table - * snowflake_stream_on_view - * [snowflake_streams](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/streams) (datasource) +* Stream + * [snowflake_stream_on_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_table) + * [snowflake_stream_on_external_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_external_table) + * [snowflake_stream_on_directory_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_directory_table) + * [snowflake_stream_on_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_view) + * [snowflake_streams](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/streams) (datasource) * Streamlit - * [snowflake_streamlit](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/streamlit) - * [snowflake_streamlits](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/streamlits) (datasource) + * [snowflake_streamlit](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/streamlit) + * [snowflake_streamlits](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/streamlits) (datasource) * Table (in progress) - * [snowflake_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/table) - * [snowflake_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/tables) (datasource) + * [snowflake_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/table) + * [snowflake_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tables) (datasource) * Tag (in progress) - * [snowflake_tag](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/tag) - * [snowflake_tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/tag_association) - * [snowflake_tags](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/tags) (datasource) + * [snowflake_tag](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/tag) + * [snowflake_tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/tag_association) + * [snowflake_tags](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tags) (datasource) * Task (in progress) - * [snowflake_task](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/task) - * [snowflake_tasks](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/tasks) (datasource) + * [snowflake_task](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/task) + * [snowflake_tasks](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tasks) (datasource) * User - * [snowflake_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/user) - * [snowflake_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/service_user) - * [snowflake_legacy_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/legacy_service_user) - * [snowflake_users](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/users) (datasource) + * [snowflake_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/user) + * [snowflake_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/service_user) + * [snowflake_legacy_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/legacy_service_user) + * [snowflake_users](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/users) (datasource) * View - * [snowflake_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/view) - * [snowflake_views](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/views) (datasource) + * [snowflake_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/view) + * [snowflake_views](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/views) (datasource) * Warehouse - * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/warehouse) - * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/warehouse) (datasource) + * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/warehouse) + * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/warehouse) (datasource) diff --git a/v1-preparations/REMAINING_GA_OBJECTS.MD b/v1-preparations/REMAINING_GA_OBJECTS.MD index 8c8c01ce83..fae960f8d3 100644 --- a/v1-preparations/REMAINING_GA_OBJECTS.MD +++ b/v1-preparations/REMAINING_GA_OBJECTS.MD @@ -9,34 +9,34 @@ Status is one of: Known issues lists open issues touching the given object. Note that some of these issues may be already fixed in the newer provider versions. We will address these while working on the given object. -| Object Type | Status | Known issues | -|-----------------------------|:------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| snowflake_object_parameter | ❌ | [#2446](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2446), [#1848](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1848), [#1561](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1561), [#1457](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1457) | -| snowflake_session_parameter | ❌ | [#1814](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1814), [#1783](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1783), [#1036](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1036) | -| snowflake_account_parameter | ❌ | [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679) | -| API INTEGRATION | ❌ | [#2772](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2772), [#1445](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1445) | -| APPLICATION | ❌ | - | -| APPLICATION PACKAGE | ❌ | - | -| APPLICATION ROLE | ❌ | [#3134](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3134) - | -| CONNECTION | ❌ | - | -| EXTERNAL ACCESS INTEGRATION | ❌ | [#2546](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2546) | -| FAILOVER GROUP | ❌ | [#2516](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2516), [#2332](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2332), [#1418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1418) | -| MANAGED ACCOUNT | ❌ | - | -| NOTIFICATION INTEGRATION | ❌ | [#2966](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2966), [#2965](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2965), [#1051](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1051) | -| REPLICATION GROUP | ❌ | [#1602](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1602) | -| SHARE | ❌ | [#3051](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3051), [#2189](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2189), [#1279](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1279), [#630](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/630) | +| Object Type | Status | Known issues | +|-----------------------------|:------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| snowflake_object_parameter | 👨‍💻 | [#2446](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2446), [#1848](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1848), [#1561](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1561), [#1457](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1457) | +| snowflake_session_parameter | 👨‍💻 | [#1814](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1814), [#1783](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1783), [#1036](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1036) | +| snowflake_account_parameter | 👨‍💻 | [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679) | +| API INTEGRATION | ❌ | [#2772](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2772), [#1445](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1445) | +| APPLICATION | ❌ | - | +| APPLICATION PACKAGE | ❌ | - | +| APPLICATION ROLE | ❌ | [#3134](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3134) - | +| CONNECTION | 🚀 | - | +| EXTERNAL ACCESS INTEGRATION | ❌ | [#2546](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2546) | +| FAILOVER GROUP | ❌ | [#2516](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2516), [#2332](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2332), [#1418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1418) | +| MANAGED ACCOUNT | ❌ | - | +| NOTIFICATION INTEGRATION | ❌ | [#2966](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2966), [#2965](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2965), [#1051](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1051) | +| REPLICATION GROUP | ❌ | [#1602](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1602) | +| SHARE | ❌ | [#3051](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3051), [#2189](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2189), [#1279](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1279), [#630](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/630) | | STORAGE INTEGRATION | ❌ | [#3082](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3082), [#2624](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2624), [#1445](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1445) | -| ALERT | ❌ | [#2963](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2963), [#2829](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2829), [#2541](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2541), [#1811](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1811), [#1753](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1753) | -| DYNAMIC TABLE | ❌ | - | -| EVENT TABLE | ❌ | [#1888](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1888) | -| EXTERNAL FUNCTION | ❌ | [#1901](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1901) | -| EXTERNAL TABLE | ❌ | [#2881](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2881), [#1564](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1564), [#1537](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1537), [#1416](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1416), [#1040](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1040) | +| ALERT | ❌ | [#2963](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2963), [#2829](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2829), [#2541](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2541), [#1811](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1811), [#1753](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1753) | +| DYNAMIC TABLE | ❌ | - | +| EVENT TABLE | ❌ | [#1888](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1888) | +| EXTERNAL FUNCTION | ❌ | [#1901](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1901) | +| EXTERNAL TABLE | ❌ | [#2881](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2881), [#1564](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1564), [#1537](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1537), [#1416](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1416), [#1040](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1040) | | FILE FORMAT | ❌ | [#3115](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3115), [#2154](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2154), [#1984](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1984), [#1820](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1820), [#1760](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1760), [#1614](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1614), [#1613](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1613), [#1609](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1609), [#1461](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1461) | -| MATERIALIZED VIEW | ❌ | [#2397](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2397), [#1218](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1218) | -| NETWORK RULE | ❌ | [#2593](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2593), [#2482](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2482) | -| PACKAGES POLICY | ❌ | - | -| PASSWORD POLICY | ❌ | [#2213](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2213), [#2162](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2162) | -| PIPE | ❌ | [#2785](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2785), [#2075](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2075), [#1781](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1781), [#1707](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1707), [#1478](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1478), [#533](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/533) | -| SECRET | ❌ | [#2545](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2545) | -| SEQUENCE | ❌ | [#2589](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2589) | -| SESSION POLICY | ❌ | [#2870](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2870) | +| MATERIALIZED VIEW | ❌ | [#2397](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2397), [#1218](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1218) | +| NETWORK RULE | ❌ | [#2593](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2593), [#2482](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2482) | +| PACKAGES POLICY | ❌ | - | +| PASSWORD POLICY | ❌ | [#2213](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2213), [#2162](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2162) | +| PIPE | ❌ | [#2785](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2785), [#2075](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2075), [#1781](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1781), [#1707](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1707), [#1478](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1478), [#533](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/533) | +| SECRET | 🚀 | [#2545](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2545) | +| SEQUENCE | ❌ | [#2589](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2589) | +| SESSION POLICY | ❌ | [#2870](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2870) |