From 6d4a10364276e92fa791eaa022c3bd7bce16228d Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 12:06:47 +0100 Subject: [PATCH 01/20] feat: Basic functions implementation (#3269) Prepare most of the java resource implementation: - add common drop method (also to procedures) - fix parameters schema and add mapping (also for procedures) - fix parameter handling - handle parameters and test - handle arguments, return type, runtime version, imports, target path, language - add default values to arguments - improve function details with mapping logic (for easier use in resource) - add a bunch of common functions to handle this family of resources - handle basic rename - add TABLE data type (needs logic and tests) - handle external language change - test no arguments - regenerate model builders and the docs - rename a few attributes - change requirements for some fields Next PRs: - TABLE function improvements and tests - handle secrets, external access integrations, packages, return not null, and comments - Add a similar PR for java procedure (reuse what we can) - Add PR with all other function types - datasources --- docs/resources/function_java.md | 86 +++- docs/resources/function_javascript.md | 62 ++- docs/resources/function_python.md | 75 ++- docs/resources/function_scala.md | 86 +++- docs/resources/function_sql.md | 62 ++- docs/resources/procedure_java.md | 56 ++- docs/resources/procedure_javascript.md | 56 ++- docs/resources/procedure_python.md | 56 ++- docs/resources/procedure_scala.md | 56 ++- docs/resources/procedure_sql.md | 56 ++- .../function_describe_snowflake_ext.go | 69 +++ .../function_java_resource_ext.go | 17 + .../function_resource_parameters_ext.go | 13 + .../config/model/function_java_model_ext.go | 72 +++ .../config/model/function_java_model_gen.go | 19 +- .../model/function_javascript_model_gen.go | 40 +- .../config/model/function_python_model_gen.go | 14 +- .../config/model/function_scala_model_gen.go | 19 +- .../config/model/function_sql_model_gen.go | 40 +- pkg/acceptance/check_destroy.go | 44 +- pkg/acceptance/helpers/function_client.go | 32 ++ .../helpers/function_setup_helpers.go | 92 ++-- pkg/acceptance/helpers/ids_generator.go | 12 + pkg/acceptance/helpers/stage_client.go | 15 + pkg/resources/custom_diffs.go | 12 + pkg/resources/doc_helpers.go | 3 +- pkg/resources/function.go | 16 +- pkg/resources/function_commons.go | 277 ++++++++++- pkg/resources/function_java.go | 165 ++++++- .../function_java_acceptance_test.go | 435 ++++++++++++++++++ pkg/resources/function_javascript.go | 2 +- pkg/resources/function_parameters.go | 12 +- pkg/resources/function_python.go | 2 +- pkg/resources/function_scala.go | 2 +- pkg/resources/function_sql.go | 2 +- pkg/resources/procedure.go | 16 +- pkg/resources/procedure_commons.go | 21 +- pkg/resources/procedure_java.go | 6 +- pkg/resources/procedure_javascript.go | 6 +- pkg/resources/procedure_python.go | 6 +- pkg/resources/procedure_scala.go | 6 +- pkg/resources/procedure_sql.go | 6 +- pkg/resources/resource_helpers_create.go | 29 ++ pkg/resources/resource_helpers_read.go | 19 + pkg/resources/user.go | 1 - pkg/schemas/function_parameters.go | 35 ++ pkg/schemas/procedure_parameters.go | 35 ++ pkg/sdk/data_types_deprecated.go | 4 + pkg/sdk/datatypes/legacy.go | 3 + pkg/sdk/datatypes/table.go | 39 ++ pkg/sdk/functions_ext.go | 152 +++++- pkg/sdk/functions_ext_test.go | 179 +++++++ pkg/sdk/identifier_helpers.go | 12 +- pkg/sdk/random_test.go | 12 +- pkg/sdk/testint/functions_integration_test.go | 209 ++++++++- .../testint/procedures_integration_test.go | 39 +- 56 files changed, 2638 insertions(+), 274 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go create mode 100644 pkg/resources/function_java_acceptance_test.go create mode 100644 pkg/schemas/function_parameters.go create mode 100644 pkg/schemas/procedure_parameters.go create mode 100644 pkg/sdk/datatypes/table.go create mode 100644 pkg/sdk/functions_ext_test.go diff --git a/docs/resources/function_java.md b/docs/resources/function_java.md index 23ab3b5dc2..820bb4e63d 100644 --- a/docs/resources/function_java.md +++ b/docs/resources/function_java.md @@ -17,7 +17,6 @@ Resource used to manage java function objects. For more information, check [func ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Java source code. For more information, see [Introduction to Java UDFs](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -29,16 +28,17 @@ Resource used to manage java function objects. For more information, check [func - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Java source code. For more information, see [Introduction to Java UDFs](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java). (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `runtime_version` (String) Specifies the Java JDK runtime version to use. The supported versions of Java are 11.x and 17.x. If RUNTIME_VERSION is not set, Java JDK 11 is used. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. +- `target_path` (Block Set, Max: 1) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -67,15 +80,72 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_javascript.md b/docs/resources/function_javascript.md index 2680ff6653..1619ab3a06 100644 --- a/docs/resources/function_javascript.md +++ b/docs/resources/function_javascript.md @@ -31,7 +31,7 @@ Resource used to manage javascript function objects. For more information, check - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -50,16 +50,68 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_python.md b/docs/resources/function_python.md index 5f68cfb014..21e4244789 100644 --- a/docs/resources/function_python.md +++ b/docs/resources/function_python.md @@ -17,7 +17,6 @@ Resource used to manage python function objects. For more information, check [fu ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Python source code. For more information, see [Introduction to Python UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler function or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a function name. If the handler code is in-line with the CREATE FUNCTION statement, you can use the function name alone. When the handler code is referenced at a stage, this value should be qualified with the module name, as in the following form: `my_module.my_function`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -30,14 +29,15 @@ Resource used to manage python function objects. For more information, check [fu - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. A file can be a `.py` file or another type of file. Python UDFs can also read non-Python files, such as text files. For an example, see [Reading a file](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-examples.html#label-udf-python-read-files). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#python). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Python source code. For more information, see [Introduction to Python UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. A file can be a `.py` file or another type of file. Python UDFs can also read non-Python files, such as text files. For an example, see [Reading a file](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-examples.html#label-udf-python-read-files). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#python). (see [below for nested schema](#nestedblock--imports)) - `is_aggregate` (String) Specifies that the function is an aggregate function. For more information about user-defined aggregate functions, see [Python user-defined aggregate functions](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-aggregate-functions). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of packages required as dependencies. The value should be of the form `package_name==version_number`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -72,10 +85,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_scala.md b/docs/resources/function_scala.md index 9ec48d3866..01226e5512 100644 --- a/docs/resources/function_scala.md +++ b/docs/resources/function_scala.md @@ -17,7 +17,6 @@ Resource used to manage scala function objects. For more information, check [fun ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Scala source code. For more information, see [Introduction to Scala UDFs](https://docs.snowflake.com/en/developer-guide/udf/scala/udf-scala-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -30,15 +29,16 @@ Resource used to manage scala function objects. For more information, check [fun - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import, such as a JAR or other kind of file. The JAR file might contain handler dependency libraries. It can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). A non-JAR file might a file read by handler code. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#scala). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Scala source code. For more information, see [Introduction to Scala UDFs](https://docs.snowflake.com/en/developer-guide/udf/scala/udf-scala-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import, such as a JAR or other kind of file. The JAR file might contain handler dependency libraries. It can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). A non-JAR file might a file read by handler code. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#scala). (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. +- `target_path` (Block Set, Max: 1) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -67,15 +80,72 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_sql.md b/docs/resources/function_sql.md index 80d83727fb..4a48191740 100644 --- a/docs/resources/function_sql.md +++ b/docs/resources/function_sql.md @@ -31,7 +31,7 @@ Resource used to manage sql function objects. For more information, check [funct - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -50,16 +50,68 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_java.md b/docs/resources/procedure_java.md index 94490ed21e..dbb5f2eba3 100644 --- a/docs/resources/procedure_java.md +++ b/docs/resources/procedure_java.md @@ -73,10 +73,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_javascript.md b/docs/resources/procedure_javascript.md index a562ad589d..a9364db4cf 100644 --- a/docs/resources/procedure_javascript.md +++ b/docs/resources/procedure_javascript.md @@ -56,10 +56,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_python.md b/docs/resources/procedure_python.md index 7b6759ef75..a28cf0d0b5 100644 --- a/docs/resources/procedure_python.md +++ b/docs/resources/procedure_python.md @@ -72,10 +72,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_scala.md b/docs/resources/procedure_scala.md index 1347bfb5cf..692fb569b1 100644 --- a/docs/resources/procedure_scala.md +++ b/docs/resources/procedure_scala.md @@ -73,10 +73,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_sql.md b/docs/resources/procedure_sql.md index 3b078e3977..2533380779 100644 --- a/docs/resources/procedure_sql.md +++ b/docs/resources/procedure_sql.md @@ -56,10 +56,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go index f540d487bd..a4c256b172 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go @@ -6,10 +6,12 @@ import ( "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + assert2 "github.com/stretchr/testify/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) // TODO [SNOW-1501905]: this file should be fully regenerated when adding and option to assert the results of describe @@ -405,3 +407,70 @@ func (f *FunctionDetailsAssert) HasExactlySecrets(expectedSecrets map[string]sdk }) return f } + +func (f *FunctionDetailsAssert) HasExactlyImportsNormalizedInAnyOrder(imports ...sdk.NormalizedPath) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedImports == nil { + return fmt.Errorf("expected imports to have value; got: nil") + } + if !assert2.ElementsMatch(t, imports, o.NormalizedImports) { + return fmt.Errorf("expected %v imports in task relations, got %v", imports, o.NormalizedImports) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasNormalizedTargetPath(expectedStageLocation string, expectedPathOnStage string) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedTargetPath == nil { + return fmt.Errorf("expected normalized target path to have value; got: nil") + } + if o.NormalizedTargetPath.StageLocation != expectedStageLocation { + return fmt.Errorf("expected %s stage location for target path, got %v", expectedStageLocation, o.NormalizedTargetPath.StageLocation) + } + if o.NormalizedTargetPath.PathOnStage != expectedPathOnStage { + return fmt.Errorf("expected %s path on stage for target path, got %v", expectedPathOnStage, o.NormalizedTargetPath.PathOnStage) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasNormalizedTargetPathNil() *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedTargetPath != nil { + return fmt.Errorf("expected normalized target path to be nil, got: %s", *o.NormalizedTargetPath) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasReturnDataType(expectedDataType datatypes.DataType) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.ReturnDataType == nil { + return fmt.Errorf("expected return data type to have value; got: nil") + } + if !datatypes.AreTheSame(o.ReturnDataType, expectedDataType) { + return fmt.Errorf("expected %s return data type, got %v", expectedDataType, o.ReturnDataType.ToSql()) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasReturnNotNull(expected bool) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.ReturnNotNull != expected { + return fmt.Errorf("expected return not null %t; got: %t", expected, o.ReturnNotNull) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go new file mode 100644 index 0000000000..9a3bb1fa15 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go @@ -0,0 +1,17 @@ +package resourceassert + +import ( + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (f *FunctionJavaResourceAssert) HasImportsLength(len int) *FunctionJavaResourceAssert { + f.AddAssertion(assert.ValueSet("imports.#", strconv.FormatInt(int64(len), 10))) + return f +} + +func (f *FunctionJavaResourceAssert) HasTargetPathEmpty() *FunctionJavaResourceAssert { + f.AddAssertion(assert.ValueSet("target_path.#", "0")) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go new file mode 100644 index 0000000000..2bc66908df --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go @@ -0,0 +1,13 @@ +package resourceparametersassert + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (f *FunctionResourceParametersAssert) HasAllDefaults() *FunctionResourceParametersAssert { + return f. + HasEnableConsoleOutput(false). + HasLogLevel(sdk.LogLevelOff). + HasMetricLevel(sdk.MetricLevelNone). + HasTraceLevel(sdk.TraceLevelOff) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go index 4bac27ada5..8579ea981a 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go @@ -2,6 +2,11 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *FunctionJavaModel) MarshalJSON() ([]byte, error) { @@ -14,3 +19,70 @@ func (f *FunctionJavaModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func FunctionJavaBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + functionDefinition string, +) *FunctionJavaModel { + return FunctionJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), id.SchemaName()).WithFunctionDefinition(functionDefinition) +} + +func FunctionJavaBasicStaged( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + stageLocation string, + pathOnStage string, +) *FunctionJavaModel { + return FunctionJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), id.SchemaName()). + WithImport(stageLocation, pathOnStage) +} + +func (f *FunctionJavaModel) WithArgument(argName string, argDataType datatypes.DataType) *FunctionJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *FunctionJavaModel) WithArgumentWithDefaultValue(argName string, argDataType datatypes.DataType, value string) *FunctionJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + "arg_default_value": tfconfig.StringVariable(value), + }, + ), + ) +} + +func (f *FunctionJavaModel) WithImport(stageLocation string, pathOnStage string) *FunctionJavaModel { + return f.WithImportsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} + +func (f *FunctionJavaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *FunctionJavaModel { + return f.WithTargetPathValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go index 704f6b2bcf..309a53b0a9 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go @@ -26,7 +26,7 @@ type FunctionJavaModel struct { Name tfconfig.Variable `json:"name,omitempty"` NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` Packages tfconfig.Variable `json:"packages,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` ReturnType tfconfig.Variable `json:"return_type,omitempty"` RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` Schema tfconfig.Variable `json:"schema,omitempty"` @@ -44,7 +44,6 @@ type FunctionJavaModel struct { func FunctionJava( resourceName string, database string, - functionDefinition string, handler string, name string, returnType string, @@ -52,7 +51,6 @@ func FunctionJava( ) *FunctionJavaModel { f := &FunctionJavaModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionJava)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -62,7 +60,6 @@ func FunctionJava( func FunctionJavaWithDefaultMeta( database string, - functionDefinition string, handler string, name string, returnType string, @@ -70,7 +67,6 @@ func FunctionJavaWithDefaultMeta( ) *FunctionJavaModel { f := &FunctionJavaModel{ResourceModelMeta: config.DefaultMeta(resources.FunctionJava)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -150,8 +146,8 @@ func (f *FunctionJavaModel) WithNullInputBehavior(nullInputBehavior string) *Fun // packages attribute type is not yet supported, so WithPackages can't be generated -func (f *FunctionJavaModel) WithReturnBehavior(returnBehavior string) *FunctionJavaModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionJavaModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionJavaModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -172,10 +168,7 @@ func (f *FunctionJavaModel) WithSchema(schema string) *FunctionJavaModel { // secrets attribute type is not yet supported, so WithSecrets can't be generated -func (f *FunctionJavaModel) WithTargetPath(targetPath string) *FunctionJavaModel { - f.TargetPath = tfconfig.StringVariable(targetPath) - return f -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (f *FunctionJavaModel) WithTraceLevel(traceLevel string) *FunctionJavaModel { f.TraceLevel = tfconfig.StringVariable(traceLevel) @@ -266,8 +259,8 @@ func (f *FunctionJavaModel) WithPackagesValue(value tfconfig.Variable) *Function return f } -func (f *FunctionJavaModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionJavaModel { - f.ReturnBehavior = value +func (f *FunctionJavaModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionJavaModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go index 5d8ad68aec..742dee099b 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go @@ -10,22 +10,22 @@ import ( ) type FunctionJavascriptModel struct { - Arguments tfconfig.Variable `json:"arguments,omitempty"` - Comment tfconfig.Variable `json:"comment,omitempty"` - Database tfconfig.Variable `json:"database,omitempty"` - EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` - FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` - FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` - FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` - IsSecure tfconfig.Variable `json:"is_secure,omitempty"` - LogLevel tfconfig.Variable `json:"log_level,omitempty"` - MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` - Name tfconfig.Variable `json:"name,omitempty"` - NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` - ReturnType tfconfig.Variable `json:"return_type,omitempty"` - Schema tfconfig.Variable `json:"schema,omitempty"` - TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` + FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` *config.ResourceModelMeta } @@ -128,8 +128,8 @@ func (f *FunctionJavascriptModel) WithNullInputBehavior(nullInputBehavior string return f } -func (f *FunctionJavascriptModel) WithReturnBehavior(returnBehavior string) *FunctionJavascriptModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionJavascriptModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionJavascriptModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -212,8 +212,8 @@ func (f *FunctionJavascriptModel) WithNullInputBehaviorValue(value tfconfig.Vari return f } -func (f *FunctionJavascriptModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionJavascriptModel { - f.ReturnBehavior = value +func (f *FunctionJavascriptModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionJavascriptModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go index 9d0ffbd348..17ae5eccaf 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go @@ -27,7 +27,7 @@ type FunctionPythonModel struct { Name tfconfig.Variable `json:"name,omitempty"` NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` Packages tfconfig.Variable `json:"packages,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` ReturnType tfconfig.Variable `json:"return_type,omitempty"` RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` Schema tfconfig.Variable `json:"schema,omitempty"` @@ -44,7 +44,6 @@ type FunctionPythonModel struct { func FunctionPython( resourceName string, database string, - functionDefinition string, handler string, name string, returnType string, @@ -53,7 +52,6 @@ func FunctionPython( ) *FunctionPythonModel { f := &FunctionPythonModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionPython)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -64,7 +62,6 @@ func FunctionPython( func FunctionPythonWithDefaultMeta( database string, - functionDefinition string, handler string, name string, returnType string, @@ -73,7 +70,6 @@ func FunctionPythonWithDefaultMeta( ) *FunctionPythonModel { f := &FunctionPythonModel{ResourceModelMeta: config.DefaultMeta(resources.FunctionPython)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -159,8 +155,8 @@ func (f *FunctionPythonModel) WithNullInputBehavior(nullInputBehavior string) *F // packages attribute type is not yet supported, so WithPackages can't be generated -func (f *FunctionPythonModel) WithReturnBehavior(returnBehavior string) *FunctionPythonModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionPythonModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionPythonModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -275,8 +271,8 @@ func (f *FunctionPythonModel) WithPackagesValue(value tfconfig.Variable) *Functi return f } -func (f *FunctionPythonModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionPythonModel { - f.ReturnBehavior = value +func (f *FunctionPythonModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionPythonModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go index 017c397af3..070933fd4e 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go @@ -26,7 +26,7 @@ type FunctionScalaModel struct { Name tfconfig.Variable `json:"name,omitempty"` NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` Packages tfconfig.Variable `json:"packages,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` ReturnType tfconfig.Variable `json:"return_type,omitempty"` RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` Schema tfconfig.Variable `json:"schema,omitempty"` @@ -44,7 +44,6 @@ type FunctionScalaModel struct { func FunctionScala( resourceName string, database string, - functionDefinition string, handler string, name string, returnType string, @@ -53,7 +52,6 @@ func FunctionScala( ) *FunctionScalaModel { f := &FunctionScalaModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionScala)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -64,7 +62,6 @@ func FunctionScala( func FunctionScalaWithDefaultMeta( database string, - functionDefinition string, handler string, name string, returnType string, @@ -73,7 +70,6 @@ func FunctionScalaWithDefaultMeta( ) *FunctionScalaModel { f := &FunctionScalaModel{ResourceModelMeta: config.DefaultMeta(resources.FunctionScala)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -154,8 +150,8 @@ func (f *FunctionScalaModel) WithNullInputBehavior(nullInputBehavior string) *Fu // packages attribute type is not yet supported, so WithPackages can't be generated -func (f *FunctionScalaModel) WithReturnBehavior(returnBehavior string) *FunctionScalaModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionScalaModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionScalaModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -176,10 +172,7 @@ func (f *FunctionScalaModel) WithSchema(schema string) *FunctionScalaModel { // secrets attribute type is not yet supported, so WithSecrets can't be generated -func (f *FunctionScalaModel) WithTargetPath(targetPath string) *FunctionScalaModel { - f.TargetPath = tfconfig.StringVariable(targetPath) - return f -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (f *FunctionScalaModel) WithTraceLevel(traceLevel string) *FunctionScalaModel { f.TraceLevel = tfconfig.StringVariable(traceLevel) @@ -270,8 +263,8 @@ func (f *FunctionScalaModel) WithPackagesValue(value tfconfig.Variable) *Functio return f } -func (f *FunctionScalaModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionScalaModel { - f.ReturnBehavior = value +func (f *FunctionScalaModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionScalaModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go index 14cbbe9136..0733c2add4 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go @@ -10,22 +10,22 @@ import ( ) type FunctionSqlModel struct { - Arguments tfconfig.Variable `json:"arguments,omitempty"` - Comment tfconfig.Variable `json:"comment,omitempty"` - Database tfconfig.Variable `json:"database,omitempty"` - EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` - FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` - FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` - FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` - IsSecure tfconfig.Variable `json:"is_secure,omitempty"` - LogLevel tfconfig.Variable `json:"log_level,omitempty"` - MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` - Name tfconfig.Variable `json:"name,omitempty"` - NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` - ReturnType tfconfig.Variable `json:"return_type,omitempty"` - Schema tfconfig.Variable `json:"schema,omitempty"` - TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` + FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` *config.ResourceModelMeta } @@ -128,8 +128,8 @@ func (f *FunctionSqlModel) WithNullInputBehavior(nullInputBehavior string) *Func return f } -func (f *FunctionSqlModel) WithReturnBehavior(returnBehavior string) *FunctionSqlModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionSqlModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionSqlModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -212,8 +212,8 @@ func (f *FunctionSqlModel) WithNullInputBehaviorValue(value tfconfig.Variable) * return f } -func (f *FunctionSqlModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionSqlModel { - f.ReturnBehavior = value +func (f *FunctionSqlModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionSqlModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index 404ad98917..5959720df0 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -67,9 +67,19 @@ func decodeSnowflakeId(rs *terraform.ResourceState, resource resources.Resource) switch resource { case resources.ExternalFunction: return sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(rs.Primary.ID), nil - case resources.Function: + case resources.Function, + resources.FunctionJava, + resources.FunctionJavascript, + resources.FunctionPython, + resources.FunctionScala, + resources.FunctionSql: return sdk.ParseSchemaObjectIdentifierWithArguments(rs.Primary.ID) - case resources.Procedure: + case resources.Procedure, + resources.ProcedureJava, + resources.ProcedureJavascript, + resources.ProcedurePython, + resources.ProcedureScala, + resources.ProcedureSql: return sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(rs.Primary.ID), nil default: return helpers.DecodeSnowflakeID(rs.Primary.ID), nil @@ -145,6 +155,21 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Function: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Functions.ShowByID) }, + resources.FunctionJava: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionJavascript: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionPython: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionScala: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionSql: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, resources.LegacyServiceUser: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Users.ShowByID) }, @@ -181,6 +206,21 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Procedure: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Procedures.ShowByID) }, + resources.ProcedureJava: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedureJavascript: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedurePython: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedureScala: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedureSql: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, resources.ResourceMonitor: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.ResourceMonitors.ShowByID) }, diff --git a/pkg/acceptance/helpers/function_client.go b/pkg/acceptance/helpers/function_client.go index 4d9bf35aaa..36c5ffcfb4 100644 --- a/pkg/acceptance/helpers/function_client.go +++ b/pkg/acceptance/helpers/function_client.go @@ -138,6 +138,26 @@ func (c *FunctionClient) CreateJava(t *testing.T) (*sdk.Function, func()) { return function, c.DropFunctionFunc(t, id) } +func (c *FunctionClient) CreateScalaStaged(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, dataType datatypes.DataType, importPath string, handler string) (*sdk.Function, func()) { + t.Helper() + ctx := context.Background() + + argName := "x" + argument := sdk.NewFunctionArgumentRequest(argName, dataType) + + request := sdk.NewCreateForScalaFunctionRequest(id.SchemaObjectId(), dataType, handler, "2.12"). + WithArguments([]sdk.FunctionArgumentRequest{*argument}). + WithImports([]sdk.FunctionImportRequest{*sdk.NewFunctionImportRequest().WithImport(importPath)}) + + err := c.client().CreateForScala(ctx, request) + require.NoError(t, err) + + function, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return function, c.DropFunctionFunc(t, id) +} + func (c *FunctionClient) CreateWithRequest(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, req *sdk.CreateForSQLFunctionRequest) *sdk.Function { t.Helper() ctx := context.Background() @@ -192,6 +212,18 @@ func (c *FunctionClient) SampleJavaDefinition(t *testing.T, className string, fu `, className, funcName, argName) } +func (c *FunctionClient) SampleJavaDefinitionNoArgs(t *testing.T, className string, funcName string) string { + t.Helper() + + return fmt.Sprintf(` + class %[1]s { + public static String %[2]s() { + return "hello"; + } + } +`, className, funcName) +} + func (c *FunctionClient) SampleJavascriptDefinition(t *testing.T, argName string) string { t.Helper() diff --git a/pkg/acceptance/helpers/function_setup_helpers.go b/pkg/acceptance/helpers/function_setup_helpers.go index 8f0447e443..d0f34d6768 100644 --- a/pkg/acceptance/helpers/function_setup_helpers.go +++ b/pkg/acceptance/helpers/function_setup_helpers.go @@ -15,8 +15,20 @@ import ( "github.com/stretchr/testify/require" ) +func (c *TestClient) CreateSampleJavaFunctionAndJarOnUserStage(t *testing.T) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaFunctionAndJarInLocation(t, "@~") +} + +func (c *TestClient) CreateSampleJavaFunctionAndJarOnStage(t *testing.T, stage *sdk.Stage) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaFunctionAndJarInLocation(t, stage.Location()) +} + // TODO [SNOW-1827324]: add TestClient ref to each specific client, so that we enhance specific client and not the base one -func (c *TestClient) CreateSampleJavaFunctionAndJar(t *testing.T) *TmpFunction { +func (c *TestClient) CreateSampleJavaFunctionAndJarInLocation(t *testing.T, stageLocation string) *TmpFunction { t.Helper() ctx := context.Background() @@ -32,7 +44,7 @@ func (c *TestClient) CreateSampleJavaFunctionAndJar(t *testing.T) *TmpFunction { handler := fmt.Sprintf("%s.%s", className, funcName) definition := c.Function.SampleJavaDefinition(t, className, funcName, argName) jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) - targetPath := fmt.Sprintf("@~/%s", jarName) + targetPath := fmt.Sprintf("%s/%s", stageLocation, jarName) request := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). WithArguments([]sdk.FunctionArgumentRequest{*argument}). @@ -42,19 +54,32 @@ func (c *TestClient) CreateSampleJavaFunctionAndJar(t *testing.T) *TmpFunction { err := c.context.client.Functions.CreateForJava(ctx, request) require.NoError(t, err) t.Cleanup(c.Function.DropFunctionFunc(t, id)) - t.Cleanup(c.Stage.RemoveFromUserStageFunc(t, jarName)) + t.Cleanup(c.Stage.RemoveFromStageFunc(t, stageLocation, jarName)) return &TmpFunction{ - FunctionId: id, - ClassName: className, - FuncName: funcName, - ArgName: argName, - ArgType: dataType, - JarName: jarName, + FunctionId: id, + ClassName: className, + FuncName: funcName, + ArgName: argName, + ArgType: dataType, + JarName: jarName, + StageLocation: stageLocation, } } -func (c *TestClient) CreateSampleJavaProcedureAndJar(t *testing.T) *TmpFunction { +func (c *TestClient) CreateSampleJavaProcedureAndJarOnUserStage(t *testing.T) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaProcedureAndJarInLocation(t, "@~") +} + +func (c *TestClient) CreateSampleJavaProcedureAndJarOnStage(t *testing.T, stage *sdk.Stage) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaProcedureAndJarInLocation(t, stage.Location()) +} + +func (c *TestClient) CreateSampleJavaProcedureAndJarInLocation(t *testing.T, stageLocation string) *TmpFunction { t.Helper() ctx := context.Background() @@ -70,7 +95,7 @@ func (c *TestClient) CreateSampleJavaProcedureAndJar(t *testing.T) *TmpFunction handler := fmt.Sprintf("%s.%s", className, funcName) definition := c.Procedure.SampleJavaDefinition(t, className, funcName, argName) jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) - targetPath := fmt.Sprintf("@~/%s", jarName) + targetPath := fmt.Sprintf("%s/%s", stageLocation, jarName) packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). @@ -81,15 +106,16 @@ func (c *TestClient) CreateSampleJavaProcedureAndJar(t *testing.T) *TmpFunction err := c.context.client.Procedures.CreateForJava(ctx, request) require.NoError(t, err) t.Cleanup(c.Procedure.DropProcedureFunc(t, id)) - t.Cleanup(c.Stage.RemoveFromUserStageFunc(t, jarName)) + t.Cleanup(c.Stage.RemoveFromStageFunc(t, stageLocation, jarName)) return &TmpFunction{ - FunctionId: id, - ClassName: className, - FuncName: funcName, - ArgName: argName, - ArgType: dataType, - JarName: jarName, + FunctionId: id, + ClassName: className, + FuncName: funcName, + ArgName: argName, + ArgType: dataType, + JarName: jarName, + StageLocation: stageLocation, } } @@ -121,30 +147,32 @@ func (c *TestClient) CreateSamplePythonFunctionAndModule(t *testing.T) *TmpFunct moduleFileName := filepath.Base(modulePath) return &TmpFunction{ - FunctionId: id, - ModuleName: strings.TrimSuffix(moduleFileName, ".py"), - FuncName: funcName, - ArgName: argName, - ArgType: dataType, + FunctionId: id, + ModuleName: strings.TrimSuffix(moduleFileName, ".py"), + FuncName: funcName, + ArgName: argName, + ArgType: dataType, + StageLocation: "@~", } } type TmpFunction struct { - FunctionId sdk.SchemaObjectIdentifierWithArguments - ClassName string - ModuleName string - FuncName string - ArgName string - ArgType datatypes.DataType - JarName string + FunctionId sdk.SchemaObjectIdentifierWithArguments + ClassName string + ModuleName string + FuncName string + ArgName string + ArgType datatypes.DataType + JarName string + StageLocation string } func (f *TmpFunction) JarLocation() string { - return fmt.Sprintf("@~/%s", f.JarName) + return fmt.Sprintf("%s/%s", f.StageLocation, f.JarName) } func (f *TmpFunction) PythonModuleLocation() string { - return fmt.Sprintf("@~/%s", f.PythonFileName()) + return fmt.Sprintf("%s/%s", f.StageLocation, f.PythonFileName()) } func (f *TmpFunction) PythonFileName() string { diff --git a/pkg/acceptance/helpers/ids_generator.go b/pkg/acceptance/helpers/ids_generator.go index ade93d46bc..46b0e85d80 100644 --- a/pkg/acceptance/helpers/ids_generator.go +++ b/pkg/acceptance/helpers/ids_generator.go @@ -4,7 +4,9 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) type IdsGenerator struct { @@ -97,6 +99,11 @@ func (c *IdsGenerator) NewSchemaObjectIdentifierWithArguments(name string, argum return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), name, arguments...) } +func (c *IdsGenerator) NewSchemaObjectIdentifierWithArgumentsNewDataTypes(name string, arguments ...datatypes.DataType) sdk.SchemaObjectIdentifierWithArguments { + legacyDataTypes := collections.Map(arguments, sdk.LegacyDataTypeFrom) + return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), name, legacyDataTypes...) +} + func (c *IdsGenerator) NewSchemaObjectIdentifierWithArgumentsInSchema(name string, schemaId sdk.DatabaseObjectIdentifier, argumentDataTypes ...sdk.DataType) sdk.SchemaObjectIdentifierWithArguments { return sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(schemaId, name, argumentDataTypes...) } @@ -105,6 +112,11 @@ func (c *IdsGenerator) RandomSchemaObjectIdentifierWithArguments(arguments ...sd return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), c.Alpha(), arguments...) } +func (c *IdsGenerator) RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(arguments ...datatypes.DataType) sdk.SchemaObjectIdentifierWithArguments { + legacyDataTypes := collections.Map(arguments, sdk.LegacyDataTypeFrom) + return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), c.Alpha(), legacyDataTypes...) +} + func (c *IdsGenerator) Alpha() string { return c.AlphaN(6) } diff --git a/pkg/acceptance/helpers/stage_client.go b/pkg/acceptance/helpers/stage_client.go index 60bac47c90..5a1176b314 100644 --- a/pkg/acceptance/helpers/stage_client.go +++ b/pkg/acceptance/helpers/stage_client.go @@ -126,6 +126,21 @@ func (c *StageClient) RemoveFromUserStageFunc(t *testing.T, pathOnStage string) } } +func (c *StageClient) RemoveFromStage(t *testing.T, stageLocation string, pathOnStage string) { + t.Helper() + ctx := context.Background() + + _, err := c.context.client.ExecForTests(ctx, fmt.Sprintf(`REMOVE %s/%s`, stageLocation, pathOnStage)) + require.NoError(t, err) +} + +func (c *StageClient) RemoveFromStageFunc(t *testing.T, stageLocation string, pathOnStage string) func() { + t.Helper() + return func() { + c.RemoveFromStage(t, stageLocation, pathOnStage) + } +} + func (c *StageClient) PutOnStageWithContent(t *testing.T, id sdk.SchemaObjectIdentifier, filename string, content string) { t.Helper() ctx := context.Background() diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index eb33b246b7..1ea9025ac5 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -284,3 +284,15 @@ func RecreateWhenResourceBoolFieldChangedExternally(boolField string, wantValue return nil } } + +// RecreateWhenResourceStringFieldChangedExternally recreates a resource when wantValue is different from value in field. +// TODO [SNOW-1850370]: merge with above? test. +func RecreateWhenResourceStringFieldChangedExternally(field string, wantValue string) schema.CustomizeDiffFunc { + return func(_ context.Context, diff *schema.ResourceDiff, _ any) error { + if o, n := diff.GetChange(field); n != nil && o != nil && o != "" && n.(string) != wantValue { + log.Printf("[DEBUG] new external value for %s: %s (want: %s), recreating the resource...\n", field, n.(string), wantValue) + return errors.Join(diff.SetNew(field, wantValue), diff.ForceNew(field)) + } + return nil + } +} diff --git a/pkg/resources/doc_helpers.go b/pkg/resources/doc_helpers.go index eb437015f9..51142971c6 100644 --- a/pkg/resources/doc_helpers.go +++ b/pkg/resources/doc_helpers.go @@ -4,8 +4,9 @@ import ( "fmt" "strings" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" providerresources "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" ) func possibleValuesListed[T ~string | ~int](values []T) string { diff --git a/pkg/resources/function.go b/pkg/resources/function.go index 38c6619a37..69e972f7d5 100644 --- a/pkg/resources/function.go +++ b/pkg/resources/function.go @@ -171,7 +171,7 @@ func Function() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.Function, CreateContextFunction), ReadContext: TrackingReadWrapper(resources.Function, ReadContextFunction), UpdateContext: TrackingUpdateWrapper(resources.Function, UpdateContextFunction), - DeleteContext: TrackingDeleteWrapper(resources.Function, DeleteContextFunction), + DeleteContext: TrackingDeleteWrapper(resources.Function, DeleteFunction), CustomizeDiff: TrackingCustomDiffWrapper(resources.Function, customdiff.All( // TODO(SNOW-1348103): add `arguments` to ComputedIfAnyAttributeChanged. This can't be done now because this function compares values without diff suppress. @@ -722,20 +722,6 @@ func UpdateContextFunction(ctx context.Context, d *schema.ResourceData, meta int return ReadContextFunction(ctx, d, meta) } -func DeleteContextFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - if err := client.Functions.Drop(ctx, sdk.NewDropFunctionRequest(id).WithIfExists(true)); err != nil { - return diag.FromErr(err) - } - d.SetId("") - return nil -} - func parseFunctionArguments(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, diag.Diagnostics) { args := make([]sdk.FunctionArgumentRequest, 0) if v, ok := d.GetOk("arguments"); ok { diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index fd4d57913e..ea005da2c2 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -1,11 +1,18 @@ package resources import ( + "context" + "errors" "fmt" + "log" "slices" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -20,6 +27,7 @@ func init() { type functionSchemaDef struct { additionalArguments []string functionDefinitionDescription string + functionDefinitionRequired bool runtimeVersionRequired bool runtimeVersionDescription string importsDescription string @@ -38,6 +46,11 @@ func setUpFunctionSchema(definition functionSchemaDef) map[string]*schema.Schema } if v, ok := currentSchema["function_definition"]; ok && v != nil { v.Description = diffSuppressStatementFieldDescription(definition.functionDefinitionDescription) + if definition.functionDefinitionRequired { + v.Required = true + } else { + v.Optional = true + } } if v, ok := currentSchema["runtime_version"]; ok && v != nil { if definition.runtimeVersionRequired { @@ -75,7 +88,7 @@ var ( "arguments", "return_type", "null_input_behavior", - "return_behavior", + "return_results_behavior", "comment", "function_definition", "function_language", @@ -94,16 +107,18 @@ var ( "target_path", }, functionDefinitionDescription: functionDefinitionTemplate("Java", "https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction"), - runtimeVersionRequired: false, - runtimeVersionDescription: "Specifies the Java JDK runtime version to use. The supported versions of Java are 11.x and 17.x. If RUNTIME_VERSION is not set, Java JDK 11 is used.", - importsDescription: "The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java).", - packagesDescription: "The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`.", - handlerDescription: "The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class.", - targetPathDescription: "The TARGET_PATH clause specifies the location to which Snowflake should write the compiled code (JAR file) after compiling the source code specified in the `function_definition`. If this clause is included, the user should manually remove the JAR file when it is no longer needed (typically when the Java UDF is dropped). If this clause is omitted, Snowflake re-compiles the source code each time the code is needed. The JAR file is not stored permanently, and the user does not need to clean up the JAR file. Snowflake returns an error if the TARGET_PATH matches an existing file; you cannot use TARGET_PATH to overwrite an existing file.", + // May be optional for java because if it is not set, describe return empty version. + runtimeVersionRequired: false, + runtimeVersionDescription: "Specifies the Java JDK runtime version to use. The supported versions of Java are 11.x and 17.x. If RUNTIME_VERSION is not set, Java JDK 11 is used.", + importsDescription: "The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java).", + packagesDescription: "The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`.", + handlerDescription: "The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class.", + targetPathDescription: "The TARGET_PATH clause specifies the location to which Snowflake should write the compiled code (JAR file) after compiling the source code specified in the `function_definition`. If this clause is included, the user should manually remove the JAR file when it is no longer needed (typically when the Java UDF is dropped). If this clause is omitted, Snowflake re-compiles the source code each time the code is needed. The JAR file is not stored permanently, and the user does not need to clean up the JAR file. Snowflake returns an error if the TARGET_PATH matches an existing file; you cannot use TARGET_PATH to overwrite an existing file.", } javascriptFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{}, functionDefinitionDescription: functionDefinitionTemplate("JavaScript", "https://docs.snowflake.com/en/developer-guide/udf/javascript/udf-javascript-introduction"), + functionDefinitionRequired: true, } pythonFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{ @@ -143,6 +158,7 @@ var ( sqlFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{}, functionDefinitionDescription: functionDefinitionTemplate("SQL", "https://docs.snowflake.com/en/developer-guide/udf/sql/udf-sql-introduction"), + functionDefinitionRequired: true, } ) @@ -214,6 +230,11 @@ func functionBaseSchema() map[string]schema.Schema { DiffSuppressFunc: DiffSuppressDataTypes, Description: "The argument type.", }, + "arg_default_value": { + Type: schema.TypeString, + Optional: true, + Description: externalChangesNotDetectedFieldDescription("Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted."), + }, }, }, Optional: true, @@ -228,28 +249,26 @@ func functionBaseSchema() map[string]schema.Schema { ValidateDiagFunc: IsDataTypeValid, DiffSuppressFunc: DiffSuppressDataTypes, Description: "Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages).", - // TODO [SNOW-1348103]: adjust DiffSuppressFunc }, "null_input_behavior": { Type: schema.TypeString, Optional: true, ForceNew: true, ValidateDiagFunc: sdkValidation(sdk.ToNullInputBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior)), // TODO [SNOW-1348103]: IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior") but not in show Description: fmt.Sprintf("Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedNullInputBehaviors)), }, - "return_behavior": { + "return_results_behavior": { Type: schema.TypeString, Optional: true, ForceNew: true, ValidateDiagFunc: sdkValidation(sdk.ToReturnResultsBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToReturnResultsBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("return_behavior")), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToReturnResultsBehavior)), // TODO [SNOW-1348103]: IgnoreChangeToCurrentSnowflakeValueInShow("return_results_behavior") but not in show Description: fmt.Sprintf("Specifies the behavior of the function when returning results. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedReturnResultsBehaviors)), }, "runtime_version": { Type: schema.TypeString, ForceNew: true, - // TODO [SNOW-1348103]: may be optional for java without consequence because if it is not set, the describe is not returning any version. }, "comment": { Type: schema.TypeString, @@ -258,12 +277,26 @@ func functionBaseSchema() map[string]schema.Schema { Default: "user-defined function", Description: "Specifies a comment for the function.", }, - // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage_name + target_path + // split into two because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6 + // TODO [SNOW-1348103]: add validations preventing setting improper stage and path "imports": { Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, // TODO [SNOW-1348103]: what do we do with the version "latest". "packages": { @@ -308,15 +341,28 @@ func functionBaseSchema() map[string]schema.Schema { }, Description: "Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter.", }, - // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage + path "target_path": { - Type: schema.TypeString, + Type: schema.TypeSet, + MaxItems: 1, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, "function_definition": { Type: schema.TypeString, - Required: true, ForceNew: true, DiffSuppressFunc: DiffSuppressStatement, }, @@ -338,9 +384,204 @@ func functionBaseSchema() map[string]schema.Schema { Computed: true, Description: "Outputs the result of `SHOW PARAMETERS IN FUNCTION` for the given function.", Elem: &schema.Resource{ - Schema: functionParametersSchema, + Schema: schemas.ShowFunctionParametersSchema, }, }, FullyQualifiedNameAttributeName: *schemas.FullyQualifiedNameSchema, } } + +func DeleteFunction(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + err = client.Functions.Drop(ctx, sdk.NewDropFunctionRequest(id).WithIfExists(true)) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} + +func parseFunctionArgumentsCommon(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, error) { + args := make([]sdk.FunctionArgumentRequest, 0) + if v, ok := d.GetOk("arguments"); ok { + for _, arg := range v.([]any) { + argName := arg.(map[string]any)["arg_name"].(string) + argDataType := arg.(map[string]any)["arg_data_type"].(string) + dataType, err := datatypes.ParseDataType(argDataType) + if err != nil { + return nil, err + } + request := sdk.NewFunctionArgumentRequest(argName, dataType) + + if argDefaultValue, defaultValuePresent := arg.(map[string]any)["arg_default_value"]; defaultValuePresent && argDefaultValue.(string) != "" { + request.WithDefaultValue(argDefaultValue.(string)) + } + + args = append(args, *request) + } + } + return args, nil +} + +func parseFunctionImportsCommon(d *schema.ResourceData) ([]sdk.FunctionImportRequest, error) { + imports := make([]sdk.FunctionImportRequest, 0) + if v, ok := d.GetOk("imports"); ok { + for _, imp := range v.(*schema.Set).List() { + stageLocation := imp.(map[string]any)["stage_location"].(string) + pathOnStage := imp.(map[string]any)["path_on_stage"].(string) + imports = append(imports, *sdk.NewFunctionImportRequest().WithImport(fmt.Sprintf("@%s/%s", stageLocation, pathOnStage))) + } + } + return imports, nil +} + +func parseFunctionTargetPathCommon(d *schema.ResourceData) (string, error) { + var tp string + if v, ok := d.GetOk("target_path"); ok { + for _, p := range v.(*schema.Set).List() { + stageLocation := p.(map[string]any)["stage_location"].(string) + pathOnStage := p.(map[string]any)["path_on_stage"].(string) + tp = fmt.Sprintf("@%s/%s", stageLocation, pathOnStage) + } + } + return tp, nil +} + +func parseFunctionReturnsCommon(d *schema.ResourceData) (*sdk.FunctionReturnsRequest, error) { + returnTypeRaw := d.Get("return_type").(string) + dataType, err := datatypes.ParseDataType(returnTypeRaw) + if err != nil { + return nil, err + } + returns := sdk.NewFunctionReturnsRequest() + switch v := dataType.(type) { + case *datatypes.TableDataType: + var cr []sdk.FunctionColumnRequest + for _, c := range v.Columns() { + cr = append(cr, *sdk.NewFunctionColumnRequest(c.ColumnName(), c.ColumnType())) + } + returns.WithTable(*sdk.NewFunctionReturnsTableRequest().WithColumns(cr)) + default: + returns.WithResultDataType(*sdk.NewFunctionReturnsResultDataTypeRequest(dataType)) + } + return returns, nil +} + +func setFunctionImportsInBuilder[T any](d *schema.ResourceData, setImports func([]sdk.FunctionImportRequest) T) error { + imports, err := parseFunctionImportsCommon(d) + if err != nil { + return err + } + setImports(imports) + return nil +} + +func setFunctionTargetPathInBuilder[T any](d *schema.ResourceData, setTargetPath func(string) T) error { + tp, err := parseFunctionTargetPathCommon(d) + if err != nil { + return err + } + if tp != "" { + setTargetPath(tp) + } + return nil +} + +func queryAllFunctionsDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allFunctionDetailsCommon, diag.Diagnostics) { + functionDetails, err := client.Functions.DescribeDetails(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { + log.Printf("[DEBUG] function (%s) not found or we are not authorized. Err: %s", d.Id(), err) + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query function. Marking the resource as removed.", + Detail: fmt.Sprintf("Function: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + function, err := client.Functions.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query function. Marking the resource as removed.", + Detail: fmt.Sprintf("Function: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + functionParameters, err := client.Functions.ShowParameters(ctx, id) + if err != nil { + return nil, diag.FromErr(err) + } + return &allFunctionDetailsCommon{ + function: function, + functionDetails: functionDetails, + functionParameters: functionParameters, + }, nil +} + +type allFunctionDetailsCommon struct { + function *sdk.Function + functionDetails *sdk.FunctionDetails + functionParameters []*sdk.Parameter +} + +func readFunctionArgumentsCommon(d *schema.ResourceData, args []sdk.NormalizedArgument) error { + if len(args) == 0 { + // TODO [SNOW-1348103]: handle empty list + return nil + } + // We do it the unusual way because the default values are not returned by SF. + // We update what we have - leaving the defaults unchanged. + if currentArgs, ok := d.Get("arguments").([]map[string]any); !ok { + return fmt.Errorf("arguments must be a list") + } else { + for i, arg := range args { + currentArgs[i]["arg_name"] = arg.Name + currentArgs[i]["arg_data_type"] = arg.DataType.ToSql() + } + return d.Set("arguments", currentArgs) + } +} + +func readFunctionImportsCommon(d *schema.ResourceData, imports []sdk.NormalizedPath) error { + if len(imports) == 0 { + // don't do anything if imports not present + return nil + } + imps := collections.Map(imports, func(imp sdk.NormalizedPath) map[string]any { + return map[string]any{ + "stage_location": imp.StageLocation, + "path_on_stage": imp.PathOnStage, + } + }) + return d.Set("imports", imps) +} + +func readFunctionTargetPathCommon(d *schema.ResourceData, normalizedPath *sdk.NormalizedPath) error { + if normalizedPath == nil { + // don't do anything if imports not present + return nil + } + tp := make([]map[string]any, 1) + tp[0] = map[string]any{ + "stage_location": normalizedPath.StageLocation, + "path_on_stage": normalizedPath.PathOnStage, + } + return d.Set("target_path", tp) +} diff --git a/pkg/resources/function_java.go b/pkg/resources/function_java.go index 5e05d3007f..b1e60da7cf 100644 --- a/pkg/resources/function_java.go +++ b/pkg/resources/function_java.go @@ -2,11 +2,18 @@ package resources import ( "context" + "errors" + "fmt" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -17,15 +24,19 @@ func FunctionJava() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionJava, CreateContextFunctionJava), ReadContext: TrackingReadWrapper(resources.FunctionJava, ReadContextFunctionJava), UpdateContext: TrackingUpdateWrapper(resources.FunctionJava, UpdateContextFunctionJava), - DeleteContext: TrackingDeleteWrapper(resources.FunctionJava, DeleteContextFunctionJava), + DeleteContext: TrackingDeleteWrapper(resources.FunctionJava, DeleteFunction), Description: "Resource used to manage java function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionJava, customdiff.All( - // TODO[SNOW-1348103]: ComputedIfAnyAttributeChanged(javaFunctionSchema, ShowOutputAttributeName, ...), + // TODO [SNOW-1348103]: ComputedIfAnyAttributeChanged(javaFunctionSchema, ShowOutputAttributeName, ...), ComputedIfAnyAttributeChanged(javaFunctionSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(functionParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllFunctionParameters), strings.ToLower)...), functionParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only potential option is java staged -> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("function_language", "JAVA"), )), Schema: collections.MergeMaps(javaFunctionSchema, functionParametersSchema), @@ -36,17 +47,155 @@ func FunctionJava() *schema.Resource { } func CreateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseFunctionArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseFunctionReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.FunctionArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "return_results_behavior", request.WithReturnResultsBehavior, sdk.ToReturnResultsBehavior), + stringAttributeCreateBuilder(d, "runtime_version", request.WithRuntimeVersion), + // TODO [SNOW-1348103]: handle the rest of the attributes + // comment + setFunctionImportsInBuilder(d, request.WithImports), + // packages + // external_access_integrations + // secrets + setFunctionTargetPathInBuilder(d, request.WithTargetPath), + stringAttributeCreateBuilder(d, "function_definition", request.WithFunctionDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Functions.CreateForJava(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create function (query does not fail but parameters stay unchanged) + setRequest := sdk.NewFunctionSetRequest() + if parametersCreateDiags := handleFunctionParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionJava(ctx, d, meta) } func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allFunctionDetails, diags := queryAllFunctionsDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // TODO [SNOW-1348103]: set the rest of the fields + // not reading is_secure on purpose (handled as external change to show output) + readFunctionArgumentsCommon(d, allFunctionDetails.functionDetails.NormalizedArguments), + d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading return_results_behavior on purpose (handled as external change to show output) + setOptionalFromStringPtr(d, "runtime_version", allFunctionDetails.functionDetails.RuntimeVersion), + // comment + readFunctionImportsCommon(d, allFunctionDetails.functionDetails.NormalizedImports), + // packages + setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), + // external_access_integrations + // secrets + readFunctionTargetPathCommon(d, allFunctionDetails.functionDetails.NormalizedTargetPath), + setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), + d.Set("function_language", allFunctionDetails.functionDetails.Language), + + handleFunctionParameterRead(d, allFunctionDetails.functionParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.FunctionToSchema(allFunctionDetails.function)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.FunctionParametersToSchema(allFunctionDetails.functionParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } + return nil } func UpdateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } -func DeleteContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + if d.HasChange("name") { + newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) + + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithRenameTo(newId.SchemaObjectId())) + if err != nil { + return diag.FromErr(fmt.Errorf("error renaming function %v err = %w", d.Id(), err)) + } + + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId + } + + // Batch SET operations and UNSET operations + setRequest := sdk.NewFunctionSetRequest() + unsetRequest := sdk.NewFunctionUnsetRequest() + + // TODO [SNOW-1348103]: handle all updates + // secure + // external access integration + // secrets + // comment + + if updateParamDiags := handleFunctionParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags + } + + // Apply SET and UNSET changes + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + if !reflect.DeepEqual(*unsetRequest, *sdk.NewFunctionUnsetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnset(*unsetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionJava(ctx, d, meta) } diff --git a/pkg/resources/function_java_acceptance_test.go b/pkg/resources/function_java_acceptance_test.go new file mode 100644 index 0000000000..b805187b69 --- /dev/null +++ b/pkg/resources/function_java_acceptance_test.go @@ -0,0 +1,435 @@ +package resources_test + +import ( + "fmt" + "testing" + "time" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +// TODO [SNOW-1348103]: test import +// TODO [SNOW-1348103]: test external changes +// TODO [SNOW-1348103]: test changes of attributes separately + +func TestAcc_FunctionJava_InlineBasic(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + functionModelRenamed := model.FunctionJavaBasicInline("w", idWithChangedNameButTheSameDataType, dataType, handler, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasImportsLength(0). + HasTargetPathEmpty(). + HasNoRuntimeVersion(). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, functionModelRenamed), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_InlineEmptyArgs(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + returnDataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes() + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinitionNoArgs(t, className, funcName) + + functionModel := model.FunctionJavaBasicInline("w", id, returnDataType, handler, definition) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_InlineBasicDefaultArg(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + defaultValue := "'hello'" + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgumentWithDefaultValue(argName, dataType, defaultValue) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", defaultValue)), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + // TODO [SNOW-1850370]: extract to helper + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("11") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasImportsLength(0). + HasRuntimeVersionString("11"). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_StagedBasic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaFunction := acc.TestClient().CreateSampleJavaFunctionAndJarOnStage(t, stage) + + dataType := tmpJavaFunction.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaFunction.JavaHandler() + + functionModel := model.FunctionJavaBasicStaged("w", id, dataType, handler, stage.ID().FullyQualifiedName(), tmpJavaFunction.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasImportsLength(1). + HasNoFunctionDefinition(). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "imports.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "imports.0.path_on_stage", tmpJavaFunction.JarName)), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_AllParameters(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + functionModelWithAllParametersSet := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithEnableConsoleOutput(true). + WithLogLevel(string(sdk.LogLevelWarn)). + WithMetricLevel(string(sdk.MetricLevelAll)). + WithTraceLevel(string(sdk.TraceLevelAlways)) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // create with default values for all the parameters + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.FunctionResourceParameters(t, functionModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // import when no parameter set + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedFunctionResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasAllDefaults(), + ), + }, + // set all parameters + { + Config: config.FromModels(t, functionModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.FunctionResourceParameters(t, functionModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // import when all parameters set + { + ResourceName: functionModelWithAllParametersSet.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedFunctionResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // unset all the parameters + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.FunctionResourceParameters(t, functionModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // destroy + { + Config: config.FromModels(t, functionModel), + Destroy: true, + }, + // create with all parameters set + { + Config: config.FromModels(t, functionModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.FunctionResourceParameters(t, functionModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_handleExternalLanguageChange(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + tmpJavaFunction := acc.TestClient().CreateSampleJavaFunctionAndJarOnUserStage(t) + + dataType := tmpJavaFunction.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaFunction.JavaHandler() + + functionModel := model.FunctionJavaBasicStaged("w", id, dataType, handler, "~", tmpJavaFunction.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + objectassert.Function(t, id).HasLanguage("JAVA"), + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()).HasNameString(id.Name()).HasFunctionLanguageString("JAVA"), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()).HasLanguage("JAVA"), + ), + }, + // change type externally by creating a new function with the exact same id but using different language + { + PreConfig: func() { + acc.TestClient().Function.DropFunctionFunc(t, id)() + acc.TestClient().Function.CreateScalaStaged(t, id, dataType, tmpJavaFunction.JarLocation(), handler) + objectassert.Function(t, id).HasLanguage("SCALA") + }, + Config: config.FromModels(t, functionModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(functionModel.ResourceReference(), plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + Check: assert.AssertThat(t, + objectassert.Function(t, id).HasLanguage("JAVA"), + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()).HasNameString(id.Name()).HasFunctionLanguageString("JAVA"), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()).HasLanguage("JAVA"), + ), + }, + }, + }) +} diff --git a/pkg/resources/function_javascript.go b/pkg/resources/function_javascript.go index f1b3e17e2a..0ba7e955b7 100644 --- a/pkg/resources/function_javascript.go +++ b/pkg/resources/function_javascript.go @@ -17,7 +17,7 @@ func FunctionJavascript() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionJavascript, CreateContextFunctionJavascript), ReadContext: TrackingReadWrapper(resources.FunctionJavascript, ReadContextFunctionJavascript), UpdateContext: TrackingUpdateWrapper(resources.FunctionJavascript, UpdateContextFunctionJavascript), - DeleteContext: TrackingDeleteWrapper(resources.FunctionJavascript, DeleteContextFunctionJavascript), + DeleteContext: TrackingDeleteWrapper(resources.FunctionJavascript, DeleteFunction), Description: "Resource used to manage javascript function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionJavascript, customdiff.All( diff --git a/pkg/resources/function_parameters.go b/pkg/resources/function_parameters.go index bccbe0666a..3ff28095b5 100644 --- a/pkg/resources/function_parameters.go +++ b/pkg/resources/function_parameters.go @@ -80,16 +80,16 @@ func handleFunctionParameterRead(d *schema.ResourceData, functionParameters []*s } // They do not work in create, that's why are set in alter -func handleFunctionParametersCreate(d *schema.ResourceData, alterOpts *sdk.FunctionSet) diag.Diagnostics { +func handleFunctionParametersCreate(d *schema.ResourceData, set *sdk.FunctionSetRequest) diag.Diagnostics { return JoinDiags( - handleParameterCreate(d, sdk.FunctionParameterEnableConsoleOutput, &alterOpts.EnableConsoleOutput), - handleParameterCreateWithMapping(d, sdk.FunctionParameterLogLevel, &alterOpts.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), - handleParameterCreateWithMapping(d, sdk.FunctionParameterMetricLevel, &alterOpts.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), - handleParameterCreateWithMapping(d, sdk.FunctionParameterTraceLevel, &alterOpts.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), + handleParameterCreate(d, sdk.FunctionParameterEnableConsoleOutput, &set.EnableConsoleOutput), + handleParameterCreateWithMapping(d, sdk.FunctionParameterLogLevel, &set.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), + handleParameterCreateWithMapping(d, sdk.FunctionParameterMetricLevel, &set.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), + handleParameterCreateWithMapping(d, sdk.FunctionParameterTraceLevel, &set.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), ) } -func handleFunctionParametersUpdate(d *schema.ResourceData, set *sdk.FunctionSet, unset *sdk.FunctionUnset) diag.Diagnostics { +func handleFunctionParametersUpdate(d *schema.ResourceData, set *sdk.FunctionSetRequest, unset *sdk.FunctionUnsetRequest) diag.Diagnostics { return JoinDiags( handleParameterUpdate(d, sdk.FunctionParameterEnableConsoleOutput, &set.EnableConsoleOutput, &unset.EnableConsoleOutput), handleParameterUpdateWithMapping(d, sdk.FunctionParameterLogLevel, &set.LogLevel, &unset.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), diff --git a/pkg/resources/function_python.go b/pkg/resources/function_python.go index e270f80ef6..cc6c137aff 100644 --- a/pkg/resources/function_python.go +++ b/pkg/resources/function_python.go @@ -17,7 +17,7 @@ func FunctionPython() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionPython, CreateContextFunctionPython), ReadContext: TrackingReadWrapper(resources.FunctionPython, ReadContextFunctionPython), UpdateContext: TrackingUpdateWrapper(resources.FunctionPython, UpdateContextFunctionPython), - DeleteContext: TrackingDeleteWrapper(resources.FunctionPython, DeleteContextFunctionPython), + DeleteContext: TrackingDeleteWrapper(resources.FunctionPython, DeleteFunction), Description: "Resource used to manage python function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionPython, customdiff.All( diff --git a/pkg/resources/function_scala.go b/pkg/resources/function_scala.go index 2c3adf0bc3..ff2bded481 100644 --- a/pkg/resources/function_scala.go +++ b/pkg/resources/function_scala.go @@ -17,7 +17,7 @@ func FunctionScala() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionScala, CreateContextFunctionScala), ReadContext: TrackingReadWrapper(resources.FunctionScala, ReadContextFunctionScala), UpdateContext: TrackingUpdateWrapper(resources.FunctionScala, UpdateContextFunctionScala), - DeleteContext: TrackingDeleteWrapper(resources.FunctionScala, DeleteContextFunctionScala), + DeleteContext: TrackingDeleteWrapper(resources.FunctionScala, DeleteFunction), Description: "Resource used to manage scala function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionScala, customdiff.All( diff --git a/pkg/resources/function_sql.go b/pkg/resources/function_sql.go index 48ea385f71..cd8cb31dc8 100644 --- a/pkg/resources/function_sql.go +++ b/pkg/resources/function_sql.go @@ -17,7 +17,7 @@ func FunctionSql() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionSql, CreateContextFunctionSql), ReadContext: TrackingReadWrapper(resources.FunctionSql, ReadContextFunctionSql), UpdateContext: TrackingUpdateWrapper(resources.FunctionSql, UpdateContextFunctionSql), - DeleteContext: TrackingDeleteWrapper(resources.FunctionSql, DeleteContextFunctionSql), + DeleteContext: TrackingDeleteWrapper(resources.FunctionSql, DeleteFunction), Description: "Resource used to manage sql function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionSql, customdiff.All( diff --git a/pkg/resources/procedure.go b/pkg/resources/procedure.go index 8665f71d09..fa986ae8f5 100644 --- a/pkg/resources/procedure.go +++ b/pkg/resources/procedure.go @@ -186,7 +186,7 @@ func Procedure() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.Procedure, CreateContextProcedure), ReadContext: TrackingReadWrapper(resources.Procedure, ReadContextProcedure), UpdateContext: TrackingUpdateWrapper(resources.Procedure, UpdateContextProcedure), - DeleteContext: TrackingDeleteWrapper(resources.Procedure, DeleteContextProcedure), + DeleteContext: TrackingDeleteWrapper(resources.Procedure, DeleteProcedure), // TODO(SNOW-1348106): add `arguments` to ComputedIfAnyAttributeChanged for FullyQualifiedNameAttributeName. // This can't be done now because this function compares values without diff suppress. @@ -714,20 +714,6 @@ func UpdateContextProcedure(ctx context.Context, d *schema.ResourceData, meta in return ReadContextProcedure(ctx, d, meta) } -func DeleteContextProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - if err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id).WithIfExists(true)); err != nil { - return diag.FromErr(err) - } - d.SetId("") - return nil -} - func getProcedureArguments(d *schema.ResourceData) ([]sdk.ProcedureArgumentRequest, diag.Diagnostics) { args := make([]sdk.ProcedureArgumentRequest, 0) if v, ok := d.GetOk("arguments"); ok { diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index 88e815978b..163a33da0f 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -1,11 +1,14 @@ package resources import ( + "context" "fmt" "slices" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -335,9 +338,25 @@ func procedureBaseSchema() map[string]schema.Schema { Computed: true, Description: "Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure.", Elem: &schema.Resource{ - Schema: procedureParametersSchema, + Schema: schemas.ShowProcedureParametersSchema, }, }, FullyQualifiedNameAttributeName: *schemas.FullyQualifiedNameSchema, } } + +func DeleteProcedure(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + if err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id).WithIfExists(true)); err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index 8019e72689..1804780de9 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -17,7 +17,7 @@ func ProcedureJava() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureJava, CreateContextProcedureJava), ReadContext: TrackingReadWrapper(resources.ProcedureJava, ReadContextProcedureJava), UpdateContext: TrackingUpdateWrapper(resources.ProcedureJava, UpdateContextProcedureJava), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureJava, DeleteContextProcedureJava), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureJava, DeleteProcedure), Description: "Resource used to manage java procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureJava, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta func UpdateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_javascript.go b/pkg/resources/procedure_javascript.go index 8c3958b99e..5088b492f7 100644 --- a/pkg/resources/procedure_javascript.go +++ b/pkg/resources/procedure_javascript.go @@ -17,7 +17,7 @@ func ProcedureJavascript() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureJavascript, CreateContextProcedureJavascript), ReadContext: TrackingReadWrapper(resources.ProcedureJavascript, ReadContextProcedureJavascript), UpdateContext: TrackingUpdateWrapper(resources.ProcedureJavascript, UpdateContextProcedureJavascript), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureJavascript, DeleteContextProcedureJavascript), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureJavascript, DeleteProcedure), Description: "Resource used to manage javascript procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureJavascript, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, func UpdateContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_python.go b/pkg/resources/procedure_python.go index 48d70329e7..717cee32fe 100644 --- a/pkg/resources/procedure_python.go +++ b/pkg/resources/procedure_python.go @@ -17,7 +17,7 @@ func ProcedurePython() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedurePython, CreateContextProcedurePython), ReadContext: TrackingReadWrapper(resources.ProcedurePython, ReadContextProcedurePython), UpdateContext: TrackingUpdateWrapper(resources.ProcedurePython, UpdateContextProcedurePython), - DeleteContext: TrackingDeleteWrapper(resources.ProcedurePython, DeleteContextProcedurePython), + DeleteContext: TrackingDeleteWrapper(resources.ProcedurePython, DeleteProcedure), Description: "Resource used to manage python procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedurePython, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedurePython(ctx context.Context, d *schema.ResourceData, met func UpdateContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_scala.go b/pkg/resources/procedure_scala.go index 3a7816b7d0..793663d0e1 100644 --- a/pkg/resources/procedure_scala.go +++ b/pkg/resources/procedure_scala.go @@ -17,7 +17,7 @@ func ProcedureScala() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureScala, CreateContextProcedureScala), ReadContext: TrackingReadWrapper(resources.ProcedureScala, ReadContextProcedureScala), UpdateContext: TrackingUpdateWrapper(resources.ProcedureScala, UpdateContextProcedureScala), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureScala, DeleteContextProcedureScala), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureScala, DeleteProcedure), Description: "Resource used to manage scala procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureScala, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta func UpdateContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_sql.go b/pkg/resources/procedure_sql.go index 0488941f03..11fcd69413 100644 --- a/pkg/resources/procedure_sql.go +++ b/pkg/resources/procedure_sql.go @@ -17,7 +17,7 @@ func ProcedureSql() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureSql, CreateContextProcedureSql), ReadContext: TrackingReadWrapper(resources.ProcedureSql, ReadContextProcedureSql), UpdateContext: TrackingUpdateWrapper(resources.ProcedureSql, UpdateContextProcedureSql), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureSql, DeleteContextProcedureSql), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureSql, DeleteProcedure), Description: "Resource used to manage sql procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureSql, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta a func UpdateContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/resource_helpers_create.go b/pkg/resources/resource_helpers_create.go index 837fada163..42c34ca0c7 100644 --- a/pkg/resources/resource_helpers_create.go +++ b/pkg/resources/resource_helpers_create.go @@ -12,6 +12,13 @@ func stringAttributeCreate(d *schema.ResourceData, key string, createField **str return nil } +func stringAttributeCreateBuilder[T any](d *schema.ResourceData, key string, setValue func(string) T) error { + if v, ok := d.GetOk(key); ok { + setValue(v.(string)) + } + return nil +} + func intAttributeCreate(d *schema.ResourceData, key string, createField **int) error { if v, ok := d.GetOk(key); ok { *createField = sdk.Int(v.(int)) @@ -37,6 +44,17 @@ func booleanStringAttributeCreate(d *schema.ResourceData, key string, createFiel return nil } +func booleanStringAttributeCreateBuilder[T any](d *schema.ResourceData, key string, setValue func(bool) T) error { + if v := d.Get(key).(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) + if err != nil { + return err + } + setValue(parsed) + } + return nil +} + func accountObjectIdentifierAttributeCreate(d *schema.ResourceData, key string, createField **sdk.AccountObjectIdentifier) error { if v, ok := d.GetOk(key); ok { *createField = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) @@ -73,6 +91,17 @@ func attributeMappedValueCreate[T any](d *schema.ResourceData, key string, creat return nil } +func attributeMappedValueCreateBuilder[InputType any, MappedType any, RequestBuilder any](d *schema.ResourceData, key string, setValue func(MappedType) RequestBuilder, mapper func(value InputType) (MappedType, error)) error { + if v, ok := d.GetOk(key); ok { + value, err := mapper(v.(InputType)) + if err != nil { + return err + } + setValue(value) + } + return nil +} + func copyGrantsAttributeCreate(d *schema.ResourceData, isOrReplace bool, orReplaceField, copyGrantsField **bool) error { if isOrReplace { *orReplaceField = sdk.Bool(true) diff --git a/pkg/resources/resource_helpers_read.go b/pkg/resources/resource_helpers_read.go index b3dcfcebf1..20d1e69fc6 100644 --- a/pkg/resources/resource_helpers_read.go +++ b/pkg/resources/resource_helpers_read.go @@ -63,3 +63,22 @@ func attributeMappedValueReadOrDefault[T, R any](d *schema.ResourceData, key str } return d.Set(key, nil) } + +func setOptionalFromStringPtr(d *schema.ResourceData, key string, ptr *string) error { + if ptr != nil { + if err := d.Set(key, *ptr); err != nil { + return err + } + } + return nil +} + +// TODO [SNOW-1348103]: return error if nil +func setRequiredFromStringPtr(d *schema.ResourceData, key string, ptr *string) error { + if ptr != nil { + if err := d.Set(key, *ptr); err != nil { + return err + } + } + return nil +} diff --git a/pkg/resources/user.go b/pkg/resources/user.go index 1fb6f15127..c84040e642 100644 --- a/pkg/resources/user.go +++ b/pkg/resources/user.go @@ -199,7 +199,6 @@ func User() *schema.Resource { }, CustomizeDiff: TrackingCustomDiffWrapper(resources.User, customdiff.All( - // TODO [SNOW-1629468 - next pr]: test "default_role", "default_secondary_roles" ComputedIfAnyAttributeChanged(userSchema, ShowOutputAttributeName, userExternalChangesAttributes...), ComputedIfAnyAttributeChanged(userParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllUserParameters), strings.ToLower)...), ComputedIfAnyAttributeChanged(userSchema, FullyQualifiedNameAttributeName, "name"), diff --git a/pkg/schemas/function_parameters.go b/pkg/schemas/function_parameters.go new file mode 100644 index 0000000000..af7752c394 --- /dev/null +++ b/pkg/schemas/function_parameters.go @@ -0,0 +1,35 @@ +package schemas + +import ( + "slices" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var ( + ShowFunctionParametersSchema = make(map[string]*schema.Schema) + functionParameters = []sdk.FunctionParameter{ + sdk.FunctionParameterEnableConsoleOutput, + sdk.FunctionParameterLogLevel, + sdk.FunctionParameterMetricLevel, + sdk.FunctionParameterTraceLevel, + } +) + +func init() { + for _, param := range functionParameters { + ShowFunctionParametersSchema[strings.ToLower(string(param))] = ParameterListSchema + } +} + +func FunctionParametersToSchema(parameters []*sdk.Parameter) map[string]any { + functionParametersValue := make(map[string]any) + for _, param := range parameters { + if slices.Contains(functionParameters, sdk.FunctionParameter(param.Key)) { + functionParametersValue[strings.ToLower(param.Key)] = []map[string]any{ParameterToSchema(param)} + } + } + return functionParametersValue +} diff --git a/pkg/schemas/procedure_parameters.go b/pkg/schemas/procedure_parameters.go new file mode 100644 index 0000000000..7e9c5c1638 --- /dev/null +++ b/pkg/schemas/procedure_parameters.go @@ -0,0 +1,35 @@ +package schemas + +import ( + "slices" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var ( + ShowProcedureParametersSchema = make(map[string]*schema.Schema) + ProcedureParameters = []sdk.ProcedureParameter{ + sdk.ProcedureParameterEnableConsoleOutput, + sdk.ProcedureParameterLogLevel, + sdk.ProcedureParameterMetricLevel, + sdk.ProcedureParameterTraceLevel, + } +) + +func init() { + for _, param := range ProcedureParameters { + ShowProcedureParametersSchema[strings.ToLower(string(param))] = ParameterListSchema + } +} + +func ProcedureParametersToSchema(parameters []*sdk.Parameter) map[string]any { + ProcedureParametersValue := make(map[string]any) + for _, param := range parameters { + if slices.Contains(ProcedureParameters, sdk.ProcedureParameter(param.Key)) { + ProcedureParametersValue[strings.ToLower(param.Key)] = []map[string]any{ParameterToSchema(param)} + } + } + return ProcedureParametersValue +} diff --git a/pkg/sdk/data_types_deprecated.go b/pkg/sdk/data_types_deprecated.go index 0d0315ad5e..24149f8d9f 100644 --- a/pkg/sdk/data_types_deprecated.go +++ b/pkg/sdk/data_types_deprecated.go @@ -47,5 +47,9 @@ func IsStringType(_type string) bool { } func LegacyDataTypeFrom(newDataType datatypes.DataType) DataType { + // TODO [SNOW-1850370]: remove this check? + if newDataType == nil { + return "" + } return DataType(newDataType.ToLegacyDataTypeSql()) } diff --git a/pkg/sdk/datatypes/legacy.go b/pkg/sdk/datatypes/legacy.go index 5a0e249cd7..c77f286f9c 100644 --- a/pkg/sdk/datatypes/legacy.go +++ b/pkg/sdk/datatypes/legacy.go @@ -16,4 +16,7 @@ const ( TimestampNtzLegacyDataType = "TIMESTAMP_NTZ" TimestampTzLegacyDataType = "TIMESTAMP_TZ" VariantLegacyDataType = "VARIANT" + + // TableLegacyDataType was not a value of legacy data type in the old implementation. Left for now for an easier implementation. + TableLegacyDataType = "TABLE" ) diff --git a/pkg/sdk/datatypes/table.go b/pkg/sdk/datatypes/table.go new file mode 100644 index 0000000000..e7c398ec6d --- /dev/null +++ b/pkg/sdk/datatypes/table.go @@ -0,0 +1,39 @@ +package datatypes + +// TableDataType is based on TODO [SNOW-1348103] +// It does not have synonyms. +// It consists of a list of column name + column type; may be empty. +// TODO [SNOW-1348103]: test and improve +type TableDataType struct { + columns []TableDataTypeColumn + underlyingType string +} + +type TableDataTypeColumn struct { + name string + dataType DataType +} + +func (c *TableDataTypeColumn) ColumnName() string { + return c.name +} + +func (c *TableDataTypeColumn) ColumnType() DataType { + return c.dataType +} + +func (t *TableDataType) ToSql() string { + return t.underlyingType +} + +func (t *TableDataType) ToLegacyDataTypeSql() string { + return TableLegacyDataType +} + +func (t *TableDataType) Canonical() string { + return TableLegacyDataType +} + +func (t *TableDataType) Columns() []TableDataTypeColumn { + return t.columns +} diff --git a/pkg/sdk/functions_ext.go b/pkg/sdk/functions_ext.go index 531ddfd9fa..2a87c2a458 100644 --- a/pkg/sdk/functions_ext.go +++ b/pkg/sdk/functions_ext.go @@ -4,7 +4,11 @@ import ( "context" "errors" "fmt" + "log" "strconv" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) const DefaultFunctionComment = "user-defined function" @@ -27,9 +31,28 @@ type FunctionDetails struct { Handler *string // present for python, java, and scala (hidden when SECURE) RuntimeVersion *string // present for python, java, and scala (hidden when SECURE) Packages *string // list // present for python, java, and scala - TargetPath *string // list present for scala and java (hidden when SECURE) + TargetPath *string // present for scala and java (hidden when SECURE) InstalledPackages *string // list present for python (hidden when SECURE) IsAggregate *bool // present for python + + NormalizedImports []NormalizedPath + NormalizedTargetPath *NormalizedPath + ReturnDataType datatypes.DataType + ReturnNotNull bool + NormalizedArguments []NormalizedArgument +} + +type NormalizedPath struct { + // StageLocation is a normalized (fully-quoted id or `~`) stage location + StageLocation string + // PathOnStage is path to the file on stage without opening `/` + PathOnStage string +} + +// NormalizedArgument does not contain default value because it is not returned in the Signature (or any other field). +type NormalizedArgument struct { + Name string + DataType datatypes.DataType } func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { @@ -69,9 +92,136 @@ func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { v.TargetPath = row.Value } } + if e := errors.Join(errs...); e != nil { + return nil, e + } + + if functionDetailsImports, err := parseFunctionDetailsImport(*v); err != nil { + errs = append(errs, err) + } else { + v.NormalizedImports = functionDetailsImports + } + + if v.TargetPath != nil { + if p, err := parseStageLocationPath(*v.TargetPath); err != nil { + errs = append(errs, err) + } else { + v.NormalizedTargetPath = p + } + } + + if dt, returnNotNull, err := parseFunctionOrProcedureReturns(v.Returns); err != nil { + errs = append(errs, err) + } else { + v.ReturnDataType = dt + v.ReturnNotNull = returnNotNull + } + + if args, err := parseFunctionOrProcedureSignature(v.Signature); err != nil { + errs = append(errs, err) + } else { + v.NormalizedArguments = args + } + return v, errors.Join(errs...) } +// TODO [SNOW-1850370]: use ParseCommaSeparatedStringArray + collections.MapErr combo here and in other methods? +func parseFunctionDetailsImport(details FunctionDetails) ([]NormalizedPath, error) { + functionDetailsImports := make([]NormalizedPath, 0) + if details.Imports == nil || *details.Imports == "" || *details.Imports == "[]" { + return functionDetailsImports, nil + } + if !strings.HasPrefix(*details.Imports, "[") || !strings.HasSuffix(*details.Imports, "]") { + return functionDetailsImports, fmt.Errorf("could not parse imports from Snowflake: %s, wrapping brackets not found", *details.Imports) + } + raw := (*details.Imports)[1 : len(*details.Imports)-1] + imports := strings.Split(raw, ",") + for _, imp := range imports { + p, err := parseStageLocationPath(imp) + if err != nil { + return nil, fmt.Errorf("could not parse imports from Snowflake: %s, err: %w", *details.Imports, err) + } + functionDetailsImports = append(functionDetailsImports, *p) + } + return functionDetailsImports, nil +} + +func parseStageLocationPath(location string) (*NormalizedPath, error) { + log.Printf("[DEBUG] parsing stage location path part: %s", location) + idx := strings.Index(location, "/") + if idx < 0 { + return nil, fmt.Errorf("part %s cannot be split into stage and path", location) + } + stageRaw := strings.TrimPrefix(strings.TrimSpace(location[:idx]), "@") + if stageRaw != "~" { + stageId, err := ParseSchemaObjectIdentifier(stageRaw) + if err != nil { + return nil, fmt.Errorf("part %s contains incorrect stage location: %w", location, err) + } + stageRaw = stageId.FullyQualifiedName() + } + pathRaw := strings.TrimPrefix(strings.TrimSpace(location[idx:]), "/") + if pathRaw == "" { + return nil, fmt.Errorf("part %s contains empty path", location) + } + return &NormalizedPath{stageRaw, pathRaw}, nil +} + +func parseFunctionOrProcedureReturns(returns string) (datatypes.DataType, bool, error) { + var returnNotNull bool + trimmed := strings.TrimSpace(returns) + if strings.HasSuffix(trimmed, " NOT NULL") { + returnNotNull = true + trimmed = strings.TrimSuffix(trimmed, " NOT NULL") + } + dt, err := datatypes.ParseDataType(trimmed) + return dt, returnNotNull, err +} + +// Format in Snowflake DB is: (argName argType, argName argType, ...). +func parseFunctionOrProcedureSignature(signature string) ([]NormalizedArgument, error) { + normalizedArguments := make([]NormalizedArgument, 0) + trimmed := strings.TrimSpace(signature) + if trimmed == "" { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, can't be empty", signature) + } + if trimmed == "()" { + return normalizedArguments, nil + } + if !strings.HasPrefix(trimmed, "(") || !strings.HasSuffix(trimmed, ")") { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, wrapping parentheses not found", trimmed) + } + raw := (trimmed)[1 : len(trimmed)-1] + args := strings.Split(raw, ",") + + for _, arg := range args { + a, err := parseFunctionOrProcedureArgument(arg) + if err != nil { + return nil, fmt.Errorf("could not parse signature from Snowflake: %s, err: %w", trimmed, err) + } + normalizedArguments = append(normalizedArguments, *a) + } + return normalizedArguments, nil +} + +// TODO [SNOW-1850370]: test with strange arg names (first integration test) +func parseFunctionOrProcedureArgument(arg string) (*NormalizedArgument, error) { + log.Printf("[DEBUG] parsing argument: %s", arg) + trimmed := strings.TrimSpace(arg) + idx := strings.Index(trimmed, " ") + if idx < 0 { + return nil, fmt.Errorf("arg %s cannot be split into arg name, data type, and default", arg) + } + argName := trimmed[:idx] + rest := strings.TrimSpace(trimmed[idx:]) + dt, err := datatypes.ParseDataType(rest) + if err != nil { + return nil, fmt.Errorf("arg type %s cannot be parsed, err: %w", rest, err) + } + return &NormalizedArgument{argName, dt}, nil +} + func (v *functions) DescribeDetails(ctx context.Context, id SchemaObjectIdentifierWithArguments) (*FunctionDetails, error) { rows, err := v.Describe(ctx, id) if err != nil { diff --git a/pkg/sdk/functions_ext_test.go b/pkg/sdk/functions_ext_test.go new file mode 100644 index 0000000000..a4f77431d0 --- /dev/null +++ b/pkg/sdk/functions_ext_test.go @@ -0,0 +1,179 @@ +package sdk + +import ( + "fmt" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + "github.com/stretchr/testify/require" +) + +// TODO [SNOW-1850370]: test parsing single +func Test_parseFunctionDetailsImport(t *testing.T) { + inputs := []struct { + rawInput string + expected []NormalizedPath + }{ + {"", []NormalizedPath{}}, + {`[]`, []NormalizedPath{}}, + {`[@~/abc]`, []NormalizedPath{{"~", "abc"}}}, + {`[@~/abc/def]`, []NormalizedPath{{"~", "abc/def"}}}, + {`[@"db"."sc"."st"/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[@db.sc.st/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[db.sc.st/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[@"db"."sc".st/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[@"db"."sc".st/abc/def, db."sc".st/abc]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}, {`"db"."sc"."st"`, "abc"}}}, + } + + badInputs := []struct { + rawInput string + expectedErrorPart string + }{ + {"[", "wrapping brackets not found"}, + {"]", "wrapping brackets not found"}, + {`[@~/]`, "contains empty path"}, + {`[@~]`, "cannot be split into stage and path"}, + {`[@"db"."sc"/abc]`, "contains incorrect stage location"}, + {`[@"db"/abc]`, "contains incorrect stage location"}, + {`[@"db"."sc"."st"."smth"/abc]`, "contains incorrect stage location"}, + {`[@"db/a"."sc"."st"/abc]`, "contains incorrect stage location"}, + {`[@"db"."sc"."st"/abc], @"db"."sc"/abc]`, "contains incorrect stage location"}, + } + + for _, tc := range inputs { + tc := tc + t.Run(fmt.Sprintf("Snowflake raw imports: %s", tc.rawInput), func(t *testing.T) { + details := FunctionDetails{Imports: &tc.rawInput} + + results, err := parseFunctionDetailsImport(details) + require.NoError(t, err) + require.Equal(t, tc.expected, results) + }) + } + + for _, tc := range badInputs { + tc := tc + t.Run(fmt.Sprintf("incorrect Snowflake input: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { + details := FunctionDetails{Imports: &tc.rawInput} + + _, err := parseFunctionDetailsImport(details) + require.Error(t, err) + require.ErrorContains(t, err, "could not parse imports from Snowflake") + require.ErrorContains(t, err, tc.expectedErrorPart) + }) + } + + t.Run("Snowflake raw imports nil", func(t *testing.T) { + details := FunctionDetails{Imports: nil} + + results, err := parseFunctionDetailsImport(details) + require.NoError(t, err) + require.Equal(t, []NormalizedPath{}, results) + }) +} + +func Test_parseFunctionOrProcedureReturns(t *testing.T) { + inputs := []struct { + rawInput string + expectedRawDataType string + expectedReturnNotNull bool + }{ + {"CHAR", "CHAR(1)", false}, + {"CHAR(1)", "CHAR(1)", false}, + {"NUMBER(30, 2)", "NUMBER(30, 2)", false}, + {"NUMBER(30,2)", "NUMBER(30, 2)", false}, + {"NUMBER(30,2) NOT NULL", "NUMBER(30, 2)", true}, + {"CHAR NOT NULL", "CHAR(1)", true}, + {" CHAR NOT NULL ", "CHAR(1)", true}, + {"OBJECT", "OBJECT", false}, + {"OBJECT NOT NULL", "OBJECT", true}, + } + + badInputs := []struct { + rawInput string + expectedErrorPart string + }{ + {"", "invalid data type"}, + {"NOT NULL", "invalid data type"}, + {"CHA NOT NULL", "invalid data type"}, + {"CHA NOT NULLS", "invalid data type"}, + } + + for _, tc := range inputs { + tc := tc + t.Run(fmt.Sprintf("return data type raw: %s", tc.rawInput), func(t *testing.T) { + dt, returnNotNull, err := parseFunctionOrProcedureReturns(tc.rawInput) + require.NoError(t, err) + require.Equal(t, tc.expectedRawDataType, dt.ToSql()) + require.Equal(t, tc.expectedReturnNotNull, returnNotNull) + }) + } + + for _, tc := range badInputs { + tc := tc + t.Run(fmt.Sprintf("incorrect return data type raw: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { + _, _, err := parseFunctionOrProcedureReturns(tc.rawInput) + require.Error(t, err) + require.ErrorContains(t, err, tc.expectedErrorPart) + }) + } +} + +func Test_parseFunctionOrProcedureSignature(t *testing.T) { + inputs := []struct { + rawInput string + expectedArgs []NormalizedArgument + }{ + {"()", []NormalizedArgument{}}, + {"(abc CHAR)", []NormalizedArgument{{"abc", dataTypeChar}}}, + {"(abc CHAR(1))", []NormalizedArgument{{"abc", dataTypeChar}}}, + {"(abc CHAR(100))", []NormalizedArgument{{"abc", dataTypeChar_100}}}, + {" ( abc CHAR(100 ) )", []NormalizedArgument{{"abc", dataTypeChar_100}}}, + {"( abc CHAR )", []NormalizedArgument{{"abc", dataTypeChar}}}, + {"(abc DOUBLE PRECISION)", []NormalizedArgument{{"abc", dataTypeDoublePrecision}}}, + {"(abc double precision)", []NormalizedArgument{{"abc", dataTypeDoublePrecision}}}, + {"(abc TIMESTAMP WITHOUT TIME ZONE(5))", []NormalizedArgument{{"abc", dataTypeTimestampWithoutTimeZone_5}}}, + } + + badInputs := []struct { + rawInput string + expectedErrorPart string + }{ + {"", "can't be empty"}, + {"(abc CHAR", "wrapping parentheses not found"}, + {"abc CHAR)", "wrapping parentheses not found"}, + {"(abc)", "cannot be split into arg name, data type, and default"}, + {"(CHAR)", "cannot be split into arg name, data type, and default"}, + {"(abc CHA)", "invalid data type"}, + {"(abc CHA(123))", "invalid data type"}, + {"(abc CHAR(1) DEFAULT)", "cannot be parsed"}, + {"(abc CHAR(1) DEFAULT 'a')", "cannot be parsed"}, + // TODO [SNOW-1850370]: Snowflake currently does not return concrete data types so we can fail on them currently but it should be improved in the future + {"(abc NUMBER(30,2))", "cannot be parsed"}, + {"(abc NUMBER(30, 2))", "cannot be parsed"}, + } + + for _, tc := range inputs { + tc := tc + t.Run(fmt.Sprintf("return data type raw: %s", tc.rawInput), func(t *testing.T) { + args, err := parseFunctionOrProcedureSignature(tc.rawInput) + + require.NoError(t, err) + require.Len(t, args, len(tc.expectedArgs)) + for i, arg := range args { + require.Equal(t, tc.expectedArgs[i].Name, arg.Name) + require.True(t, datatypes.AreTheSame(tc.expectedArgs[i].DataType, arg.DataType)) + } + }) + } + + for _, tc := range badInputs { + tc := tc + t.Run(fmt.Sprintf("incorrect signature raw: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { + _, err := parseFunctionOrProcedureSignature(tc.rawInput) + require.Error(t, err) + require.ErrorContains(t, err, "could not parse signature from Snowflake") + require.ErrorContains(t, err, tc.expectedErrorPart) + }) + } +} diff --git a/pkg/sdk/identifier_helpers.go b/pkg/sdk/identifier_helpers.go index 1609593d71..308535c4f8 100644 --- a/pkg/sdk/identifier_helpers.go +++ b/pkg/sdk/identifier_helpers.go @@ -5,6 +5,7 @@ import ( "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) @@ -211,7 +212,7 @@ type SchemaObjectIdentifier struct { databaseName string schemaName string name string - // TODO(next prs): left right now for backward compatibility for procedures and externalFunctions + // TODO [SNOW-1850370]: left right now for backward compatibility for procedures and externalFunctions arguments []DataType } @@ -343,6 +344,15 @@ func NewSchemaObjectIdentifierWithArguments(databaseName, schemaName, name strin } } +func NewSchemaObjectIdentifierWithArgumentsNormalized(databaseName, schemaName, name string, argumentDataTypes ...datatypes.DataType) SchemaObjectIdentifierWithArguments { + return SchemaObjectIdentifierWithArguments{ + databaseName: strings.Trim(databaseName, `"`), + schemaName: strings.Trim(schemaName, `"`), + name: strings.Trim(name, `"`), + argumentDataTypes: collections.Map(argumentDataTypes, LegacyDataTypeFrom), + } +} + func NewSchemaObjectIdentifierWithArgumentsInSchema(schemaId DatabaseObjectIdentifier, name string, argumentDataTypes ...DataType) SchemaObjectIdentifierWithArguments { return NewSchemaObjectIdentifierWithArguments(schemaId.DatabaseName(), schemaId.Name(), name, argumentDataTypes...) } diff --git a/pkg/sdk/random_test.go b/pkg/sdk/random_test.go index 83880167df..cedf8c3985 100644 --- a/pkg/sdk/random_test.go +++ b/pkg/sdk/random_test.go @@ -17,10 +17,14 @@ var ( emptySchemaObjectIdentifierWithArguments = NewSchemaObjectIdentifierWithArguments("", "", "") // TODO [SNOW-1843440]: create using constructors (when we add them)? - dataTypeNumber, _ = datatypes.ParseDataType("NUMBER(36, 2)") - dataTypeVarchar, _ = datatypes.ParseDataType("VARCHAR(100)") - dataTypeFloat, _ = datatypes.ParseDataType("FLOAT") - dataTypeVariant, _ = datatypes.ParseDataType("VARIANT") + dataTypeNumber, _ = datatypes.ParseDataType("NUMBER(36, 2)") + dataTypeVarchar, _ = datatypes.ParseDataType("VARCHAR(100)") + dataTypeFloat, _ = datatypes.ParseDataType("FLOAT") + dataTypeVariant, _ = datatypes.ParseDataType("VARIANT") + dataTypeChar, _ = datatypes.ParseDataType("CHAR") + dataTypeChar_100, _ = datatypes.ParseDataType("CHAR(100)") + dataTypeDoublePrecision, _ = datatypes.ParseDataType("DOUBLE PRECISION") + dataTypeTimestampWithoutTimeZone_5, _ = datatypes.ParseDataType("TIMESTAMP WITHOUT TIME ZONE(5)") ) func randomSchemaObjectIdentifierWithArguments(argumentDataTypes ...DataType) SchemaObjectIdentifierWithArguments { diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index bb292cd627..022ba7592a 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -20,20 +20,20 @@ import ( "github.com/stretchr/testify/require" ) -// TODO [SNOW-1348103]: schemaName and catalog name are quoted (because we use lowercase) // TODO [SNOW-1850370]: HasArgumentsRawFrom(functionId, arguments, return) // TODO [SNOW-1850370]: extract show assertions with commons fields // TODO [SNOW-1850370]: test confirming that runtime version is required for Scala function -// TODO [SNOW-1348103 or SNOW-1850370]: test create or replace with name change, args change -// TODO [SNOW-1348103]: test rename more (arg stays, can't change arg, rename to different schema) -// TODO [SNOW-1348103]: test weird names for arg name - lower/upper if used with double quotes, to upper without quotes, dots, spaces, and both quotes not permitted +// TODO [SNOW-1850370]: test create or replace with name change, args change +// TODO [SNOW-1850370]: test rename more (arg stays, can't change arg, rename to different schema) // TODO [SNOW-1850370]: add test documenting that UNSET SECRETS does not work // TODO [SNOW-1850370]: add test documenting [JAVA]: 391516 (42601): SQL compilation error: Cannot specify TARGET_PATH without a function BODY. -// TODO [SNOW-1348103 or SNOW-1850370]: test secure -// TODO [SNOW-1348103]: python aggregate func (100357 (P0000): Could not find accumulate method in function CVVEMHIT_06547800_08D6_DBCA_1AC7_5E422AFF8B39 with handler dump) -// TODO [SNOW-1348103]: add a test documenting that we can't set parameters in create (and revert adding these parameters directly in object...) +// TODO [SNOW-1850370]: add a test documenting that we can't set parameters in create (and revert adding these parameters directly in object...) // TODO [SNOW-1850370]: active warehouse vs validations -// TODO [SNOW-1348103]: add a test documenting STRICT behavior +// TODO [SNOW-1850370]: add a test documenting STRICT behavior +// TODO [SNOW-1348103]: test weird names for arg name - lower/upper if used with double quotes, to upper without quotes, dots, spaces, and both quotes not permitted +// TODO [SNOW-1348103]: test secure +// TODO [SNOW-1348103]: python aggregate func (100357 (P0000): Could not find accumulate method in function CVVEMHIT_06547800_08D6_DBCA_1AC7_5E422AFF8B39 with handler dump) +// TODO [SNOW-1348103]: add test with multiple imports func TestInt_Functions(t *testing.T) { client := testClient(t) ctx := context.Background() @@ -48,7 +48,7 @@ func TestInt_Functions(t *testing.T) { externalAccessIntegration, externalAccessIntegrationCleanup := testClientHelper().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) t.Cleanup(externalAccessIntegrationCleanup) - tmpJavaFunction := testClientHelper().CreateSampleJavaFunctionAndJar(t) + tmpJavaFunction := testClientHelper().CreateSampleJavaFunctionAndJarOnUserStage(t) tmpPythonFunction := testClientHelper().CreateSamplePythonFunctionAndModule(t) assertParametersSet := func(t *testing.T, functionParametersAssert *objectparametersassert.FunctionParametersAssert) { @@ -112,6 +112,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -119,10 +121,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersionNil(). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -206,6 +210,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -215,10 +221,14 @@ func TestInt_Functions(t *testing.T) { // TODO [SNOW-1348103]: check multiple secrets (to know how to parse) HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -278,6 +288,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -285,10 +297,14 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersionNil(). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -360,6 +376,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -367,10 +385,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -381,6 +403,78 @@ func TestInt_Functions(t *testing.T) { ) }) + t.Run("create function for Java - different stage", func(t *testing.T) { + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaFunctionDifferentStage := testClientHelper().CreateSampleJavaFunctionAndJarOnStage(t, stage) + + dataType := tmpJavaFunctionDifferentStage.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + argument := sdk.NewFunctionArgumentRequest(argName, dataType) + dt := sdk.NewFunctionReturnsResultDataTypeRequest(dataType) + returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) + handler := tmpJavaFunctionDifferentStage.JavaHandler() + importPath := tmpJavaFunctionDifferentStage.JarLocation() + + requestStaged := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). + WithArguments([]sdk.FunctionArgumentRequest{*argument}). + WithImports([]sdk.FunctionImportRequest{*sdk.NewFunctionImportRequest().WithImport(importPath)}) + + err := client.Functions.CreateForJava(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Function.DropFunctionFunc(t, id)) + + function, err := client.Functions.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). + HasImports(fmt.Sprintf(`[@"%s"."%s".%s/%s]`, stage.ID().DatabaseName(), stage.ID().SchemaName(), stage.ID().Name(), tmpJavaFunctionDifferentStage.JarName)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: stage.ID().FullyQualifiedName(), PathOnStage: tmpJavaFunctionDifferentStage.JarName, + }). + HasHandler(handler). + HasTargetPathNil(). + HasNormalizedTargetPathNil(), + ) + }) + + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create function for Java - default argument value", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewFunctionArgumentRequest(argName, dataType).WithDefaultValue(`'abc'`) + dt := sdk.NewFunctionReturnsResultDataTypeRequest(dataType) + returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := testClientHelper().Function.SampleJavaDefinition(t, className, funcName, argName) + + request := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). + WithArguments([]sdk.FunctionArgumentRequest{*argument}). + WithFunctionDefinitionWrapped(definition) + + err := client.Functions.CreateForJava(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Function.DropFunctionFunc(t, id)) + + function, err := client.Functions.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), + ) + }) + t.Run("create function for Javascript - inline minimal", func(t *testing.T) { dataType := testdatatypes.DataTypeFloat id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) @@ -428,6 +522,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -435,10 +531,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -501,6 +599,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -508,10 +608,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -569,7 +671,9 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). - HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). // TODO [SNOW-1348103]: do we care about this whitespace? + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -577,10 +681,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -651,7 +757,9 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). - HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). // TODO [SNOW-1348103]: do we care about this whitespace? + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -659,10 +767,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['absl-py==0.10.0','about-time==4.2.1']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -719,6 +831,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -726,10 +840,14 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -798,6 +916,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -805,10 +925,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['absl-py==0.10.0','about-time==4.2.1']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -868,6 +992,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -875,10 +1001,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -959,6 +1087,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -966,10 +1096,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1027,6 +1161,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -1034,10 +1170,14 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1106,6 +1246,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -1113,10 +1255,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1173,6 +1319,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1180,10 +1328,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1194,6 +1344,35 @@ func TestInt_Functions(t *testing.T) { ) }) + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create function for SQL - default argument value", func(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + definition := testClientHelper().Function.SampleSqlDefinition(t) + dt := sdk.NewFunctionReturnsResultDataTypeRequest(dataType) + returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewFunctionArgumentRequest(argName, dataType).WithDefaultValue("3.123") + request := sdk.NewCreateForSQLFunctionRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). + WithArguments([]sdk.FunctionArgumentRequest{*argument}) + + err := client.Functions.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Function.DropFunctionFunc(t, id)) + + function, err := client.Functions.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), + ) + }) + t.Run("create function for SQL - inline full", func(t *testing.T) { argName := "x" dataType := testdatatypes.DataTypeFloat @@ -1246,6 +1425,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1255,10 +1436,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1312,6 +1495,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature("()"). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1319,10 +1504,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 2a69ef42c2..6e0298308e 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -37,7 +37,7 @@ func TestInt_Procedures(t *testing.T) { externalAccessIntegration, externalAccessIntegrationCleanup := testClientHelper().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) t.Cleanup(externalAccessIntegrationCleanup) - tmpJavaProcedure := testClientHelper().CreateSampleJavaProcedureAndJar(t) + tmpJavaProcedure := testClientHelper().CreateSampleJavaProcedureAndJarOnUserStage(t) tmpPythonFunction := testClientHelper().CreateSamplePythonFunctionAndModule(t) assertParametersSet := func(t *testing.T, procedureParametersAssert *objectparametersassert.ProcedureParametersAssert) { @@ -354,6 +354,43 @@ func TestInt_Procedures(t *testing.T) { ) }) + t.Run("create procedure for Java - different stage", func(t *testing.T) { + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaProcedureDifferentStage := testClientHelper().CreateSampleJavaProcedureAndJarOnStage(t, stage) + + dataType := tmpJavaProcedureDifferentStage.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := tmpJavaProcedureDifferentStage.JavaHandler() + importPath := tmpJavaProcedureDifferentStage.JarLocation() + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0"), + } + + requestStaged := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(importPath)}) + + err := client.Procedures.CreateForJava(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasImports(fmt.Sprintf(`[@"%s"."%s".%s/%s]`, stage.ID().DatabaseName(), stage.ID().SchemaName(), stage.ID().Name(), tmpJavaProcedureDifferentStage.JarName)). + HasHandler(handler). + HasTargetPathNil(), + ) + }) + t.Run("create procedure for Javascript - inline minimal", func(t *testing.T) { dataType := testdatatypes.DataTypeFloat id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) From 933335f56d1e53bf3e95d1f552672f35425b4878 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 12:15:24 +0100 Subject: [PATCH 02/20] feat: Basic procedures implementation (#3271) Prepare most of the java procedure resource implementation (based on #3269, check it for details); additionally: - extracted more common functions to reuse between functions and procedures - left TODO for some that we duplicate for now Next PRs: - handle secrets, external access integrations, packages, return not null, and comments - TABLE function improvements and tests - Add PR with all other function types - datasources --- docs/resources/procedure_java.md | 28 +- docs/resources/procedure_javascript.md | 4 + docs/resources/procedure_python.md | 17 +- docs/resources/procedure_scala.md | 28 +- docs/resources/procedure_sql.md | 4 + .../procedure_describe_snowflake_ext.go | 69 +++ .../procedure_java_resource_ext.go | 17 + .../procedure_resource_parameters_ext.go | 13 + .../config/model/procedure_java_model_ext.go | 73 +++ .../config/model/procedure_java_model_gen.go | 9 +- .../model/procedure_python_model_gen.go | 4 - .../config/model/procedure_scala_model_gen.go | 9 +- pkg/acceptance/helpers/procedure_client.go | 36 ++ .../function_and_procedure_commons.go | 54 +++ pkg/resources/function_commons.go | 50 +- pkg/resources/function_java.go | 10 +- pkg/resources/procedure_commons.go | 187 +++++++- pkg/resources/procedure_java.go | 159 ++++++- .../procedure_java_acceptance_test.go | 429 ++++++++++++++++++ pkg/resources/procedure_parameters.go | 12 +- pkg/sdk/functions_and_procedures_commons.go | 118 +++++ pkg/sdk/functions_ext.go | 117 +---- pkg/sdk/functions_ext_test.go | 14 +- pkg/sdk/procedures_ext.go | 39 +- .../testint/procedures_integration_test.go | 216 ++++++++- 25 files changed, 1495 insertions(+), 221 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go create mode 100644 pkg/resources/function_and_procedure_commons.go create mode 100644 pkg/resources/procedure_java_acceptance_test.go create mode 100644 pkg/sdk/functions_and_procedures_commons.go diff --git a/docs/resources/procedure_java.md b/docs/resources/procedure_java.md index dbb5f2eba3..edc8047672 100644 --- a/docs/resources/procedure_java.md +++ b/docs/resources/procedure_java.md @@ -19,7 +19,6 @@ Resource used to manage java procedure objects. For more information, check [pro - `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `handler` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. - `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Java source code. For more information, see [Java (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Java Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-java-data-type-mappings)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). - `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 11. - `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -32,14 +31,15 @@ Resource used to manage java procedure objects. For more information, check [pro - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Java source code. For more information, see [Java (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `target_path` (Block Set, Max: 1) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -58,6 +58,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -68,6 +81,15 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` diff --git a/docs/resources/procedure_javascript.md b/docs/resources/procedure_javascript.md index a9364db4cf..a8d0ee9db2 100644 --- a/docs/resources/procedure_javascript.md +++ b/docs/resources/procedure_javascript.md @@ -50,6 +50,10 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` diff --git a/docs/resources/procedure_python.md b/docs/resources/procedure_python.md index a28cf0d0b5..8761764754 100644 --- a/docs/resources/procedure_python.md +++ b/docs/resources/procedure_python.md @@ -19,7 +19,6 @@ Resource used to manage python procedure objects. For more information, check [p - `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `handler` (String) Use the name of the stored procedure’s function or method. This can differ depending on whether the code is in-line or referenced at a stage. When the code is in-line, you can specify just the function name. When the code is imported from a stage, specify the fully-qualified handler function name as `.`. - `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Python source code. For more information, see [Python (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/python/procedure-python-overview). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Python Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-python-data-type-mappings)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). - `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 3.9, 3.10, and 3.11. - `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -32,12 +31,13 @@ Resource used to manage python procedure objects. For more information, check [p - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If your stored procedure’s code will be on a stage, you must also include a path to the module file your code is in. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If your stored procedure’s code will be on a stage, you must also include a path to the module file your code is in. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Python source code. For more information, see [Python (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/python/procedure-python-overview). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` diff --git a/docs/resources/procedure_scala.md b/docs/resources/procedure_scala.md index 692fb569b1..ef76be8b1d 100644 --- a/docs/resources/procedure_scala.md +++ b/docs/resources/procedure_scala.md @@ -19,7 +19,6 @@ Resource used to manage scala procedure objects. For more information, check [pr - `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `handler` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. - `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Scala source code. For more information, see [Scala (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Scala Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-types-to-scala-types)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). - `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 2.12. - `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -32,14 +31,15 @@ Resource used to manage scala procedure objects. For more information, check [pr - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Scala source code. For more information, see [Scala (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `target_path` (Block Set, Max: 1) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -58,6 +58,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -68,6 +81,15 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` diff --git a/docs/resources/procedure_sql.md b/docs/resources/procedure_sql.md index 2533380779..3dcc0fefb7 100644 --- a/docs/resources/procedure_sql.md +++ b/docs/resources/procedure_sql.md @@ -50,6 +50,10 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go index 64011d14f9..2319b30f7a 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go @@ -6,10 +6,12 @@ import ( "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + assert2 "github.com/stretchr/testify/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) // TODO [SNOW-1501905]: this file should be fully regenerated when adding and option to assert the results of describe @@ -391,3 +393,70 @@ func (f *ProcedureDetailsAssert) HasExactlySecrets(expectedSecrets map[string]sd }) return f } + +func (f *ProcedureDetailsAssert) HasExactlyImportsNormalizedInAnyOrder(imports ...sdk.NormalizedPath) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedImports == nil { + return fmt.Errorf("expected imports to have value; got: nil") + } + if !assert2.ElementsMatch(t, imports, o.NormalizedImports) { + return fmt.Errorf("expected %v imports in task relations, got %v", imports, o.NormalizedImports) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasNormalizedTargetPath(expectedStageLocation string, expectedPathOnStage string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedTargetPath == nil { + return fmt.Errorf("expected normalized target path to have value; got: nil") + } + if o.NormalizedTargetPath.StageLocation != expectedStageLocation { + return fmt.Errorf("expected %s stage location for target path, got %v", expectedStageLocation, o.NormalizedTargetPath.StageLocation) + } + if o.NormalizedTargetPath.PathOnStage != expectedPathOnStage { + return fmt.Errorf("expected %s path on stage for target path, got %v", expectedPathOnStage, o.NormalizedTargetPath.PathOnStage) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasNormalizedTargetPathNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedTargetPath != nil { + return fmt.Errorf("expected normalized target path to be nil, got: %s", *o.NormalizedTargetPath) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasReturnDataType(expectedDataType datatypes.DataType) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ReturnDataType == nil { + return fmt.Errorf("expected return data type to have value; got: nil") + } + if !datatypes.AreTheSame(o.ReturnDataType, expectedDataType) { + return fmt.Errorf("expected %s return data type, got %v", expectedDataType, o.ReturnDataType.ToSql()) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasReturnNotNull(expected bool) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ReturnNotNull != expected { + return fmt.Errorf("expected return not null %t; got: %t", expected, o.ReturnNotNull) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go new file mode 100644 index 0000000000..85de853dbe --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go @@ -0,0 +1,17 @@ +package resourceassert + +import ( + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (f *ProcedureJavaResourceAssert) HasImportsLength(len int) *ProcedureJavaResourceAssert { + f.AddAssertion(assert.ValueSet("imports.#", strconv.FormatInt(int64(len), 10))) + return f +} + +func (f *ProcedureJavaResourceAssert) HasTargetPathEmpty() *ProcedureJavaResourceAssert { + f.AddAssertion(assert.ValueSet("target_path.#", "0")) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go new file mode 100644 index 0000000000..e7090a0661 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go @@ -0,0 +1,13 @@ +package resourceparametersassert + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (f *ProcedureResourceParametersAssert) HasAllDefaults() *ProcedureResourceParametersAssert { + return f. + HasEnableConsoleOutput(false). + HasLogLevel(sdk.LogLevelOff). + HasMetricLevel(sdk.MetricLevelNone). + HasTraceLevel(sdk.TraceLevelOff) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go index 1fa425aa28..cb6779784c 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go @@ -2,6 +2,11 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *ProcedureJavaModel) MarshalJSON() ([]byte, error) { @@ -14,3 +19,71 @@ func (f *ProcedureJavaModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func ProcedureJavaBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + procedureDefinition string, +) *ProcedureJavaModel { + return ProcedureJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), "11", id.SchemaName(), "1.14.0"). + WithProcedureDefinition(procedureDefinition) +} + +func ProcedureJavaBasicStaged( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + stageLocation string, + pathOnStage string, +) *ProcedureJavaModel { + return ProcedureJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), "11", id.SchemaName(), "1.14.0"). + WithImport(stageLocation, pathOnStage) +} + +func (f *ProcedureJavaModel) WithArgument(argName string, argDataType datatypes.DataType) *ProcedureJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *ProcedureJavaModel) WithArgumentWithDefaultValue(argName string, argDataType datatypes.DataType, value string) *ProcedureJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + "arg_default_value": tfconfig.StringVariable(value), + }, + ), + ) +} + +func (f *ProcedureJavaModel) WithImport(stageLocation string, pathOnStage string) *ProcedureJavaModel { + return f.WithImportsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} + +func (f *ProcedureJavaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *ProcedureJavaModel { + return f.WithTargetPathValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go index 5be880ae22..ed2be8286e 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go @@ -47,7 +47,6 @@ func ProcedureJava( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -57,7 +56,6 @@ func ProcedureJava( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -69,7 +67,6 @@ func ProcedureJavaWithDefaultMeta( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -79,7 +76,6 @@ func ProcedureJavaWithDefaultMeta( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -186,10 +182,7 @@ func (p *ProcedureJavaModel) WithSnowparkPackage(snowparkPackage string) *Proced return p } -func (p *ProcedureJavaModel) WithTargetPath(targetPath string) *ProcedureJavaModel { - p.TargetPath = tfconfig.StringVariable(targetPath) - return p -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (p *ProcedureJavaModel) WithTraceLevel(traceLevel string) *ProcedureJavaModel { p.TraceLevel = tfconfig.StringVariable(traceLevel) diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go index dfe2801f00..c69dcab167 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go @@ -46,7 +46,6 @@ func ProcedurePython( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -56,7 +55,6 @@ func ProcedurePython( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -68,7 +66,6 @@ func ProcedurePythonWithDefaultMeta( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -78,7 +75,6 @@ func ProcedurePythonWithDefaultMeta( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go index 01ff2f1107..9df0441308 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go @@ -47,7 +47,6 @@ func ProcedureScala( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -57,7 +56,6 @@ func ProcedureScala( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -69,7 +67,6 @@ func ProcedureScalaWithDefaultMeta( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -79,7 +76,6 @@ func ProcedureScalaWithDefaultMeta( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -186,10 +182,7 @@ func (p *ProcedureScalaModel) WithSnowparkPackage(snowparkPackage string) *Proce return p } -func (p *ProcedureScalaModel) WithTargetPath(targetPath string) *ProcedureScalaModel { - p.TargetPath = tfconfig.StringVariable(targetPath) - return p -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (p *ProcedureScalaModel) WithTraceLevel(traceLevel string) *ProcedureScalaModel { p.TraceLevel = tfconfig.StringVariable(traceLevel) diff --git a/pkg/acceptance/helpers/procedure_client.go b/pkg/acceptance/helpers/procedure_client.go index 019d5f9299..7e77e37782 100644 --- a/pkg/acceptance/helpers/procedure_client.go +++ b/pkg/acceptance/helpers/procedure_client.go @@ -87,6 +87,29 @@ func (c *ProcedureClient) CreateJava(t *testing.T) (*sdk.Procedure, func()) { return function, c.DropProcedureFunc(t, id) } +func (c *ProcedureClient) CreateScalaStaged(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, dataType datatypes.DataType, importPath string, handler string) (*sdk.Procedure, func()) { + t.Helper() + ctx := context.Background() + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(importPath)}) + + err := c.client().CreateForScala(ctx, request) + require.NoError(t, err) + + function, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return function, c.DropProcedureFunc(t, id) +} + func (c *ProcedureClient) Create(t *testing.T, arguments ...sdk.DataType) *sdk.Procedure { t.Helper() return c.CreateWithIdentifier(t, c.ids.RandomSchemaObjectIdentifierWithArguments(arguments...)) @@ -156,6 +179,19 @@ func (c *ProcedureClient) SampleJavaDefinition(t *testing.T, className string, f `, className, funcName, argName) } +func (c *ProcedureClient) SampleJavaDefinitionNoArgs(t *testing.T, className string, funcName string) string { + t.Helper() + + return fmt.Sprintf(` + import com.snowflake.snowpark_java.*; + class %[1]s { + public static String %[2]s(Session session) { + return "hello"; + } + } +`, className, funcName) +} + // For more references: https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript func (c *ProcedureClient) SampleJavascriptDefinition(t *testing.T, argName string) string { t.Helper() diff --git a/pkg/resources/function_and_procedure_commons.go b/pkg/resources/function_and_procedure_commons.go new file mode 100644 index 0000000000..e21801813c --- /dev/null +++ b/pkg/resources/function_and_procedure_commons.go @@ -0,0 +1,54 @@ +package resources + +import ( + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func readFunctionOrProcedureArguments(d *schema.ResourceData, args []sdk.NormalizedArgument) error { + if len(args) == 0 { + // TODO [SNOW-1348103]: handle empty list + return nil + } + // We do it the unusual way because the default values are not returned by SF. + // We update what we have - leaving the defaults unchanged. + if currentArgs, ok := d.Get("arguments").([]map[string]any); !ok { + return fmt.Errorf("arguments must be a list") + } else { + for i, arg := range args { + currentArgs[i]["arg_name"] = arg.Name + currentArgs[i]["arg_data_type"] = arg.DataType.ToSql() + } + return d.Set("arguments", currentArgs) + } +} + +func readFunctionOrProcedureImports(d *schema.ResourceData, imports []sdk.NormalizedPath) error { + if len(imports) == 0 { + // don't do anything if imports not present + return nil + } + imps := collections.Map(imports, func(imp sdk.NormalizedPath) map[string]any { + return map[string]any{ + "stage_location": imp.StageLocation, + "path_on_stage": imp.PathOnStage, + } + }) + return d.Set("imports", imps) +} + +func readFunctionOrProcedureTargetPath(d *schema.ResourceData, normalizedPath *sdk.NormalizedPath) error { + if normalizedPath == nil { + // don't do anything if imports not present + return nil + } + tp := make([]map[string]any, 1) + tp[0] = map[string]any{ + "stage_location": normalizedPath.StageLocation, + "path_on_stage": normalizedPath.PathOnStage, + } + return d.Set("target_path", tp) +} diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index ea005da2c2..fe5a097a45 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -7,7 +7,6 @@ import ( "log" "slices" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -241,7 +240,6 @@ func functionBaseSchema() map[string]schema.Schema { ForceNew: true, Description: "List of the arguments for the function. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages) for more details.", }, - // TODO [SNOW-1348103]: for now, the proposal is to leave return type as string, add TABLE to data types, and here always parse (easier handling and diff suppression) "return_type": { Type: schema.TypeString, Required: true, @@ -408,6 +406,7 @@ func DeleteFunction(ctx context.Context, d *schema.ResourceData, meta any) diag. return nil } +// TODO [SNOW-1850370]: Make the rest of the functions in this file generic (for reuse with procedures) func parseFunctionArgumentsCommon(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, error) { args := make([]sdk.FunctionArgumentRequest, 0) if v, ok := d.GetOk("arguments"); ok { @@ -494,7 +493,7 @@ func setFunctionTargetPathInBuilder[T any](d *schema.ResourceData, setTargetPath return nil } -func queryAllFunctionsDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allFunctionDetailsCommon, diag.Diagnostics) { +func queryAllFunctionDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allFunctionDetailsCommon, diag.Diagnostics) { functionDetails, err := client.Functions.DescribeDetails(ctx, id) if err != nil { if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { @@ -540,48 +539,3 @@ type allFunctionDetailsCommon struct { functionDetails *sdk.FunctionDetails functionParameters []*sdk.Parameter } - -func readFunctionArgumentsCommon(d *schema.ResourceData, args []sdk.NormalizedArgument) error { - if len(args) == 0 { - // TODO [SNOW-1348103]: handle empty list - return nil - } - // We do it the unusual way because the default values are not returned by SF. - // We update what we have - leaving the defaults unchanged. - if currentArgs, ok := d.Get("arguments").([]map[string]any); !ok { - return fmt.Errorf("arguments must be a list") - } else { - for i, arg := range args { - currentArgs[i]["arg_name"] = arg.Name - currentArgs[i]["arg_data_type"] = arg.DataType.ToSql() - } - return d.Set("arguments", currentArgs) - } -} - -func readFunctionImportsCommon(d *schema.ResourceData, imports []sdk.NormalizedPath) error { - if len(imports) == 0 { - // don't do anything if imports not present - return nil - } - imps := collections.Map(imports, func(imp sdk.NormalizedPath) map[string]any { - return map[string]any{ - "stage_location": imp.StageLocation, - "path_on_stage": imp.PathOnStage, - } - }) - return d.Set("imports", imps) -} - -func readFunctionTargetPathCommon(d *schema.ResourceData, normalizedPath *sdk.NormalizedPath) error { - if normalizedPath == nil { - // don't do anything if imports not present - return nil - } - tp := make([]map[string]any, 1) - tp[0] = map[string]any{ - "stage_location": normalizedPath.StageLocation, - "path_on_stage": normalizedPath.PathOnStage, - } - return d.Set("target_path", tp) -} diff --git a/pkg/resources/function_java.go b/pkg/resources/function_java.go index b1e60da7cf..c8fca3c13f 100644 --- a/pkg/resources/function_java.go +++ b/pkg/resources/function_java.go @@ -35,7 +35,7 @@ func FunctionJava() *schema.Resource { // The language check is more for the future. // Currently, almost all attributes are marked as forceNew. // When language changes, these attributes also change, causing the object to recreate either way. - // The only potential option is java staged -> scala staged (however scala need runtime_version which may interfere). + // The only potential option is java staged <-> scala staged (however scala need runtime_version which may interfere). RecreateWhenResourceStringFieldChangedExternally("function_language", "JAVA"), )), @@ -112,7 +112,7 @@ func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta a return diag.FromErr(err) } - allFunctionDetails, diags := queryAllFunctionsDetailsCommon(ctx, d, client, id) + allFunctionDetails, diags := queryAllFunctionDetailsCommon(ctx, d, client, id) if diags != nil { return diags } @@ -123,18 +123,18 @@ func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta a errs := errors.Join( // TODO [SNOW-1348103]: set the rest of the fields // not reading is_secure on purpose (handled as external change to show output) - readFunctionArgumentsCommon(d, allFunctionDetails.functionDetails.NormalizedArguments), + readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) // not reading return_results_behavior on purpose (handled as external change to show output) setOptionalFromStringPtr(d, "runtime_version", allFunctionDetails.functionDetails.RuntimeVersion), // comment - readFunctionImportsCommon(d, allFunctionDetails.functionDetails.NormalizedImports), + readFunctionOrProcedureImports(d, allFunctionDetails.functionDetails.NormalizedImports), // packages setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), // external_access_integrations // secrets - readFunctionTargetPathCommon(d, allFunctionDetails.functionDetails.NormalizedTargetPath), + readFunctionOrProcedureTargetPath(d, allFunctionDetails.functionDetails.NormalizedTargetPath), setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), d.Set("function_language", allFunctionDetails.functionDetails.Language), diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index 163a33da0f..759f44f878 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -2,12 +2,15 @@ package resources import ( "context" + "errors" "fmt" + "log" "slices" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -23,6 +26,7 @@ func init() { type procedureSchemaDef struct { additionalArguments []string procedureDefinitionDescription string + procedureDefinitionRequired bool returnTypeLinkName string returnTypeLinkUrl string runtimeVersionDescription string @@ -41,6 +45,11 @@ func setUpProcedureSchema(definition procedureSchemaDef) map[string]*schema.Sche } if v, ok := currentSchema["procedure_definition"]; ok && v != nil { v.Description = diffSuppressStatementFieldDescription(definition.procedureDefinitionDescription) + if definition.procedureDefinitionRequired { + v.Required = true + } else { + v.Optional = true + } } if v, ok := currentSchema["return_type"]; ok && v != nil { v.Description = procedureReturnsTemplate(definition.returnTypeLinkName, definition.returnTypeLinkUrl) @@ -109,6 +118,7 @@ var ( returnTypeLinkName: "SQL and JavaScript data type mapping", returnTypeLinkUrl: "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript.html#label-stored-procedure-data-type-mapping", procedureDefinitionDescription: procedureDefinitionTemplate("JavaScript", "JavaScript", "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript"), + procedureDefinitionRequired: true, } pythonProcedureSchemaDefinition = procedureSchemaDef{ additionalArguments: []string{ @@ -149,6 +159,7 @@ var ( sqlProcedureSchemaDefinition = procedureSchemaDef{ additionalArguments: []string{}, procedureDefinitionDescription: procedureDefinitionTemplate("SQL", "Snowflake Scripting", "https://docs.snowflake.com/en/developer-guide/snowflake-scripting/index"), + procedureDefinitionRequired: true, returnTypeLinkName: "SQL data type", returnTypeLinkUrl: "https://docs.snowflake.com/en/sql-reference-data-types", } @@ -212,13 +223,17 @@ func procedureBaseSchema() map[string]schema.Schema { DiffSuppressFunc: DiffSuppressDataTypes, Description: "The argument type.", }, + "arg_default_value": { + Type: schema.TypeString, + Optional: true, + Description: externalChangesNotDetectedFieldDescription("Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted."), + }, }, }, Optional: true, ForceNew: true, Description: "List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details.", }, - // TODO [SNOW-1348103]: for now, the proposal is to leave return type as string, add TABLE to data types, and here always parse (easier handling and diff suppression) "return_type": { Type: schema.TypeString, Required: true, @@ -231,7 +246,7 @@ func procedureBaseSchema() map[string]schema.Schema { Optional: true, ForceNew: true, ValidateDiagFunc: sdkValidation(sdk.ToNullInputBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior)), // IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), Description: fmt.Sprintf("Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedNullInputBehaviors)), }, // "return_behavior" removed because it is deprecated in the docs: https://docs.snowflake.com/en/sql-reference/sql/create-procedure#id1 @@ -249,9 +264,22 @@ func procedureBaseSchema() map[string]schema.Schema { }, "imports": { Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, "snowpark_package": { Type: schema.TypeString, @@ -303,9 +331,24 @@ func procedureBaseSchema() map[string]schema.Schema { Description: "Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter.", }, "target_path": { - Type: schema.TypeString, + Type: schema.TypeSet, + MaxItems: 1, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, "execute_as": { Type: schema.TypeString, @@ -316,7 +359,6 @@ func procedureBaseSchema() map[string]schema.Schema { }, "procedure_definition": { Type: schema.TypeString, - Required: true, ForceNew: true, DiffSuppressFunc: DiffSuppressStatement, }, @@ -360,3 +402,138 @@ func DeleteProcedure(ctx context.Context, d *schema.ResourceData, meta any) diag d.SetId("") return nil } + +func queryAllProcedureDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allProcedureDetailsCommon, diag.Diagnostics) { + procedureDetails, err := client.Procedures.DescribeDetails(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { + log.Printf("[DEBUG] procedure (%s) not found or we are not authorized. Err: %s", d.Id(), err) + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query procedure. Marking the resource as removed.", + Detail: fmt.Sprintf("Procedure: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + procedure, err := client.Procedures.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query procedure. Marking the resource as removed.", + Detail: fmt.Sprintf("Procedure: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + procedureParameters, err := client.Procedures.ShowParameters(ctx, id) + if err != nil { + return nil, diag.FromErr(err) + } + return &allProcedureDetailsCommon{ + procedure: procedure, + procedureDetails: procedureDetails, + procedureParameters: procedureParameters, + }, nil +} + +type allProcedureDetailsCommon struct { + procedure *sdk.Procedure + procedureDetails *sdk.ProcedureDetails + procedureParameters []*sdk.Parameter +} + +// TODO [SNOW-1850370]: Make the rest of the functions in this file generic (for reuse with functions) +// These were copy-pasted for now. +func parseProcedureArgumentsCommon(d *schema.ResourceData) ([]sdk.ProcedureArgumentRequest, error) { + args := make([]sdk.ProcedureArgumentRequest, 0) + if v, ok := d.GetOk("arguments"); ok { + for _, arg := range v.([]any) { + argName := arg.(map[string]any)["arg_name"].(string) + argDataType := arg.(map[string]any)["arg_data_type"].(string) + dataType, err := datatypes.ParseDataType(argDataType) + if err != nil { + return nil, err + } + request := sdk.NewProcedureArgumentRequest(argName, dataType) + + if argDefaultValue, defaultValuePresent := arg.(map[string]any)["arg_default_value"]; defaultValuePresent && argDefaultValue.(string) != "" { + request.WithDefaultValue(argDefaultValue.(string)) + } + + args = append(args, *request) + } + } + return args, nil +} + +func parseProcedureImportsCommon(d *schema.ResourceData) ([]sdk.ProcedureImportRequest, error) { + imports := make([]sdk.ProcedureImportRequest, 0) + if v, ok := d.GetOk("imports"); ok { + for _, imp := range v.(*schema.Set).List() { + stageLocation := imp.(map[string]any)["stage_location"].(string) + pathOnStage := imp.(map[string]any)["path_on_stage"].(string) + imports = append(imports, *sdk.NewProcedureImportRequest(fmt.Sprintf("@%s/%s", stageLocation, pathOnStage))) + } + } + return imports, nil +} + +func parseProcedureTargetPathCommon(d *schema.ResourceData) (string, error) { + var tp string + if v, ok := d.GetOk("target_path"); ok { + for _, p := range v.(*schema.Set).List() { + stageLocation := p.(map[string]any)["stage_location"].(string) + pathOnStage := p.(map[string]any)["path_on_stage"].(string) + tp = fmt.Sprintf("@%s/%s", stageLocation, pathOnStage) + } + } + return tp, nil +} + +func parseProcedureReturnsCommon(d *schema.ResourceData) (*sdk.ProcedureReturnsRequest, error) { + returnTypeRaw := d.Get("return_type").(string) + dataType, err := datatypes.ParseDataType(returnTypeRaw) + if err != nil { + return nil, err + } + returns := sdk.NewProcedureReturnsRequest() + switch v := dataType.(type) { + case *datatypes.TableDataType: + var cr []sdk.ProcedureColumnRequest + for _, c := range v.Columns() { + cr = append(cr, *sdk.NewProcedureColumnRequest(c.ColumnName(), c.ColumnType())) + } + returns.WithTable(*sdk.NewProcedureReturnsTableRequest().WithColumns(cr)) + default: + returns.WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(dataType)) + } + return returns, nil +} + +func setProcedureImportsInBuilder[T any](d *schema.ResourceData, setImports func([]sdk.ProcedureImportRequest) T) error { + imports, err := parseProcedureImportsCommon(d) + if err != nil { + return err + } + setImports(imports) + return nil +} + +func setProcedureTargetPathInBuilder[T any](d *schema.ResourceData, setTargetPath func(string) T) error { + tp, err := parseProcedureTargetPathCommon(d) + if err != nil { + return err + } + if tp != "" { + setTargetPath(tp) + } + return nil +} diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index 1804780de9..bc4f417144 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -2,11 +2,18 @@ package resources import ( "context" + "errors" + "fmt" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -25,7 +32,11 @@ func ProcedureJava() *schema.Resource { ComputedIfAnyAttributeChanged(javaProcedureSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), procedureParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("procedure_language", "JAVA"), )), Schema: collections.MergeMaps(javaProcedureSchema, procedureParametersSchema), @@ -36,13 +47,155 @@ func ProcedureJava() *schema.Resource { } func CreateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseProcedureArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseProcedureReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + runtimeVersion := d.Get("runtime_version").(string) + // TODO [this PR]: handle real packages + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.ProcedureArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + // TODO [SNOW-1348103]: handle the rest of the attributes + // comment + setProcedureImportsInBuilder(d, request.WithImports), + // packages + // external_access_integrations + // secrets + setProcedureTargetPathInBuilder(d, request.WithTargetPath), + stringAttributeCreateBuilder(d, "procedure_definition", request.WithProcedureDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Procedures.CreateForJava(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create procedure (query does not fail but parameters stay unchanged) + setRequest := sdk.NewProcedureSetRequest() + if parametersCreateDiags := handleProcedureParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedureJava(ctx, d, meta) } func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allProcedureDetails, diags := queryAllProcedureDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // TODO [SNOW-1348103]: set the rest of the fields + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), + d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), + // comment + readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), + // packages + setRequiredFromStringPtr(d, "handler", allProcedureDetails.procedureDetails.Handler), + // external_access_integrations + // secrets + readFunctionOrProcedureTargetPath(d, allProcedureDetails.procedureDetails.NormalizedTargetPath), + setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), + d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), + + handleProcedureParameterRead(d, allProcedureDetails.procedureParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ProcedureToSchema(allProcedureDetails.procedure)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.ProcedureParametersToSchema(allProcedureDetails.procedureParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } + return nil } func UpdateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + if d.HasChange("name") { + newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) + + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithRenameTo(newId.SchemaObjectId())) + if err != nil { + return diag.FromErr(fmt.Errorf("error renaming procedure %v err = %w", d.Id(), err)) + } + + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId + } + + // Batch SET operations and UNSET operations + setRequest := sdk.NewProcedureSetRequest() + unsetRequest := sdk.NewProcedureUnsetRequest() + + // TODO [SNOW-1348103]: handle all updates + // secure + // external access integration + // secrets + // comment + + if updateParamDiags := handleProcedureParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags + } + + // Apply SET and UNSET changes + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + if !reflect.DeepEqual(*unsetRequest, *sdk.NewProcedureUnsetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnset(*unsetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedureJava(ctx, d, meta) } diff --git a/pkg/resources/procedure_java_acceptance_test.go b/pkg/resources/procedure_java_acceptance_test.go new file mode 100644 index 0000000000..35bdd401ec --- /dev/null +++ b/pkg/resources/procedure_java_acceptance_test.go @@ -0,0 +1,429 @@ +package resources_test + +import ( + "fmt" + "testing" + "time" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +// TODO [SNOW-1348103]: test import +// TODO [SNOW-1348103]: test external changes +// TODO [SNOW-1348103]: test changes of attributes separately + +func TestAcc_ProcedureJava_InlineBasic(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + procedureModelRenamed := model.ProcedureJavaBasicInline("w", idWithChangedNameButTheSameDataType, dataType, handler, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(0). + HasTargetPathEmpty(). + HasRuntimeVersionString("11"). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, procedureModelRenamed), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_InlineEmptyArgs(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + returnDataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes() + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinitionNoArgs(t, className, funcName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, returnDataType, handler, definition) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_InlineBasicDefaultArg(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + defaultValue := "'hello'" + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgumentWithDefaultValue(argName, dataType, defaultValue) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", defaultValue)), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + // TODO [SNOW-1850370]: extract to helper + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("11") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(0). + HasRuntimeVersionString("11"). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_StagedBasic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaProcedure := acc.TestClient().CreateSampleJavaProcedureAndJarOnStage(t, stage) + + dataType := tmpJavaProcedure.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaProcedure.JavaHandler() + + procedureModel := model.ProcedureJavaBasicStaged("w", id, dataType, handler, stage.ID().FullyQualifiedName(), tmpJavaProcedure.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(1). + HasNoProcedureDefinition(). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "imports.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "imports.0.path_on_stage", tmpJavaProcedure.JarName)), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_AllParameters(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + procedureModelWithAllParametersSet := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithEnableConsoleOutput(true). + WithLogLevel(string(sdk.LogLevelWarn)). + WithMetricLevel(string(sdk.MetricLevelAll)). + WithTraceLevel(string(sdk.TraceLevelAlways)) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // create with default values for all the parameters + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.ProcedureResourceParameters(t, procedureModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // import when no parameter set + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedProcedureResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasAllDefaults(), + ), + }, + // set all parameters + { + Config: config.FromModels(t, procedureModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.ProcedureResourceParameters(t, procedureModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // import when all parameters set + { + ResourceName: procedureModelWithAllParametersSet.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedProcedureResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // unset all the parameters + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.ProcedureResourceParameters(t, procedureModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // destroy + { + Config: config.FromModels(t, procedureModel), + Destroy: true, + }, + // create with all parameters set + { + Config: config.FromModels(t, procedureModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.ProcedureResourceParameters(t, procedureModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_handleExternalLanguageChange(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + tmpJavaProcedure := acc.TestClient().CreateSampleJavaProcedureAndJarOnUserStage(t) + + dataType := tmpJavaProcedure.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaProcedure.JavaHandler() + + procedureModel := model.ProcedureJavaBasicStaged("w", id, dataType, handler, "~", tmpJavaProcedure.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()).HasNameString(id.Name()).HasProcedureLanguageString("JAVA"), + ), + }, + // change type externally by creating a new procedure with the exact same id but using different language + { + PreConfig: func() { + acc.TestClient().Procedure.DropProcedureFunc(t, id)() + acc.TestClient().Procedure.CreateScalaStaged(t, id, dataType, tmpJavaProcedure.JarLocation(), handler) + }, + Config: config.FromModels(t, procedureModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(procedureModel.ResourceReference(), plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()).HasNameString(id.Name()).HasProcedureLanguageString("JAVA"), + ), + }, + }, + }) +} diff --git a/pkg/resources/procedure_parameters.go b/pkg/resources/procedure_parameters.go index eba2a378b2..4bb719776e 100644 --- a/pkg/resources/procedure_parameters.go +++ b/pkg/resources/procedure_parameters.go @@ -80,16 +80,16 @@ func handleProcedureParameterRead(d *schema.ResourceData, procedureParameters [] } // They do not work in create, that's why are set in alter -func handleProcedureParametersCreate(d *schema.ResourceData, alterOpts *sdk.ProcedureSet) diag.Diagnostics { +func handleProcedureParametersCreate(d *schema.ResourceData, set *sdk.ProcedureSetRequest) diag.Diagnostics { return JoinDiags( - handleParameterCreate(d, sdk.ProcedureParameterEnableConsoleOutput, &alterOpts.EnableConsoleOutput), - handleParameterCreateWithMapping(d, sdk.ProcedureParameterLogLevel, &alterOpts.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), - handleParameterCreateWithMapping(d, sdk.ProcedureParameterMetricLevel, &alterOpts.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), - handleParameterCreateWithMapping(d, sdk.ProcedureParameterTraceLevel, &alterOpts.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), + handleParameterCreate(d, sdk.ProcedureParameterEnableConsoleOutput, &set.EnableConsoleOutput), + handleParameterCreateWithMapping(d, sdk.ProcedureParameterLogLevel, &set.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), + handleParameterCreateWithMapping(d, sdk.ProcedureParameterMetricLevel, &set.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), + handleParameterCreateWithMapping(d, sdk.ProcedureParameterTraceLevel, &set.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), ) } -func handleProcedureParametersUpdate(d *schema.ResourceData, set *sdk.ProcedureSet, unset *sdk.ProcedureUnset) diag.Diagnostics { +func handleProcedureParametersUpdate(d *schema.ResourceData, set *sdk.ProcedureSetRequest, unset *sdk.ProcedureUnsetRequest) diag.Diagnostics { return JoinDiags( handleParameterUpdate(d, sdk.ProcedureParameterEnableConsoleOutput, &set.EnableConsoleOutput, &unset.EnableConsoleOutput), handleParameterUpdateWithMapping(d, sdk.ProcedureParameterLogLevel, &set.LogLevel, &unset.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), diff --git a/pkg/sdk/functions_and_procedures_commons.go b/pkg/sdk/functions_and_procedures_commons.go new file mode 100644 index 0000000000..df64aba187 --- /dev/null +++ b/pkg/sdk/functions_and_procedures_commons.go @@ -0,0 +1,118 @@ +package sdk + +import ( + "fmt" + "log" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" +) + +type NormalizedPath struct { + // StageLocation is a normalized (fully-quoted id or `~`) stage location + StageLocation string + // PathOnStage is path to the file on stage without opening `/` + PathOnStage string +} + +// NormalizedArgument does not contain default value because it is not returned in the Signature (or any other field). +type NormalizedArgument struct { + Name string + DataType datatypes.DataType +} + +// TODO [SNOW-1850370]: use ParseCommaSeparatedStringArray + collections.MapErr combo here and in other methods? +func parseFunctionOrProcedureImports(importsRaw *string) ([]NormalizedPath, error) { + normalizedImports := make([]NormalizedPath, 0) + if importsRaw == nil || *importsRaw == "" || *importsRaw == "[]" { + return normalizedImports, nil + } + if !strings.HasPrefix(*importsRaw, "[") || !strings.HasSuffix(*importsRaw, "]") { + return normalizedImports, fmt.Errorf("could not parse imports from Snowflake: %s, wrapping brackets not found", *importsRaw) + } + raw := (*importsRaw)[1 : len(*importsRaw)-1] + imports := strings.Split(raw, ",") + for _, imp := range imports { + p, err := parseFunctionOrProcedureStageLocationPath(imp) + if err != nil { + return nil, fmt.Errorf("could not parse imports from Snowflake: %s, err: %w", *importsRaw, err) + } + normalizedImports = append(normalizedImports, *p) + } + return normalizedImports, nil +} + +func parseFunctionOrProcedureStageLocationPath(location string) (*NormalizedPath, error) { + log.Printf("[DEBUG] parsing stage location path part: %s", location) + idx := strings.Index(location, "/") + if idx < 0 { + return nil, fmt.Errorf("part %s cannot be split into stage and path", location) + } + stageRaw := strings.TrimPrefix(strings.TrimSpace(location[:idx]), "@") + if stageRaw != "~" { + stageId, err := ParseSchemaObjectIdentifier(stageRaw) + if err != nil { + return nil, fmt.Errorf("part %s contains incorrect stage location: %w", location, err) + } + stageRaw = stageId.FullyQualifiedName() + } + pathRaw := strings.TrimPrefix(strings.TrimSpace(location[idx:]), "/") + if pathRaw == "" { + return nil, fmt.Errorf("part %s contains empty path", location) + } + return &NormalizedPath{stageRaw, pathRaw}, nil +} + +func parseFunctionOrProcedureReturns(returns string) (datatypes.DataType, bool, error) { + var returnNotNull bool + trimmed := strings.TrimSpace(returns) + if strings.HasSuffix(trimmed, " NOT NULL") { + returnNotNull = true + trimmed = strings.TrimSuffix(trimmed, " NOT NULL") + } + dt, err := datatypes.ParseDataType(trimmed) + return dt, returnNotNull, err +} + +// Format in Snowflake DB is: (argName argType, argName argType, ...). +func parseFunctionOrProcedureSignature(signature string) ([]NormalizedArgument, error) { + normalizedArguments := make([]NormalizedArgument, 0) + trimmed := strings.TrimSpace(signature) + if trimmed == "" { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, can't be empty", signature) + } + if trimmed == "()" { + return normalizedArguments, nil + } + if !strings.HasPrefix(trimmed, "(") || !strings.HasSuffix(trimmed, ")") { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, wrapping parentheses not found", trimmed) + } + raw := (trimmed)[1 : len(trimmed)-1] + args := strings.Split(raw, ",") + + for _, arg := range args { + a, err := parseFunctionOrProcedureArgument(arg) + if err != nil { + return nil, fmt.Errorf("could not parse signature from Snowflake: %s, err: %w", trimmed, err) + } + normalizedArguments = append(normalizedArguments, *a) + } + return normalizedArguments, nil +} + +// TODO [SNOW-1850370]: test with strange arg names (first integration test) +func parseFunctionOrProcedureArgument(arg string) (*NormalizedArgument, error) { + log.Printf("[DEBUG] parsing argument: %s", arg) + trimmed := strings.TrimSpace(arg) + idx := strings.Index(trimmed, " ") + if idx < 0 { + return nil, fmt.Errorf("arg %s cannot be split into arg name, data type, and default", arg) + } + argName := trimmed[:idx] + rest := strings.TrimSpace(trimmed[idx:]) + dt, err := datatypes.ParseDataType(rest) + if err != nil { + return nil, fmt.Errorf("arg type %s cannot be parsed, err: %w", rest, err) + } + return &NormalizedArgument{argName, dt}, nil +} diff --git a/pkg/sdk/functions_ext.go b/pkg/sdk/functions_ext.go index 2a87c2a458..facd9ede1d 100644 --- a/pkg/sdk/functions_ext.go +++ b/pkg/sdk/functions_ext.go @@ -4,9 +4,7 @@ import ( "context" "errors" "fmt" - "log" "strconv" - "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) @@ -42,19 +40,6 @@ type FunctionDetails struct { NormalizedArguments []NormalizedArgument } -type NormalizedPath struct { - // StageLocation is a normalized (fully-quoted id or `~`) stage location - StageLocation string - // PathOnStage is path to the file on stage without opening `/` - PathOnStage string -} - -// NormalizedArgument does not contain default value because it is not returned in the Signature (or any other field). -type NormalizedArgument struct { - Name string - DataType datatypes.DataType -} - func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { v := &FunctionDetails{} var errs []error @@ -96,14 +81,14 @@ func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { return nil, e } - if functionDetailsImports, err := parseFunctionDetailsImport(*v); err != nil { + if normalizedImports, err := parseFunctionOrProcedureImports(v.Imports); err != nil { errs = append(errs, err) } else { - v.NormalizedImports = functionDetailsImports + v.NormalizedImports = normalizedImports } if v.TargetPath != nil { - if p, err := parseStageLocationPath(*v.TargetPath); err != nil { + if p, err := parseFunctionOrProcedureStageLocationPath(*v.TargetPath); err != nil { errs = append(errs, err) } else { v.NormalizedTargetPath = p @@ -126,102 +111,6 @@ func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { return v, errors.Join(errs...) } -// TODO [SNOW-1850370]: use ParseCommaSeparatedStringArray + collections.MapErr combo here and in other methods? -func parseFunctionDetailsImport(details FunctionDetails) ([]NormalizedPath, error) { - functionDetailsImports := make([]NormalizedPath, 0) - if details.Imports == nil || *details.Imports == "" || *details.Imports == "[]" { - return functionDetailsImports, nil - } - if !strings.HasPrefix(*details.Imports, "[") || !strings.HasSuffix(*details.Imports, "]") { - return functionDetailsImports, fmt.Errorf("could not parse imports from Snowflake: %s, wrapping brackets not found", *details.Imports) - } - raw := (*details.Imports)[1 : len(*details.Imports)-1] - imports := strings.Split(raw, ",") - for _, imp := range imports { - p, err := parseStageLocationPath(imp) - if err != nil { - return nil, fmt.Errorf("could not parse imports from Snowflake: %s, err: %w", *details.Imports, err) - } - functionDetailsImports = append(functionDetailsImports, *p) - } - return functionDetailsImports, nil -} - -func parseStageLocationPath(location string) (*NormalizedPath, error) { - log.Printf("[DEBUG] parsing stage location path part: %s", location) - idx := strings.Index(location, "/") - if idx < 0 { - return nil, fmt.Errorf("part %s cannot be split into stage and path", location) - } - stageRaw := strings.TrimPrefix(strings.TrimSpace(location[:idx]), "@") - if stageRaw != "~" { - stageId, err := ParseSchemaObjectIdentifier(stageRaw) - if err != nil { - return nil, fmt.Errorf("part %s contains incorrect stage location: %w", location, err) - } - stageRaw = stageId.FullyQualifiedName() - } - pathRaw := strings.TrimPrefix(strings.TrimSpace(location[idx:]), "/") - if pathRaw == "" { - return nil, fmt.Errorf("part %s contains empty path", location) - } - return &NormalizedPath{stageRaw, pathRaw}, nil -} - -func parseFunctionOrProcedureReturns(returns string) (datatypes.DataType, bool, error) { - var returnNotNull bool - trimmed := strings.TrimSpace(returns) - if strings.HasSuffix(trimmed, " NOT NULL") { - returnNotNull = true - trimmed = strings.TrimSuffix(trimmed, " NOT NULL") - } - dt, err := datatypes.ParseDataType(trimmed) - return dt, returnNotNull, err -} - -// Format in Snowflake DB is: (argName argType, argName argType, ...). -func parseFunctionOrProcedureSignature(signature string) ([]NormalizedArgument, error) { - normalizedArguments := make([]NormalizedArgument, 0) - trimmed := strings.TrimSpace(signature) - if trimmed == "" { - return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, can't be empty", signature) - } - if trimmed == "()" { - return normalizedArguments, nil - } - if !strings.HasPrefix(trimmed, "(") || !strings.HasSuffix(trimmed, ")") { - return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, wrapping parentheses not found", trimmed) - } - raw := (trimmed)[1 : len(trimmed)-1] - args := strings.Split(raw, ",") - - for _, arg := range args { - a, err := parseFunctionOrProcedureArgument(arg) - if err != nil { - return nil, fmt.Errorf("could not parse signature from Snowflake: %s, err: %w", trimmed, err) - } - normalizedArguments = append(normalizedArguments, *a) - } - return normalizedArguments, nil -} - -// TODO [SNOW-1850370]: test with strange arg names (first integration test) -func parseFunctionOrProcedureArgument(arg string) (*NormalizedArgument, error) { - log.Printf("[DEBUG] parsing argument: %s", arg) - trimmed := strings.TrimSpace(arg) - idx := strings.Index(trimmed, " ") - if idx < 0 { - return nil, fmt.Errorf("arg %s cannot be split into arg name, data type, and default", arg) - } - argName := trimmed[:idx] - rest := strings.TrimSpace(trimmed[idx:]) - dt, err := datatypes.ParseDataType(rest) - if err != nil { - return nil, fmt.Errorf("arg type %s cannot be parsed, err: %w", rest, err) - } - return &NormalizedArgument{argName, dt}, nil -} - func (v *functions) DescribeDetails(ctx context.Context, id SchemaObjectIdentifierWithArguments) (*FunctionDetails, error) { rows, err := v.Describe(ctx, id) if err != nil { diff --git a/pkg/sdk/functions_ext_test.go b/pkg/sdk/functions_ext_test.go index a4f77431d0..9fea4d4319 100644 --- a/pkg/sdk/functions_ext_test.go +++ b/pkg/sdk/functions_ext_test.go @@ -9,7 +9,7 @@ import ( ) // TODO [SNOW-1850370]: test parsing single -func Test_parseFunctionDetailsImport(t *testing.T) { +func Test_parseFunctionOrProcedureImports(t *testing.T) { inputs := []struct { rawInput string expected []NormalizedPath @@ -43,9 +43,7 @@ func Test_parseFunctionDetailsImport(t *testing.T) { for _, tc := range inputs { tc := tc t.Run(fmt.Sprintf("Snowflake raw imports: %s", tc.rawInput), func(t *testing.T) { - details := FunctionDetails{Imports: &tc.rawInput} - - results, err := parseFunctionDetailsImport(details) + results, err := parseFunctionOrProcedureImports(&tc.rawInput) require.NoError(t, err) require.Equal(t, tc.expected, results) }) @@ -54,9 +52,7 @@ func Test_parseFunctionDetailsImport(t *testing.T) { for _, tc := range badInputs { tc := tc t.Run(fmt.Sprintf("incorrect Snowflake input: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { - details := FunctionDetails{Imports: &tc.rawInput} - - _, err := parseFunctionDetailsImport(details) + _, err := parseFunctionOrProcedureImports(&tc.rawInput) require.Error(t, err) require.ErrorContains(t, err, "could not parse imports from Snowflake") require.ErrorContains(t, err, tc.expectedErrorPart) @@ -64,9 +60,7 @@ func Test_parseFunctionDetailsImport(t *testing.T) { } t.Run("Snowflake raw imports nil", func(t *testing.T) { - details := FunctionDetails{Imports: nil} - - results, err := parseFunctionDetailsImport(details) + results, err := parseFunctionOrProcedureImports(nil) require.NoError(t, err) require.Equal(t, []NormalizedPath{}, results) }) diff --git a/pkg/sdk/procedures_ext.go b/pkg/sdk/procedures_ext.go index a8ee2844bf..de40fd8732 100644 --- a/pkg/sdk/procedures_ext.go +++ b/pkg/sdk/procedures_ext.go @@ -29,9 +29,15 @@ type ProcedureDetails struct { Handler *string // present for python, java, and scala (hidden when SECURE) RuntimeVersion *string // present for python, java, and scala (hidden when SECURE) Packages *string // list // present for python, java, and scala (hidden when SECURE) - TargetPath *string // list present for scala and java (hidden when SECURE) + TargetPath *string // present for scala and java (hidden when SECURE) InstalledPackages *string // list present for python (hidden when SECURE) ExecuteAs string // present for all procedure types + + NormalizedImports []NormalizedPath + NormalizedTargetPath *NormalizedPath + ReturnDataType datatypes.DataType + ReturnNotNull bool + NormalizedArguments []NormalizedArgument } func procedureDetailsFromRows(rows []ProcedureDetail) (*ProcedureDetails, error) { @@ -71,6 +77,37 @@ func procedureDetailsFromRows(rows []ProcedureDetail) (*ProcedureDetails, error) v.TargetPath = row.Value } } + if e := errors.Join(errs...); e != nil { + return nil, e + } + + if normalizedImports, err := parseFunctionOrProcedureImports(v.Imports); err != nil { + errs = append(errs, err) + } else { + v.NormalizedImports = normalizedImports + } + + if v.TargetPath != nil { + if p, err := parseFunctionOrProcedureStageLocationPath(*v.TargetPath); err != nil { + errs = append(errs, err) + } else { + v.NormalizedTargetPath = p + } + } + + if dt, returnNotNull, err := parseFunctionOrProcedureReturns(v.Returns); err != nil { + errs = append(errs, err) + } else { + v.ReturnDataType = dt + v.ReturnNotNull = returnNotNull + } + + if args, err := parseFunctionOrProcedureSignature(v.Signature); err != nil { + errs = append(errs, err) + } else { + v.NormalizedArguments = args + } + return v, errors.Join(errs...) } diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 6e0298308e..c5434d6308 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -100,6 +100,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, procedure.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -107,10 +109,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -186,6 +190,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -193,10 +199,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -256,6 +266,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -263,10 +275,14 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -333,6 +349,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -340,10 +358,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -386,8 +408,47 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasImports(fmt.Sprintf(`[@"%s"."%s".%s/%s]`, stage.ID().DatabaseName(), stage.ID().SchemaName(), stage.ID().Name(), tmpJavaProcedureDifferentStage.JarName)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: stage.ID().FullyQualifiedName(), PathOnStage: tmpJavaProcedureDifferentStage.JarName, + }). HasHandler(handler). - HasTargetPathNil(), + HasTargetPathNil(). + HasNormalizedTargetPathNil(), + ) + }) + + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create procedure for Java - default argument value", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewProcedureArgumentRequest(argName, dataType).WithDefaultValue(`'abc'`) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := testClientHelper().Procedure.SampleJavaDefinition(t, className, funcName, argName) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithProcedureDefinitionWrapped(definition) + + err := client.Procedures.CreateForJava(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), ) }) @@ -432,6 +493,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -439,10 +502,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -500,6 +565,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -507,10 +574,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -568,6 +637,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -575,10 +646,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("OWNER"), ) @@ -648,6 +721,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -655,10 +730,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("CALLER"), ) @@ -714,6 +793,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -721,10 +802,14 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("OWNER"), ) @@ -791,6 +876,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -798,10 +885,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("CALLER"), ) @@ -861,6 +952,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -868,10 +961,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -948,6 +1043,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -955,10 +1052,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -1015,6 +1116,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -1022,10 +1125,14 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -1094,6 +1201,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -1101,10 +1210,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -1157,6 +1270,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1164,10 +1279,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -1178,6 +1295,35 @@ func TestInt_Procedures(t *testing.T) { ) }) + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create procedure for SQL - default argument value", func(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + definition := testClientHelper().Procedure.SampleSqlDefinition(t) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType).WithDefaultValue("3.123") + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}) + + err := client.Procedures.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), + ) + }) + t.Run("create procedure for SQL - inline full", func(t *testing.T) { argName := "x" dataType := testdatatypes.DataTypeFloat @@ -1227,6 +1373,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SQL"). HasBody(definition). // TODO [SNOW-1348103]: null handling and volatility are not returned and is present in create syntax @@ -1236,10 +1384,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -1250,6 +1400,70 @@ func TestInt_Procedures(t *testing.T) { ) }) + t.Run("create procedure for SQL - no arguments", func(t *testing.T) { + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments() + + definition := testClientHelper().Procedure.SampleSqlDefinition(t) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt) + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition) + + err := client.Procedures.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(0). + HasMaxNumArguments(0). + HasArgumentsOld([]sdk.DataType{}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s() RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature("()"). + HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). + HasLanguage("SQL"). + HasBody(definition). + HasNullHandlingNil(). + HasVolatilityNil(). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). + HasHandlerNil(). + HasRuntimeVersionNil(). + HasPackagesNil(). + HasTargetPathNil(). + HasNormalizedTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Java: returns table", func(t *testing.T) { t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") From 7197b57c5dd75be34fc77eb82aabbd091074b809 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 12 Dec 2024 14:15:06 +0100 Subject: [PATCH 03/20] fix: Make blocked_roles_field optional in OAuth security integrations (#3267) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - make the `blocked_roles_field` optional in OAuth security integrations - when tried to use `NormalizeAndCompareIdentifiersInSet`, the diff was suppressed in step 2 of the new tests, resulting in the role not being removed - this will be fixed next ## Test Plan * [x] acceptance tests * [ ] … ## References #3171 ## TODO - use `NormalizeAndCompareIdentifiersInSet` in this field; also in external oauth resource --- MIGRATION_GUIDE.md | 15 +- docs/resources/external_oauth_integration.md | 2 +- .../oauth_integration_for_custom_clients.md | 24 +- ...th_integration_for_partner_applications.md | 24 +- .../resource.tf | 1 - .../resource.tf | 1 - .../resourceassert/gen/resource_schema_def.go | 8 + ...gration_for_custom_clients_resource_gen.go | 197 ++++++++++++++++ ...n_for_partner_applications_resource_gen.go | 147 ++++++++++++ ...ntegration_for_custom_clients_model_ext.go | 15 ++ ...ntegration_for_custom_clients_model_gen.go | 222 ++++++++++++++++++ ...tion_for_partner_applications_model_ext.go | 15 ++ ...tion_for_partner_applications_model_gen.go | 163 +++++++++++++ pkg/resources/external_oauth_integration.go | 14 +- .../oauth_integration_for_custom_clients.go | 58 +++-- ...tion_for_custom_clients_acceptance_test.go | 76 +++++- ...th_integration_for_partner_applications.go | 61 +++-- ...or_partner_applications_acceptance_test.go | 78 +++++- pkg/resources/show_and_describe_handlers.go | 2 +- .../basic/test.tf | 1 - .../basic/variables.tf | 3 - .../basic/test.tf | 1 - .../basic/variables.tf | 3 - .../oauth_integration_for_custom_clients.go | 14 ++ ...ty_integration_for_partner_applications.go | 14 ++ 25 files changed, 1096 insertions(+), 63 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_custom_clients_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_partner_applications_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_gen.go diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index fde9268c6c..d9bc0f050d 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,6 +9,19 @@ across different versions. ## v0.99.0 ➞ v0.100.0 +### snowflake_oauth_integration_for_partner_applications and snowflake_oauth_integration_for_custom_clients resource changes +#### *(behavior change)* `blocked_roles_list` field is no longer required + +Previously, `blocked_roles_list` field was required to handle default account roles like `ACCOUNTADMIN`, `ORGADMIN`, and `SECURITYADMIN`. + +Now, it is optional, because of using the value of `OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST` parameter (read more below). + +No changes in the configuration are necessary. + +#### *(behavior change)* new field `related_parameters` + +To handle `blocked_roles_list` field properly in both of the resources, we introduce `related_parameters` field. This field is a list of parameters related to OAuth integrations. It is a computed-only field containing value of `OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST` account parameter (see [docs](https://docs.snowflake.com/en/sql-reference/parameters#oauth-add-privileged-roles-to-blocked-list)). + ### snowflake_account resource changes Changes: @@ -16,7 +29,7 @@ Changes: - `grace_period_in_days` is now required. The field should be explicitly set in the following versions. - Account renaming is now supported. - `is_org_admin` is a settable field (previously it was read-only field). Changing its value is also supported. -- `must_change_password` and `is_org_admin` type was changed from `bool` to bool-string (more on that [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/CHANGES_BEFORE_V1.md#empty-values)). No action required during the migration. +- `must_change_password` and `is_org_admin` type was changed from `bool` to bool-string (more on that [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/CHANGES_BEFORE_V1.md#empty-values)). No action required during the migration. - The underlying resource identifier was changed from `` to `.`. Migration will be done automatically. Notice this introduces changes in how `snowflake_account` resource is imported. - New `show_output` field was added (see [raw Snowflake output](./v1-preparations/CHANGES_BEFORE_V1.md#raw-snowflake-output)). diff --git a/docs/resources/external_oauth_integration.md b/docs/resources/external_oauth_integration.md index 37550af92e..2bcb6b2dc7 100644 --- a/docs/resources/external_oauth_integration.md +++ b/docs/resources/external_oauth_integration.md @@ -92,7 +92,7 @@ resource "snowflake_external_oauth_integration" "test" { - `describe_output` (List of Object) Outputs the result of `DESCRIBE SECURITY INTEGRATIONS` for the given security integration. (see [below for nested schema](#nestedatt--describe_output)) - `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). - `id` (String) The ID of this resource. -- `related_parameters` (List of Object) Paramteres related to this security integration. (see [below for nested schema](#nestedatt--related_parameters)) +- `related_parameters` (List of Object) Parameters related to this security integration. (see [below for nested schema](#nestedatt--related_parameters)) - `show_output` (List of Object) Outputs the result of `SHOW SECURITY INTEGRATIONS` for the given security integration. (see [below for nested schema](#nestedatt--show_output)) diff --git a/docs/resources/oauth_integration_for_custom_clients.md b/docs/resources/oauth_integration_for_custom_clients.md index 8a5182a45d..6765f52861 100644 --- a/docs/resources/oauth_integration_for_custom_clients.md +++ b/docs/resources/oauth_integration_for_custom_clients.md @@ -23,7 +23,6 @@ resource "snowflake_oauth_integration_for_custom_clients" "basic" { name = "integration" oauth_client_type = "CONFIDENTIAL" oauth_redirect_uri = "https://example.com" - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] } # resource with all fields set @@ -53,13 +52,13 @@ resource "snowflake_oauth_integration_for_custom_clients" "complete" { ### Required -- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. For more information about this resource, see [docs](./account_role). - `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_client_type` (String) Specifies the type of client being registered. Snowflake supports both confidential and public clients. Valid options are: `PUBLIC` | `CONFIDENTIAL`. - `oauth_redirect_uri` (String) Specifies the client URI. After a user is authenticated, the web browser is redirected to this URI. ### Optional +- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. By default, this list includes the ACCOUNTADMIN, ORGADMIN and SECURITYADMIN roles. To remove these privileged roles from the list, use the ALTER ACCOUNT command to set the OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST account parameter to FALSE. For more information about this resource, see [docs](./account_role). - `comment` (String) Specifies a comment for the OAuth integration. - `enabled` (String) Specifies whether this OAuth integration is enabled or disabled. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `network_policy` (String) Specifies an existing network policy. This network policy controls network traffic that is attempting to exchange an authorization code for an access or refresh token or to use a refresh token to obtain a new access token. For more information about this resource, see [docs](./network_policy). @@ -77,6 +76,7 @@ resource "snowflake_oauth_integration_for_custom_clients" "complete" { - `describe_output` (List of Object) Outputs the result of `DESCRIBE SECURITY INTEGRATION` for the given integration. (see [below for nested schema](#nestedatt--describe_output)) - `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). - `id` (String) The ID of this resource. +- `related_parameters` (List of Object) Parameters related to this security integration. (see [below for nested schema](#nestedatt--related_parameters)) - `show_output` (List of Object) Outputs the result of `SHOW SECURITY INTEGRATION` for the given integration. (see [below for nested schema](#nestedatt--show_output)) @@ -314,6 +314,26 @@ Read-Only: + +### Nested Schema for `related_parameters` + +Read-Only: + +- `oauth_add_privileged_roles_to_blocked_list` (List of Object) (see [below for nested schema](#nestedobjatt--related_parameters--oauth_add_privileged_roles_to_blocked_list)) + + +### Nested Schema for `related_parameters.oauth_add_privileged_roles_to_blocked_list` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + ### Nested Schema for `show_output` diff --git a/docs/resources/oauth_integration_for_partner_applications.md b/docs/resources/oauth_integration_for_partner_applications.md index 48311dbca2..0188afb102 100644 --- a/docs/resources/oauth_integration_for_partner_applications.md +++ b/docs/resources/oauth_integration_for_partner_applications.md @@ -21,7 +21,6 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { name = "example" oauth_client = "LOOKER" oauth_redirect_uri = "http://example.com" - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] } # resource with all fields set @@ -44,12 +43,12 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { ### Required -- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. For more information about this resource, see [docs](./account_role). - `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_client` (String) Creates an OAuth interface between Snowflake and a partner application. Valid options are: `LOOKER` | `TABLEAU_DESKTOP` | `TABLEAU_SERVER`. ### Optional +- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. By default, this list includes the ACCOUNTADMIN, ORGADMIN and SECURITYADMIN roles. To remove these privileged roles from the list, use the ALTER ACCOUNT command to set the OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST account parameter to FALSE. For more information about this resource, see [docs](./account_role). - `comment` (String) Specifies a comment for the OAuth integration. - `enabled` (String) Specifies whether this OAuth integration is enabled or disabled. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `oauth_issue_refresh_tokens` (String) Specifies whether to allow the client to exchange a refresh token for an access token when the current access token has expired. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. @@ -62,6 +61,7 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { - `describe_output` (List of Object) Outputs the result of `DESCRIBE SECURITY INTEGRATION` for the given integration. (see [below for nested schema](#nestedatt--describe_output)) - `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). - `id` (String) The ID of this resource. +- `related_parameters` (List of Object) Parameters related to this security integration. (see [below for nested schema](#nestedatt--related_parameters)) - `show_output` (List of Object) Outputs the result of `SHOW SECURITY INTEGRATION` for the given integration. (see [below for nested schema](#nestedatt--show_output)) @@ -299,6 +299,26 @@ Read-Only: + +### Nested Schema for `related_parameters` + +Read-Only: + +- `oauth_add_privileged_roles_to_blocked_list` (List of Object) (see [below for nested schema](#nestedobjatt--related_parameters--oauth_add_privileged_roles_to_blocked_list)) + + +### Nested Schema for `related_parameters.oauth_add_privileged_roles_to_blocked_list` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + ### Nested Schema for `show_output` diff --git a/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf b/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf index c48c536a33..16e54b5f4d 100644 --- a/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf +++ b/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf @@ -3,7 +3,6 @@ resource "snowflake_oauth_integration_for_custom_clients" "basic" { name = "integration" oauth_client_type = "CONFIDENTIAL" oauth_redirect_uri = "https://example.com" - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] } # resource with all fields set diff --git a/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf b/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf index 1c8a7830c2..f961c2a766 100644 --- a/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf +++ b/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf @@ -3,7 +3,6 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { name = "example" oauth_client = "LOOKER" oauth_redirect_uri = "http://example.com" - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] } # resource with all fields set diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go index 23d5e9d5d9..e39d6f0533 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go @@ -121,6 +121,14 @@ var allResourceSchemaDefs = []ResourceSchemaDef{ name: "Account", schema: resources.Account().Schema, }, + { + name: "OauthIntegrationForCustomClients", + schema: resources.OauthIntegrationForCustomClients().Schema, + }, + { + name: "OauthIntegrationForPartnerApplications", + schema: resources.OauthIntegrationForPartnerApplications().Schema, + }, { name: "FunctionJava", schema: resources.FunctionJava().Schema, diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_custom_clients_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_custom_clients_resource_gen.go new file mode 100644 index 0000000000..d1ef7b4754 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_custom_clients_resource_gen.go @@ -0,0 +1,197 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type OauthIntegrationForCustomClientsResourceAssert struct { + *assert.ResourceAssert +} + +func OauthIntegrationForCustomClientsResource(t *testing.T, name string) *OauthIntegrationForCustomClientsResourceAssert { + t.Helper() + + return &OauthIntegrationForCustomClientsResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedOauthIntegrationForCustomClientsResource(t *testing.T, id string) *OauthIntegrationForCustomClientsResourceAssert { + t.Helper() + + return &OauthIntegrationForCustomClientsResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasBlockedRolesListString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("blocked_roles_list", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasCommentString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("comment", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasEnabledString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("enabled", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasFullyQualifiedNameString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNameString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("name", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNetworkPolicyString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("network_policy", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthAllowNonTlsRedirectUriString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_allow_non_tls_redirect_uri", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthClientRsaPublicKeyString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_client_rsa_public_key", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthClientRsaPublicKey2String(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_client_rsa_public_key_2", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthClientTypeString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_client_type", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthEnforcePkceString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_enforce_pkce", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthIssueRefreshTokensString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_issue_refresh_tokens", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthRedirectUriString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_redirect_uri", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthRefreshTokenValidityString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_refresh_token_validity", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasOauthUseSecondaryRolesString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_use_secondary_roles", expected)) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasPreAuthorizedRolesListString(expected string) *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueSet("pre_authorized_roles_list", expected)) + return o +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoBlockedRolesList() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("blocked_roles_list")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoComment() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("comment")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoEnabled() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("enabled")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoFullyQualifiedName() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoName() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("name")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoNetworkPolicy() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("network_policy")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthAllowNonTlsRedirectUri() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_allow_non_tls_redirect_uri")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthClientRsaPublicKey() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_client_rsa_public_key")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthClientRsaPublicKey2() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_client_rsa_public_key_2")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthClientType() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_client_type")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthEnforcePkce() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_enforce_pkce")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthIssueRefreshTokens() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_issue_refresh_tokens")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthRedirectUri() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_redirect_uri")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthRefreshTokenValidity() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_refresh_token_validity")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoOauthUseSecondaryRoles() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_use_secondary_roles")) + return o +} + +func (o *OauthIntegrationForCustomClientsResourceAssert) HasNoPreAuthorizedRolesList() *OauthIntegrationForCustomClientsResourceAssert { + o.AddAssertion(assert.ValueNotSet("pre_authorized_roles_list")) + return o +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_partner_applications_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_partner_applications_resource_gen.go new file mode 100644 index 0000000000..3d1be060d2 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/oauth_integration_for_partner_applications_resource_gen.go @@ -0,0 +1,147 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type OauthIntegrationForPartnerApplicationsResourceAssert struct { + *assert.ResourceAssert +} + +func OauthIntegrationForPartnerApplicationsResource(t *testing.T, name string) *OauthIntegrationForPartnerApplicationsResourceAssert { + t.Helper() + + return &OauthIntegrationForPartnerApplicationsResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedOauthIntegrationForPartnerApplicationsResource(t *testing.T, id string) *OauthIntegrationForPartnerApplicationsResourceAssert { + t.Helper() + + return &OauthIntegrationForPartnerApplicationsResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasBlockedRolesListString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("blocked_roles_list", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasCommentString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("comment", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasEnabledString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("enabled", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasFullyQualifiedNameString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNameString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("name", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasOauthClientString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_client", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasOauthIssueRefreshTokensString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_issue_refresh_tokens", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasOauthRedirectUriString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_redirect_uri", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasOauthRefreshTokenValidityString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_refresh_token_validity", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasOauthUseSecondaryRolesString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("oauth_use_secondary_roles", expected)) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasRelatedParametersString(expected string) *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueSet("related_parameters", expected)) + return o +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoBlockedRolesList() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("blocked_roles_list")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoComment() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("comment")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoEnabled() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("enabled")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoFullyQualifiedName() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoName() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("name")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoOauthClient() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_client")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoOauthIssueRefreshTokens() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_issue_refresh_tokens")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoOauthRedirectUri() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_redirect_uri")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoOauthRefreshTokenValidity() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_refresh_token_validity")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoOauthUseSecondaryRoles() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("oauth_use_secondary_roles")) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsResourceAssert) HasNoRelatedParameters() *OauthIntegrationForPartnerApplicationsResourceAssert { + o.AddAssertion(assert.ValueNotSet("related_parameters")) + return o +} diff --git a/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_ext.go new file mode 100644 index 0000000000..3d3605ca78 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_ext.go @@ -0,0 +1,15 @@ +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" +) + +func (t *OauthIntegrationForCustomClientsModel) WithBlockedRolesList(blockedRoles ...string) *OauthIntegrationForCustomClientsModel { + blockedRolesListStringVariables := make([]tfconfig.Variable, len(blockedRoles)) + for i, v := range blockedRoles { + blockedRolesListStringVariables[i] = tfconfig.StringVariable(v) + } + + t.BlockedRolesList = tfconfig.SetVariable(blockedRolesListStringVariables...) + return t +} diff --git a/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_gen.go new file mode 100644 index 0000000000..db5cf93efc --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_custom_clients_model_gen.go @@ -0,0 +1,222 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type OauthIntegrationForCustomClientsModel struct { + BlockedRolesList tfconfig.Variable `json:"blocked_roles_list,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Enabled tfconfig.Variable `json:"enabled,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NetworkPolicy tfconfig.Variable `json:"network_policy,omitempty"` + OauthAllowNonTlsRedirectUri tfconfig.Variable `json:"oauth_allow_non_tls_redirect_uri,omitempty"` + OauthClientRsaPublicKey tfconfig.Variable `json:"oauth_client_rsa_public_key,omitempty"` + OauthClientRsaPublicKey2 tfconfig.Variable `json:"oauth_client_rsa_public_key_2,omitempty"` + OauthClientType tfconfig.Variable `json:"oauth_client_type,omitempty"` + OauthEnforcePkce tfconfig.Variable `json:"oauth_enforce_pkce,omitempty"` + OauthIssueRefreshTokens tfconfig.Variable `json:"oauth_issue_refresh_tokens,omitempty"` + OauthRedirectUri tfconfig.Variable `json:"oauth_redirect_uri,omitempty"` + OauthRefreshTokenValidity tfconfig.Variable `json:"oauth_refresh_token_validity,omitempty"` + OauthUseSecondaryRoles tfconfig.Variable `json:"oauth_use_secondary_roles,omitempty"` + PreAuthorizedRolesList tfconfig.Variable `json:"pre_authorized_roles_list,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func OauthIntegrationForCustomClients( + resourceName string, + name string, + oauthClientType string, + oauthRedirectUri string, +) *OauthIntegrationForCustomClientsModel { + o := &OauthIntegrationForCustomClientsModel{ResourceModelMeta: config.Meta(resourceName, resources.OauthIntegrationForCustomClients)} + o.WithName(name) + o.WithOauthClientType(oauthClientType) + o.WithOauthRedirectUri(oauthRedirectUri) + return o +} + +func OauthIntegrationForCustomClientsWithDefaultMeta( + name string, + oauthClientType string, + oauthRedirectUri string, +) *OauthIntegrationForCustomClientsModel { + o := &OauthIntegrationForCustomClientsModel{ResourceModelMeta: config.DefaultMeta(resources.OauthIntegrationForCustomClients)} + o.WithName(name) + o.WithOauthClientType(oauthClientType) + o.WithOauthRedirectUri(oauthRedirectUri) + return o +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// blocked_roles_list attribute type is not yet supported, so WithBlockedRolesList can't be generated + +func (o *OauthIntegrationForCustomClientsModel) WithComment(comment string) *OauthIntegrationForCustomClientsModel { + o.Comment = tfconfig.StringVariable(comment) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithEnabled(enabled string) *OauthIntegrationForCustomClientsModel { + o.Enabled = tfconfig.StringVariable(enabled) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithFullyQualifiedName(fullyQualifiedName string) *OauthIntegrationForCustomClientsModel { + o.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithName(name string) *OauthIntegrationForCustomClientsModel { + o.Name = tfconfig.StringVariable(name) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithNetworkPolicy(networkPolicy string) *OauthIntegrationForCustomClientsModel { + o.NetworkPolicy = tfconfig.StringVariable(networkPolicy) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthAllowNonTlsRedirectUri(oauthAllowNonTlsRedirectUri string) *OauthIntegrationForCustomClientsModel { + o.OauthAllowNonTlsRedirectUri = tfconfig.StringVariable(oauthAllowNonTlsRedirectUri) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthClientRsaPublicKey(oauthClientRsaPublicKey string) *OauthIntegrationForCustomClientsModel { + o.OauthClientRsaPublicKey = tfconfig.StringVariable(oauthClientRsaPublicKey) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthClientRsaPublicKey2(oauthClientRsaPublicKey2 string) *OauthIntegrationForCustomClientsModel { + o.OauthClientRsaPublicKey2 = tfconfig.StringVariable(oauthClientRsaPublicKey2) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthClientType(oauthClientType string) *OauthIntegrationForCustomClientsModel { + o.OauthClientType = tfconfig.StringVariable(oauthClientType) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthEnforcePkce(oauthEnforcePkce string) *OauthIntegrationForCustomClientsModel { + o.OauthEnforcePkce = tfconfig.StringVariable(oauthEnforcePkce) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthIssueRefreshTokens(oauthIssueRefreshTokens string) *OauthIntegrationForCustomClientsModel { + o.OauthIssueRefreshTokens = tfconfig.StringVariable(oauthIssueRefreshTokens) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthRedirectUri(oauthRedirectUri string) *OauthIntegrationForCustomClientsModel { + o.OauthRedirectUri = tfconfig.StringVariable(oauthRedirectUri) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthRefreshTokenValidity(oauthRefreshTokenValidity int) *OauthIntegrationForCustomClientsModel { + o.OauthRefreshTokenValidity = tfconfig.IntegerVariable(oauthRefreshTokenValidity) + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthUseSecondaryRoles(oauthUseSecondaryRoles string) *OauthIntegrationForCustomClientsModel { + o.OauthUseSecondaryRoles = tfconfig.StringVariable(oauthUseSecondaryRoles) + return o +} + +// pre_authorized_roles_list attribute type is not yet supported, so WithPreAuthorizedRolesList can't be generated + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (o *OauthIntegrationForCustomClientsModel) WithBlockedRolesListValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.BlockedRolesList = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithCommentValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.Comment = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithEnabledValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.Enabled = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.FullyQualifiedName = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithNameValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.Name = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithNetworkPolicyValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.NetworkPolicy = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthAllowNonTlsRedirectUriValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthAllowNonTlsRedirectUri = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthClientRsaPublicKeyValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthClientRsaPublicKey = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthClientRsaPublicKey2Value(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthClientRsaPublicKey2 = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthClientTypeValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthClientType = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthEnforcePkceValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthEnforcePkce = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthIssueRefreshTokensValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthIssueRefreshTokens = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthRedirectUriValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthRedirectUri = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthRefreshTokenValidityValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthRefreshTokenValidity = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithOauthUseSecondaryRolesValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.OauthUseSecondaryRoles = value + return o +} + +func (o *OauthIntegrationForCustomClientsModel) WithPreAuthorizedRolesListValue(value tfconfig.Variable) *OauthIntegrationForCustomClientsModel { + o.PreAuthorizedRolesList = value + return o +} diff --git a/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_ext.go new file mode 100644 index 0000000000..a19b31b0ad --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_ext.go @@ -0,0 +1,15 @@ +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" +) + +func (t *OauthIntegrationForPartnerApplicationsModel) WithBlockedRolesList(blockedRoles ...string) *OauthIntegrationForPartnerApplicationsModel { + blockedRolesListStringVariables := make([]tfconfig.Variable, len(blockedRoles)) + for i, v := range blockedRoles { + blockedRolesListStringVariables[i] = tfconfig.StringVariable(v) + } + + t.BlockedRolesList = tfconfig.SetVariable(blockedRolesListStringVariables...) + return t +} diff --git a/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_gen.go new file mode 100644 index 0000000000..b433aaee4c --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/oauth_integration_for_partner_applications_model_gen.go @@ -0,0 +1,163 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type OauthIntegrationForPartnerApplicationsModel struct { + BlockedRolesList tfconfig.Variable `json:"blocked_roles_list,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Enabled tfconfig.Variable `json:"enabled,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + OauthClient tfconfig.Variable `json:"oauth_client,omitempty"` + OauthIssueRefreshTokens tfconfig.Variable `json:"oauth_issue_refresh_tokens,omitempty"` + OauthRedirectUri tfconfig.Variable `json:"oauth_redirect_uri,omitempty"` + OauthRefreshTokenValidity tfconfig.Variable `json:"oauth_refresh_token_validity,omitempty"` + OauthUseSecondaryRoles tfconfig.Variable `json:"oauth_use_secondary_roles,omitempty"` + RelatedParameters tfconfig.Variable `json:"related_parameters,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func OauthIntegrationForPartnerApplications( + resourceName string, + name string, + oauthClient string, +) *OauthIntegrationForPartnerApplicationsModel { + o := &OauthIntegrationForPartnerApplicationsModel{ResourceModelMeta: config.Meta(resourceName, resources.OauthIntegrationForPartnerApplications)} + o.WithName(name) + o.WithOauthClient(oauthClient) + return o +} + +func OauthIntegrationForPartnerApplicationsWithDefaultMeta( + name string, + oauthClient string, +) *OauthIntegrationForPartnerApplicationsModel { + o := &OauthIntegrationForPartnerApplicationsModel{ResourceModelMeta: config.DefaultMeta(resources.OauthIntegrationForPartnerApplications)} + o.WithName(name) + o.WithOauthClient(oauthClient) + return o +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// blocked_roles_list attribute type is not yet supported, so WithBlockedRolesList can't be generated + +func (o *OauthIntegrationForPartnerApplicationsModel) WithComment(comment string) *OauthIntegrationForPartnerApplicationsModel { + o.Comment = tfconfig.StringVariable(comment) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithEnabled(enabled string) *OauthIntegrationForPartnerApplicationsModel { + o.Enabled = tfconfig.StringVariable(enabled) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithFullyQualifiedName(fullyQualifiedName string) *OauthIntegrationForPartnerApplicationsModel { + o.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithName(name string) *OauthIntegrationForPartnerApplicationsModel { + o.Name = tfconfig.StringVariable(name) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthClient(oauthClient string) *OauthIntegrationForPartnerApplicationsModel { + o.OauthClient = tfconfig.StringVariable(oauthClient) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthIssueRefreshTokens(oauthIssueRefreshTokens string) *OauthIntegrationForPartnerApplicationsModel { + o.OauthIssueRefreshTokens = tfconfig.StringVariable(oauthIssueRefreshTokens) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthRedirectUri(oauthRedirectUri string) *OauthIntegrationForPartnerApplicationsModel { + o.OauthRedirectUri = tfconfig.StringVariable(oauthRedirectUri) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthRefreshTokenValidity(oauthRefreshTokenValidity int) *OauthIntegrationForPartnerApplicationsModel { + o.OauthRefreshTokenValidity = tfconfig.IntegerVariable(oauthRefreshTokenValidity) + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthUseSecondaryRoles(oauthUseSecondaryRoles string) *OauthIntegrationForPartnerApplicationsModel { + o.OauthUseSecondaryRoles = tfconfig.StringVariable(oauthUseSecondaryRoles) + return o +} + +// related_parameters attribute type is not yet supported, so WithRelatedParameters can't be generated + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (o *OauthIntegrationForPartnerApplicationsModel) WithBlockedRolesListValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.BlockedRolesList = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithCommentValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.Comment = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithEnabledValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.Enabled = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.FullyQualifiedName = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithNameValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.Name = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthClientValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.OauthClient = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthIssueRefreshTokensValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.OauthIssueRefreshTokens = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthRedirectUriValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.OauthRedirectUri = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthRefreshTokenValidityValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.OauthRefreshTokenValidity = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithOauthUseSecondaryRolesValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.OauthUseSecondaryRoles = value + return o +} + +func (o *OauthIntegrationForPartnerApplicationsModel) WithRelatedParametersValue(value tfconfig.Variable) *OauthIntegrationForPartnerApplicationsModel { + o.RelatedParameters = value + return o +} diff --git a/pkg/resources/external_oauth_integration.go b/pkg/resources/external_oauth_integration.go index 785efca961..5f1caccd13 100644 --- a/pkg/resources/external_oauth_integration.go +++ b/pkg/resources/external_oauth_integration.go @@ -93,10 +93,14 @@ var externalOauthIntegrationSchema = map[string]*schema.Schema{ ConflictsWith: []string{"external_oauth_allowed_roles_list"}, }, "external_oauth_allowed_roles_list": { - Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, - Optional: true, - Description: relatedResourceDescription("Specifies the list of roles that the client can set as the primary role.", resources.AccountRole), + Type: schema.TypeSet, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + Description: relatedResourceDescription("Specifies the list of roles that the client can set as the primary role.", resources.AccountRole), + DiffSuppressFunc: SuppressIfAny( + // TODO(SNOW-1517937): uncomment + // NormalizeAndCompareIdentifiersInSet("external_oauth_allowed_roles_list"), + ), ConflictsWith: []string{"external_oauth_blocked_roles_list"}, }, "external_oauth_audience_list": { @@ -146,7 +150,7 @@ var externalOauthIntegrationSchema = map[string]*schema.Schema{ RelatedParametersAttributeName: { Type: schema.TypeList, Computed: true, - Description: "Paramteres related to this security integration.", + Description: "Parameters related to this security integration.", Elem: &schema.Resource{ Schema: schemas.ShowExternalOauthParametersSchema, }, diff --git a/pkg/resources/oauth_integration_for_custom_clients.go b/pkg/resources/oauth_integration_for_custom_clients.go index 0d3b6e6040..1c9a1d8d07 100644 --- a/pkg/resources/oauth_integration_for_custom_clients.go +++ b/pkg/resources/oauth_integration_for_custom_clients.go @@ -89,9 +89,14 @@ var oauthIntegrationForCustomClientsSchema = map[string]*schema.Schema{ Type: schema.TypeString, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, - // TODO(SNOW-1517937): Check if can make optional - Required: true, - Description: relatedResourceDescription("A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", resources.AccountRole), + Optional: true, + Description: relatedResourceDescription(withPrivilegedRolesDescription("A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList)), resources.AccountRole), + DiffSuppressFunc: SuppressIfAny( + IgnoreChangeToCurrentSnowflakeListValueInDescribe("blocked_roles_list"), + IgnoreValuesFromSetIfParamSet("blocked_roles_list", string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList), privilegedRoles), + // TODO(SNOW-1517937): uncomment + // NormalizeAndCompareIdentifiersInSet("blocked_roles_list"), + ), }, "oauth_issue_refresh_tokens": { Type: schema.TypeString, @@ -148,6 +153,14 @@ var oauthIntegrationForCustomClientsSchema = map[string]*schema.Schema{ Schema: schemas.DescribeOauthIntegrationForCustomClients, }, }, + RelatedParametersAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Parameters related to this security integration.", + Elem: &schema.Resource{ + Schema: schemas.ShowOauthForCustomClientsParametersSchema, + }, + }, FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, } @@ -256,6 +269,15 @@ func ImportOauthForCustomClientsIntegration(ctx context.Context, d *schema.Resou } } + if prop, err := collections.FindFirst(integrationProperties, func(property sdk.SecurityIntegrationProperty) bool { + return property.Name == "BLOCKED_ROLES_LIST" + }); err == nil { + roles := sdk.ParseCommaSeparatedStringArray(prop.Value, false) + if err = d.Set("blocked_roles_list", roles); err != nil { + return nil, err + } + } + return []*schema.ResourceData{d}, nil } @@ -432,20 +454,6 @@ func ReadContextOauthIntegrationForCustomClients(withExternalChangesMarking bool return diag.FromErr(err) } - blockedRolesList, err := collections.FindFirst(integrationProperties, func(property sdk.SecurityIntegrationProperty) bool { - return property.Name == "BLOCKED_ROLES_LIST" - }) - if err != nil { - return diag.FromErr(fmt.Errorf("failed to find pre authorized roles list, err = %w", err)) - } - var blockedRoles []string - if len(blockedRolesList.Value) > 0 { - blockedRoles = strings.Split(blockedRolesList.Value, ",") - } - if err := d.Set("blocked_roles_list", blockedRoles); err != nil { - return diag.FromErr(err) - } - networkPolicy, err := collections.FindFirst(integrationProperties, func(property sdk.SecurityIntegrationProperty) bool { return property.Name == "NETWORK_POLICY" }) @@ -498,12 +506,20 @@ func ReadContextOauthIntegrationForCustomClients(withExternalChangesMarking bool return diag.FromErr(err) } + blockedRolesList, err := collections.FindFirst(integrationProperties, func(property sdk.SecurityIntegrationProperty) bool { + return property.Name == "BLOCKED_ROLES_LIST" + }) + if err != nil { + return diag.FromErr(err) + } + if err = handleExternalChangesToObjectInDescribe(d, describeMapping{"oauth_allow_non_tls_redirect_uri", "oauth_allow_non_tls_redirect_uri", oauthAllowNonTlsRedirectUri.Value, oauthAllowNonTlsRedirectUri.Value, nil}, describeMapping{"oauth_enforce_pkce", "oauth_enforce_pkce", oauthEnforcePkce.Value, oauthEnforcePkce.Value, nil}, describeMapping{"oauth_use_secondary_roles", "oauth_use_secondary_roles", oauthUseSecondaryRoles.Value, oauthUseSecondaryRoles.Value, nil}, describeMapping{"oauth_issue_refresh_tokens", "oauth_issue_refresh_tokens", oauthIssueRefreshTokens.Value, oauthIssueRefreshTokens.Value, nil}, describeMapping{"oauth_refresh_token_validity", "oauth_refresh_token_validity", oauthRefreshTokenValidity.Value, oauthRefreshTokenValidity.Value, nil}, + describeMapping{"blocked_roles_list", "blocked_roles_list", blockedRolesList.Value, sdk.ParseCommaSeparatedStringArray(blockedRolesList.Value, false), nil}, ); err != nil { return diag.FromErr(err) } @@ -516,6 +532,7 @@ func ReadContextOauthIntegrationForCustomClients(withExternalChangesMarking bool "oauth_use_secondary_roles", "oauth_issue_refresh_tokens", "oauth_refresh_token_validity", + "blocked_roles_list", }); err != nil { return diag.FromErr(err) } @@ -527,6 +544,13 @@ func ReadContextOauthIntegrationForCustomClients(withExternalChangesMarking bool if err = d.Set(DescribeOutputAttributeName, []map[string]any{schemas.DescribeOauthIntegrationForCustomClientsToSchema(integrationProperties)}); err != nil { return diag.FromErr(err) } + param, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList) + if err != nil { + return diag.FromErr(err) + } + if err = d.Set(RelatedParametersAttributeName, []map[string]any{schemas.OauthForCustomClientsParametersToSchema([]*sdk.Parameter{param})}); err != nil { + return diag.FromErr(err) + } return nil } diff --git a/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go b/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go index be88ceab7c..a6ffa8205b 100644 --- a/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go +++ b/pkg/resources/oauth_integration_for_custom_clients_acceptance_test.go @@ -3,12 +3,16 @@ package resources_test import ( "fmt" "regexp" + "strings" "testing" resourcehelpers "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" tfjson "github.com/hashicorp/terraform-json" + accconfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/planchecks" resourcenames "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" @@ -43,7 +47,6 @@ func TestAcc_OauthIntegrationForCustomClients_Basic(t *testing.T) { "name": config.StringVariable(id.Name()), "oauth_client_type": config.StringVariable(string(sdk.OauthSecurityIntegrationClientTypeConfidential)), "oauth_redirect_uri": config.StringVariable(validUrl), - "blocked_roles_list": config.SetVariable(config.StringVariable("ACCOUNTADMIN"), config.StringVariable("SECURITYADMIN")), } if complete { c["blocked_roles_list"] = config.SetVariable(config.StringVariable("ACCOUNTADMIN"), config.StringVariable("SECURITYADMIN"), config.StringVariable(blockedRole.ID().Name())) @@ -85,7 +88,6 @@ func TestAcc_OauthIntegrationForCustomClients_Basic(t *testing.T) { resource.TestCheckResourceAttr("snowflake_oauth_integration_for_custom_clients.test", "oauth_enforce_pkce", resources.BooleanDefault), resource.TestCheckNoResourceAttr("snowflake_oauth_integration_for_custom_clients.test", "oauth_use_secondary_roles"), resource.TestCheckResourceAttr("snowflake_oauth_integration_for_custom_clients.test", "pre_authorized_roles_list.#", "0"), - resource.TestCheckResourceAttr("snowflake_oauth_integration_for_custom_clients.test", "blocked_roles_list.#", "2"), resource.TestCheckResourceAttr("snowflake_oauth_integration_for_custom_clients.test", "oauth_issue_refresh_tokens", resources.BooleanDefault), resource.TestCheckResourceAttr("snowflake_oauth_integration_for_custom_clients.test", "oauth_refresh_token_validity", "-1"), resource.TestCheckResourceAttr("snowflake_oauth_integration_for_custom_clients.test", "network_policy", ""), @@ -763,3 +765,73 @@ resource "snowflake_oauth_integration_for_custom_clients" "test" { } `, name) } + +func TestAcc_OauthIntegrationForCustomClients_WithPrivilegedRolesBlockedList(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + // Use an identifier with this prefix to have this role in the end. + roleId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("Z") + role, roleCleanup := acc.TestClient().Role.CreateRoleWithIdentifier(t, roleId) + t.Cleanup(roleCleanup) + allRoles := []string{snowflakeroles.Accountadmin.Name(), snowflakeroles.SecurityAdmin.Name(), role.ID().Name()} + onlyPrivilegedRoles := []string{snowflakeroles.Accountadmin.Name(), snowflakeroles.SecurityAdmin.Name()} + customRoles := []string{role.ID().Name()} + + paramCleanup := acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList, "true") + t.Cleanup(paramCleanup) + + modelWithoutBlockedRole := model.OauthIntegrationForCustomClients("test", id.Name(), string(sdk.OauthSecurityIntegrationClientTypePublic), "https://example.com") + modelWithBlockedRole := model.OauthIntegrationForCustomClients("test", id.Name(), string(sdk.OauthSecurityIntegrationClientTypePublic), "https://example.com"). + WithBlockedRolesList(role.ID().Name()) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: accconfig.FromModel(t, modelWithBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.#", "1"), + resource.TestCheckTypeSetElemAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.*", role.ID().Name()), + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", strings.Join(allRoles, ",")), + ), + }, + { + Config: accconfig.FromModel(t, modelWithoutBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "blocked_roles_list.#", "0"), + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", strings.Join(onlyPrivilegedRoles, ",")), + ), + }, + { + PreConfig: func() { + // Do not revert, because the revert is setup above. + acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList, "false") + }, + Config: accconfig.FromModel(t, modelWithBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.#", "1"), + resource.TestCheckTypeSetElemAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.*", role.ID().Name()), + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", strings.Join(customRoles, ",")), + ), + }, + { + Config: accconfig.FromModel(t, modelWithoutBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "blocked_roles_list.#", "0"), + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", ""), + ), + }, + }, + }) +} diff --git a/pkg/resources/oauth_integration_for_partner_applications.go b/pkg/resources/oauth_integration_for_partner_applications.go index 7781c1973f..0074b3a3f5 100644 --- a/pkg/resources/oauth_integration_for_partner_applications.go +++ b/pkg/resources/oauth_integration_for_partner_applications.go @@ -6,7 +6,6 @@ import ( "fmt" "reflect" "strconv" - "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" @@ -81,10 +80,14 @@ var oauthIntegrationForPartnerApplicationsSchema = map[string]*schema.Schema{ Type: schema.TypeString, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, - // TODO(SNOW-1517937): Check if can make optional - Required: true, - Description: relatedResourceDescription("A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", resources.AccountRole), - DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeListValueInDescribe("blocked_roles_list"), + Optional: true, + Description: relatedResourceDescription(withPrivilegedRolesDescription("A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList)), resources.AccountRole), + DiffSuppressFunc: SuppressIfAny( + IgnoreChangeToCurrentSnowflakeListValueInDescribe("blocked_roles_list"), + IgnoreValuesFromSetIfParamSet("blocked_roles_list", string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList), privilegedRoles), + // TODO(SNOW-1517937): uncomment + // NormalizeAndCompareIdentifiersInSet("blocked_roles_list"), + ), }, "comment": { Type: schema.TypeString, @@ -108,6 +111,14 @@ var oauthIntegrationForPartnerApplicationsSchema = map[string]*schema.Schema{ Schema: schemas.DescribeOauthIntegrationForPartnerApplications, }, }, + RelatedParametersAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Parameters related to this security integration.", + Elem: &schema.Resource{ + Schema: schemas.ShowOauthForPartnerApplicationsParametersSchema, + }, + }, FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, } @@ -205,6 +216,15 @@ func ImportOauthForPartnerApplicationIntegration(ctx context.Context, d *schema. } } + if prop, err := collections.FindFirst(integrationProperties, func(property sdk.SecurityIntegrationProperty) bool { + return property.Name == "BLOCKED_ROLES_LIST" + }); err == nil { + roles := sdk.ParseCommaSeparatedStringArray(prop.Value, false) + if err = d.Set("blocked_roles_list", roles); err != nil { + return nil, err + } + } + return []*schema.ResourceData{d}, nil } @@ -322,20 +342,6 @@ func ReadContextOauthIntegrationForPartnerApplications(withExternalChangesMarkin return diag.FromErr(err) } - blockedRolesList, err := collections.FindFirst(integrationProperties, func(property sdk.SecurityIntegrationProperty) bool { - return property.Name == "BLOCKED_ROLES_LIST" - }) - if err != nil { - return diag.FromErr(fmt.Errorf("failed to find pre authorized roles list, err = %w", err)) - } - var blockedRoles []string - if len(blockedRolesList.Value) > 0 { - blockedRoles = strings.Split(blockedRolesList.Value, ",") - } - if err := d.Set("blocked_roles_list", blockedRoles); err != nil { - return diag.FromErr(err) - } - if withExternalChangesMarking { if err = handleExternalChangesToObjectInShow(d, outputMapping{"enabled", "enabled", integration.Enabled, booleanStringFromBool(integration.Enabled), nil}, @@ -368,10 +374,18 @@ func ReadContextOauthIntegrationForPartnerApplications(withExternalChangesMarkin return diag.FromErr(err) } + blockedRolesList, err := collections.FindFirst(integrationProperties, func(property sdk.SecurityIntegrationProperty) bool { + return property.Name == "BLOCKED_ROLES_LIST" + }) + if err != nil { + return diag.FromErr(err) + } + if err = handleExternalChangesToObjectInDescribe(d, describeMapping{"oauth_issue_refresh_tokens", "oauth_issue_refresh_tokens", oauthIssueRefreshTokens.Value, oauthIssueRefreshTokens.Value, nil}, describeMapping{"oauth_refresh_token_validity", "oauth_refresh_token_validity", oauthRefreshTokenValidity.Value, oauthRefreshTokenValidityValue, nil}, describeMapping{"oauth_use_secondary_roles", "oauth_use_secondary_roles", oauthUseSecondaryRoles.Value, oauthUseSecondaryRoles.Value, nil}, + describeMapping{"blocked_roles_list", "blocked_roles_list", blockedRolesList.Value, sdk.ParseCommaSeparatedStringArray(blockedRolesList.Value, false), nil}, ); err != nil { return diag.FromErr(err) } @@ -382,6 +396,7 @@ func ReadContextOauthIntegrationForPartnerApplications(withExternalChangesMarkin "oauth_issue_refresh_tokens", "oauth_refresh_token_validity", "oauth_use_secondary_roles", + "blocked_roles_list", }); err != nil { return diag.FromErr(err) } @@ -393,7 +408,13 @@ func ReadContextOauthIntegrationForPartnerApplications(withExternalChangesMarkin if err = d.Set(DescribeOutputAttributeName, []map[string]any{schemas.DescribeOauthIntegrationForPartnerApplicationsToSchema(integrationProperties)}); err != nil { return diag.FromErr(err) } - + param, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList) + if err != nil { + return diag.FromErr(err) + } + if err = d.Set(RelatedParametersAttributeName, []map[string]any{schemas.OauthForPartnerApplicationsParametersToSchema([]*sdk.Parameter{param})}); err != nil { + return diag.FromErr(err) + } return nil } } diff --git a/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go b/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go index 0eccde347c..975b47b7b7 100644 --- a/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go +++ b/pkg/resources/oauth_integration_for_partner_applications_acceptance_test.go @@ -3,10 +3,14 @@ package resources_test import ( "fmt" "regexp" + "strings" "testing" resourcehelpers "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" + accconfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/planchecks" tfjson "github.com/hashicorp/terraform-json" @@ -32,11 +36,11 @@ func TestAcc_OauthIntegrationForPartnerApplications_Basic(t *testing.T) { values := config.Variables{ "name": config.StringVariable(id.Name()), "oauth_client": config.StringVariable(string(sdk.OauthSecurityIntegrationClientLooker)), - "blocked_roles_list": config.SetVariable(config.StringVariable("ACCOUNTADMIN"), config.StringVariable("SECURITYADMIN")), "oauth_redirect_uri": config.StringVariable(validUrl), } if complete { values["enabled"] = config.BoolVariable(true) + values["blocked_roles_list"] = config.SetVariable(config.StringVariable("ACCOUNTADMIN"), config.StringVariable("SECURITYADMIN")) values["oauth_issue_refresh_tokens"] = config.BoolVariable(false) values["oauth_refresh_token_validity"] = config.IntegerVariable(86400) values["oauth_use_secondary_roles"] = config.StringVariable(string(sdk.OauthSecurityIntegrationUseSecondaryRolesImplicit)) @@ -65,7 +69,7 @@ func TestAcc_OauthIntegrationForPartnerApplications_Basic(t *testing.T) { resource.TestCheckResourceAttr("snowflake_oauth_integration_for_partner_applications.test", "oauth_issue_refresh_tokens", "default"), resource.TestCheckResourceAttr("snowflake_oauth_integration_for_partner_applications.test", "oauth_refresh_token_validity", "-1"), resource.TestCheckNoResourceAttr("snowflake_oauth_integration_for_partner_applications.test", "oauth_use_secondary_roles"), - resource.TestCheckResourceAttr("snowflake_oauth_integration_for_partner_applications.test", "blocked_roles_list.#", "2"), + resource.TestCheckNoResourceAttr("snowflake_oauth_integration_for_partner_applications.test", "blocked_roles_list"), resource.TestCheckResourceAttr("snowflake_oauth_integration_for_partner_applications.test", "comment", ""), resource.TestCheckResourceAttr("snowflake_oauth_integration_for_partner_applications.test", "show_output.#", "1"), @@ -768,3 +772,73 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { } `, name) } + +func TestAcc_OauthIntegrationForPartnerApplications_WithPrivilegedRolesBlockedList(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + // Use an identifier with this prefix to have this role in the end. + roleId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("Z") + role, roleCleanup := acc.TestClient().Role.CreateRoleWithIdentifier(t, roleId) + t.Cleanup(roleCleanup) + allRoles := []string{snowflakeroles.Accountadmin.Name(), snowflakeroles.SecurityAdmin.Name(), role.ID().Name()} + onlyPrivilegedRoles := []string{snowflakeroles.Accountadmin.Name(), snowflakeroles.SecurityAdmin.Name()} + customRoles := []string{role.ID().Name()} + + paramCleanup := acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList, "true") + t.Cleanup(paramCleanup) + + modelWithoutBlockedRole := model.OauthIntegrationForPartnerApplications("test", id.Name(), string(sdk.OauthSecurityIntegrationClientTableauDesktop)) + modelWithBlockedRole := model.OauthIntegrationForPartnerApplications("test", id.Name(), string(sdk.OauthSecurityIntegrationClientTableauDesktop)). + WithBlockedRolesList(role.ID().Name()) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: accconfig.FromModel(t, modelWithBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.#", "1"), + resource.TestCheckTypeSetElemAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.*", role.ID().Name()), + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", strings.Join(allRoles, ",")), + ), + }, + { + Config: accconfig.FromModel(t, modelWithoutBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "blocked_roles_list.#", "0"), + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", strings.Join(onlyPrivilegedRoles, ",")), + ), + }, + { + PreConfig: func() { + // Do not revert, because the revert is setup above. + acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList, "false") + }, + Config: accconfig.FromModel(t, modelWithBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.#", "1"), + resource.TestCheckTypeSetElemAttr(modelWithBlockedRole.ResourceReference(), "blocked_roles_list.*", role.ID().Name()), + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", strings.Join(customRoles, ",")), + ), + }, + { + Config: accconfig.FromModel(t, modelWithoutBlockedRole), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "blocked_roles_list.#", "0"), + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "name", id.Name()), + + resource.TestCheckResourceAttr(modelWithoutBlockedRole.ResourceReference(), "describe_output.0.blocked_roles_list.0.value", ""), + ), + }, + }, + }) +} diff --git a/pkg/resources/show_and_describe_handlers.go b/pkg/resources/show_and_describe_handlers.go index fbbf8e49ca..0fe72372b6 100644 --- a/pkg/resources/show_and_describe_handlers.go +++ b/pkg/resources/show_and_describe_handlers.go @@ -54,7 +54,7 @@ type outputMapping struct { normalizeFunc func(any) any } -// handleExternalChangesToObjectInDescribe assumes that show output is kept in DescribeOutputAttributeName attribute +// handleExternalChangesToObjectInDescribe assumes that describe output is kept in DescribeOutputAttributeName attribute func handleExternalChangesToObjectInDescribe(d *schema.ResourceData, mappings ...describeMapping) error { if describeOutput, ok := d.GetOk(DescribeOutputAttributeName); ok { describeOutputList := describeOutput.([]any) diff --git a/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/test.tf b/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/test.tf index 83e0ae30b7..9f6b0f3d0d 100644 --- a/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/test.tf +++ b/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/test.tf @@ -2,5 +2,4 @@ resource "snowflake_oauth_integration_for_custom_clients" "test" { name = var.name oauth_client_type = var.oauth_client_type oauth_redirect_uri = var.oauth_redirect_uri - blocked_roles_list = var.blocked_roles_list } diff --git a/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/variables.tf b/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/variables.tf index eacd431fd3..202efb149e 100644 --- a/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/variables.tf +++ b/pkg/resources/testdata/TestAcc_OauthIntegrationForCustomClients/basic/variables.tf @@ -7,6 +7,3 @@ variable "oauth_client_type" { variable "oauth_redirect_uri" { type = string } -variable "blocked_roles_list" { - type = set(string) -} diff --git a/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/test.tf b/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/test.tf index 5bef8bad73..15fe8b9e9b 100644 --- a/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/test.tf +++ b/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/test.tf @@ -2,5 +2,4 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { name = var.name oauth_client = var.oauth_client oauth_redirect_uri = var.oauth_redirect_uri - blocked_roles_list = var.blocked_roles_list } diff --git a/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/variables.tf b/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/variables.tf index e475d52432..7da5c54ce2 100644 --- a/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/variables.tf +++ b/pkg/resources/testdata/TestAcc_OauthIntegrationForPartnerApplications/basic/variables.tf @@ -7,6 +7,3 @@ variable "oauth_client" { variable "oauth_redirect_uri" { type = string } -variable "blocked_roles_list" { - type = set(string) -} diff --git a/pkg/schemas/oauth_integration_for_custom_clients.go b/pkg/schemas/oauth_integration_for_custom_clients.go index c517f9b03b..890ad6104a 100644 --- a/pkg/schemas/oauth_integration_for_custom_clients.go +++ b/pkg/schemas/oauth_integration_for_custom_clients.go @@ -65,3 +65,17 @@ func DescribeOauthIntegrationForCustomClientsToSchema(integrationProperties []sd } return propsSchema } + +var ShowOauthForCustomClientsParametersSchema = map[string]*schema.Schema{ + strings.ToLower(string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList)): ParameterListSchema, +} + +func OauthForCustomClientsParametersToSchema(parameters []*sdk.Parameter) map[string]any { + schemaMap := make(map[string]any) + for _, param := range parameters { + if slices.Contains([]sdk.AccountParameter{sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList}, sdk.AccountParameter(param.Key)) { + schemaMap[strings.ToLower(param.Key)] = []map[string]any{ParameterToSchema(param)} + } + } + return schemaMap +} diff --git a/pkg/schemas/oauth_security_integration_for_partner_applications.go b/pkg/schemas/oauth_security_integration_for_partner_applications.go index 61bb3d2efc..7721e24ec0 100644 --- a/pkg/schemas/oauth_security_integration_for_partner_applications.go +++ b/pkg/schemas/oauth_security_integration_for_partner_applications.go @@ -65,3 +65,17 @@ func DescribeOauthIntegrationForPartnerApplicationsToSchema(integrationPropertie } return securityIntegrationProperties } + +var ShowOauthForPartnerApplicationsParametersSchema = map[string]*schema.Schema{ + strings.ToLower(string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList)): ParameterListSchema, +} + +func OauthForPartnerApplicationsParametersToSchema(parameters []*sdk.Parameter) map[string]any { + schemaMap := make(map[string]any) + for _, param := range parameters { + if slices.Contains([]sdk.AccountParameter{sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList}, sdk.AccountParameter(param.Key)) { + schemaMap[strings.ToLower(param.Key)] = []map[string]any{ParameterToSchema(param)} + } + } + return schemaMap +} From 53e7a0aea3350e9e03a804d67e7df796f15bff3a Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 14:51:32 +0100 Subject: [PATCH 04/20] feat: Handle missing fields in function and procedure (#3273) Handle secrets, external access integrations, comments, packages, and snowpark package. --- .../function_describe_snowflake_ext.go | 56 +++++++++++- .../procedure_describe_snowflake_ext.go | 67 +++++++++++++- .../config/model/function_java_model_ext.go | 53 ++++++++++- .../config/model/procedure_java_model_ext.go | 51 +++++++++++ .../function_and_procedure_commons.go | 65 ++++++++++++- pkg/resources/function_commons.go | 20 +++- pkg/resources/function_java.go | 72 ++++++++++++--- .../function_java_acceptance_test.go | 88 +++++++++++++++++- pkg/resources/procedure_commons.go | 16 +++- pkg/resources/procedure_java.go | 57 ++++++++---- .../procedure_java_acceptance_test.go | 89 +++++++++++++++++- pkg/sdk/functions_and_procedures_commons.go | 33 +++++++ pkg/sdk/functions_ext.go | 43 ++++++++- pkg/sdk/procedures_ext.go | 91 +++++++++++++++++-- pkg/sdk/testint/functions_integration_test.go | 48 +++++++++- .../testint/procedures_integration_test.go | 53 ++++++++++- 16 files changed, 839 insertions(+), 63 deletions(-) diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go index a4c256b172..b00dad3d3c 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go @@ -415,7 +415,7 @@ func (f *FunctionDetailsAssert) HasExactlyImportsNormalizedInAnyOrder(imports .. return fmt.Errorf("expected imports to have value; got: nil") } if !assert2.ElementsMatch(t, imports, o.NormalizedImports) { - return fmt.Errorf("expected %v imports in task relations, got %v", imports, o.NormalizedImports) + return fmt.Errorf("expected %v imports, got %v", imports, o.NormalizedImports) } return nil }) @@ -474,3 +474,57 @@ func (f *FunctionDetailsAssert) HasReturnNotNull(expected bool) *FunctionDetails }) return f } + +func (f *FunctionDetailsAssert) HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(integrations ...sdk.AccountObjectIdentifier) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedExternalAccessIntegrations == nil { + return fmt.Errorf("expected normalized external access integrations to have value; got: nil") + } + fullyQualifiedNamesExpected := collections.Map(integrations, func(id sdk.AccountObjectIdentifier) string { return id.FullyQualifiedName() }) + fullyQualifiedNamesGot := collections.Map(o.NormalizedExternalAccessIntegrations, func(id sdk.AccountObjectIdentifier) string { return id.FullyQualifiedName() }) + if !assert2.ElementsMatch(t, fullyQualifiedNamesExpected, fullyQualifiedNamesGot) { + return fmt.Errorf("expected %v normalized external access integrations, got %v", integrations, o.NormalizedExternalAccessIntegrations) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) ContainsExactlySecrets(secrets map[string]sdk.SchemaObjectIdentifier) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedSecrets == nil { + return fmt.Errorf("expected normalized secrets to have value; got: nil") + } + for k, v := range secrets { + if s, ok := o.NormalizedSecrets[k]; !ok { + return fmt.Errorf("expected normalized secrets to have a secret associated with key %s", k) + } else if s.FullyQualifiedName() != v.FullyQualifiedName() { + return fmt.Errorf("expected secret with key %s to have id %s, got %s", k, v.FullyQualifiedName(), s.FullyQualifiedName()) + } + } + for k := range o.NormalizedSecrets { + if _, ok := secrets[k]; !ok { + return fmt.Errorf("normalized secrets have unexpected key: %s", k) + } + } + + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasExactlyPackagesInAnyOrder(packages ...string) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedPackages == nil { + return fmt.Errorf("expected packages to have value; got: nil") + } + if !assert2.ElementsMatch(t, packages, o.NormalizedPackages) { + return fmt.Errorf("expected %v packages, got %v", packages, o.NormalizedPackages) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go index 2319b30f7a..962d34f2d6 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go @@ -401,7 +401,7 @@ func (f *ProcedureDetailsAssert) HasExactlyImportsNormalizedInAnyOrder(imports . return fmt.Errorf("expected imports to have value; got: nil") } if !assert2.ElementsMatch(t, imports, o.NormalizedImports) { - return fmt.Errorf("expected %v imports in task relations, got %v", imports, o.NormalizedImports) + return fmt.Errorf("expected %v imports, got %v", imports, o.NormalizedImports) } return nil }) @@ -460,3 +460,68 @@ func (f *ProcedureDetailsAssert) HasReturnNotNull(expected bool) *ProcedureDetai }) return f } + +func (f *ProcedureDetailsAssert) HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(integrations ...sdk.AccountObjectIdentifier) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedExternalAccessIntegrations == nil { + return fmt.Errorf("expected normalized external access integrations to have value; got: nil") + } + fullyQualifiedNamesExpected := collections.Map(integrations, func(id sdk.AccountObjectIdentifier) string { return id.FullyQualifiedName() }) + fullyQualifiedNamesGot := collections.Map(o.NormalizedExternalAccessIntegrations, func(id sdk.AccountObjectIdentifier) string { return id.FullyQualifiedName() }) + if !assert2.ElementsMatch(t, fullyQualifiedNamesExpected, fullyQualifiedNamesGot) { + return fmt.Errorf("expected %v normalized external access integrations, got %v", integrations, o.NormalizedExternalAccessIntegrations) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) ContainsExactlySecrets(secrets map[string]sdk.SchemaObjectIdentifier) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedSecrets == nil { + return fmt.Errorf("expected normalized secrets to have value; got: nil") + } + for k, v := range secrets { + if s, ok := o.NormalizedSecrets[k]; !ok { + return fmt.Errorf("expected normalized secrets to have a secret associated with key %s", k) + } else if s.FullyQualifiedName() != v.FullyQualifiedName() { + return fmt.Errorf("expected secret with key %s to have id %s, got %s", k, v.FullyQualifiedName(), s.FullyQualifiedName()) + } + } + for k := range o.NormalizedSecrets { + if _, ok := secrets[k]; !ok { + return fmt.Errorf("normalized secrets have unexpected key: %s", k) + } + } + + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasExactlyPackagesInAnyOrder(packages ...string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedPackages == nil { + return fmt.Errorf("expected packages to have value; got: nil") + } + if !assert2.ElementsMatch(t, packages, o.NormalizedPackages) { + return fmt.Errorf("expected %v packages, got %v", packages, o.NormalizedPackages) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasSnowparkVersion(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.SnowparkVersion != expected { + return fmt.Errorf("expected snowpark version %s; got: %s", expected, o.SnowparkVersion) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go index 8579ea981a..0174c4dc10 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go @@ -2,9 +2,11 @@ package model import ( "encoding/json" + "strings" tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) @@ -69,13 +71,62 @@ func (f *FunctionJavaModel) WithImport(stageLocation string, pathOnStage string) return f.WithImportsValue( tfconfig.ObjectVariable( map[string]tfconfig.Variable{ - "stage_location": tfconfig.StringVariable(stageLocation), + "stage_location": tfconfig.StringVariable(strings.TrimPrefix(stageLocation, "@")), "path_on_stage": tfconfig.StringVariable(pathOnStage), }, ), ) } +func (f *FunctionJavaModel) WithImports(imports ...sdk.NormalizedPath) *FunctionJavaModel { + return f.WithImportsValue( + tfconfig.SetVariable( + collections.Map(imports, func(imp sdk.NormalizedPath) tfconfig.Variable { + return tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(imp.StageLocation), + "path_on_stage": tfconfig.StringVariable(imp.PathOnStage), + }, + ) + })..., + ), + ) +} + +func (f *FunctionJavaModel) WithPackages(pkgs ...string) *FunctionJavaModel { + return f.WithPackagesValue( + tfconfig.SetVariable( + collections.Map(pkgs, func(pkg string) tfconfig.Variable { return tfconfig.StringVariable(pkg) })..., + ), + ) +} + +func (f *FunctionJavaModel) WithExternalAccessIntegrations(ids ...sdk.AccountObjectIdentifier) *FunctionJavaModel { + return f.WithExternalAccessIntegrationsValue( + tfconfig.SetVariable( + collections.Map(ids, func(id sdk.AccountObjectIdentifier) tfconfig.Variable { return tfconfig.StringVariable(id.Name()) })..., + ), + ) +} + +func (f *FunctionJavaModel) WithSecrets(secrets map[string]sdk.SchemaObjectIdentifier) *FunctionJavaModel { + objects := make([]tfconfig.Variable, 0) + for k, v := range secrets { + objects = append(objects, tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "secret_variable_name": tfconfig.StringVariable(k), + "secret_id": tfconfig.StringVariable(v.FullyQualifiedName()), + }, + )) + } + + return f.WithSecretsValue( + tfconfig.SetVariable( + objects..., + ), + ) +} + func (f *FunctionJavaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *FunctionJavaModel { return f.WithTargetPathValue( tfconfig.ObjectVariable( diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go index cb6779784c..b8a1602f79 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go @@ -3,6 +3,8 @@ package model import ( "encoding/json" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -77,6 +79,55 @@ func (f *ProcedureJavaModel) WithImport(stageLocation string, pathOnStage string ) } +func (f *ProcedureJavaModel) WithImports(imports ...sdk.NormalizedPath) *ProcedureJavaModel { + return f.WithImportsValue( + tfconfig.SetVariable( + collections.Map(imports, func(imp sdk.NormalizedPath) tfconfig.Variable { + return tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(imp.StageLocation), + "path_on_stage": tfconfig.StringVariable(imp.PathOnStage), + }, + ) + })..., + ), + ) +} + +func (f *ProcedureJavaModel) WithPackages(pkgs ...string) *ProcedureJavaModel { + return f.WithPackagesValue( + tfconfig.SetVariable( + collections.Map(pkgs, func(pkg string) tfconfig.Variable { return tfconfig.StringVariable(pkg) })..., + ), + ) +} + +func (f *ProcedureJavaModel) WithExternalAccessIntegrations(ids ...sdk.AccountObjectIdentifier) *ProcedureJavaModel { + return f.WithExternalAccessIntegrationsValue( + tfconfig.SetVariable( + collections.Map(ids, func(id sdk.AccountObjectIdentifier) tfconfig.Variable { return tfconfig.StringVariable(id.Name()) })..., + ), + ) +} + +func (f *ProcedureJavaModel) WithSecrets(secrets map[string]sdk.SchemaObjectIdentifier) *ProcedureJavaModel { + objects := make([]tfconfig.Variable, 0) + for k, v := range secrets { + objects = append(objects, tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "secret_variable_name": tfconfig.StringVariable(k), + "secret_id": tfconfig.StringVariable(v.FullyQualifiedName()), + }, + )) + } + + return f.WithSecretsValue( + tfconfig.SetVariable( + objects..., + ), + ) +} + func (f *ProcedureJavaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *ProcedureJavaModel { return f.WithTargetPathValue( tfconfig.ObjectVariable( diff --git a/pkg/resources/function_and_procedure_commons.go b/pkg/resources/function_and_procedure_commons.go index e21801813c..213217e968 100644 --- a/pkg/resources/function_and_procedure_commons.go +++ b/pkg/resources/function_and_procedure_commons.go @@ -10,7 +10,7 @@ import ( func readFunctionOrProcedureArguments(d *schema.ResourceData, args []sdk.NormalizedArgument) error { if len(args) == 0 { - // TODO [SNOW-1348103]: handle empty list + // TODO [before V1]: handle empty list return nil } // We do it the unusual way because the default values are not returned by SF. @@ -40,6 +40,21 @@ func readFunctionOrProcedureImports(d *schema.ResourceData, imports []sdk.Normal return d.Set("imports", imps) } +func readFunctionOrProcedureExternalAccessIntegrations(d *schema.ResourceData, externalAccessIntegrations []sdk.AccountObjectIdentifier) error { + return d.Set("external_access_integrations", collections.Map(externalAccessIntegrations, func(id sdk.AccountObjectIdentifier) string { return id.Name() })) +} + +func readFunctionOrProcedureSecrets(d *schema.ResourceData, secrets map[string]sdk.SchemaObjectIdentifier) error { + all := make([]map[string]any, 0) + for k, v := range secrets { + all = append(all, map[string]any{ + "secret_variable_name": k, + "secret_id": v.FullyQualifiedName(), + }) + } + return d.Set("secrets", all) +} + func readFunctionOrProcedureTargetPath(d *schema.ResourceData, normalizedPath *sdk.NormalizedPath) error { if normalizedPath == nil { // don't do anything if imports not present @@ -52,3 +67,51 @@ func readFunctionOrProcedureTargetPath(d *schema.ResourceData, normalizedPath *s } return d.Set("target_path", tp) } + +func setExternalAccessIntegrationsInBuilder[T any](d *schema.ResourceData, setIntegrations func([]sdk.AccountObjectIdentifier) T) error { + integrations, err := parseExternalAccessIntegrationsCommon(d) + if err != nil { + return err + } + setIntegrations(integrations) + return nil +} + +func setSecretsInBuilder[T any](d *schema.ResourceData, setSecrets func([]sdk.SecretReference) T) error { + secrets, err := parseSecretsCommon(d) + if err != nil { + return err + } + setSecrets(secrets) + return nil +} + +func parseExternalAccessIntegrationsCommon(d *schema.ResourceData) ([]sdk.AccountObjectIdentifier, error) { + integrations := make([]sdk.AccountObjectIdentifier, 0) + if v, ok := d.GetOk("external_access_integrations"); ok { + for _, i := range v.(*schema.Set).List() { + id, err := sdk.ParseAccountObjectIdentifier(i.(string)) + if err != nil { + return nil, err + } + integrations = append(integrations, id) + } + } + return integrations, nil +} + +func parseSecretsCommon(d *schema.ResourceData) ([]sdk.SecretReference, error) { + secretReferences := make([]sdk.SecretReference, 0) + if v, ok := d.GetOk("secrets"); ok { + for _, s := range v.(*schema.Set).List() { + name := s.(map[string]any)["secret_variable_name"].(string) + idRaw := s.(map[string]any)["secret_id"].(string) + id, err := sdk.ParseSchemaObjectIdentifier(idRaw) + if err != nil { + return nil, err + } + secretReferences = append(secretReferences, sdk.SecretReference{VariableName: name, Name: id}) + } + } + return secretReferences, nil +} diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index fe5a097a45..7dddd097e7 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -316,7 +316,6 @@ func functionBaseSchema() map[string]schema.Schema { ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, Optional: true, - ForceNew: true, Description: "The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API.", }, "secrets": { @@ -441,6 +440,16 @@ func parseFunctionImportsCommon(d *schema.ResourceData) ([]sdk.FunctionImportReq return imports, nil } +func parseFunctionPackagesCommon(d *schema.ResourceData) ([]sdk.FunctionPackageRequest, error) { + packages := make([]sdk.FunctionPackageRequest, 0) + if v, ok := d.GetOk("packages"); ok { + for _, pkg := range v.(*schema.Set).List() { + packages = append(packages, *sdk.NewFunctionPackageRequest().WithPackage(pkg.(string))) + } + } + return packages, nil +} + func parseFunctionTargetPathCommon(d *schema.ResourceData) (string, error) { var tp string if v, ok := d.GetOk("target_path"); ok { @@ -482,6 +491,15 @@ func setFunctionImportsInBuilder[T any](d *schema.ResourceData, setImports func( return nil } +func setFunctionPackagesInBuilder[T any](d *schema.ResourceData, setPackages func([]sdk.FunctionPackageRequest) T) error { + packages, err := parseFunctionPackagesCommon(d) + if err != nil { + return err + } + setPackages(packages) + return nil +} + func setFunctionTargetPathInBuilder[T any](d *schema.ResourceData, setTargetPath func(string) T) error { tp, err := parseFunctionTargetPathCommon(d) if err != nil { diff --git a/pkg/resources/function_java.go b/pkg/resources/function_java.go index c8fca3c13f..e085fc0c97 100644 --- a/pkg/resources/function_java.go +++ b/pkg/resources/function_java.go @@ -72,12 +72,11 @@ func CreateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), attributeMappedValueCreateBuilder[string](d, "return_results_behavior", request.WithReturnResultsBehavior, sdk.ToReturnResultsBehavior), stringAttributeCreateBuilder(d, "runtime_version", request.WithRuntimeVersion), - // TODO [SNOW-1348103]: handle the rest of the attributes - // comment + stringAttributeCreateBuilder(d, "comment", request.WithComment), setFunctionImportsInBuilder(d, request.WithImports), - // packages - // external_access_integrations - // secrets + setFunctionPackagesInBuilder(d, request.WithPackages), + setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), + setSecretsInBuilder(d, request.WithSecrets), setFunctionTargetPathInBuilder(d, request.WithTargetPath), stringAttributeCreateBuilder(d, "function_definition", request.WithFunctionDefinitionWrapped), ) @@ -121,19 +120,18 @@ func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta a // TODO [SNOW-1348103]: handle setting state to value from config errs := errors.Join( - // TODO [SNOW-1348103]: set the rest of the fields // not reading is_secure on purpose (handled as external change to show output) readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) // not reading return_results_behavior on purpose (handled as external change to show output) setOptionalFromStringPtr(d, "runtime_version", allFunctionDetails.functionDetails.RuntimeVersion), - // comment + d.Set("comment", allFunctionDetails.function.Description), readFunctionOrProcedureImports(d, allFunctionDetails.functionDetails.NormalizedImports), - // packages + d.Set("packages", allFunctionDetails.functionDetails.NormalizedPackages), setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), - // external_access_integrations - // secrets + readFunctionOrProcedureExternalAccessIntegrations(d, allFunctionDetails.functionDetails.NormalizedExternalAccessIntegrations), + readFunctionOrProcedureSecrets(d, allFunctionDetails.functionDetails.NormalizedSecrets), readFunctionOrProcedureTargetPath(d, allFunctionDetails.functionDetails.NormalizedTargetPath), setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), d.Set("function_language", allFunctionDetails.functionDetails.Language), @@ -173,11 +171,32 @@ func UpdateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta setRequest := sdk.NewFunctionSetRequest() unsetRequest := sdk.NewFunctionUnsetRequest() - // TODO [SNOW-1348103]: handle all updates - // secure - // external access integration - // secrets - // comment + err = errors.Join( + stringAttributeUpdate(d, "comment", &setRequest.Comment, &unsetRequest.Comment), + func() error { + if d.HasChange("secrets") { + return setSecretsInBuilder(d, func(references []sdk.SecretReference) *sdk.FunctionSetRequest { + return setRequest.WithSecretsList(sdk.SecretsListRequest{SecretsList: references}) + }) + } + return nil + }(), + func() error { + if d.HasChange("external_access_integrations") { + return setExternalAccessIntegrationsInBuilder(d, func(references []sdk.AccountObjectIdentifier) any { + if len(references) == 0 { + return unsetRequest.WithExternalAccessIntegrations(true) + } else { + return setRequest.WithExternalAccessIntegrations(references) + } + }) + } + return nil + }(), + ) + if err != nil { + return diag.FromErr(err) + } if updateParamDiags := handleFunctionParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { return updateParamDiags @@ -187,15 +206,38 @@ func UpdateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) if err != nil { + d.Partial(true) return diag.FromErr(err) } } if !reflect.DeepEqual(*unsetRequest, *sdk.NewFunctionUnsetRequest()) { err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnset(*unsetRequest)) if err != nil { + d.Partial(true) return diag.FromErr(err) } } + // has to be handled separately + if d.HasChange("is_secure") { + if v := d.Get("is_secure").(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) + if err != nil { + return diag.FromErr(err) + } + err = client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetSecure(parsed)) + if err != nil { + d.Partial(true) + return diag.FromErr(err) + } + } else { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetSecure(true)) + if err != nil { + d.Partial(true) + return diag.FromErr(err) + } + } + } + return ReadContextFunctionJava(ctx, d, meta) } diff --git a/pkg/resources/function_java_acceptance_test.go b/pkg/resources/function_java_acceptance_test.go index b805187b69..9b8f032779 100644 --- a/pkg/resources/function_java_acceptance_test.go +++ b/pkg/resources/function_java_acceptance_test.go @@ -173,6 +173,27 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) t.Cleanup(stageCleanup) + secretId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secretId2 := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + networkRule, networkRuleCleanup := acc.TestClient().NetworkRule.Create(t) + t.Cleanup(networkRuleCleanup) + + secret, secretCleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId, "test_secret_string") + t.Cleanup(secretCleanup) + + secret2, secret2Cleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId2, "test_secret_string_2") + t.Cleanup(secret2Cleanup) + + externalAccessIntegration, externalAccessIntegrationCleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) + t.Cleanup(externalAccessIntegrationCleanup) + + externalAccessIntegration2, externalAccessIntegration2Cleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret2.ID()) + t.Cleanup(externalAccessIntegration2Cleanup) + + tmpJavaFunction := acc.TestClient().CreateSampleJavaFunctionAndJarOnUserStage(t) + tmpJavaFunction2 := acc.TestClient().CreateSampleJavaFunctionAndJarOnUserStage(t) + className := "TestFunc" funcName := "echoVarchar" argName := "x" @@ -187,8 +208,34 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction2.JarName}, + ). + WithPackages("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration, externalAccessIntegration2). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "abc": secretId, + "def": secretId2, + }). WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). - WithRuntimeVersion("11") + WithRuntimeVersion("11"). + WithComment("some comment") + + functionModelUpdateWithoutRecreation := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction2.JarName}, + ). + WithPackages("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "def": secretId2, + }). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("11"). + WithComment("some other comment") resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -198,25 +245,58 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { PreCheck: func() { acc.TestAccPreCheck(t) }, CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), Steps: []resource.TestStep{ - // CREATE BASIC + // CREATE WITH ALL { Config: config.FromModels(t, functionModel), Check: assert.AssertThat(t, resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). HasNameString(id.Name()). HasIsSecureString(r.BooleanDefault). - HasCommentString(sdk.DefaultFunctionComment). - HasImportsLength(0). + HasImportsLength(2). HasRuntimeVersionString("11"). HasFunctionDefinitionString(definition). + HasCommentString("some comment"). HasFunctionLanguageString("JAVA"). HasFullyQualifiedNameString(id.FullyQualifiedName()), assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "secrets.#", "2")), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "external_access_integrations.#", "2")), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "packages.#", "2")), resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). HasIsSecure(false), ), }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(functionModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, functionModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasImportsLength(2). + HasRuntimeVersionString("11"). + HasFunctionDefinitionString(definition). + HasCommentString("some other comment"). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.#", "1")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_variable_name", "def")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_id", secretId2.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.#", "1")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.0", externalAccessIntegration.Name())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "packages.#", "2")), + resourceshowoutputassert.FunctionShowOutput(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, }, }) } diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index 759f44f878..addb4f3c30 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -256,9 +256,8 @@ func procedureBaseSchema() map[string]schema.Schema { ForceNew: true, }, "comment": { - Type: schema.TypeString, - Optional: true, - // TODO [SNOW-1348103]: handle dynamic comment - this is a workaround for now + Type: schema.TypeString, + Optional: true, Default: "user-defined procedure", Description: "Specifies a comment for the procedure.", }, @@ -307,7 +306,6 @@ func procedureBaseSchema() map[string]schema.Schema { ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, Optional: true, - ForceNew: true, Description: "The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API.", }, "secrets": { @@ -486,6 +484,16 @@ func parseProcedureImportsCommon(d *schema.ResourceData) ([]sdk.ProcedureImportR return imports, nil } +func parseProceduresPackagesCommon(d *schema.ResourceData) ([]sdk.ProcedurePackageRequest, error) { + packages := make([]sdk.ProcedurePackageRequest, 0) + if v, ok := d.GetOk("packages"); ok { + for _, pkg := range v.(*schema.Set).List() { + packages = append(packages, *sdk.NewProcedurePackageRequest(pkg.(string))) + } + } + return packages, nil +} + func parseProcedureTargetPathCommon(d *schema.ResourceData) (string, error) { var tp string if v, ok := d.GetOk("target_path"); ok { diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index bc4f417144..04fcb0cf1a 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -62,8 +62,12 @@ func CreateContextProcedureJava(ctx context.Context, d *schema.ResourceData, met } handler := d.Get("handler").(string) runtimeVersion := d.Get("runtime_version").(string) - // TODO [this PR]: handle real packages - packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + packages, err := parseProceduresPackagesCommon(d) + if err != nil { + return diag.FromErr(err) + } + packages = append(packages, *sdk.NewProcedurePackageRequest(fmt.Sprintf(`%s%s`, sdk.JavaSnowparkPackageString, d.Get("snowpark_package").(string)))) argumentDataTypes := collections.Map(argumentRequests, func(r sdk.ProcedureArgumentRequest) datatypes.DataType { return r.ArgDataType }) id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) @@ -73,12 +77,10 @@ func CreateContextProcedureJava(ctx context.Context, d *schema.ResourceData, met errs := errors.Join( booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), - // TODO [SNOW-1348103]: handle the rest of the attributes - // comment + stringAttributeCreateBuilder(d, "comment", request.WithComment), setProcedureImportsInBuilder(d, request.WithImports), - // packages - // external_access_integrations - // secrets + setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), + setSecretsInBuilder(d, request.WithSecrets), setProcedureTargetPathInBuilder(d, request.WithTargetPath), stringAttributeCreateBuilder(d, "procedure_definition", request.WithProcedureDefinitionWrapped), ) @@ -122,18 +124,18 @@ func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta // TODO [SNOW-1348103]: handle setting state to value from config errs := errors.Join( - // TODO [SNOW-1348103]: set the rest of the fields // not reading is_secure on purpose (handled as external change to show output) readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), - // comment + d.Set("comment", allProcedureDetails.procedure.Description), readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), - // packages + d.Set("packages", allProcedureDetails.procedureDetails.NormalizedPackages), + d.Set("snowpark_package", allProcedureDetails.procedureDetails.SnowparkVersion), setRequiredFromStringPtr(d, "handler", allProcedureDetails.procedureDetails.Handler), - // external_access_integrations - // secrets + readFunctionOrProcedureExternalAccessIntegrations(d, allProcedureDetails.procedureDetails.NormalizedExternalAccessIntegrations), + readFunctionOrProcedureSecrets(d, allProcedureDetails.procedureDetails.NormalizedSecrets), readFunctionOrProcedureTargetPath(d, allProcedureDetails.procedureDetails.NormalizedTargetPath), setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), @@ -173,11 +175,32 @@ func UpdateContextProcedureJava(ctx context.Context, d *schema.ResourceData, met setRequest := sdk.NewProcedureSetRequest() unsetRequest := sdk.NewProcedureUnsetRequest() - // TODO [SNOW-1348103]: handle all updates - // secure - // external access integration - // secrets - // comment + err = errors.Join( + stringAttributeUpdate(d, "comment", &setRequest.Comment, &unsetRequest.Comment), + func() error { + if d.HasChange("secrets") { + return setSecretsInBuilder(d, func(references []sdk.SecretReference) *sdk.ProcedureSetRequest { + return setRequest.WithSecretsList(sdk.SecretsListRequest{SecretsList: references}) + }) + } + return nil + }(), + func() error { + if d.HasChange("external_access_integrations") { + return setExternalAccessIntegrationsInBuilder(d, func(references []sdk.AccountObjectIdentifier) any { + if len(references) == 0 { + return unsetRequest.WithExternalAccessIntegrations(true) + } else { + return setRequest.WithExternalAccessIntegrations(references) + } + }) + } + return nil + }(), + ) + if err != nil { + return diag.FromErr(err) + } if updateParamDiags := handleProcedureParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { return updateParamDiags diff --git a/pkg/resources/procedure_java_acceptance_test.go b/pkg/resources/procedure_java_acceptance_test.go index 35bdd401ec..c6d0aba743 100644 --- a/pkg/resources/procedure_java_acceptance_test.go +++ b/pkg/resources/procedure_java_acceptance_test.go @@ -172,6 +172,27 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) t.Cleanup(stageCleanup) + secretId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secretId2 := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + networkRule, networkRuleCleanup := acc.TestClient().NetworkRule.Create(t) + t.Cleanup(networkRuleCleanup) + + secret, secretCleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId, "test_secret_string") + t.Cleanup(secretCleanup) + + secret2, secret2Cleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId2, "test_secret_string_2") + t.Cleanup(secret2Cleanup) + + externalAccessIntegration, externalAccessIntegrationCleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) + t.Cleanup(externalAccessIntegrationCleanup) + + externalAccessIntegration2, externalAccessIntegration2Cleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret2.ID()) + t.Cleanup(externalAccessIntegration2Cleanup) + + tmpJavaProcedure := acc.TestClient().CreateSampleJavaProcedureAndJarOnUserStage(t) + tmpJavaProcedure2 := acc.TestClient().CreateSampleJavaProcedureAndJarOnUserStage(t) + className := "TestFunc" funcName := "echoVarchar" argName := "x" @@ -186,8 +207,36 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure2.JarName}, + ). + WithSnowparkPackage("1.14.0"). + WithPackages("com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration, externalAccessIntegration2). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "abc": secretId, + "def": secretId2, + }). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("11"). + WithComment("some comment") + + procedureModelUpdateWithoutRecreation := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure2.JarName}, + ). + WithSnowparkPackage("1.14.0"). + WithPackages("com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "def": secretId2, + }). WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). - WithRuntimeVersion("11") + WithRuntimeVersion("11"). + WithComment("some other comment") resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -204,18 +253,52 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). HasNameString(id.Name()). HasIsSecureString(r.BooleanDefault). - HasCommentString(sdk.DefaultProcedureComment). - HasImportsLength(0). + HasImportsLength(2). HasRuntimeVersionString("11"). HasProcedureDefinitionString(definition). + HasCommentString("some comment"). HasProcedureLanguageString("JAVA"). HasFullyQualifiedNameString(id.FullyQualifiedName()), assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "secrets.#", "2")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "external_access_integrations.#", "2")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "packages.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "packages.0", "com.snowflake:telemetry:0.1.0")), resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). HasIsSecure(false), ), }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(procedureModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, procedureModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasImportsLength(2). + HasRuntimeVersionString("11"). + HasProcedureDefinitionString(definition). + HasCommentString("some other comment"). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_variable_name", "def")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_id", secretId2.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.0", externalAccessIntegration.Name())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "packages.#", "1")), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, }, }) } diff --git a/pkg/sdk/functions_and_procedures_commons.go b/pkg/sdk/functions_and_procedures_commons.go index df64aba187..35f4ab3dbb 100644 --- a/pkg/sdk/functions_and_procedures_commons.go +++ b/pkg/sdk/functions_and_procedures_commons.go @@ -1,10 +1,12 @@ package sdk import ( + "encoding/json" "fmt" "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) @@ -116,3 +118,34 @@ func parseFunctionOrProcedureArgument(arg string) (*NormalizedArgument, error) { } return &NormalizedArgument{argName, dt}, nil } + +// TODO [SNOW-1850370]: is this combo enough? - e.g. whitespace looks to be not trimmed +func parseFunctionOrProcedureExternalAccessIntegrations(raw string) ([]AccountObjectIdentifier, error) { + log.Printf("[DEBUG] external access integrations: %s", raw) + return collections.MapErr(ParseCommaSeparatedStringArray(raw, false), ParseAccountObjectIdentifier) +} + +// TODO [before V1]: test +func parseFunctionOrProcedurePackages(raw string) ([]string, error) { + log.Printf("[DEBUG] external access integrations: %s", raw) + return collections.Map(ParseCommaSeparatedStringArray(raw, true), strings.TrimSpace), nil +} + +// TODO [before V1]: unit test +func parseFunctionOrProcedureSecrets(raw string) (map[string]SchemaObjectIdentifier, error) { + log.Printf("[DEBUG] parsing secrets: %s", raw) + secrets := make(map[string]string) + err := json.Unmarshal([]byte(raw), &secrets) + if err != nil { + return nil, fmt.Errorf("could not parse secrets from Snowflake: %s, err: %w", raw, err) + } + normalizedSecrets := make(map[string]SchemaObjectIdentifier) + for k, v := range secrets { + id, err := ParseSchemaObjectIdentifier(v) + if err != nil { + return nil, fmt.Errorf("could not parse secrets from Snowflake: %s, err: %w", raw, err) + } + normalizedSecrets[k] = id + } + return normalizedSecrets, nil +} diff --git a/pkg/sdk/functions_ext.go b/pkg/sdk/functions_ext.go index facd9ede1d..93ae4ffa8e 100644 --- a/pkg/sdk/functions_ext.go +++ b/pkg/sdk/functions_ext.go @@ -33,11 +33,14 @@ type FunctionDetails struct { InstalledPackages *string // list present for python (hidden when SECURE) IsAggregate *bool // present for python - NormalizedImports []NormalizedPath - NormalizedTargetPath *NormalizedPath - ReturnDataType datatypes.DataType - ReturnNotNull bool - NormalizedArguments []NormalizedArgument + NormalizedImports []NormalizedPath + NormalizedTargetPath *NormalizedPath + ReturnDataType datatypes.DataType + ReturnNotNull bool + NormalizedArguments []NormalizedArgument + NormalizedExternalAccessIntegrations []AccountObjectIdentifier + NormalizedSecrets map[string]SchemaObjectIdentifier + NormalizedPackages []string } func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { @@ -108,6 +111,36 @@ func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { v.NormalizedArguments = args } + if v.ExternalAccessIntegrations != nil { + if p, err := parseFunctionOrProcedureExternalAccessIntegrations(*v.ExternalAccessIntegrations); err != nil { + errs = append(errs, err) + } else { + v.NormalizedExternalAccessIntegrations = p + } + } else { + v.NormalizedExternalAccessIntegrations = []AccountObjectIdentifier{} + } + + if v.Secrets != nil { + if p, err := parseFunctionOrProcedureSecrets(*v.Secrets); err != nil { + errs = append(errs, err) + } else { + v.NormalizedSecrets = p + } + } else { + v.NormalizedSecrets = map[string]SchemaObjectIdentifier{} + } + + if v.Packages != nil { + if p, err := parseFunctionOrProcedurePackages(*v.Packages); err != nil { + errs = append(errs, err) + } else { + v.NormalizedPackages = p + } + } else { + v.NormalizedPackages = []string{} + } + return v, errors.Join(errs...) } diff --git a/pkg/sdk/procedures_ext.go b/pkg/sdk/procedures_ext.go index de40fd8732..4a308dc97c 100644 --- a/pkg/sdk/procedures_ext.go +++ b/pkg/sdk/procedures_ext.go @@ -5,11 +5,16 @@ import ( "errors" "fmt" "strconv" + "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) -const DefaultProcedureComment = "user-defined procedure" +const ( + DefaultProcedureComment = "user-defined procedure" + JavaSnowparkPackageString = "com.snowflake:snowpark:" + PythonSnowparkPackageString = "snowflake-snowpark-python==" +) func (v *Procedure) ID() SchemaObjectIdentifierWithArguments { return NewSchemaObjectIdentifierWithArguments(v.CatalogName, v.SchemaName, v.Name, v.ArgumentsOld...) @@ -33,11 +38,16 @@ type ProcedureDetails struct { InstalledPackages *string // list present for python (hidden when SECURE) ExecuteAs string // present for all procedure types - NormalizedImports []NormalizedPath - NormalizedTargetPath *NormalizedPath - ReturnDataType datatypes.DataType - ReturnNotNull bool - NormalizedArguments []NormalizedArgument + NormalizedImports []NormalizedPath + NormalizedTargetPath *NormalizedPath + ReturnDataType datatypes.DataType + ReturnNotNull bool + NormalizedArguments []NormalizedArgument + NormalizedExternalAccessIntegrations []AccountObjectIdentifier + NormalizedSecrets map[string]SchemaObjectIdentifier + // NormalizedPackages does not contain a snowpark package - it is extracted only as a version in SnowparkVersion below + NormalizedPackages []string + SnowparkVersion string } func procedureDetailsFromRows(rows []ProcedureDetail) (*ProcedureDetails, error) { @@ -108,6 +118,75 @@ func procedureDetailsFromRows(rows []ProcedureDetail) (*ProcedureDetails, error) v.NormalizedArguments = args } + if v.ExternalAccessIntegrations != nil { + if p, err := parseFunctionOrProcedureExternalAccessIntegrations(*v.ExternalAccessIntegrations); err != nil { + errs = append(errs, err) + } else { + v.NormalizedExternalAccessIntegrations = p + } + } else { + v.NormalizedExternalAccessIntegrations = []AccountObjectIdentifier{} + } + + if v.Secrets != nil { + if p, err := parseFunctionOrProcedureSecrets(*v.Secrets); err != nil { + errs = append(errs, err) + } else { + v.NormalizedSecrets = p + } + } else { + v.NormalizedSecrets = map[string]SchemaObjectIdentifier{} + } + + if v.Packages != nil { + if p, err := parseFunctionOrProcedurePackages(*v.Packages); err != nil { + errs = append(errs, err) + } else { + // TODO [SNOW-1850370]: merge these and unit test + switch strings.ToUpper(v.Language) { + case "JAVA", "SCALA": + filtered := make([]string, 0) + var found bool + for _, o := range p { + o := strings.TrimSpace(o) + if strings.HasPrefix(o, JavaSnowparkPackageString) { + v.SnowparkVersion = strings.TrimPrefix(o, JavaSnowparkPackageString) + found = true + } else { + filtered = append(filtered, o) + } + } + v.NormalizedPackages = filtered + if !found { + errs = append(errs, fmt.Errorf("could not parse package from Snowflake, expected at least snowpark package, got %v", filtered)) + } + case "PYTHON": + filtered := make([]string, 0) + var found bool + for _, o := range p { + o := strings.TrimSpace(o) + if strings.HasPrefix(o, PythonSnowparkPackageString) { + v.SnowparkVersion = strings.TrimPrefix(o, PythonSnowparkPackageString) + found = true + } else { + filtered = append(filtered, o) + } + } + v.NormalizedPackages = filtered + if !found { + errs = append(errs, fmt.Errorf("could not parse package from Snowflake, expected at least snowpark package, got %v", filtered)) + } + } + } + } else { + switch strings.ToUpper(v.Language) { + case "JAVA", "SCALA", "PYTHON": + errs = append(errs, fmt.Errorf("could not parse package from Snowflake, expected at least snowpark package, got nil")) + default: + v.NormalizedPackages = []string{} + } + } + return v, errors.Join(errs...) } diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index 022ba7592a..8aa1b217fb 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -34,6 +34,7 @@ import ( // TODO [SNOW-1348103]: test secure // TODO [SNOW-1348103]: python aggregate func (100357 (P0000): Could not find accumulate method in function CVVEMHIT_06547800_08D6_DBCA_1AC7_5E422AFF8B39 with handler dump) // TODO [SNOW-1348103]: add test with multiple imports +// TODO [this PR]: test with multiple external access integrations and secrets func TestInt_Functions(t *testing.T) { client := testClient(t) ctx := context.Background() @@ -119,12 +120,14 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(`[]`). HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersionNil(). HasPackages(`[]`). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -217,9 +220,8 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). - // TODO [SNOW-1348103]: parse to identifier list - // TODO [SNOW-1348103]: check multiple secrets (to know how to parse) HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, @@ -227,6 +229,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). HasTargetPath(targetPath). HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). @@ -295,6 +298,7 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ @@ -303,6 +307,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(handler). HasRuntimeVersionNil(). HasPackages(`[]`). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -383,7 +388,9 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, @@ -391,6 +398,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -529,12 +537,14 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -606,12 +616,14 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -679,12 +691,14 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(`[]`). HasExactlyImportsNormalizedInAnyOrder(). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`[]`). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -765,7 +779,9 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), @@ -773,6 +789,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['absl-py==0.10.0','about-time==4.2.1']`). + HasExactlyPackagesInAnyOrder("absl-py==0.10.0", "about-time==4.2.1"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -838,6 +855,7 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ @@ -846,6 +864,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`[]`). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -923,7 +942,9 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), @@ -931,6 +952,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['absl-py==0.10.0','about-time==4.2.1']`). + HasExactlyPackagesInAnyOrder("about-time==4.2.1", "absl-py==0.10.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -999,12 +1021,14 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(`[]`). HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[]`). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1094,7 +1118,9 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, @@ -1102,6 +1128,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). HasTargetPath(targetPath). HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). @@ -1168,6 +1195,7 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ @@ -1176,6 +1204,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[]`). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1253,7 +1282,9 @@ func TestInt_Functions(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, @@ -1261,6 +1292,7 @@ func TestInt_Functions(t *testing.T) { HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1326,12 +1358,14 @@ func TestInt_Functions(t *testing.T) { HasNullHandlingNil(). HasVolatilityNil(). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1434,12 +1468,14 @@ func TestInt_Functions(t *testing.T) { // HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasVolatilityNil(). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1502,12 +1538,14 @@ func TestInt_Functions(t *testing.T) { HasNullHandlingNil(). HasVolatilityNil(). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). + HasExactlyPackagesInAnyOrder(). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1581,6 +1619,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, id). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(), ) @@ -1609,7 +1648,9 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, id). HasExactlyExternalAccessIntegrations(externalAccessIntegration). - HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}), + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}), ) assertParametersSet(t, objectparametersassert.FunctionParameters(t, id)) @@ -1636,6 +1677,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, id). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). // TODO [SNOW-1850370]: apparently UNSET external access integrations cleans out secrets in the describe but leaves it in SHOW HasSecretsNil(), ) diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index c5434d6308..e8b54a9a4d 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -107,12 +107,15 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(`[]`). HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). + HasExactlyPackagesInAnyOrder(). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -197,7 +200,9 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, @@ -205,6 +210,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:telemetry:0.1.0"). + HasSnowparkVersion("1.14.0"). HasTargetPath(targetPath). HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). @@ -273,6 +280,7 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ @@ -281,6 +289,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:telemetry:0.1.0"). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -356,7 +366,9 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, @@ -364,6 +376,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:telemetry:0.1.0"). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -500,6 +514,7 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). @@ -572,6 +587,7 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). @@ -644,12 +660,15 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(`[]`). HasExactlyImportsNormalizedInAnyOrder(). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0']`). + HasExactlyPackagesInAnyOrder(). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -728,7 +747,9 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), @@ -736,6 +757,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). + HasExactlyPackagesInAnyOrder("absl-py==0.10.0"). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -800,6 +823,7 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ @@ -808,6 +832,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0']`). + HasExactlyPackagesInAnyOrder(). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -883,7 +909,9 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), @@ -891,6 +919,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). + HasExactlyPackagesInAnyOrder("absl-py==0.10.0"). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). @@ -959,12 +989,15 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(`[]`). HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). + HasExactlyPackagesInAnyOrder(). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1050,7 +1083,9 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, @@ -1058,6 +1093,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:telemetry:0.1.0"). + HasSnowparkVersion("1.14.0"). HasTargetPath(targetPath). HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). @@ -1123,6 +1160,7 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ @@ -1131,6 +1169,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). + HasExactlyPackagesInAnyOrder(). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1208,7 +1248,9 @@ func TestInt_Procedures(t *testing.T) { HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, @@ -1216,6 +1258,8 @@ func TestInt_Procedures(t *testing.T) { HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasExactlyPackagesInAnyOrder("com.snowflake:telemetry:0.1.0"). + HasSnowparkVersion("1.14.0"). HasTargetPathNil(). HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). @@ -1277,6 +1321,7 @@ func TestInt_Procedures(t *testing.T) { HasNullHandlingNil(). HasVolatilityNil(). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). @@ -1382,6 +1427,7 @@ func TestInt_Procedures(t *testing.T) { HasVolatilityNil(). HasVolatilityNil(). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). @@ -1446,6 +1492,7 @@ func TestInt_Procedures(t *testing.T) { HasNullHandlingNil(). HasVolatilityNil(). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(). HasImportsNil(). HasExactlyImportsNormalizedInAnyOrder(). @@ -1828,6 +1875,7 @@ def filter_by_role(session, table_name, role): assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). HasSecretsNil(), ) @@ -1858,7 +1906,9 @@ def filter_by_role(session, table_name, role): assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). HasExactlyExternalAccessIntegrations(externalAccessIntegration). - HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}), + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + ContainsExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}), ) assertParametersSet(t, objectparametersassert.ProcedureParameters(t, id)) @@ -1886,6 +1936,7 @@ def filter_by_role(session, table_name, role): assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). HasExternalAccessIntegrationsNil(). + HasExactlyExternalAccessIntegrationsNormalizedInAnyOrder(). // TODO [SNOW-1850370]: apparently UNSET external access integrations cleans out secrets in the describe but leaves it in SHOW HasSecretsNil(), ) From 7a6f68df2fb0a0a4696a5442569344039a839c27 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 15:21:07 +0100 Subject: [PATCH 05/20] feat: Add all other functions and procedures implementations (#3275) Mimic the implementation in all other functions and procedures Next PRs: - docs + migration guide - import - acceptance tests - missing aggregate, return not null - validations --- docs/resources/function_sql.md | 1 - pkg/resources/function_commons.go | 110 +++++++++++++++++++++++- pkg/resources/function_java.go | 97 +-------------------- pkg/resources/function_javascript.go | 101 ++++++++++++++++++++-- pkg/resources/function_python.go | 112 ++++++++++++++++++++++-- pkg/resources/function_scala.go | 115 +++++++++++++++++++++++-- pkg/resources/function_sql.go | 100 ++++++++++++++++++++-- pkg/resources/procedure_commons.go | 99 ++++++++++++++++++++++ pkg/resources/procedure_java.go | 73 +--------------- pkg/resources/procedure_javascript.go | 97 +++++++++++++++++++-- pkg/resources/procedure_python.go | 115 +++++++++++++++++++++++-- pkg/resources/procedure_scala.go | 117 ++++++++++++++++++++++++-- pkg/resources/procedure_sql.go | 96 +++++++++++++++++++-- 13 files changed, 1002 insertions(+), 231 deletions(-) diff --git a/docs/resources/function_sql.md b/docs/resources/function_sql.md index 4a48191740..bb4e772742 100644 --- a/docs/resources/function_sql.md +++ b/docs/resources/function_sql.md @@ -30,7 +30,6 @@ Resource used to manage sql function objects. For more information, check [funct - `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). -- `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index 7dddd097e7..12ea55bd73 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -5,8 +5,10 @@ import ( "errors" "fmt" "log" + "reflect" "slices" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -86,7 +88,6 @@ var ( "is_secure", "arguments", "return_type", - "null_input_behavior", "return_results_behavior", "comment", "function_definition", @@ -98,6 +99,7 @@ var ( javaFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{ "runtime_version", + "null_input_behavior", "imports", "packages", "handler", @@ -115,7 +117,9 @@ var ( targetPathDescription: "The TARGET_PATH clause specifies the location to which Snowflake should write the compiled code (JAR file) after compiling the source code specified in the `function_definition`. If this clause is included, the user should manually remove the JAR file when it is no longer needed (typically when the Java UDF is dropped). If this clause is omitted, Snowflake re-compiles the source code each time the code is needed. The JAR file is not stored permanently, and the user does not need to clean up the JAR file. Snowflake returns an error if the TARGET_PATH matches an existing file; you cannot use TARGET_PATH to overwrite an existing file.", } javascriptFunctionSchemaDefinition = functionSchemaDef{ - additionalArguments: []string{}, + additionalArguments: []string{ + "null_input_behavior", + }, functionDefinitionDescription: functionDefinitionTemplate("JavaScript", "https://docs.snowflake.com/en/developer-guide/udf/javascript/udf-javascript-introduction"), functionDefinitionRequired: true, } @@ -123,6 +127,7 @@ var ( additionalArguments: []string{ "is_aggregate", "runtime_version", + "null_input_behavior", "imports", "packages", "handler", @@ -139,6 +144,7 @@ var ( scalaFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{ "runtime_version", + "null_input_behavior", "imports", "packages", "handler", @@ -405,6 +411,106 @@ func DeleteFunction(ctx context.Context, d *schema.ResourceData, meta any) diag. return nil } +func UpdateFunction(language string, readFunc func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics) func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + if d.HasChange("name") { + newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) + + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithRenameTo(newId.SchemaObjectId())) + if err != nil { + return diag.FromErr(fmt.Errorf("error renaming function %v err = %w", d.Id(), err)) + } + + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId + } + + // Batch SET operations and UNSET operations + setRequest := sdk.NewFunctionSetRequest() + unsetRequest := sdk.NewFunctionUnsetRequest() + + _ = stringAttributeUpdate(d, "comment", &setRequest.Comment, &unsetRequest.Comment) + + switch language { + case "JAVA", "SCALA", "PYTHON": + err = errors.Join( + func() error { + if d.HasChange("secrets") { + return setSecretsInBuilder(d, func(references []sdk.SecretReference) *sdk.FunctionSetRequest { + return setRequest.WithSecretsList(sdk.SecretsListRequest{SecretsList: references}) + }) + } + return nil + }(), + func() error { + if d.HasChange("external_access_integrations") { + return setExternalAccessIntegrationsInBuilder(d, func(references []sdk.AccountObjectIdentifier) any { + if len(references) == 0 { + return unsetRequest.WithExternalAccessIntegrations(true) + } else { + return setRequest.WithExternalAccessIntegrations(references) + } + }) + } + return nil + }(), + ) + if err != nil { + return diag.FromErr(err) + } + } + + if updateParamDiags := handleFunctionParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags + } + + // Apply SET and UNSET changes + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + d.Partial(true) + return diag.FromErr(err) + } + } + if !reflect.DeepEqual(*unsetRequest, *sdk.NewFunctionUnsetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnset(*unsetRequest)) + if err != nil { + d.Partial(true) + return diag.FromErr(err) + } + } + + // has to be handled separately + if d.HasChange("is_secure") { + if v := d.Get("is_secure").(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) + if err != nil { + return diag.FromErr(err) + } + err = client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetSecure(parsed)) + if err != nil { + d.Partial(true) + return diag.FromErr(err) + } + } else { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetSecure(true)) + if err != nil { + d.Partial(true) + return diag.FromErr(err) + } + } + } + + return readFunc(ctx, d, meta) + } +} + // TODO [SNOW-1850370]: Make the rest of the functions in this file generic (for reuse with procedures) func parseFunctionArgumentsCommon(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, error) { args := make([]sdk.FunctionArgumentRequest, 0) diff --git a/pkg/resources/function_java.go b/pkg/resources/function_java.go index e085fc0c97..4dae73f94c 100644 --- a/pkg/resources/function_java.go +++ b/pkg/resources/function_java.go @@ -3,7 +3,6 @@ package resources import ( "context" "errors" - "fmt" "reflect" "strings" @@ -23,7 +22,7 @@ func FunctionJava() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.FunctionJava, CreateContextFunctionJava), ReadContext: TrackingReadWrapper(resources.FunctionJava, ReadContextFunctionJava), - UpdateContext: TrackingUpdateWrapper(resources.FunctionJava, UpdateContextFunctionJava), + UpdateContext: TrackingUpdateWrapper(resources.FunctionJava, UpdateFunction("JAVA", ReadContextFunctionJava)), DeleteContext: TrackingDeleteWrapper(resources.FunctionJava, DeleteFunction), Description: "Resource used to manage java function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", @@ -147,97 +146,3 @@ func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta a return nil } - -func UpdateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - - if d.HasChange("name") { - newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) - - err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithRenameTo(newId.SchemaObjectId())) - if err != nil { - return diag.FromErr(fmt.Errorf("error renaming function %v err = %w", d.Id(), err)) - } - - d.SetId(helpers.EncodeResourceIdentifier(newId)) - id = newId - } - - // Batch SET operations and UNSET operations - setRequest := sdk.NewFunctionSetRequest() - unsetRequest := sdk.NewFunctionUnsetRequest() - - err = errors.Join( - stringAttributeUpdate(d, "comment", &setRequest.Comment, &unsetRequest.Comment), - func() error { - if d.HasChange("secrets") { - return setSecretsInBuilder(d, func(references []sdk.SecretReference) *sdk.FunctionSetRequest { - return setRequest.WithSecretsList(sdk.SecretsListRequest{SecretsList: references}) - }) - } - return nil - }(), - func() error { - if d.HasChange("external_access_integrations") { - return setExternalAccessIntegrationsInBuilder(d, func(references []sdk.AccountObjectIdentifier) any { - if len(references) == 0 { - return unsetRequest.WithExternalAccessIntegrations(true) - } else { - return setRequest.WithExternalAccessIntegrations(references) - } - }) - } - return nil - }(), - ) - if err != nil { - return diag.FromErr(err) - } - - if updateParamDiags := handleFunctionParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { - return updateParamDiags - } - - // Apply SET and UNSET changes - if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { - err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) - if err != nil { - d.Partial(true) - return diag.FromErr(err) - } - } - if !reflect.DeepEqual(*unsetRequest, *sdk.NewFunctionUnsetRequest()) { - err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnset(*unsetRequest)) - if err != nil { - d.Partial(true) - return diag.FromErr(err) - } - } - - // has to be handled separately - if d.HasChange("is_secure") { - if v := d.Get("is_secure").(string); v != BooleanDefault { - parsed, err := booleanStringToBool(v) - if err != nil { - return diag.FromErr(err) - } - err = client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetSecure(parsed)) - if err != nil { - d.Partial(true) - return diag.FromErr(err) - } - } else { - err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetSecure(true)) - if err != nil { - d.Partial(true) - return diag.FromErr(err) - } - } - } - - return ReadContextFunctionJava(ctx, d, meta) -} diff --git a/pkg/resources/function_javascript.go b/pkg/resources/function_javascript.go index 0ba7e955b7..fe0884dd67 100644 --- a/pkg/resources/function_javascript.go +++ b/pkg/resources/function_javascript.go @@ -2,11 +2,17 @@ package resources import ( "context" + "errors" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +22,7 @@ func FunctionJavascript() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.FunctionJavascript, CreateContextFunctionJavascript), ReadContext: TrackingReadWrapper(resources.FunctionJavascript, ReadContextFunctionJavascript), - UpdateContext: TrackingUpdateWrapper(resources.FunctionJavascript, UpdateContextFunctionJavascript), + UpdateContext: TrackingUpdateWrapper(resources.FunctionJavascript, UpdateFunction("JAVASCRIPT", ReadContextFunctionJavascript)), DeleteContext: TrackingDeleteWrapper(resources.FunctionJavascript, DeleteFunction), Description: "Resource used to manage javascript function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", @@ -25,7 +31,11 @@ func FunctionJavascript() *schema.Resource { ComputedIfAnyAttributeChanged(javascriptFunctionSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(functionParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllFunctionParameters), strings.ToLower)...), functionParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only potential option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("function_language", "JAVASCRIPT"), )), Schema: collections.MergeMaps(javascriptFunctionSchema, functionParametersSchema), @@ -36,17 +46,90 @@ func FunctionJavascript() *schema.Resource { } func CreateContextFunctionJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseFunctionArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseFunctionReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + functionDefinition := d.Get("function_definition").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.FunctionArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForJavascriptFunctionRequestDefinitionWrapped(id.SchemaObjectId(), *returns, functionDefinition). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "return_results_behavior", request.WithReturnResultsBehavior, sdk.ToReturnResultsBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Functions.CreateForJavascript(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create function (query does not fail but parameters stay unchanged) + setRequest := sdk.NewFunctionSetRequest() + if parametersCreateDiags := handleFunctionParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionJavascript(ctx, d, meta) } func ReadContextFunctionJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } -func UpdateContextFunctionJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + allFunctionDetails, diags := queryAllFunctionDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), + d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading return_results_behavior on purpose (handled as external change to show output) + d.Set("comment", allFunctionDetails.function.Description), + setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), + setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), + d.Set("function_language", allFunctionDetails.functionDetails.Language), + + handleFunctionParameterRead(d, allFunctionDetails.functionParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.FunctionToSchema(allFunctionDetails.function)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.FunctionParametersToSchema(allFunctionDetails.functionParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func DeleteContextFunctionJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } diff --git a/pkg/resources/function_python.go b/pkg/resources/function_python.go index cc6c137aff..ebc3dd7259 100644 --- a/pkg/resources/function_python.go +++ b/pkg/resources/function_python.go @@ -2,11 +2,17 @@ package resources import ( "context" + "errors" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +22,7 @@ func FunctionPython() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.FunctionPython, CreateContextFunctionPython), ReadContext: TrackingReadWrapper(resources.FunctionPython, ReadContextFunctionPython), - UpdateContext: TrackingUpdateWrapper(resources.FunctionPython, UpdateContextFunctionPython), + UpdateContext: TrackingUpdateWrapper(resources.FunctionPython, UpdateFunction("PYTHON", ReadContextFunctionPython)), DeleteContext: TrackingDeleteWrapper(resources.FunctionPython, DeleteFunction), Description: "Resource used to manage python function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", @@ -25,7 +31,11 @@ func FunctionPython() *schema.Resource { ComputedIfAnyAttributeChanged(pythonFunctionSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(functionParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllFunctionParameters), strings.ToLower)...), functionParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only potential option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("function_language", "PYTHON"), )), Schema: collections.MergeMaps(pythonFunctionSchema, functionParametersSchema), @@ -36,17 +46,101 @@ func FunctionPython() *schema.Resource { } func CreateContextFunctionPython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseFunctionArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseFunctionReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + runtimeVersion := d.Get("runtime_version").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.FunctionArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForPythonFunctionRequest(id.SchemaObjectId(), *returns, runtimeVersion, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "return_results_behavior", request.WithReturnResultsBehavior, sdk.ToReturnResultsBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + setFunctionImportsInBuilder(d, request.WithImports), + setFunctionPackagesInBuilder(d, request.WithPackages), + setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), + setSecretsInBuilder(d, request.WithSecrets), + stringAttributeCreateBuilder(d, "function_definition", request.WithFunctionDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Functions.CreateForPython(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create function (query does not fail but parameters stay unchanged) + setRequest := sdk.NewFunctionSetRequest() + if parametersCreateDiags := handleFunctionParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionPython(ctx, d, meta) } func ReadContextFunctionPython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } -func UpdateContextFunctionPython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + allFunctionDetails, diags := queryAllFunctionDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), + d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading return_results_behavior on purpose (handled as external change to show output) + setOptionalFromStringPtr(d, "runtime_version", allFunctionDetails.functionDetails.RuntimeVersion), + d.Set("comment", allFunctionDetails.function.Description), + readFunctionOrProcedureImports(d, allFunctionDetails.functionDetails.NormalizedImports), + d.Set("packages", allFunctionDetails.functionDetails.NormalizedPackages), + setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), + readFunctionOrProcedureExternalAccessIntegrations(d, allFunctionDetails.functionDetails.NormalizedExternalAccessIntegrations), + readFunctionOrProcedureSecrets(d, allFunctionDetails.functionDetails.NormalizedSecrets), + setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), + d.Set("function_language", allFunctionDetails.functionDetails.Language), + + handleFunctionParameterRead(d, allFunctionDetails.functionParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.FunctionToSchema(allFunctionDetails.function)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.FunctionParametersToSchema(allFunctionDetails.functionParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func DeleteContextFunctionPython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } diff --git a/pkg/resources/function_scala.go b/pkg/resources/function_scala.go index ff2bded481..491af794b4 100644 --- a/pkg/resources/function_scala.go +++ b/pkg/resources/function_scala.go @@ -2,11 +2,17 @@ package resources import ( "context" + "errors" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +22,7 @@ func FunctionScala() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.FunctionScala, CreateContextFunctionScala), ReadContext: TrackingReadWrapper(resources.FunctionScala, ReadContextFunctionScala), - UpdateContext: TrackingUpdateWrapper(resources.FunctionScala, UpdateContextFunctionScala), + UpdateContext: TrackingUpdateWrapper(resources.FunctionScala, UpdateFunction("SCALA", ReadContextFunctionScala)), DeleteContext: TrackingDeleteWrapper(resources.FunctionScala, DeleteFunction), Description: "Resource used to manage scala function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", @@ -25,7 +31,11 @@ func FunctionScala() *schema.Resource { ComputedIfAnyAttributeChanged(scalaFunctionSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(functionParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllFunctionParameters), strings.ToLower)...), functionParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only potential option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("function_language", "SCALA"), )), Schema: collections.MergeMaps(scalaFunctionSchema, functionParametersSchema), @@ -36,17 +46,104 @@ func FunctionScala() *schema.Resource { } func CreateContextFunctionScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseFunctionArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returnTypeRaw := d.Get("return_type").(string) + returnDataType, err := datatypes.ParseDataType(returnTypeRaw) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + runtimeVersion := d.Get("runtime_version").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.FunctionArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForScalaFunctionRequest(id.SchemaObjectId(), returnDataType, runtimeVersion, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "return_results_behavior", request.WithReturnResultsBehavior, sdk.ToReturnResultsBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + setFunctionImportsInBuilder(d, request.WithImports), + setFunctionPackagesInBuilder(d, request.WithPackages), + setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), + setSecretsInBuilder(d, request.WithSecrets), + setFunctionTargetPathInBuilder(d, request.WithTargetPath), + stringAttributeCreateBuilder(d, "function_definition", request.WithFunctionDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Functions.CreateForScala(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create function (query does not fail but parameters stay unchanged) + setRequest := sdk.NewFunctionSetRequest() + if parametersCreateDiags := handleFunctionParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionScala(ctx, d, meta) } func ReadContextFunctionScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } -func UpdateContextFunctionScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + allFunctionDetails, diags := queryAllFunctionDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), + d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading return_results_behavior on purpose (handled as external change to show output) + setOptionalFromStringPtr(d, "runtime_version", allFunctionDetails.functionDetails.RuntimeVersion), + d.Set("comment", allFunctionDetails.function.Description), + readFunctionOrProcedureImports(d, allFunctionDetails.functionDetails.NormalizedImports), + d.Set("packages", allFunctionDetails.functionDetails.NormalizedPackages), + setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), + readFunctionOrProcedureExternalAccessIntegrations(d, allFunctionDetails.functionDetails.NormalizedExternalAccessIntegrations), + readFunctionOrProcedureSecrets(d, allFunctionDetails.functionDetails.NormalizedSecrets), + readFunctionOrProcedureTargetPath(d, allFunctionDetails.functionDetails.NormalizedTargetPath), + setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), + d.Set("function_language", allFunctionDetails.functionDetails.Language), + + handleFunctionParameterRead(d, allFunctionDetails.functionParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.FunctionToSchema(allFunctionDetails.function)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.FunctionParametersToSchema(allFunctionDetails.functionParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func DeleteContextFunctionScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } diff --git a/pkg/resources/function_sql.go b/pkg/resources/function_sql.go index cd8cb31dc8..53694da3a1 100644 --- a/pkg/resources/function_sql.go +++ b/pkg/resources/function_sql.go @@ -2,11 +2,17 @@ package resources import ( "context" + "errors" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +22,7 @@ func FunctionSql() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.FunctionSql, CreateContextFunctionSql), ReadContext: TrackingReadWrapper(resources.FunctionSql, ReadContextFunctionSql), - UpdateContext: TrackingUpdateWrapper(resources.FunctionSql, UpdateContextFunctionSql), + UpdateContext: TrackingUpdateWrapper(resources.FunctionSql, UpdateFunction("SQL", ReadContextFunctionSql)), DeleteContext: TrackingDeleteWrapper(resources.FunctionSql, DeleteFunction), Description: "Resource used to manage sql function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", @@ -25,7 +31,11 @@ func FunctionSql() *schema.Resource { ComputedIfAnyAttributeChanged(sqlFunctionSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(functionParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllFunctionParameters), strings.ToLower)...), functionParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only potential option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("function_language", "SQL"), )), Schema: collections.MergeMaps(sqlFunctionSchema, functionParametersSchema), @@ -36,17 +46,89 @@ func FunctionSql() *schema.Resource { } func CreateContextFunctionSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseFunctionArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseFunctionReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + functionDefinition := d.Get("function_definition").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.FunctionArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForSQLFunctionRequestDefinitionWrapped(id.SchemaObjectId(), *returns, functionDefinition). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "return_results_behavior", request.WithReturnResultsBehavior, sdk.ToReturnResultsBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Functions.CreateForSQL(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create function (query does not fail but parameters stay unchanged) + setRequest := sdk.NewFunctionSetRequest() + if parametersCreateDiags := handleFunctionParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionSql(ctx, d, meta) } func ReadContextFunctionSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } -func UpdateContextFunctionSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + allFunctionDetails, diags := queryAllFunctionDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), + d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading return_results_behavior on purpose (handled as external change to show output) + d.Set("comment", allFunctionDetails.function.Description), + setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), + setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), + d.Set("function_language", allFunctionDetails.functionDetails.Language), + + handleFunctionParameterRead(d, allFunctionDetails.functionParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.FunctionToSchema(allFunctionDetails.function)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.FunctionParametersToSchema(allFunctionDetails.functionParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func DeleteContextFunctionSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index addb4f3c30..12d3645388 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -5,8 +5,10 @@ import ( "errors" "fmt" "log" + "reflect" "slices" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -401,6 +403,83 @@ func DeleteProcedure(ctx context.Context, d *schema.ResourceData, meta any) diag return nil } +func UpdateProcedure(language string, readFunc func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics) func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + if d.HasChange("name") { + newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) + + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithRenameTo(newId.SchemaObjectId())) + if err != nil { + return diag.FromErr(fmt.Errorf("error renaming procedure %v err = %w", d.Id(), err)) + } + + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId + } + + // Batch SET operations and UNSET operations + setRequest := sdk.NewProcedureSetRequest() + unsetRequest := sdk.NewProcedureUnsetRequest() + + _ = stringAttributeUpdate(d, "comment", &setRequest.Comment, &unsetRequest.Comment) + + switch language { + case "JAVA", "SCALA", "PYTHON": + err = errors.Join( + func() error { + if d.HasChange("secrets") { + return setSecretsInBuilder(d, func(references []sdk.SecretReference) *sdk.ProcedureSetRequest { + return setRequest.WithSecretsList(sdk.SecretsListRequest{SecretsList: references}) + }) + } + return nil + }(), + func() error { + if d.HasChange("external_access_integrations") { + return setExternalAccessIntegrationsInBuilder(d, func(references []sdk.AccountObjectIdentifier) any { + if len(references) == 0 { + return unsetRequest.WithExternalAccessIntegrations(true) + } else { + return setRequest.WithExternalAccessIntegrations(references) + } + }) + } + return nil + }(), + ) + if err != nil { + return diag.FromErr(err) + } + } + + if updateParamDiags := handleProcedureParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags + } + + // Apply SET and UNSET changes + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + if !reflect.DeepEqual(*unsetRequest, *sdk.NewProcedureUnsetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnset(*unsetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return readFunc(ctx, d, meta) + } +} + func queryAllProcedureDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allProcedureDetailsCommon, diag.Diagnostics) { procedureDetails, err := client.Procedures.DescribeDetails(ctx, id) if err != nil { @@ -526,6 +605,26 @@ func parseProcedureReturnsCommon(d *schema.ResourceData) (*sdk.ProcedureReturnsR return returns, nil } +func parseProcedureSqlReturns(d *schema.ResourceData) (*sdk.ProcedureSQLReturnsRequest, error) { + returnTypeRaw := d.Get("return_type").(string) + dataType, err := datatypes.ParseDataType(returnTypeRaw) + if err != nil { + return nil, err + } + returns := sdk.NewProcedureSQLReturnsRequest() + switch v := dataType.(type) { + case *datatypes.TableDataType: + var cr []sdk.ProcedureColumnRequest + for _, c := range v.Columns() { + cr = append(cr, *sdk.NewProcedureColumnRequest(c.ColumnName(), c.ColumnType())) + } + returns.WithTable(*sdk.NewProcedureReturnsTableRequest().WithColumns(cr)) + default: + returns.WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(dataType)) + } + return returns, nil +} + func setProcedureImportsInBuilder[T any](d *schema.ResourceData, setImports func([]sdk.ProcedureImportRequest) T) error { imports, err := parseProcedureImportsCommon(d) if err != nil { diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index 04fcb0cf1a..1d98f7cf2a 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -23,7 +23,7 @@ func ProcedureJava() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.ProcedureJava, CreateContextProcedureJava), ReadContext: TrackingReadWrapper(resources.ProcedureJava, ReadContextProcedureJava), - UpdateContext: TrackingUpdateWrapper(resources.ProcedureJava, UpdateContextProcedureJava), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureJava, UpdateProcedure("JAVA", ReadContextProcedureJava)), DeleteContext: TrackingDeleteWrapper(resources.ProcedureJava, DeleteProcedure), Description: "Resource used to manage java procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", @@ -151,74 +151,3 @@ func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta return nil } - -func UpdateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - - if d.HasChange("name") { - newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) - - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithRenameTo(newId.SchemaObjectId())) - if err != nil { - return diag.FromErr(fmt.Errorf("error renaming procedure %v err = %w", d.Id(), err)) - } - - d.SetId(helpers.EncodeResourceIdentifier(newId)) - id = newId - } - - // Batch SET operations and UNSET operations - setRequest := sdk.NewProcedureSetRequest() - unsetRequest := sdk.NewProcedureUnsetRequest() - - err = errors.Join( - stringAttributeUpdate(d, "comment", &setRequest.Comment, &unsetRequest.Comment), - func() error { - if d.HasChange("secrets") { - return setSecretsInBuilder(d, func(references []sdk.SecretReference) *sdk.ProcedureSetRequest { - return setRequest.WithSecretsList(sdk.SecretsListRequest{SecretsList: references}) - }) - } - return nil - }(), - func() error { - if d.HasChange("external_access_integrations") { - return setExternalAccessIntegrationsInBuilder(d, func(references []sdk.AccountObjectIdentifier) any { - if len(references) == 0 { - return unsetRequest.WithExternalAccessIntegrations(true) - } else { - return setRequest.WithExternalAccessIntegrations(references) - } - }) - } - return nil - }(), - ) - if err != nil { - return diag.FromErr(err) - } - - if updateParamDiags := handleProcedureParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { - return updateParamDiags - } - - // Apply SET and UNSET changes - if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) - if err != nil { - return diag.FromErr(err) - } - } - if !reflect.DeepEqual(*unsetRequest, *sdk.NewProcedureUnsetRequest()) { - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnset(*unsetRequest)) - if err != nil { - return diag.FromErr(err) - } - } - - return ReadContextProcedureJava(ctx, d, meta) -} diff --git a/pkg/resources/procedure_javascript.go b/pkg/resources/procedure_javascript.go index 5088b492f7..4a273e28f5 100644 --- a/pkg/resources/procedure_javascript.go +++ b/pkg/resources/procedure_javascript.go @@ -2,11 +2,17 @@ package resources import ( "context" + "errors" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +22,7 @@ func ProcedureJavascript() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.ProcedureJavascript, CreateContextProcedureJavascript), ReadContext: TrackingReadWrapper(resources.ProcedureJavascript, ReadContextProcedureJavascript), - UpdateContext: TrackingUpdateWrapper(resources.ProcedureJavascript, UpdateContextProcedureJavascript), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureJavascript, UpdateProcedure("JAVASCRIPT", ReadContextProcedureJavascript)), DeleteContext: TrackingDeleteWrapper(resources.ProcedureJavascript, DeleteProcedure), Description: "Resource used to manage javascript procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", @@ -25,7 +31,11 @@ func ProcedureJavascript() *schema.Resource { ComputedIfAnyAttributeChanged(javascriptProcedureSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), procedureParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("procedure_language", "JAVASCRIPT"), )), Schema: collections.MergeMaps(javascriptProcedureSchema, procedureParametersSchema), @@ -36,13 +46,88 @@ func ProcedureJavascript() *schema.Resource { } func CreateContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseProcedureArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returnTypeRaw := d.Get("return_type").(string) + returnDataType, err := datatypes.ParseDataType(returnTypeRaw) + if err != nil { + return diag.FromErr(err) + } + procedureDefinition := d.Get("procedure_definition").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.ProcedureArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), returnDataType, procedureDefinition). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Procedures.CreateForJavaScript(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create procedure (query does not fail but parameters stay unchanged) + setRequest := sdk.NewProcedureSetRequest() + if parametersCreateDiags := handleProcedureParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedureJavascript(ctx, d, meta) } func ReadContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allProcedureDetails, diags := queryAllProcedureDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), + d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + d.Set("comment", allProcedureDetails.procedure.Description), + setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), + d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), + + handleProcedureParameterRead(d, allProcedureDetails.procedureParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ProcedureToSchema(allProcedureDetails.procedure)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.ProcedureParametersToSchema(allProcedureDetails.procedureParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func UpdateContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } diff --git a/pkg/resources/procedure_python.go b/pkg/resources/procedure_python.go index 717cee32fe..0432fbb966 100644 --- a/pkg/resources/procedure_python.go +++ b/pkg/resources/procedure_python.go @@ -2,11 +2,18 @@ package resources import ( "context" + "errors" + "fmt" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +23,7 @@ func ProcedurePython() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.ProcedurePython, CreateContextProcedurePython), ReadContext: TrackingReadWrapper(resources.ProcedurePython, ReadContextProcedurePython), - UpdateContext: TrackingUpdateWrapper(resources.ProcedurePython, UpdateContextProcedurePython), + UpdateContext: TrackingUpdateWrapper(resources.ProcedurePython, UpdateProcedure("PYTHON", ReadContextProcedurePython)), DeleteContext: TrackingDeleteWrapper(resources.ProcedurePython, DeleteProcedure), Description: "Resource used to manage python procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", @@ -25,7 +32,11 @@ func ProcedurePython() *schema.Resource { ComputedIfAnyAttributeChanged(pythonProcedureSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), procedureParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("procedure_language", "PYTHON"), )), Schema: collections.MergeMaps(pythonProcedureSchema, procedureParametersSchema), @@ -36,13 +47,105 @@ func ProcedurePython() *schema.Resource { } func CreateContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseProcedureArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseProcedureReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + runtimeVersion := d.Get("runtime_version").(string) + + packages, err := parseProceduresPackagesCommon(d) + if err != nil { + return diag.FromErr(err) + } + packages = append(packages, *sdk.NewProcedurePackageRequest(fmt.Sprintf(`%s%s`, sdk.PythonSnowparkPackageString, d.Get("snowpark_package").(string)))) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.ProcedureArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + setProcedureImportsInBuilder(d, request.WithImports), + setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), + setSecretsInBuilder(d, request.WithSecrets), + stringAttributeCreateBuilder(d, "procedure_definition", request.WithProcedureDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Procedures.CreateForPython(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create procedure (query does not fail but parameters stay unchanged) + setRequest := sdk.NewProcedureSetRequest() + if parametersCreateDiags := handleProcedureParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedurePython(ctx, d, meta) } func ReadContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allProcedureDetails, diags := queryAllProcedureDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), + d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), + d.Set("comment", allProcedureDetails.procedure.Description), + readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), + d.Set("packages", allProcedureDetails.procedureDetails.NormalizedPackages), + d.Set("snowpark_package", allProcedureDetails.procedureDetails.SnowparkVersion), + setRequiredFromStringPtr(d, "handler", allProcedureDetails.procedureDetails.Handler), + readFunctionOrProcedureExternalAccessIntegrations(d, allProcedureDetails.procedureDetails.NormalizedExternalAccessIntegrations), + readFunctionOrProcedureSecrets(d, allProcedureDetails.procedureDetails.NormalizedSecrets), + setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), + d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), + + handleProcedureParameterRead(d, allProcedureDetails.procedureParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ProcedureToSchema(allProcedureDetails.procedure)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.ProcedureParametersToSchema(allProcedureDetails.procedureParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func UpdateContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } diff --git a/pkg/resources/procedure_scala.go b/pkg/resources/procedure_scala.go index 793663d0e1..0a5dc691d0 100644 --- a/pkg/resources/procedure_scala.go +++ b/pkg/resources/procedure_scala.go @@ -2,11 +2,18 @@ package resources import ( "context" + "errors" + "fmt" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +23,7 @@ func ProcedureScala() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.ProcedureScala, CreateContextProcedureScala), ReadContext: TrackingReadWrapper(resources.ProcedureScala, ReadContextProcedureScala), - UpdateContext: TrackingUpdateWrapper(resources.ProcedureScala, UpdateContextProcedureScala), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureScala, UpdateProcedure("SQL", ReadContextProcedureScala)), DeleteContext: TrackingDeleteWrapper(resources.ProcedureScala, DeleteProcedure), Description: "Resource used to manage scala procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", @@ -25,7 +32,11 @@ func ProcedureScala() *schema.Resource { ComputedIfAnyAttributeChanged(scalaProcedureSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), procedureParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("procedure_language", "SCALA"), )), Schema: collections.MergeMaps(scalaProcedureSchema, procedureParametersSchema), @@ -36,13 +47,107 @@ func ProcedureScala() *schema.Resource { } func CreateContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseProcedureArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseProcedureReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + runtimeVersion := d.Get("runtime_version").(string) + + packages, err := parseProceduresPackagesCommon(d) + if err != nil { + return diag.FromErr(err) + } + packages = append(packages, *sdk.NewProcedurePackageRequest(fmt.Sprintf(`%s%s`, sdk.JavaSnowparkPackageString, d.Get("snowpark_package").(string)))) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.ProcedureArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + setProcedureImportsInBuilder(d, request.WithImports), + setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), + setSecretsInBuilder(d, request.WithSecrets), + setProcedureTargetPathInBuilder(d, request.WithTargetPath), + stringAttributeCreateBuilder(d, "procedure_definition", request.WithProcedureDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Procedures.CreateForScala(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create procedure (query does not fail but parameters stay unchanged) + setRequest := sdk.NewProcedureSetRequest() + if parametersCreateDiags := handleProcedureParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedureScala(ctx, d, meta) } func ReadContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allProcedureDetails, diags := queryAllProcedureDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), + d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), + d.Set("comment", allProcedureDetails.procedure.Description), + readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), + d.Set("packages", allProcedureDetails.procedureDetails.NormalizedPackages), + d.Set("snowpark_package", allProcedureDetails.procedureDetails.SnowparkVersion), + setRequiredFromStringPtr(d, "handler", allProcedureDetails.procedureDetails.Handler), + readFunctionOrProcedureExternalAccessIntegrations(d, allProcedureDetails.procedureDetails.NormalizedExternalAccessIntegrations), + readFunctionOrProcedureSecrets(d, allProcedureDetails.procedureDetails.NormalizedSecrets), + readFunctionOrProcedureTargetPath(d, allProcedureDetails.procedureDetails.NormalizedTargetPath), + setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), + d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), + + handleProcedureParameterRead(d, allProcedureDetails.procedureParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ProcedureToSchema(allProcedureDetails.procedure)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.ProcedureParametersToSchema(allProcedureDetails.procedureParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func UpdateContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } diff --git a/pkg/resources/procedure_sql.go b/pkg/resources/procedure_sql.go index 11fcd69413..64ddfde270 100644 --- a/pkg/resources/procedure_sql.go +++ b/pkg/resources/procedure_sql.go @@ -2,11 +2,17 @@ package resources import ( "context" + "errors" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -16,7 +22,7 @@ func ProcedureSql() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.ProcedureSql, CreateContextProcedureSql), ReadContext: TrackingReadWrapper(resources.ProcedureSql, ReadContextProcedureSql), - UpdateContext: TrackingUpdateWrapper(resources.ProcedureSql, UpdateContextProcedureSql), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureSql, UpdateProcedure("SQL", ReadContextProcedureSql)), DeleteContext: TrackingDeleteWrapper(resources.ProcedureSql, DeleteProcedure), Description: "Resource used to manage sql procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", @@ -25,7 +31,11 @@ func ProcedureSql() *schema.Resource { ComputedIfAnyAttributeChanged(sqlProcedureSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), procedureParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("procedure_language", "SQL"), )), Schema: collections.MergeMaps(sqlProcedureSchema, procedureParametersSchema), @@ -36,13 +46,87 @@ func ProcedureSql() *schema.Resource { } func CreateContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseProcedureArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseProcedureSqlReturns(d) + if err != nil { + return diag.FromErr(err) + } + procedureDefinition := d.Get("procedure_definition").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.ProcedureArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, procedureDefinition). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + stringAttributeCreateBuilder(d, "comment", request.WithComment), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Procedures.CreateForSQL(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create procedure (query does not fail but parameters stay unchanged) + setRequest := sdk.NewProcedureSetRequest() + if parametersCreateDiags := handleProcedureParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedureSql(ctx, d, meta) } func ReadContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allProcedureDetails, diags := queryAllProcedureDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), + d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + d.Set("comment", allProcedureDetails.procedure.Description), + setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), + d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), + + handleProcedureParameterRead(d, allProcedureDetails.procedureParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ProcedureToSchema(allProcedureDetails.procedure)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.ProcedureParametersToSchema(allProcedureDetails.procedureParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } -func UpdateContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } From 15aa9c2c94d80ae1d299a333b8035e38de6a6dfc Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 12 Dec 2024 15:39:05 +0100 Subject: [PATCH 06/20] feat: Rework account parameter resource (#3264) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - rework account parameter resource - use UNSET in resource deletion - add missing docs - add missing functions in the sdk - ## Test Plan * [x] acceptance tests * [ ] … ## References https://docs.snowflake.com/en/sql-reference/parameters ## TODO - support all of the account parameters - add tests for all of the supported account parameters - check unsetting with a proper checkDestroy --- MIGRATION_GUIDE.md | 8 + docs/resources/account_parameter.md | 15 +- .../snowflake_account_parameter/import.sh | 2 +- .../database_parameters_snowflake_gen.go | 2 +- .../gen/object_parameters_def.go | 14 +- .../task_parameters_snowflake_gen.go | 6 +- .../user_parameters_snowflake_gen.go | 6 +- .../account_parameter_resource_gen.go | 57 +++ .../resourceassert/gen/resource_schema_def.go | 4 + .../model/account_parameter_model_gen.go | 70 ++++ pkg/acceptance/check_destroy.go | 16 + pkg/resources/account_parameter.go | 62 ++-- .../account_parameter_acceptance_test.go | 80 +++-- pkg/sdk/parameters.go | 327 +++++++++++++++--- pkg/sdk/parameters_impl.go | 8 +- pkg/sdk/parameters_test.go | 116 +++++++ templates/resources/account_parameter.md.tmpl | 37 ++ 17 files changed, 697 insertions(+), 133 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/account_parameter_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/account_parameter_model_gen.go create mode 100644 templates/resources/account_parameter.md.tmpl diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index d9bc0f050d..2fe0dd5647 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,6 +9,14 @@ across different versions. ## v0.99.0 ➞ v0.100.0 +### snowflake_account_parameter resource changes + +#### *(behavior change)* resource deletion +During resource deleting, provider now uses `UNSET` instead of `SET` with the default value. + +#### *(behavior change)* changes in `key` field +The value of `key` field is now case-insensitive and is validated. The list of supported values is available in the resource documentation. + ### snowflake_oauth_integration_for_partner_applications and snowflake_oauth_integration_for_custom_clients resource changes #### *(behavior change)* `blocked_roles_list` field is no longer required diff --git a/docs/resources/account_parameter.md b/docs/resources/account_parameter.md index 4607f52742..07b49e485f 100644 --- a/docs/resources/account_parameter.md +++ b/docs/resources/account_parameter.md @@ -2,12 +2,16 @@ page_title: "snowflake_account_parameter Resource - terraform-provider-snowflake" subcategory: "" description: |- - + Resource used to manage current account parameters. For more information, check parameters documentation https://docs.snowflake.com/en/sql-reference/parameters. --- -# snowflake_account_parameter (Resource) +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0990--v01000) to use it. + +-> **Note** This resource does not support all account parameters. The supported ones are listed below. This feature gap will be addressed in future releases. +# snowflake_account_parameter (Resource) +Resource used to manage current account parameters. For more information, check [parameters documentation](https://docs.snowflake.com/en/sql-reference/parameters). ## Example Usage @@ -22,7 +26,6 @@ resource "snowflake_account_parameter" "p2" { value = "256" } ``` - -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). @@ -31,8 +34,8 @@ resource "snowflake_account_parameter" "p2" { ### Required -- `key` (String) Name of account parameter. Valid values are those in [account parameters](https://docs.snowflake.com/en/sql-reference/parameters.html#account-parameters). -- `value` (String) Value of account parameter, as a string. Constraints are the same as those for the parameters in Snowflake documentation. +- `key` (String) Name of account parameter. Valid values are (case-insensitive): `ALLOW_CLIENT_MFA_CACHING` | `ALLOW_ID_TOKEN` | `CLIENT_ENCRYPTION_KEY_SIZE` | `ENABLE_IDENTIFIER_FIRST_LOGIN` | `ENABLE_INTERNAL_STAGES_PRIVATELINK` | `ENABLE_TRI_SECRET_AND_REKEY_OPT_OUT_FOR_IMAGE_REPOSITORY` | `ENABLE_TRI_SECRET_AND_REKEY_OPT_OUT_FOR_SPCS_BLOCK_STORAGE` | `EVENT_TABLE` | `EXTERNAL_OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST` | `INITIAL_REPLICATION_SIZE_LIMIT_IN_TB` | `MIN_DATA_RETENTION_TIME_IN_DAYS` | `NETWORK_POLICY` | `OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST` | `PERIODIC_DATA_REKEYING` | `PREVENT_LOAD_FROM_INLINE_URL` | `PREVENT_UNLOAD_TO_INLINE_URL` | `PREVENT_UNLOAD_TO_INTERNAL_STAGES` | `REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_CREATION` | `REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_OPERATION` | `SSO_LOGIN_PAGE` | `ABORT_DETACHED_QUERY` | `AUTOCOMMIT` | `BINARY_INPUT_FORMAT` | `BINARY_OUTPUT_FORMAT` | `CLIENT_MEMORY_LIMIT` | `CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX` | `CLIENT_METADATA_USE_SESSION_DATABASE` | `CLIENT_PREFETCH_THREADS` | `CLIENT_RESULT_CHUNK_SIZE` | `CLIENT_SESSION_KEEP_ALIVE` | `CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY` | `CLIENT_TIMESTAMP_TYPE_MAPPING` | `ENABLE_UNLOAD_PHYSICAL_TYPE_OPTIMIZATION` | `CLIENT_RESULT_COLUMN_CASE_INSENSITIVE` | `DATE_INPUT_FORMAT` | `DATE_OUTPUT_FORMAT` | `ERROR_ON_NONDETERMINISTIC_MERGE` | `ERROR_ON_NONDETERMINISTIC_UPDATE` | `GEOGRAPHY_OUTPUT_FORMAT` | `GEOMETRY_OUTPUT_FORMAT` | `JDBC_TREAT_DECIMAL_AS_INT` | `JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC` | `JDBC_USE_SESSION_TIMEZONE` | `JSON_INDENT` | `LOCK_TIMEOUT` | `MULTI_STATEMENT_COUNT` | `NOORDER_SEQUENCE_AS_DEFAULT` | `ODBC_TREAT_DECIMAL_AS_INT` | `QUERY_TAG` | `QUOTED_IDENTIFIERS_IGNORE_CASE` | `ROWS_PER_RESULTSET` | `S3_STAGE_VPCE_DNS_NAME` | `SEARCH_PATH` | `SIMULATED_DATA_SHARING_CONSUMER` | `STATEMENT_TIMEOUT_IN_SECONDS` | `STRICT_JSON_OUTPUT` | `TIME_INPUT_FORMAT` | `TIME_OUTPUT_FORMAT` | `TIMESTAMP_DAY_IS_ALWAYS_24H` | `TIMESTAMP_INPUT_FORMAT` | `TIMESTAMP_LTZ_OUTPUT_FORMAT` | `TIMESTAMP_NTZ_OUTPUT_FORMAT` | `TIMESTAMP_OUTPUT_FORMAT` | `TIMESTAMP_TYPE_MAPPING` | `TIMESTAMP_TZ_OUTPUT_FORMAT` | `TIMEZONE` | `TRANSACTION_ABORT_ON_ERROR` | `TRANSACTION_DEFAULT_ISOLATION_LEVEL` | `TWO_DIGIT_CENTURY_START` | `UNSUPPORTED_DDL_ACTION` | `USE_CACHED_RESULT` | `WEEK_OF_YEAR_POLICY` | `WEEK_START` | `CATALOG` | `DATA_RETENTION_TIME_IN_DAYS` | `DEFAULT_DDL_COLLATION` | `EXTERNAL_VOLUME` | `LOG_LEVEL` | `MAX_CONCURRENCY_LEVEL` | `MAX_DATA_EXTENSION_TIME_IN_DAYS` | `PIPE_EXECUTION_PAUSED` | `PREVENT_UNLOAD_TO_INTERNAL_STAGES` | `REPLACE_INVALID_CHARACTERS` | `STATEMENT_QUEUED_TIMEOUT_IN_SECONDS` | `STORAGE_SERIALIZATION_POLICY` | `SHARE_RESTRICTIONS` | `SUSPEND_TASK_AFTER_NUM_FAILURES` | `TRACE_LEVEL` | `USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE` | `USER_TASK_TIMEOUT_MS` | `TASK_AUTO_RETRY_ATTEMPTS` | `USER_TASK_MINIMUM_TRIGGER_INTERVAL_IN_SECONDS` | `METRIC_LEVEL` | `ENABLE_CONSOLE_OUTPUT` | `ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR`. +- `value` (String) Value of account parameter, as a string. Constraints are the same as those for the parameters in Snowflake documentation. The parameter values are validated in Snowflake. ### Read-Only @@ -43,5 +46,5 @@ resource "snowflake_account_parameter" "p2" { Import is supported using the following syntax: ```shell -terraform import snowflake_account_parameter.p +terraform import snowflake_account_parameter.p '' ``` diff --git a/examples/resources/snowflake_account_parameter/import.sh b/examples/resources/snowflake_account_parameter/import.sh index c1dd2640d1..c5a0051fd8 100644 --- a/examples/resources/snowflake_account_parameter/import.sh +++ b/examples/resources/snowflake_account_parameter/import.sh @@ -1 +1 @@ -terraform import snowflake_account_parameter.p +terraform import snowflake_account_parameter.p '' diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/database_parameters_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/database_parameters_snowflake_gen.go index 6c33814187..8937660447 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/database_parameters_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/database_parameters_snowflake_gen.go @@ -81,7 +81,7 @@ func (d *DatabaseParametersAssert) HasAllDefaults() *DatabaseParametersAssert { HasDefaultParameterValueOnLevel(sdk.DatabaseParameterUserTaskManagedInitialWarehouseSize, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.DatabaseParameterUserTaskTimeoutMs, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.DatabaseParameterUserTaskMinimumTriggerIntervalInSeconds, sdk.ParameterTypeSnowflakeDefault). - HasDefaultParameterValueOnLevel(sdk.DatabaseParameterQuotedIdentifiersIgnoreCase, sdk.ParameterTypeAccount). + HasDefaultParameterValueOnLevel(sdk.DatabaseParameterQuotedIdentifiersIgnoreCase, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.DatabaseParameterEnableConsoleOutput, sdk.ParameterTypeSnowflakeDefault) } diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go index fd716a8993..9f9c3c6733 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go @@ -36,7 +36,7 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.UserParameterNetworkPolicy), ParameterType: "string", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterPreventUnloadToInternalStages), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterAbortDetachedQuery), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, - {ParameterName: string(sdk.UserParameterAutocommit), ParameterType: "bool", DefaultValue: "true", DefaultLevel: "sdk.ParameterTypeAccount"}, + {ParameterName: string(sdk.UserParameterAutocommit), ParameterType: "bool", DefaultValue: "true", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterBinaryInputFormat), ParameterType: "sdk.BinaryInputFormat", DefaultValue: "sdk.BinaryInputFormatHex", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterBinaryOutputFormat), ParameterType: "sdk.BinaryOutputFormat", DefaultValue: "sdk.BinaryOutputFormatHex", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterClientMemoryLimit), ParameterType: "int", DefaultValue: "1536", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, @@ -64,7 +64,7 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.UserParameterNoorderSequenceAsDefault), ParameterType: "bool", DefaultValue: "true", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterOdbcTreatDecimalAsInt), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterQueryTag), ParameterType: "string", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, - {ParameterName: string(sdk.UserParameterQuotedIdentifiersIgnoreCase), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeAccount"}, + {ParameterName: string(sdk.UserParameterQuotedIdentifiersIgnoreCase), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterRowsPerResultset), ParameterType: "int", DefaultValue: "0", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterS3StageVpceDnsName), ParameterType: "string", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterSearchPath), ParameterType: "string", DefaultValue: "$current, $public", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, @@ -83,7 +83,7 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.UserParameterTimeInputFormat), ParameterType: "string", DefaultValue: "AUTO", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterTimeOutputFormat), ParameterType: "string", DefaultValue: "HH24:MI:SS", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterTraceLevel), ParameterType: "sdk.TraceLevel", DefaultValue: "sdk.TraceLevelOff", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, - {ParameterName: string(sdk.UserParameterTransactionAbortOnError), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeAccount"}, + {ParameterName: string(sdk.UserParameterTransactionAbortOnError), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterTransactionDefaultIsolationLevel), ParameterType: "sdk.TransactionDefaultIsolationLevel", DefaultValue: "sdk.TransactionDefaultIsolationLevelReadCommitted", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.UserParameterTwoDigitCenturyStart), ParameterType: "int", DefaultValue: "1970", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, // TODO [SNOW-1501905]: quick workaround for now: lowercase for ignore in snowflake by default but uppercase for FAIL @@ -124,7 +124,7 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.DatabaseParameterUserTaskManagedInitialWarehouseSize), ParameterType: "sdk.WarehouseSize", DefaultValue: "sdk.WarehouseSizeMedium", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.DatabaseParameterUserTaskTimeoutMs), ParameterType: "int", DefaultValue: "3600000", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.DatabaseParameterUserTaskMinimumTriggerIntervalInSeconds), ParameterType: "int", DefaultValue: "30", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, - {ParameterName: string(sdk.DatabaseParameterQuotedIdentifiersIgnoreCase), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeAccount"}, + {ParameterName: string(sdk.DatabaseParameterQuotedIdentifiersIgnoreCase), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.DatabaseParameterEnableConsoleOutput), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, }, }, @@ -139,7 +139,7 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.TaskParameterUserTaskMinimumTriggerIntervalInSeconds), ParameterType: "int", DefaultValue: "30", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterUserTaskTimeoutMs), ParameterType: "int", DefaultValue: "3600000", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterAbortDetachedQuery), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, - {ParameterName: string(sdk.TaskParameterAutocommit), ParameterType: "bool", DefaultValue: "true", DefaultLevel: "sdk.ParameterTypeAccount"}, + {ParameterName: string(sdk.TaskParameterAutocommit), ParameterType: "bool", DefaultValue: "true", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterBinaryInputFormat), ParameterType: "sdk.BinaryInputFormat", DefaultValue: "sdk.BinaryInputFormatHex", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterBinaryOutputFormat), ParameterType: "sdk.BinaryOutputFormat", DefaultValue: "sdk.BinaryOutputFormatHex", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterClientMemoryLimit), ParameterType: "int", DefaultValue: "1536", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, @@ -166,7 +166,7 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.TaskParameterNoorderSequenceAsDefault), ParameterType: "bool", DefaultValue: "true", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterOdbcTreatDecimalAsInt), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterQueryTag), ParameterType: "string", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, - {ParameterName: string(sdk.TaskParameterQuotedIdentifiersIgnoreCase), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeAccount"}, + {ParameterName: string(sdk.TaskParameterQuotedIdentifiersIgnoreCase), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterRowsPerResultset), ParameterType: "int", DefaultValue: "0", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterS3StageVpceDnsName), ParameterType: "string", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterSearchPath), ParameterType: "string", DefaultValue: "$current, $public", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, @@ -184,7 +184,7 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.TaskParameterTimeInputFormat), ParameterType: "string", DefaultValue: "AUTO", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterTimeOutputFormat), ParameterType: "string", DefaultValue: "HH24:MI:SS", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterTraceLevel), ParameterType: "sdk.TraceLevel", DefaultValue: "sdk.TraceLevelOff", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, - {ParameterName: string(sdk.TaskParameterTransactionAbortOnError), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeAccount"}, + {ParameterName: string(sdk.TaskParameterTransactionAbortOnError), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterTransactionDefaultIsolationLevel), ParameterType: "sdk.TransactionDefaultIsolationLevel", DefaultValue: "sdk.TransactionDefaultIsolationLevelReadCommitted", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, {ParameterName: string(sdk.TaskParameterTwoDigitCenturyStart), ParameterType: "int", DefaultValue: "1970", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, // TODO [SNOW-1501905]: quick workaround for now: lowercase for ignore in snowflake by default but uppercase for FAIL diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go index b5c571149d..85ef79ebd6 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go @@ -74,7 +74,7 @@ func (t *TaskParametersAssert) HasAllDefaults() *TaskParametersAssert { HasDefaultParameterValueOnLevel(sdk.TaskParameterUserTaskMinimumTriggerIntervalInSeconds, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterUserTaskTimeoutMs, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterAbortDetachedQuery, sdk.ParameterTypeSnowflakeDefault). - HasDefaultParameterValueOnLevel(sdk.TaskParameterAutocommit, sdk.ParameterTypeAccount). + HasDefaultParameterValueOnLevel(sdk.TaskParameterAutocommit, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterBinaryInputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterBinaryOutputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterClientMemoryLimit, sdk.ParameterTypeSnowflakeDefault). @@ -101,7 +101,7 @@ func (t *TaskParametersAssert) HasAllDefaults() *TaskParametersAssert { HasDefaultParameterValueOnLevel(sdk.TaskParameterNoorderSequenceAsDefault, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterOdbcTreatDecimalAsInt, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterQueryTag, sdk.ParameterTypeSnowflakeDefault). - HasDefaultParameterValueOnLevel(sdk.TaskParameterQuotedIdentifiersIgnoreCase, sdk.ParameterTypeAccount). + HasDefaultParameterValueOnLevel(sdk.TaskParameterQuotedIdentifiersIgnoreCase, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterRowsPerResultset, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterS3StageVpceDnsName, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterSearchPath, sdk.ParameterTypeSnowflakeDefault). @@ -119,7 +119,7 @@ func (t *TaskParametersAssert) HasAllDefaults() *TaskParametersAssert { HasDefaultParameterValueOnLevel(sdk.TaskParameterTimeInputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterTimeOutputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterTraceLevel, sdk.ParameterTypeSnowflakeDefault). - HasDefaultParameterValueOnLevel(sdk.TaskParameterTransactionAbortOnError, sdk.ParameterTypeAccount). + HasDefaultParameterValueOnLevel(sdk.TaskParameterTransactionAbortOnError, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterTransactionDefaultIsolationLevel, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterTwoDigitCenturyStart, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.TaskParameterUnsupportedDdlAction, sdk.ParameterTypeSnowflakeDefault). diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/user_parameters_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/user_parameters_snowflake_gen.go index 4f860eb38d..433fc0fa70 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/user_parameters_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/user_parameters_snowflake_gen.go @@ -72,7 +72,7 @@ func (u *UserParametersAssert) HasAllDefaults() *UserParametersAssert { HasDefaultParameterValueOnLevel(sdk.UserParameterNetworkPolicy, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterPreventUnloadToInternalStages, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterAbortDetachedQuery, sdk.ParameterTypeSnowflakeDefault). - HasDefaultParameterValueOnLevel(sdk.UserParameterAutocommit, sdk.ParameterTypeAccount). + HasDefaultParameterValueOnLevel(sdk.UserParameterAutocommit, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterBinaryInputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterBinaryOutputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterClientMemoryLimit, sdk.ParameterTypeSnowflakeDefault). @@ -100,7 +100,7 @@ func (u *UserParametersAssert) HasAllDefaults() *UserParametersAssert { HasDefaultParameterValueOnLevel(sdk.UserParameterNoorderSequenceAsDefault, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterOdbcTreatDecimalAsInt, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterQueryTag, sdk.ParameterTypeSnowflakeDefault). - HasDefaultParameterValueOnLevel(sdk.UserParameterQuotedIdentifiersIgnoreCase, sdk.ParameterTypeAccount). + HasDefaultParameterValueOnLevel(sdk.UserParameterQuotedIdentifiersIgnoreCase, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterRowsPerResultset, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterS3StageVpceDnsName, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterSearchPath, sdk.ParameterTypeSnowflakeDefault). @@ -119,7 +119,7 @@ func (u *UserParametersAssert) HasAllDefaults() *UserParametersAssert { HasDefaultParameterValueOnLevel(sdk.UserParameterTimeInputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterTimeOutputFormat, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterTraceLevel, sdk.ParameterTypeSnowflakeDefault). - HasDefaultParameterValueOnLevel(sdk.UserParameterTransactionAbortOnError, sdk.ParameterTypeAccount). + HasDefaultParameterValueOnLevel(sdk.UserParameterTransactionAbortOnError, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterTransactionDefaultIsolationLevel, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterTwoDigitCenturyStart, sdk.ParameterTypeSnowflakeDefault). HasDefaultParameterValueOnLevel(sdk.UserParameterUnsupportedDdlAction, sdk.ParameterTypeSnowflakeDefault). diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/account_parameter_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/account_parameter_resource_gen.go new file mode 100644 index 0000000000..a021e54a07 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/account_parameter_resource_gen.go @@ -0,0 +1,57 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type AccountParameterResourceAssert struct { + *assert.ResourceAssert +} + +func AccountParameterResource(t *testing.T, name string) *AccountParameterResourceAssert { + t.Helper() + + return &AccountParameterResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedAccountParameterResource(t *testing.T, id string) *AccountParameterResourceAssert { + t.Helper() + + return &AccountParameterResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (a *AccountParameterResourceAssert) HasKeyString(expected string) *AccountParameterResourceAssert { + a.AddAssertion(assert.ValueSet("key", expected)) + return a +} + +func (a *AccountParameterResourceAssert) HasValueString(expected string) *AccountParameterResourceAssert { + a.AddAssertion(assert.ValueSet("value", expected)) + return a +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (a *AccountParameterResourceAssert) HasNoKey() *AccountParameterResourceAssert { + a.AddAssertion(assert.ValueNotSet("key")) + return a +} + +func (a *AccountParameterResourceAssert) HasNoValue() *AccountParameterResourceAssert { + a.AddAssertion(assert.ValueNotSet("value")) + return a +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go index e39d6f0533..e757ffc0a6 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go @@ -121,6 +121,10 @@ var allResourceSchemaDefs = []ResourceSchemaDef{ name: "Account", schema: resources.Account().Schema, }, + { + name: "AccountParameter", + schema: resources.AccountParameter().Schema, + }, { name: "OauthIntegrationForCustomClients", schema: resources.OauthIntegrationForCustomClients().Schema, diff --git a/pkg/acceptance/bettertestspoc/config/model/account_parameter_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/account_parameter_model_gen.go new file mode 100644 index 0000000000..9fc1cff42a --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/account_parameter_model_gen.go @@ -0,0 +1,70 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type AccountParameterModel struct { + Key tfconfig.Variable `json:"key,omitempty"` + Value tfconfig.Variable `json:"value,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func AccountParameter( + resourceName string, + key string, + value string, +) *AccountParameterModel { + a := &AccountParameterModel{ResourceModelMeta: config.Meta(resourceName, resources.AccountParameter)} + a.WithKey(key) + a.WithValue(value) + return a +} + +func AccountParameterWithDefaultMeta( + key string, + value string, +) *AccountParameterModel { + a := &AccountParameterModel{ResourceModelMeta: config.DefaultMeta(resources.AccountParameter)} + a.WithKey(key) + a.WithValue(value) + return a +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +func (a *AccountParameterModel) WithKey(key string) *AccountParameterModel { + a.Key = tfconfig.StringVariable(key) + return a +} + +func (a *AccountParameterModel) WithValue(value string) *AccountParameterModel { + a.Value = tfconfig.StringVariable(value) + return a +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (a *AccountParameterModel) WithKeyValue(value tfconfig.Variable) *AccountParameterModel { + a.Key = value + return a +} + +func (a *AccountParameterModel) WithValueValue(value tfconfig.Variable) *AccountParameterModel { + a.Value = value + return a +} diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index 5959720df0..6d056a613b 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -630,3 +630,19 @@ func TestAccCheckGrantApplicationRoleDestroy(s *terraform.State) error { } return nil } + +func CheckAccountParameterUnset(t *testing.T, paramName sdk.AccountParameter) func(*terraform.State) error { + t.Helper() + return func(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + if rs.Type != "snowflake_account_parameter" { + continue + } + parameter := TestClient().Parameter.ShowAccountParameter(t, paramName) + if parameter.Level != sdk.ParameterTypeSnowflakeDefault { + return fmt.Errorf("expected parameter level empty, got %v", parameter.Level) + } + } + return nil + } +} diff --git a/pkg/resources/account_parameter.go b/pkg/resources/account_parameter.go index 95cea28579..2f8021c080 100644 --- a/pkg/resources/account_parameter.go +++ b/pkg/resources/account_parameter.go @@ -2,8 +2,10 @@ package resources import ( "context" + "errors" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -15,15 +17,17 @@ import ( var accountParameterSchema = map[string]*schema.Schema{ "key": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "Name of account parameter. Valid values are those in [account parameters](https://docs.snowflake.com/en/sql-reference/parameters.html#account-parameters).", + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: sdkValidation(sdk.ToAccountParameter), + DiffSuppressFunc: NormalizeAndCompare(sdk.ToAccountParameter), + Description: fmt.Sprintf("Name of account parameter. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AsStringList(sdk.AllAccountParameters))), }, "value": { Type: schema.TypeString, Required: true, - Description: "Value of account parameter, as a string. Constraints are the same as those for the parameters in Snowflake documentation.", + Description: "Value of account parameter, as a string. Constraints are the same as those for the parameters in Snowflake documentation. The parameter values are validated in Snowflake.", }, } @@ -34,6 +38,8 @@ func AccountParameter() *schema.Resource { UpdateContext: TrackingUpdateWrapper(resources.AccountParameter, UpdateAccountParameter), DeleteContext: TrackingDeleteWrapper(resources.AccountParameter, DeleteAccountParameter), + Description: "Resource used to manage current account parameters. For more information, check [parameters documentation](https://docs.snowflake.com/en/sql-reference/parameters).", + Schema: accountParameterSchema, Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, @@ -42,56 +48,58 @@ func AccountParameter() *schema.Resource { } // CreateAccountParameter implements schema.CreateFunc. -func CreateAccountParameter(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { +func CreateAccountParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client key := d.Get("key").(string) value := d.Get("value").(string) - parameter := sdk.AccountParameter(key) - err := client.Parameters.SetAccountParameter(ctx, parameter, value) + parameter, err := sdk.ToAccountParameter(key) + if err != nil { + return diag.FromErr(err) + } + err = client.Parameters.SetAccountParameter(ctx, parameter, value) if err != nil { return diag.FromErr(err) } - d.SetId(key) + d.SetId(helpers.EncodeResourceIdentifier(string(parameter))) return ReadAccountParameter(ctx, d, meta) } // ReadAccountParameter implements schema.ReadFunc. -func ReadAccountParameter(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { +func ReadAccountParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - parameterName := d.Id() - parameter, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameter(parameterName)) + parameterNameRaw := d.Id() + parameterName, err := sdk.ToAccountParameter(parameterNameRaw) if err != nil { - return diag.FromErr(fmt.Errorf("error reading account parameter err = %w", err)) + return diag.FromErr(err) } - err = d.Set("value", parameter.Value) + parameter, err := client.Parameters.ShowAccountParameter(ctx, parameterName) if err != nil { - return diag.FromErr(fmt.Errorf("error setting account parameter err = %w", err)) + return diag.FromErr(fmt.Errorf("reading account parameter: %w", err)) } - err = d.Set("key", parameter.Key) - if err != nil { - return diag.FromErr(fmt.Errorf("error setting account parameter err = %w", err)) + errs := errors.Join( + d.Set("value", parameter.Value), + d.Set("key", parameter.Key), + ) + if errs != nil { + return diag.FromErr(errs) } return nil } // UpdateAccountParameter implements schema.UpdateFunc. -func UpdateAccountParameter(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { +func UpdateAccountParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return CreateAccountParameter(ctx, d, meta) } // DeleteAccountParameter implements schema.DeleteFunc. -func DeleteAccountParameter(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { +func DeleteAccountParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client key := d.Get("key").(string) parameter := sdk.AccountParameter(key) - defaultParameter, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameter(key)) - if err != nil { - return diag.FromErr(err) - } - defaultValue := defaultParameter.Default - err = client.Parameters.SetAccountParameter(ctx, parameter, defaultValue) + + err := client.Parameters.UnsetAccountParameter(ctx, parameter) if err != nil { - return diag.FromErr(fmt.Errorf("error resetting account parameter err = %w", err)) + return diag.FromErr(fmt.Errorf("unsetting account parameter: %w", err)) } d.SetId("") diff --git a/pkg/resources/account_parameter_acceptance_test.go b/pkg/resources/account_parameter_acceptance_test.go index 8b269e8983..ab20439532 100644 --- a/pkg/resources/account_parameter_acceptance_test.go +++ b/pkg/resources/account_parameter_acceptance_test.go @@ -1,59 +1,55 @@ package resources_test import ( - "fmt" "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) func TestAcc_AccountParameter(t *testing.T) { + model := model.AccountParameter("test", string(sdk.AccountParameterAllowIDToken), "true") resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, + CheckDestroy: acc.CheckAccountParameterUnset(t, sdk.AccountParameterAllowIDToken), Steps: []resource.TestStep{ { - Config: accountParameterBasic("ALLOW_ID_TOKEN", "true"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "key", "ALLOW_ID_TOKEN"), - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "value", "true"), + Config: config.FromModel(t, model), + Check: assert.AssertThat(t, resourceassert.AccountParameterResource(t, model.ResourceReference()). + HasKeyString(string(sdk.AccountParameterAllowIDToken)). + HasValueString("true"), ), }, }, }) } -func accountParameterBasic(key, value string) string { - s := ` -resource "snowflake_account_parameter" "p" { - key = "%s" - value = "%s" -} -` - return fmt.Sprintf(s, key, value) -} - func TestAcc_AccountParameter_PREVENT_LOAD_FROM_INLINE_URL(t *testing.T) { + model := model.AccountParameter("test", string(sdk.AccountParameterPreventLoadFromInlineURL), "true") resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, + CheckDestroy: acc.CheckAccountParameterUnset(t, sdk.AccountParameterPreventLoadFromInlineURL), Steps: []resource.TestStep{ { - Config: accountParameterBasic("PREVENT_LOAD_FROM_INLINE_URL", "true"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "key", "PREVENT_LOAD_FROM_INLINE_URL"), - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "value", "true"), + Config: config.FromModel(t, model), + Check: assert.AssertThat(t, resourceassert.AccountParameterResource(t, model.ResourceReference()). + HasKeyString(string(sdk.AccountParameterPreventLoadFromInlineURL)). + HasValueString("true"), ), }, }, @@ -61,45 +57,45 @@ func TestAcc_AccountParameter_PREVENT_LOAD_FROM_INLINE_URL(t *testing.T) { } func TestAcc_AccountParameter_REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_CREATION(t *testing.T) { + model := model.AccountParameter("test", string(sdk.AccountParameterRequireStorageIntegrationForStageCreation), "true") resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, + CheckDestroy: acc.CheckAccountParameterUnset(t, sdk.AccountParameterRequireStorageIntegrationForStageCreation), Steps: []resource.TestStep{ { - Config: accountParameterBasic("REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_CREATION", "true"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "key", "REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_CREATION"), - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "value", "true"), + Config: config.FromModel(t, model), + Check: assert.AssertThat(t, resourceassert.AccountParameterResource(t, model.ResourceReference()). + HasKeyString(string(sdk.AccountParameterRequireStorageIntegrationForStageCreation)). + HasValueString("true"), ), }, }, }) } -// TODO [SNOW-1528546]: unskip func TestAcc_AccountParameter_Issue2573(t *testing.T) { - t.Skipf("The cleanup for parameter is currently incorrect and this test messes with other ones. Skipping until SNOW-1528546 is resolved.") + model := model.AccountParameter("test", string(sdk.AccountParameterTimezone), "Etc/UTC") resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, + CheckDestroy: acc.CheckAccountParameterUnset(t, sdk.AccountParameterTimezone), Steps: []resource.TestStep{ { - Config: accountParameterBasic("TIMEZONE", "Etc/UTC"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "key", "TIMEZONE"), - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "value", "Etc/UTC"), + Config: config.FromModel(t, model), + Check: assert.AssertThat(t, resourceassert.AccountParameterResource(t, model.ResourceReference()). + HasKeyString(string(sdk.AccountParameterTimezone)). + HasValueString("Etc/UTC"), ), }, { - ResourceName: "snowflake_account_parameter.p", + ResourceName: "snowflake_account_parameter.test", ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{}, @@ -109,24 +105,24 @@ func TestAcc_AccountParameter_Issue2573(t *testing.T) { } func TestAcc_AccountParameter_Issue3025(t *testing.T) { - t.Skipf("The cleanup for parameter is currently incorrect and this test messes with other ones. Skipping until SNOW-1528546 is resolved.") + model := model.AccountParameter("test", string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList), "true") resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, + CheckDestroy: acc.CheckAccountParameterUnset(t, sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList), Steps: []resource.TestStep{ { - Config: accountParameterBasic("OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST", "true"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "key", "OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST"), - resource.TestCheckResourceAttr("snowflake_account_parameter.p", "value", "true"), + Config: config.FromModel(t, model), + Check: assert.AssertThat(t, resourceassert.AccountParameterResource(t, model.ResourceReference()). + HasKeyString(string(sdk.AccountParameterOAuthAddPrivilegedRolesToBlockedList)). + HasValueString("true"), ), }, { - ResourceName: "snowflake_account_parameter.p", + ResourceName: "snowflake_account_parameter.test", ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{}, @@ -134,3 +130,5 @@ func TestAcc_AccountParameter_Issue3025(t *testing.T) { }, }) } + +// TODO(next pr): add more acc tests for the remaining parameters diff --git a/pkg/sdk/parameters.go b/pkg/sdk/parameters.go index 44f73e9e55..377d46a5fd 100644 --- a/pkg/sdk/parameters.go +++ b/pkg/sdk/parameters.go @@ -5,6 +5,7 @@ import ( "database/sql" "errors" "fmt" + "slices" "strconv" "strings" ) @@ -22,6 +23,7 @@ var _ Parameters = (*parameters)(nil) type Parameters interface { SetAccountParameter(ctx context.Context, parameter AccountParameter, value string) error + UnsetAccountParameter(ctx context.Context, parameter AccountParameter) error SetSessionParameterOnAccount(ctx context.Context, parameter SessionParameter, value string) error SetSessionParameterOnUser(ctx context.Context, userID AccountObjectIdentifier, parameter SessionParameter, value string) error SetObjectParameterOnAccount(ctx context.Context, parameter ObjectParameter, value string) error @@ -174,6 +176,64 @@ func (parameters *parameters) SetAccountParameter(ctx context.Context, parameter return nil } +// TODO(next pr): add integration tests +func (parameters *parameters) UnsetAccountParameter(ctx context.Context, parameter AccountParameter) error { + opts := AlterAccountOptions{ + Unset: &AccountUnset{ + Parameters: &AccountLevelParametersUnset{ + AccountParameters: &AccountParametersUnset{}, + }, + }, + } + switch parameter { + case AccountParameterAllowClientMFACaching: + opts.Unset.Parameters.AccountParameters.AllowClientMFACaching = Pointer(true) + case AccountParameterAllowIDToken: + opts.Unset.Parameters.AccountParameters.AllowIDToken = Pointer(true) + case AccountParameterClientEncryptionKeySize: + opts.Unset.Parameters.AccountParameters.ClientEncryptionKeySize = Pointer(true) + case AccountParameterEnableIdentifierFirstLogin: + opts.Unset.Parameters.AccountParameters.EnableIdentifierFirstLogin = Pointer(true) + case AccountParameterEnableInternalStagesPrivatelink: + opts.Unset.Parameters.AccountParameters.EnableInternalStagesPrivatelink = Pointer(true) + case AccountParameterEnableTriSecretAndRekeyOptOutForImageRepository: + opts.Unset.Parameters.AccountParameters.EnableTriSecretAndRekeyOptOutForImageRepository = Pointer(true) + case AccountParameterEnableTriSecretAndRekeyOptOutForSpcsBlockStorage: + opts.Unset.Parameters.AccountParameters.EnableTriSecretAndRekeyOptOutForSpcsBlockStorage = Pointer(true) + case AccountParameterEnableUnredactedQuerySyntaxError: + opts.Unset.Parameters.AccountParameters.EnableUnredactedQuerySyntaxError = Pointer(true) + case AccountParameterEventTable: + opts.Unset.Parameters.AccountParameters.EventTable = Pointer(true) + case AccountParameterExternalOAuthAddPrivilegedRolesToBlockedList: + opts.Unset.Parameters.AccountParameters.ExternalOAuthAddPrivilegedRolesToBlockedList = Pointer(true) + case AccountParameterInitialReplicationSizeLimitInTB: + opts.Unset.Parameters.AccountParameters.InitialReplicationSizeLimitInTB = Pointer(true) + case AccountParameterMinDataRetentionTimeInDays: + opts.Unset.Parameters.AccountParameters.MinDataRetentionTimeInDays = Pointer(true) + case AccountParameterNetworkPolicy: + opts.Unset.Parameters.AccountParameters.NetworkPolicy = Pointer(true) + case AccountParameterOAuthAddPrivilegedRolesToBlockedList: + opts.Unset.Parameters.AccountParameters.OAuthAddPrivilegedRolesToBlockedList = Pointer(true) + case AccountParameterPeriodicDataRekeying: + opts.Unset.Parameters.AccountParameters.PeriodicDataRekeying = Pointer(true) + case AccountParameterPreventLoadFromInlineURL: + opts.Unset.Parameters.AccountParameters.PreventLoadFromInlineURL = Pointer(true) + case AccountParameterPreventUnloadToInlineURL: + opts.Unset.Parameters.AccountParameters.PreventUnloadToInlineURL = Pointer(true) + case AccountParameterPreventUnloadToInternalStages: + opts.Unset.Parameters.AccountParameters.PreventUnloadToInternalStages = Pointer(true) + case AccountParameterRequireStorageIntegrationForStageCreation: + opts.Unset.Parameters.AccountParameters.RequireStorageIntegrationForStageCreation = Pointer(true) + case AccountParameterRequireStorageIntegrationForStageOperation: + opts.Unset.Parameters.AccountParameters.RequireStorageIntegrationForStageOperation = Pointer(true) + case AccountParameterSSOLoginPage: + opts.Unset.Parameters.AccountParameters.SSOLoginPage = Pointer(true) + default: + return parameters.UnsetSessionParameterOnAccount(ctx, SessionParameter(parameter)) + } + return parameters.client.Accounts.Alter(ctx, &opts) +} + func (parameters *parameters) SetSessionParameterOnAccount(ctx context.Context, parameter SessionParameter, value string) error { sp := &SessionParameters{} err := sp.setParam(parameter, value) @@ -192,6 +252,24 @@ func (parameters *parameters) SetSessionParameterOnAccount(ctx context.Context, } } +func (parameters *parameters) UnsetSessionParameterOnAccount(ctx context.Context, parameter SessionParameter) error { + sp := &SessionParametersUnset{} + err := sp.setParam(parameter) + if err == nil { + opts := AlterAccountOptions{Unset: &AccountUnset{Parameters: &AccountLevelParametersUnset{SessionParameters: sp}}} + err = parameters.client.Accounts.Alter(ctx, &opts) + if err != nil { + return err + } + return nil + } else { + if strings.Contains(err.Error(), "session parameter is not supported") { + return parameters.UnsetObjectParameterOnAccount(ctx, ObjectParameter(parameter)) + } + return err + } +} + func (parameters *parameters) SetSessionParameterOnUser(ctx context.Context, userId AccountObjectIdentifier, parameter SessionParameter, value string) error { sp := &SessionParameters{} err := sp.setParam(parameter, value) @@ -297,6 +375,49 @@ func (parameters *parameters) SetObjectParameterOnAccount(ctx context.Context, p return nil } +func (parameters *parameters) UnsetObjectParameterOnAccount(ctx context.Context, parameter ObjectParameter) error { + opts := AlterAccountOptions{Unset: &AccountUnset{Parameters: &AccountLevelParametersUnset{ObjectParameters: &ObjectParametersUnset{}}}} + switch parameter { + case ObjectParameterCatalog: + opts.Unset.Parameters.ObjectParameters.Catalog = Pointer(true) + case ObjectParameterDataRetentionTimeInDays: + opts.Unset.Parameters.ObjectParameters.DataRetentionTimeInDays = Pointer(true) + case ObjectParameterDefaultDDLCollation: + opts.Unset.Parameters.ObjectParameters.DefaultDDLCollation = Pointer(true) + case ObjectParameterLogLevel: + opts.Unset.Parameters.ObjectParameters.LogLevel = Pointer(true) + case ObjectParameterMaxConcurrencyLevel: + opts.Unset.Parameters.ObjectParameters.MaxConcurrencyLevel = Pointer(true) + case ObjectParameterMaxDataExtensionTimeInDays: + opts.Unset.Parameters.ObjectParameters.MaxDataExtensionTimeInDays = Pointer(true) + case ObjectParameterPipeExecutionPaused: + opts.Unset.Parameters.ObjectParameters.PipeExecutionPaused = Pointer(true) + case ObjectParameterPreventUnloadToInternalStages: + opts.Unset.Parameters.ObjectParameters.PreventUnloadToInternalStages = Pointer(true) + case ObjectParameterStatementQueuedTimeoutInSeconds: + opts.Unset.Parameters.ObjectParameters.StatementQueuedTimeoutInSeconds = Pointer(true) + case ObjectParameterStatementTimeoutInSeconds: + opts.Unset.Parameters.ObjectParameters.StatementTimeoutInSeconds = Pointer(true) + case ObjectParameterNetworkPolicy: + opts.Unset.Parameters.ObjectParameters.NetworkPolicy = Pointer(true) + case ObjectParameterShareRestrictions: + opts.Unset.Parameters.ObjectParameters.ShareRestrictions = Pointer(true) + case ObjectParameterSuspendTaskAfterNumFailures: + opts.Unset.Parameters.ObjectParameters.SuspendTaskAfterNumFailures = Pointer(true) + case ObjectParameterTraceLevel: + opts.Unset.Parameters.ObjectParameters.TraceLevel = Pointer(true) + case ObjectParameterUserTaskManagedInitialWarehouseSize: + opts.Unset.Parameters.ObjectParameters.UserTaskManagedInitialWarehouseSize = Pointer(true) + case ObjectParameterUserTaskTimeoutMs: + opts.Unset.Parameters.ObjectParameters.UserTaskTimeoutMs = Pointer(true) + case ObjectParameterEnableUnredactedQuerySyntaxError: + opts.Unset.Parameters.ObjectParameters.EnableUnredactedQuerySyntaxError = Pointer(true) + default: + return fmt.Errorf("invalid object parameter: %v", string(parameter)) + } + return parameters.client.Accounts.Alter(ctx, &opts) +} + type setParameterOnObject struct { alter bool `ddl:"static" sql:"ALTER"` objectType ObjectType `ddl:"keyword"` @@ -344,6 +465,7 @@ type AccountParameter string // https://docs.snowflake.com/en/sql-reference/parameters#parameter-hierarchy-and-types // Account Parameters include Session Parameters, Object Parameters and User Parameters const ( + // TODO(next pr): add remaining parameters; also in parameters_impl.go // Account Parameters AccountParameterAllowClientMFACaching AccountParameter = "ALLOW_CLIENT_MFA_CACHING" AccountParameterAllowIDToken AccountParameter = "ALLOW_ID_TOKEN" // #nosec G101 @@ -367,45 +489,59 @@ const ( AccountParameterSSOLoginPage AccountParameter = "SSO_LOGIN_PAGE" // Session Parameters (inherited) - AccountParameterAbortDetachedQuery AccountParameter = "ABORT_DETACHED_QUERY" - AccountParameterAutocommit AccountParameter = "AUTOCOMMIT" - AccountParameterBinaryInputFormat AccountParameter = "BINARY_INPUT_FORMAT" - AccountParameterBinaryOutputFormat AccountParameter = "BINARY_OUTPUT_FORMAT" - AccountParameterClientMetadataRequestUseConnectionCtx AccountParameter = "CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX" - AccountParameterClientMetadataUseSessionDatabase AccountParameter = "CLIENT_METADATA_USE_SESSION_DATABASE" - AccountParameterClientResultColumnCaseInsensitive AccountParameter = "CLIENT_RESULT_COLUMN_CASE_INSENSITIVE" - AccountParameterDateInputFormat AccountParameter = "DATE_INPUT_FORMAT" - AccountParameterGeographyOutputFormat AccountParameter = "GEOGRAPHY_OUTPUT_FORMAT" - AccountParameterDateOutputFormat AccountParameter = "DATE_OUTPUT_FORMAT" - AccountParameterErrorOnNondeterministicMerge AccountParameter = "ERROR_ON_NONDETERMINISTIC_MERGE" - AccountParameterErrorOnNondeterministicUpdate AccountParameter = "ERROR_ON_NONDETERMINISTIC_UPDATE" - AccountParameterJSONIndent AccountParameter = "JSON_INDENT" - AccountParameterLockTimeout AccountParameter = "LOCK_TIMEOUT" - AccountParameterMultiStatementCount AccountParameter = "MULTI_STATEMENT_COUNT" - AccountParameterQueryTag AccountParameter = "QUERY_TAG" - AccountParameterQuotedIdentifiersIgnoreCase AccountParameter = "QUOTED_IDENTIFIERS_IGNORE_CASE" - AccountParameterRowsPerResultset AccountParameter = "ROWS_PER_RESULTSET" - AccountParameterS3StageVpceDnsName AccountParameter = "S3_STAGE_VPCE_DNS_NAME" - AccountParameterSimulatedDataSharingConsumer AccountParameter = "SIMULATED_DATA_SHARING_CONSUMER" - AccountParameterStatementTimeoutInSeconds AccountParameter = "STATEMENT_TIMEOUT_IN_SECONDS" - AccountParameterStrictJSONOutput AccountParameter = "STRICT_JSON_OUTPUT" - AccountParameterTimeInputFormat AccountParameter = "TIME_INPUT_FORMAT" - AccountParameterTimeOutputFormat AccountParameter = "TIME_OUTPUT_FORMAT" - AccountParameterTimestampDayIsAlways24h AccountParameter = "TIMESTAMP_DAY_IS_ALWAYS_24H" - AccountParameterTimestampInputFormat AccountParameter = "TIMESTAMP_INPUT_FORMAT" - AccountParameterTimestampLtzOutputFormat AccountParameter = "TIMESTAMP_LTZ_OUTPUT_FORMAT" - AccountParameterTimestampNtzOutputFormat AccountParameter = "TIMESTAMP_NTZ_OUTPUT_FORMAT" - AccountParameterTimestampOutputFormat AccountParameter = "TIMESTAMP_OUTPUT_FORMAT" - AccountParameterTimestampTypeMapping AccountParameter = "TIMESTAMP_TYPE_MAPPING" - AccountParameterTimestampTzOutputFormat AccountParameter = "TIMESTAMP_TZ_OUTPUT_FORMAT" - AccountParameterTimezone AccountParameter = "TIMEZONE" - AccountParameterTransactionAbortOnError AccountParameter = "TRANSACTION_ABORT_ON_ERROR" - AccountParameterTransactionDefaultIsolationLevel AccountParameter = "TRANSACTION_DEFAULT_ISOLATION_LEVEL" - AccountParameterTwoDigitCenturyStart AccountParameter = "TWO_DIGIT_CENTURY_START" - AccountParameterUnsupportedDdlAction AccountParameter = "UNSUPPORTED_DDL_ACTION" - AccountParameterUseCachedResult AccountParameter = "USE_CACHED_RESULT" - AccountParameterWeekOfYearPolicy AccountParameter = "WEEK_OF_YEAR_POLICY" - AccountParameterWeekStart AccountParameter = "WEEK_START" + AccountParameterAbortDetachedQuery AccountParameter = "ABORT_DETACHED_QUERY" + AccountParameterAutocommit AccountParameter = "AUTOCOMMIT" + AccountParameterBinaryInputFormat AccountParameter = "BINARY_INPUT_FORMAT" + AccountParameterBinaryOutputFormat AccountParameter = "BINARY_OUTPUT_FORMAT" + AccountParameterClientMemoryLimit AccountParameter = "CLIENT_MEMORY_LIMIT" + AccountParameterClientMetadataRequestUseConnectionCtx AccountParameter = "CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX" + AccountParameterClientMetadataUseSessionDatabase AccountParameter = "CLIENT_METADATA_USE_SESSION_DATABASE" + AccountParameterClientPrefetchThreads AccountParameter = "CLIENT_PREFETCH_THREADS" + AccountParameterClientResultChunkSize AccountParameter = "CLIENT_RESULT_CHUNK_SIZE" + AccountParameterClientResultColumnCaseInsensitive AccountParameter = "CLIENT_RESULT_COLUMN_CASE_INSENSITIVE" + AccountParameterClientSessionKeepAlive AccountParameter = "CLIENT_SESSION_KEEP_ALIVE" + AccountParameterClientSessionKeepAliveHeartbeatFrequency AccountParameter = "CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY" + AccountParameterClientTimestampTypeMapping AccountParameter = "CLIENT_TIMESTAMP_TYPE_MAPPING" + AccountParameterDateInputFormat AccountParameter = "DATE_INPUT_FORMAT" + AccountParameterDateOutputFormat AccountParameter = "DATE_OUTPUT_FORMAT" + AccountParameterEnableUnloadPhysicalTypeOptimization AccountParameter = "ENABLE_UNLOAD_PHYSICAL_TYPE_OPTIMIZATION" + AccountParameterErrorOnNondeterministicMerge AccountParameter = "ERROR_ON_NONDETERMINISTIC_MERGE" + AccountParameterErrorOnNondeterministicUpdate AccountParameter = "ERROR_ON_NONDETERMINISTIC_UPDATE" + AccountParameterGeographyOutputFormat AccountParameter = "GEOGRAPHY_OUTPUT_FORMAT" + AccountParameterGeometryOutputFormat AccountParameter = "GEOMETRY_OUTPUT_FORMAT" + AccountParameterJdbcTreatDecimalAsInt AccountParameter = "JDBC_TREAT_DECIMAL_AS_INT" + AccountParameterJdbcTreatTimestampNtzAsUtc AccountParameter = "JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC" + AccountParameterJdbcUseSessionTimezone AccountParameter = "JDBC_USE_SESSION_TIMEZONE" + AccountParameterJSONIndent AccountParameter = "JSON_INDENT" + AccountParameterLockTimeout AccountParameter = "LOCK_TIMEOUT" + AccountParameterMultiStatementCount AccountParameter = "MULTI_STATEMENT_COUNT" + AccountParameterNoorderSequenceAsDefault AccountParameter = "NOORDER_SEQUENCE_AS_DEFAULT" + AccountParameterOdbcTreatDecimalAsInt AccountParameter = "ODBC_TREAT_DECIMAL_AS_INT" + AccountParameterQueryTag AccountParameter = "QUERY_TAG" + AccountParameterQuotedIdentifiersIgnoreCase AccountParameter = "QUOTED_IDENTIFIERS_IGNORE_CASE" + AccountParameterRowsPerResultset AccountParameter = "ROWS_PER_RESULTSET" + AccountParameterS3StageVpceDnsName AccountParameter = "S3_STAGE_VPCE_DNS_NAME" + AccountParameterSearchPath AccountParameter = "SEARCH_PATH" + AccountParameterSimulatedDataSharingConsumer AccountParameter = "SIMULATED_DATA_SHARING_CONSUMER" + AccountParameterStatementTimeoutInSeconds AccountParameter = "STATEMENT_TIMEOUT_IN_SECONDS" + AccountParameterStrictJSONOutput AccountParameter = "STRICT_JSON_OUTPUT" + AccountParameterTimeInputFormat AccountParameter = "TIME_INPUT_FORMAT" + AccountParameterTimeOutputFormat AccountParameter = "TIME_OUTPUT_FORMAT" + AccountParameterTimestampDayIsAlways24h AccountParameter = "TIMESTAMP_DAY_IS_ALWAYS_24H" + AccountParameterTimestampInputFormat AccountParameter = "TIMESTAMP_INPUT_FORMAT" + AccountParameterTimestampLtzOutputFormat AccountParameter = "TIMESTAMP_LTZ_OUTPUT_FORMAT" + AccountParameterTimestampNtzOutputFormat AccountParameter = "TIMESTAMP_NTZ_OUTPUT_FORMAT" + AccountParameterTimestampOutputFormat AccountParameter = "TIMESTAMP_OUTPUT_FORMAT" + AccountParameterTimestampTypeMapping AccountParameter = "TIMESTAMP_TYPE_MAPPING" + AccountParameterTimestampTzOutputFormat AccountParameter = "TIMESTAMP_TZ_OUTPUT_FORMAT" + AccountParameterTimezone AccountParameter = "TIMEZONE" + AccountParameterTransactionAbortOnError AccountParameter = "TRANSACTION_ABORT_ON_ERROR" + AccountParameterTransactionDefaultIsolationLevel AccountParameter = "TRANSACTION_DEFAULT_ISOLATION_LEVEL" + AccountParameterTwoDigitCenturyStart AccountParameter = "TWO_DIGIT_CENTURY_START" + AccountParameterUnsupportedDdlAction AccountParameter = "UNSUPPORTED_DDL_ACTION" + AccountParameterUseCachedResult AccountParameter = "USE_CACHED_RESULT" + AccountParameterWeekOfYearPolicy AccountParameter = "WEEK_OF_YEAR_POLICY" + AccountParameterWeekStart AccountParameter = "WEEK_START" // Object Parameters (inherited) AccountParameterCatalog AccountParameter = "CATALOG" @@ -433,6 +569,112 @@ const ( AccountParameterEnableUnredactedQuerySyntaxError AccountParameter = "ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR" ) +var AllAccountParameters = []AccountParameter{ + AccountParameterAllowClientMFACaching, + AccountParameterAllowIDToken, + AccountParameterClientEncryptionKeySize, + AccountParameterEnableIdentifierFirstLogin, + AccountParameterEnableInternalStagesPrivatelink, + AccountParameterEnableTriSecretAndRekeyOptOutForImageRepository, + AccountParameterEnableTriSecretAndRekeyOptOutForSpcsBlockStorage, + AccountParameterEventTable, + AccountParameterExternalOAuthAddPrivilegedRolesToBlockedList, + AccountParameterInitialReplicationSizeLimitInTB, + AccountParameterMinDataRetentionTimeInDays, + AccountParameterNetworkPolicy, + AccountParameterOAuthAddPrivilegedRolesToBlockedList, + AccountParameterPeriodicDataRekeying, + AccountParameterPreventLoadFromInlineURL, + AccountParameterPreventUnloadToInlineURL, + AccountParameterPreventUnloadToInternalStages, + AccountParameterRequireStorageIntegrationForStageCreation, + AccountParameterRequireStorageIntegrationForStageOperation, + AccountParameterSSOLoginPage, + AccountParameterAbortDetachedQuery, + AccountParameterAutocommit, + AccountParameterBinaryInputFormat, + AccountParameterBinaryOutputFormat, + AccountParameterClientMemoryLimit, + AccountParameterClientMetadataRequestUseConnectionCtx, + AccountParameterClientMetadataUseSessionDatabase, + AccountParameterClientPrefetchThreads, + AccountParameterClientResultChunkSize, + AccountParameterClientSessionKeepAlive, + AccountParameterClientSessionKeepAliveHeartbeatFrequency, + AccountParameterClientTimestampTypeMapping, + AccountParameterEnableUnloadPhysicalTypeOptimization, + AccountParameterClientResultColumnCaseInsensitive, + AccountParameterDateInputFormat, + AccountParameterDateOutputFormat, + AccountParameterErrorOnNondeterministicMerge, + AccountParameterErrorOnNondeterministicUpdate, + AccountParameterGeographyOutputFormat, + AccountParameterGeometryOutputFormat, + AccountParameterJdbcTreatDecimalAsInt, + AccountParameterJdbcTreatTimestampNtzAsUtc, + AccountParameterJdbcUseSessionTimezone, + AccountParameterJSONIndent, + AccountParameterLockTimeout, + AccountParameterMultiStatementCount, + AccountParameterNoorderSequenceAsDefault, + AccountParameterOdbcTreatDecimalAsInt, + AccountParameterQueryTag, + AccountParameterQuotedIdentifiersIgnoreCase, + AccountParameterRowsPerResultset, + AccountParameterS3StageVpceDnsName, + AccountParameterSearchPath, + AccountParameterSimulatedDataSharingConsumer, + AccountParameterStatementTimeoutInSeconds, + AccountParameterStrictJSONOutput, + AccountParameterTimeInputFormat, + AccountParameterTimeOutputFormat, + AccountParameterTimestampDayIsAlways24h, + AccountParameterTimestampInputFormat, + AccountParameterTimestampLtzOutputFormat, + AccountParameterTimestampNtzOutputFormat, + AccountParameterTimestampOutputFormat, + AccountParameterTimestampTypeMapping, + AccountParameterTimestampTzOutputFormat, + AccountParameterTimezone, + AccountParameterTransactionAbortOnError, + AccountParameterTransactionDefaultIsolationLevel, + AccountParameterTwoDigitCenturyStart, + AccountParameterUnsupportedDdlAction, + AccountParameterUseCachedResult, + AccountParameterWeekOfYearPolicy, + AccountParameterWeekStart, + AccountParameterCatalog, + AccountParameterDataRetentionTimeInDays, + AccountParameterDefaultDDLCollation, + AccountParameterExternalVolume, + AccountParameterLogLevel, + AccountParameterMaxConcurrencyLevel, + AccountParameterMaxDataExtensionTimeInDays, + AccountParameterPipeExecutionPaused, + AccountParameterPreventUnloadToInternalStages, + AccountParameterReplaceInvalidCharacters, + AccountParameterStatementQueuedTimeoutInSeconds, + AccountParameterStorageSerializationPolicy, + AccountParameterShareRestrictions, + AccountParameterSuspendTaskAfterNumFailures, + AccountParameterTraceLevel, + AccountParameterUserTaskManagedInitialWarehouseSize, + AccountParameterUserTaskTimeoutMs, + AccountParameterTaskAutoRetryAttempts, + AccountParameterUserTaskMinimumTriggerIntervalInSeconds, + AccountParameterMetricLevel, + AccountParameterEnableConsoleOutput, + AccountParameterEnableUnredactedQuerySyntaxError, +} + +func ToAccountParameter(s string) (AccountParameter, error) { + s = strings.ToUpper(s) + if !slices.Contains(AllAccountParameters, AccountParameter(s)) { + return "", fmt.Errorf("invalid account parameter: %s", s) + } + return AccountParameter(s), nil +} + type SessionParameter string const ( @@ -922,11 +1164,14 @@ type AccountParametersUnset struct { EnableTriSecretAndRekeyOptOutForImageRepository *bool `ddl:"keyword" sql:"ENABLE_TRI_SECRET_AND_REKEY_OPT_OUT_FOR_IMAGE_REPOSITORY"` EnableTriSecretAndRekeyOptOutForSpcsBlockStorage *bool `ddl:"keyword" sql:"ENABLE_TRI_SECRET_AND_REKEY_OPT_OUT_FOR_SPCS_BLOCK_STORAGE"` EventTable *bool `ddl:"keyword" sql:"EVENT_TABLE"` + EnableUnredactedQuerySyntaxError *bool `ddl:"keyword" sql:"ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR"` ExternalOAuthAddPrivilegedRolesToBlockedList *bool `ddl:"keyword" sql:"EXTERNAL_OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST"` InitialReplicationSizeLimitInTB *bool `ddl:"keyword" sql:"INITIAL_REPLICATION_SIZE_LIMIT_IN_TB"` MinDataRetentionTimeInDays *bool `ddl:"keyword" sql:"MIN_DATA_RETENTION_TIME_IN_DAYS"` NetworkPolicy *bool `ddl:"keyword" sql:"NETWORK_POLICY"` + OAuthAddPrivilegedRolesToBlockedList *bool `ddl:"keyword" sql:"OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST"` PeriodicDataRekeying *bool `ddl:"keyword" sql:"PERIODIC_DATA_REKEYING"` + PreventLoadFromInlineURL *bool `ddl:"keyword" sql:"PREVENT_LOAD_FROM_INLINE_URL"` PreventUnloadToInlineURL *bool `ddl:"keyword" sql:"PREVENT_UNLOAD_TO_INLINE_URL"` PreventUnloadToInternalStages *bool `ddl:"keyword" sql:"PREVENT_UNLOAD_TO_INTERNAL_STAGES"` RequireStorageIntegrationForStageCreation *bool `ddl:"keyword" sql:"REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_CREATION"` @@ -1267,6 +1512,7 @@ func (v *SessionParametersUnset) validate() error { // ObjectParameters is based on https://docs.snowflake.com/en/sql-reference/parameters#object-parameters. type ObjectParameters struct { + Catalog *string `ddl:"parameter" sql:"CATALOG"` DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` EnableUnredactedQuerySyntaxError *bool `ddl:"parameter" sql:"ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR"` @@ -1283,7 +1529,6 @@ type ObjectParameters struct { TraceLevel *TraceLevel `ddl:"parameter" sql:"TRACE_LEVEL"` UserTaskManagedInitialWarehouseSize *WarehouseSize `ddl:"parameter" sql:"USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE"` UserTaskTimeoutMs *int `ddl:"parameter" sql:"USER_TASK_TIMEOUT_MS"` - Catalog *string `ddl:"parameter" sql:"CATALOG"` } func (v *ObjectParameters) validate() error { @@ -1327,8 +1572,10 @@ func (v *ObjectParameters) validate() error { } type ObjectParametersUnset struct { + Catalog *bool `ddl:"keyword" sql:"CATALOG"` DataRetentionTimeInDays *bool `ddl:"keyword" sql:"DATA_RETENTION_TIME_IN_DAYS"` DefaultDDLCollation *bool `ddl:"keyword" sql:"DEFAULT_DDL_COLLATION"` + EnableUnredactedQuerySyntaxError *bool `ddl:"keyword" sql:"ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR"` LogLevel *bool `ddl:"keyword" sql:"LOG_LEVEL"` MaxConcurrencyLevel *bool `ddl:"keyword" sql:"MAX_CONCURRENCY_LEVEL"` MaxDataExtensionTimeInDays *bool `ddl:"keyword" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` diff --git a/pkg/sdk/parameters_impl.go b/pkg/sdk/parameters_impl.go index 098e9e1d03..d01af6d161 100644 --- a/pkg/sdk/parameters_impl.go +++ b/pkg/sdk/parameters_impl.go @@ -187,14 +187,14 @@ func (sessionParametersUnset *SessionParametersUnset) setParam(parameter Session unsetField = &sessionParametersUnset.ClientMemoryLimit case SessionParameterClientMetadataRequestUseConnectionCtx: unsetField = &sessionParametersUnset.ClientMetadataRequestUseConnectionCtx + case SessionParameterClientMetadataUseSessionDatabase: + unsetField = &sessionParametersUnset.ClientMetadataUseSessionDatabase + case SessionParameterClientResultColumnCaseInsensitive: + unsetField = &sessionParametersUnset.ClientResultColumnCaseInsensitive case SessionParameterClientPrefetchThreads: unsetField = &sessionParametersUnset.ClientPrefetchThreads case SessionParameterClientResultChunkSize: unsetField = &sessionParametersUnset.ClientResultChunkSize - case SessionParameterClientResultColumnCaseInsensitive: - unsetField = &sessionParametersUnset.ClientResultColumnCaseInsensitive - case SessionParameterClientMetadataUseSessionDatabase: - unsetField = &sessionParametersUnset.ClientMetadataUseSessionDatabase case SessionParameterClientSessionKeepAlive: unsetField = &sessionParametersUnset.ClientSessionKeepAlive case SessionParameterClientSessionKeepAliveHeartbeatFrequency: diff --git a/pkg/sdk/parameters_test.go b/pkg/sdk/parameters_test.go index f7f1a88233..a288f9eee5 100644 --- a/pkg/sdk/parameters_test.go +++ b/pkg/sdk/parameters_test.go @@ -2,6 +2,8 @@ package sdk import ( "testing" + + "github.com/stretchr/testify/require" ) // TODO: add more tests @@ -51,3 +53,117 @@ func TestUnSetObjectParameterNetworkPolicyOnUser(t *testing.T) { assertOptsValidAndSQLEquals(t, opts, `ALTER USER "TEST_USER" UNSET NETWORK_POLICY`) }) } + +func TestToAccountParameter(t *testing.T) { + type test struct { + input string + want AccountParameter + } + + valid := []test{ + // Case insensitive. + {input: "allow_client_mfa_caching", want: AccountParameterAllowClientMFACaching}, + + // Supported Values. + {input: "ALLOW_CLIENT_MFA_CACHING", want: AccountParameterAllowClientMFACaching}, + {input: "ALLOW_ID_TOKEN", want: AccountParameterAllowIDToken}, + {input: "CLIENT_ENCRYPTION_KEY_SIZE", want: AccountParameterClientEncryptionKeySize}, + {input: "ENABLE_IDENTIFIER_FIRST_LOGIN", want: AccountParameterEnableIdentifierFirstLogin}, + {input: "ENABLE_INTERNAL_STAGES_PRIVATELINK", want: AccountParameterEnableInternalStagesPrivatelink}, + {input: "ENABLE_TRI_SECRET_AND_REKEY_OPT_OUT_FOR_IMAGE_REPOSITORY", want: AccountParameterEnableTriSecretAndRekeyOptOutForImageRepository}, + {input: "ENABLE_TRI_SECRET_AND_REKEY_OPT_OUT_FOR_SPCS_BLOCK_STORAGE", want: AccountParameterEnableTriSecretAndRekeyOptOutForSpcsBlockStorage}, + {input: "EVENT_TABLE", want: AccountParameterEventTable}, + {input: "EXTERNAL_OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST", want: AccountParameterExternalOAuthAddPrivilegedRolesToBlockedList}, + {input: "INITIAL_REPLICATION_SIZE_LIMIT_IN_TB", want: AccountParameterInitialReplicationSizeLimitInTB}, + {input: "MIN_DATA_RETENTION_TIME_IN_DAYS", want: AccountParameterMinDataRetentionTimeInDays}, + {input: "NETWORK_POLICY", want: AccountParameterNetworkPolicy}, + {input: "OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST", want: AccountParameterOAuthAddPrivilegedRolesToBlockedList}, + {input: "PERIODIC_DATA_REKEYING", want: AccountParameterPeriodicDataRekeying}, + {input: "PREVENT_LOAD_FROM_INLINE_URL", want: AccountParameterPreventLoadFromInlineURL}, + {input: "PREVENT_UNLOAD_TO_INLINE_URL", want: AccountParameterPreventUnloadToInlineURL}, + {input: "PREVENT_UNLOAD_TO_INTERNAL_STAGES", want: AccountParameterPreventUnloadToInternalStages}, + {input: "REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_CREATION", want: AccountParameterRequireStorageIntegrationForStageCreation}, + {input: "REQUIRE_STORAGE_INTEGRATION_FOR_STAGE_OPERATION", want: AccountParameterRequireStorageIntegrationForStageOperation}, + {input: "SSO_LOGIN_PAGE", want: AccountParameterSSOLoginPage}, + {input: "ABORT_DETACHED_QUERY", want: AccountParameterAbortDetachedQuery}, + {input: "AUTOCOMMIT", want: AccountParameterAutocommit}, + {input: "BINARY_INPUT_FORMAT", want: AccountParameterBinaryInputFormat}, + {input: "BINARY_OUTPUT_FORMAT", want: AccountParameterBinaryOutputFormat}, + {input: "CLIENT_METADATA_REQUEST_USE_CONNECTION_CTX", want: AccountParameterClientMetadataRequestUseConnectionCtx}, + {input: "CLIENT_METADATA_USE_SESSION_DATABASE", want: AccountParameterClientMetadataUseSessionDatabase}, + {input: "CLIENT_RESULT_COLUMN_CASE_INSENSITIVE", want: AccountParameterClientResultColumnCaseInsensitive}, + {input: "DATE_INPUT_FORMAT", want: AccountParameterDateInputFormat}, + {input: "DATE_OUTPUT_FORMAT", want: AccountParameterDateOutputFormat}, + {input: "ERROR_ON_NONDETERMINISTIC_MERGE", want: AccountParameterErrorOnNondeterministicMerge}, + {input: "ERROR_ON_NONDETERMINISTIC_UPDATE", want: AccountParameterErrorOnNondeterministicUpdate}, + {input: "GEOGRAPHY_OUTPUT_FORMAT", want: AccountParameterGeographyOutputFormat}, + {input: "JSON_INDENT", want: AccountParameterJSONIndent}, + {input: "LOCK_TIMEOUT", want: AccountParameterLockTimeout}, + {input: "MULTI_STATEMENT_COUNT", want: AccountParameterMultiStatementCount}, + {input: "QUERY_TAG", want: AccountParameterQueryTag}, + {input: "QUOTED_IDENTIFIERS_IGNORE_CASE", want: AccountParameterQuotedIdentifiersIgnoreCase}, + {input: "ROWS_PER_RESULTSET", want: AccountParameterRowsPerResultset}, + {input: "S3_STAGE_VPCE_DNS_NAME", want: AccountParameterS3StageVpceDnsName}, + {input: "SIMULATED_DATA_SHARING_CONSUMER", want: AccountParameterSimulatedDataSharingConsumer}, + {input: "STATEMENT_TIMEOUT_IN_SECONDS", want: AccountParameterStatementTimeoutInSeconds}, + {input: "STRICT_JSON_OUTPUT", want: AccountParameterStrictJSONOutput}, + {input: "TIME_INPUT_FORMAT", want: AccountParameterTimeInputFormat}, + {input: "TIME_OUTPUT_FORMAT", want: AccountParameterTimeOutputFormat}, + {input: "TIMESTAMP_DAY_IS_ALWAYS_24H", want: AccountParameterTimestampDayIsAlways24h}, + {input: "TIMESTAMP_INPUT_FORMAT", want: AccountParameterTimestampInputFormat}, + {input: "TIMESTAMP_LTZ_OUTPUT_FORMAT", want: AccountParameterTimestampLtzOutputFormat}, + {input: "TIMESTAMP_NTZ_OUTPUT_FORMAT", want: AccountParameterTimestampNtzOutputFormat}, + {input: "TIMESTAMP_OUTPUT_FORMAT", want: AccountParameterTimestampOutputFormat}, + {input: "TIMESTAMP_TYPE_MAPPING", want: AccountParameterTimestampTypeMapping}, + {input: "TIMESTAMP_TZ_OUTPUT_FORMAT", want: AccountParameterTimestampTzOutputFormat}, + {input: "TIMEZONE", want: AccountParameterTimezone}, + {input: "TRANSACTION_ABORT_ON_ERROR", want: AccountParameterTransactionAbortOnError}, + {input: "TRANSACTION_DEFAULT_ISOLATION_LEVEL", want: AccountParameterTransactionDefaultIsolationLevel}, + {input: "TWO_DIGIT_CENTURY_START", want: AccountParameterTwoDigitCenturyStart}, + {input: "UNSUPPORTED_DDL_ACTION", want: AccountParameterUnsupportedDdlAction}, + {input: "USE_CACHED_RESULT", want: AccountParameterUseCachedResult}, + {input: "WEEK_OF_YEAR_POLICY", want: AccountParameterWeekOfYearPolicy}, + {input: "WEEK_START", want: AccountParameterWeekStart}, + {input: "CATALOG", want: AccountParameterCatalog}, + {input: "DATA_RETENTION_TIME_IN_DAYS", want: AccountParameterDataRetentionTimeInDays}, + {input: "DEFAULT_DDL_COLLATION", want: AccountParameterDefaultDDLCollation}, + {input: "EXTERNAL_VOLUME", want: AccountParameterExternalVolume}, + {input: "LOG_LEVEL", want: AccountParameterLogLevel}, + {input: "MAX_CONCURRENCY_LEVEL", want: AccountParameterMaxConcurrencyLevel}, + {input: "MAX_DATA_EXTENSION_TIME_IN_DAYS", want: AccountParameterMaxDataExtensionTimeInDays}, + {input: "PIPE_EXECUTION_PAUSED", want: AccountParameterPipeExecutionPaused}, + {input: "REPLACE_INVALID_CHARACTERS", want: AccountParameterReplaceInvalidCharacters}, + {input: "STATEMENT_QUEUED_TIMEOUT_IN_SECONDS", want: AccountParameterStatementQueuedTimeoutInSeconds}, + {input: "STORAGE_SERIALIZATION_POLICY", want: AccountParameterStorageSerializationPolicy}, + {input: "SHARE_RESTRICTIONS", want: AccountParameterShareRestrictions}, + {input: "SUSPEND_TASK_AFTER_NUM_FAILURES", want: AccountParameterSuspendTaskAfterNumFailures}, + {input: "TRACE_LEVEL", want: AccountParameterTraceLevel}, + {input: "USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE", want: AccountParameterUserTaskManagedInitialWarehouseSize}, + {input: "USER_TASK_TIMEOUT_MS", want: AccountParameterUserTaskTimeoutMs}, + {input: "TASK_AUTO_RETRY_ATTEMPTS", want: AccountParameterTaskAutoRetryAttempts}, + {input: "USER_TASK_MINIMUM_TRIGGER_INTERVAL_IN_SECONDS", want: AccountParameterUserTaskMinimumTriggerIntervalInSeconds}, + {input: "METRIC_LEVEL", want: AccountParameterMetricLevel}, + {input: "ENABLE_CONSOLE_OUTPUT", want: AccountParameterEnableConsoleOutput}, + {input: "ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR", want: AccountParameterEnableUnredactedQuerySyntaxError}, + } + + invalid := []test{ + {input: ""}, + {input: "foo"}, + } + + for _, tc := range valid { + t.Run(tc.input, func(t *testing.T) { + got, err := ToAccountParameter(tc.input) + require.NoError(t, err) + require.Equal(t, tc.want, got) + }) + } + + for _, tc := range invalid { + t.Run(tc.input, func(t *testing.T) { + _, err := ToAccountParameter(tc.input) + require.Error(t, err) + }) + } +} diff --git a/templates/resources/account_parameter.md.tmpl b/templates/resources/account_parameter.md.tmpl new file mode 100644 index 0000000000..15c1ee96c9 --- /dev/null +++ b/templates/resources/account_parameter.md.tmpl @@ -0,0 +1,37 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0990--v01000) to use it. + +-> **Note** This resource does not support all account parameters. The supported ones are listed below. This feature gap will be addressed in future releases. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} From 13401d5fff320eedcf40eed7c0831154cc6cc13a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Thu, 12 Dec 2024 15:58:22 +0100 Subject: [PATCH 07/20] feat: support table data type (#3274) ### Changes - support table data types - add unit tests on parsing and comparing - add integration tests in function and procedure tests --- .../objectassert/function_snowflake_ext.go | 11 ++ .../objectassert/procedure_snowflake_ext.go | 11 ++ pkg/sdk/datatypes/data_types.go | 5 + pkg/sdk/datatypes/data_types_test.go | 101 ++++++++++++++++++ pkg/sdk/datatypes/legacy.go | 1 - pkg/sdk/datatypes/table.go | 81 +++++++++++++- pkg/sdk/testint/functions_integration_test.go | 34 ++++++ .../testint/procedures_integration_test.go | 30 +++--- 8 files changed, 256 insertions(+), 18 deletions(-) diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go index aa8d17a022..8836ff49d5 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go @@ -65,3 +65,14 @@ func (f *FunctionAssert) HasExactlySecrets(expectedSecrets map[string]sdk.Schema }) return f } + +func (f *FunctionAssert) HasArgumentsRawContains(substring string) *FunctionAssert { + f.AddAssertion(func(t *testing.T, o *sdk.Function) error { + t.Helper() + if !strings.Contains(o.ArgumentsRaw, substring) { + return fmt.Errorf("expected arguments raw contain: %v, to contain: %v", o.ArgumentsRaw, substring) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go index 12d5a384cf..4ce244f856 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go @@ -57,3 +57,14 @@ func (f *ProcedureAssert) HasExactlyExternalAccessIntegrations(integrations ...s }) return f } + +func (p *ProcedureAssert) HasArgumentsRawContains(substring string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if !strings.Contains(o.ArgumentsRaw, substring) { + return fmt.Errorf("expected arguments raw contain: %v, to contain: %v", o.ArgumentsRaw, substring) + } + return nil + }) + return p +} diff --git a/pkg/sdk/datatypes/data_types.go b/pkg/sdk/datatypes/data_types.go index be58f978f2..2371770a94 100644 --- a/pkg/sdk/datatypes/data_types.go +++ b/pkg/sdk/datatypes/data_types.go @@ -80,6 +80,9 @@ func ParseDataType(raw string) (DataType, error) { if idx := slices.IndexFunc(VectorDataTypeSynonyms, func(s string) bool { return strings.HasPrefix(dataTypeRaw, s) }); idx >= 0 { return parseVectorDataTypeRaw(sanitizedDataTypeRaw{dataTypeRaw, VectorDataTypeSynonyms[idx]}) } + if idx := slices.IndexFunc(TableDataTypeSynonyms, func(s string) bool { return strings.HasPrefix(dataTypeRaw, s) }); idx >= 0 { + return parseTableDataTypeRaw(sanitizedDataTypeRaw{strings.TrimSpace(raw), TableDataTypeSynonyms[idx]}) + } return nil, fmt.Errorf("invalid data type: %s", raw) } @@ -118,6 +121,8 @@ func AreTheSame(a DataType, b DataType) bool { return castSuccessfully(v, b, areNumberDataTypesTheSame) case *ObjectDataType: return castSuccessfully(v, b, noArgsDataTypesAreTheSame) + case *TableDataType: + return castSuccessfully(v, b, areTableDataTypesTheSame) case *TextDataType: return castSuccessfully(v, b, areTextDataTypesTheSame) case *TimeDataType: diff --git a/pkg/sdk/datatypes/data_types_test.go b/pkg/sdk/datatypes/data_types_test.go index cfb3845ef1..7e6382e63f 100644 --- a/pkg/sdk/datatypes/data_types_test.go +++ b/pkg/sdk/datatypes/data_types_test.go @@ -6,6 +6,8 @@ import ( "strings" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -1095,6 +1097,98 @@ func Test_ParseDataType_Vector(t *testing.T) { } } +func Test_ParseDataType_Table(t *testing.T) { + type column struct { + Name string + Type string + } + type test struct { + input string + expectedColumns []column + } + + positiveTestCases := []test{ + {input: "TABLE()", expectedColumns: []column{}}, + {input: "TABLE ()", expectedColumns: []column{}}, + {input: "TABLE ( )", expectedColumns: []column{}}, + {input: "TABLE(arg_name NUMBER)", expectedColumns: []column{{"arg_name", "NUMBER"}}}, + {input: "TABLE(arg_name double precision, arg_name_2 NUMBER)", expectedColumns: []column{{"arg_name", "double precision"}, {"arg_name_2", "NUMBER"}}}, + {input: "TABLE(arg_name NUMBER(38))", expectedColumns: []column{{"arg_name", "NUMBER(38)"}}}, + {input: "TABLE(arg_name NUMBER(38), arg_name_2 VARCHAR)", expectedColumns: []column{{"arg_name", "NUMBER(38)"}, {"arg_name_2", "VARCHAR"}}}, + {input: "TABLE(arg_name number, second float, third GEOGRAPHY)", expectedColumns: []column{{"arg_name", "number"}, {"second", "float"}, {"third", "GEOGRAPHY"}}}, + {input: "TABLE ( arg_name varchar, second date, third TIME )", expectedColumns: []column{{"arg_name", "varchar"}, {"second", "date"}, {"third", "time"}}}, + // TODO: Support types with parameters (for now, only legacy types are supported because Snowflake returns only with this output), e.g. TABLE(ARG NUMBER(38, 0)) + // TODO: Support nested tables, e.g. TABLE(ARG NUMBER, NESTED TABLE(A VARCHAR, B GEOMETRY)) + // TODO: Support complex argument names (with quotes / spaces / special characters / etc) + } + + negativeTestCases := []test{ + {input: "TABLE())"}, + {input: "TABLE(1, 2)"}, + {input: "TABLE(INT, INT)"}, + {input: "TABLE(a b)"}, + {input: "TABLE(1)"}, + {input: "TABLE(2, INT)"}, + {input: "TABLE"}, + {input: "TABLE(INT, 2, 3)"}, + {input: "TABLE(INT)"}, + {input: "TABLE(x, 2)"}, + {input: "TABLE("}, + {input: "TABLE)"}, + {input: "TA BLE"}, + } + + for _, tc := range positiveTestCases { + tc := tc + t.Run(tc.input, func(t *testing.T) { + parsed, err := ParseDataType(tc.input) + + require.NoError(t, err) + require.IsType(t, &TableDataType{}, parsed) + + assert.Equal(t, "TABLE", parsed.(*TableDataType).underlyingType) + assert.Equal(t, len(tc.expectedColumns), len(parsed.(*TableDataType).columns)) + for i, column := range tc.expectedColumns { + assert.Equal(t, column.Name, parsed.(*TableDataType).columns[i].name) + parsedType, err := ParseDataType(column.Type) + require.NoError(t, err) + assert.Equal(t, parsedType.ToLegacyDataTypeSql(), parsed.(*TableDataType).columns[i].dataType.ToLegacyDataTypeSql()) + } + + legacyColumns := strings.Join(collections.Map(tc.expectedColumns, func(col column) string { + parsedType, err := ParseDataType(col.Type) + require.NoError(t, err) + return fmt.Sprintf("%s %s", col.Name, parsedType.ToLegacyDataTypeSql()) + }), ", ") + assert.Equal(t, fmt.Sprintf("TABLE(%s)", legacyColumns), parsed.ToLegacyDataTypeSql()) + + canonicalColumns := strings.Join(collections.Map(tc.expectedColumns, func(col column) string { + parsedType, err := ParseDataType(col.Type) + require.NoError(t, err) + return fmt.Sprintf("%s %s", col.Name, parsedType.Canonical()) + }), ", ") + assert.Equal(t, fmt.Sprintf("TABLE(%s)", canonicalColumns), parsed.Canonical()) + + columns := strings.Join(collections.Map(tc.expectedColumns, func(col column) string { + parsedType, err := ParseDataType(col.Type) + require.NoError(t, err) + return fmt.Sprintf("%s %s", col.Name, parsedType.ToSql()) + }), ", ") + assert.Equal(t, fmt.Sprintf("TABLE(%s)", columns), parsed.ToSql()) + }) + } + + for _, tc := range negativeTestCases { + tc := tc + t.Run("negative: "+tc.input, func(t *testing.T) { + parsed, err := ParseDataType(tc.input) + + require.Error(t, err) + require.Nil(t, parsed) + }) + } +} + func Test_AreTheSame(t *testing.T) { // empty d1/d2 means nil DataType input type test struct { @@ -1145,6 +1239,13 @@ func Test_AreTheSame(t *testing.T) { {d1: "TIME", d2: "TIME", expectedOutcome: true}, {d1: "TIME", d2: "TIME(5)", expectedOutcome: false}, {d1: "TIME", d2: fmt.Sprintf("TIME(%d)", DefaultTimePrecision), expectedOutcome: true}, + {d1: "TABLE()", d2: "TABLE()", expectedOutcome: true}, + {d1: "TABLE(A NUMBER)", d2: "TABLE(B NUMBER)", expectedOutcome: false}, + {d1: "TABLE(A NUMBER)", d2: "TABLE(a NUMBER)", expectedOutcome: false}, + {d1: "TABLE(A NUMBER)", d2: "TABLE(A VARCHAR)", expectedOutcome: false}, + {d1: "TABLE(A NUMBER, B VARCHAR)", d2: "TABLE(A NUMBER, B VARCHAR)", expectedOutcome: true}, + {d1: "TABLE(A NUMBER, B NUMBER)", d2: "TABLE(A NUMBER, B VARCHAR)", expectedOutcome: false}, + {d1: "TABLE()", d2: "TABLE(A NUMBER)", expectedOutcome: false}, } for _, tc := range testCases { diff --git a/pkg/sdk/datatypes/legacy.go b/pkg/sdk/datatypes/legacy.go index c77f286f9c..63f523779e 100644 --- a/pkg/sdk/datatypes/legacy.go +++ b/pkg/sdk/datatypes/legacy.go @@ -16,7 +16,6 @@ const ( TimestampNtzLegacyDataType = "TIMESTAMP_NTZ" TimestampTzLegacyDataType = "TIMESTAMP_TZ" VariantLegacyDataType = "VARIANT" - // TableLegacyDataType was not a value of legacy data type in the old implementation. Left for now for an easier implementation. TableLegacyDataType = "TABLE" ) diff --git a/pkg/sdk/datatypes/table.go b/pkg/sdk/datatypes/table.go index e7c398ec6d..a05298c992 100644 --- a/pkg/sdk/datatypes/table.go +++ b/pkg/sdk/datatypes/table.go @@ -1,9 +1,16 @@ package datatypes -// TableDataType is based on TODO [SNOW-1348103] +import ( + "fmt" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" +) + +// TableDataType is based on https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java#returning-tabular-data. // It does not have synonyms. // It consists of a list of column name + column type; may be empty. -// TODO [SNOW-1348103]: test and improve type TableDataType struct { columns []TableDataTypeColumn underlyingType string @@ -14,6 +21,8 @@ type TableDataTypeColumn struct { dataType DataType } +var TableDataTypeSynonyms = []string{"TABLE"} + func (c *TableDataTypeColumn) ColumnName() string { return c.name } @@ -23,17 +32,79 @@ func (c *TableDataTypeColumn) ColumnType() DataType { } func (t *TableDataType) ToSql() string { - return t.underlyingType + columns := strings.Join(collections.Map(t.columns, func(col TableDataTypeColumn) string { + return fmt.Sprintf("%s %s", col.name, col.dataType.ToSql()) + }), ", ") + return fmt.Sprintf("%s(%s)", t.underlyingType, columns) } func (t *TableDataType) ToLegacyDataTypeSql() string { - return TableLegacyDataType + columns := strings.Join(collections.Map(t.columns, func(col TableDataTypeColumn) string { + return fmt.Sprintf("%s %s", col.name, col.dataType.ToLegacyDataTypeSql()) + }), ", ") + return fmt.Sprintf("%s(%s)", TableLegacyDataType, columns) } func (t *TableDataType) Canonical() string { - return TableLegacyDataType + columns := strings.Join(collections.Map(t.columns, func(col TableDataTypeColumn) string { + return fmt.Sprintf("%s %s", col.name, col.dataType.Canonical()) + }), ", ") + return fmt.Sprintf("%s(%s)", TableLegacyDataType, columns) } func (t *TableDataType) Columns() []TableDataTypeColumn { return t.columns } + +func parseTableDataTypeRaw(raw sanitizedDataTypeRaw) (*TableDataType, error) { + r := strings.TrimSpace(strings.TrimPrefix(raw.raw, raw.matchedByType)) + if r == "" || (!strings.HasPrefix(r, "(") || !strings.HasSuffix(r, ")")) { + logging.DebugLogger.Printf(`table %s could not be parsed, use "%s(argName argType, ...)" format`, raw.raw, raw.matchedByType) + return nil, fmt.Errorf(`table %s could not be parsed, use "%s(argName argType, ...)" format`, raw.raw, raw.matchedByType) + } + onlyArgs := strings.TrimSpace(r[1 : len(r)-1]) + if onlyArgs == "" { + return &TableDataType{ + columns: make([]TableDataTypeColumn, 0), + underlyingType: raw.matchedByType, + }, nil + } + columns, err := collections.MapErr(strings.Split(onlyArgs, ","), func(arg string) (TableDataTypeColumn, error) { + argParts := strings.SplitN(strings.TrimSpace(arg), " ", 2) + if len(argParts) != 2 { + return TableDataTypeColumn{}, fmt.Errorf("could not parse table column: %s, it should contain the following format ` `; parser failure may be connected to the complex argument names", arg) + } + argDataType, err := ParseDataType(argParts[1]) + if err != nil { + return TableDataTypeColumn{}, err + } + return TableDataTypeColumn{ + name: argParts[0], + dataType: argDataType, + }, nil + }) + if err != nil { + return nil, err + } + return &TableDataType{ + columns: columns, + underlyingType: raw.matchedByType, + }, nil +} + +func areTableDataTypesTheSame(a, b *TableDataType) bool { + if len(a.columns) != len(b.columns) { + return false + } + + for i := range a.columns { + aColumn := a.columns[i] + bColumn := b.columns[i] + + if aColumn.name != bColumn.name || !AreTheSame(aColumn.dataType, bColumn.dataType) { + return false + } + } + + return true +} diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index 8aa1b217fb..b5e9a35fb6 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -2049,4 +2049,38 @@ func TestInt_Functions(t *testing.T) { assert.Equal(t, dataType.Canonical(), pairs["returns"]) }) } + + t.Run("create function for SQL - return table data type", func(t *testing.T) { + argName := "x" + + returnDataType, err := datatypes.ParseDataType(fmt.Sprintf("TABLE(ID %s, PRICE %s, THIRD %s)", datatypes.NumberLegacyDataType, datatypes.FloatLegacyDataType, datatypes.VarcharLegacyDataType)) + require.NoError(t, err) + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(datatypes.VarcharLegacyDataType) + + definition := ` SELECT 1, 2.2::float, 'abc';` + dt := sdk.NewFunctionReturnsResultDataTypeRequest(returnDataType) + returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewFunctionArgumentRequest(argName, nil).WithArgDataTypeOld(datatypes.VarcharLegacyDataType) + request := sdk.NewCreateForSQLFunctionRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). + WithArguments([]sdk.FunctionArgumentRequest{*argument}) + + err = client.Functions.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Function.DropFunctionFunc(t, id)) + + function, err := client.Functions.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasArgumentsRawContains(returnDataType.ToLegacyDataTypeSql()), + ) + + assertions.AssertThatObject(t, objectassert.FunctionDetails(t, id). + HasReturnDataType(returnDataType), + ) + }) } diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index e8b54a9a4d..56f1a20248 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -1781,22 +1781,21 @@ def filter_by_role(session, table_name, role): require.GreaterOrEqual(t, len(procedures), 1) }) - // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for SQL: returns table", func(t *testing.T) { - t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") - - name := "find_invoice_by_id" - id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR) + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) + column1 := sdk.NewProcedureColumnRequest("id", nil).WithColumnDataTypeOld("INTEGER") + column2 := sdk.NewProcedureColumnRequest("price", nil).WithColumnDataTypeOld("double") + column3 := sdk.NewProcedureColumnRequest("third", nil).WithColumnDataTypeOld("Geometry") + returnsTable := sdk.NewProcedureReturnsTableRequest().WithColumns([]sdk.ProcedureColumnRequest{*column1, *column2, *column3}) + expectedReturnDataType, err := datatypes.ParseDataType(fmt.Sprintf("TABLE(id %s, price %s, third %s)", datatypes.NumberLegacyDataType, datatypes.FloatLegacyDataType, datatypes.GeometryLegacyDataType)) + require.NoError(t, err) definition := ` DECLARE res RESULTSET DEFAULT (SELECT * FROM invoices WHERE id = :id); BEGIN RETURN TABLE(res); END;` - column1 := sdk.NewProcedureColumnRequest("id", nil).WithColumnDataTypeOld("INTEGER") - column2 := sdk.NewProcedureColumnRequest("price", nil).WithColumnDataTypeOld("NUMBER(12,2)") - returnsTable := sdk.NewProcedureReturnsTableRequest().WithColumns([]sdk.ProcedureColumnRequest{*column1, *column2}) returns := sdk.NewProcedureSQLReturnsRequest().WithTable(*returnsTable) argument := sdk.NewProcedureArgumentRequest("id", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). @@ -1804,13 +1803,20 @@ def filter_by_role(session, table_name, role): // SNOW-1051627 todo: uncomment once null input behavior working again // WithNullInputBehavior(sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithArguments([]sdk.ProcedureArgumentRequest{*argument}) - err := client.Procedures.CreateForSQL(ctx, request) + err = client.Procedures.CreateForSQL(ctx, request) require.NoError(t, err) t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) - procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) - require.NoError(t, err) - require.GreaterOrEqual(t, len(procedures), 1) + assertions.AssertThatObject(t, objectassert.Procedure(t, id). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasArgumentsRawContains(expectedReturnDataType.ToLegacyDataTypeSql()), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasReturnDataType(expectedReturnDataType), + ) }) t.Run("show parameters", func(t *testing.T) { From c4f1e8fd55150e40d8a556580016ff83fe65bdaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Thu, 12 Dec 2024 16:11:34 +0100 Subject: [PATCH 08/20] feat: Unsafe execute v1 readiness (#3266) ### Changes - Deprecated unsafe execute resource - Add new snowflake_execute resource - Adjust tests to use the new resource - Add more tests for other corner cases - Make sure the new resource is importable --- .github/ISSUE_TEMPLATE/01-bug.yml | 1 + .github/ISSUE_TEMPLATE/02-general-usage.yml | 1 + .github/ISSUE_TEMPLATE/03-documentation.yml | 1 + .github/ISSUE_TEMPLATE/04-feature-request.yml | 1 + MIGRATION_GUIDE.md | 10 + docs/index.md | 1 + docs/resources/execute.md | 128 ++++++++++ docs/resources/unsafe_execute.md | 6 +- examples/additional/deprecated_resources.MD | 1 + .../resources/snowflake_execute/import.sh | 1 + .../resources/snowflake_execute/resource.tf | 79 ++++++ pkg/provider/provider.go | 1 + pkg/provider/provider_acceptance_test.go | 10 +- pkg/provider/resources/resources.go | 1 + pkg/resources/execute.go | 172 +++++++++++++ ...nce_test.go => execute_acceptance_test.go} | 229 +++++++++++++----- pkg/resources/helpers.go | 9 + .../test.tf | 2 +- .../variables.tf | 0 .../test.tf | 2 +- .../variables.tf | 0 .../test.tf | 2 +- .../variables.tf | 0 pkg/resources/unsafe_execute.go | 160 +----------- pkg/scripts/issues/labels.go | 1 + templates/resources/execute.md.tmpl | 40 +++ templates/resources/unsafe_execute.md.tmpl | 4 +- 27 files changed, 632 insertions(+), 231 deletions(-) create mode 100644 docs/resources/execute.md create mode 100644 examples/resources/snowflake_execute/import.sh create mode 100644 examples/resources/snowflake_execute/resource.tf create mode 100644 pkg/resources/execute.go rename pkg/resources/{unsafe_execute_acceptance_test.go => execute_acceptance_test.go} (78%) rename pkg/resources/testdata/{TestAcc_UnsafeExecute_commonSetup => TestAcc_Execute_commonSetup}/test.tf (52%) rename pkg/resources/testdata/{TestAcc_UnsafeExecute_commonSetup => TestAcc_Execute_commonSetup}/variables.tf (100%) rename pkg/resources/testdata/{TestAcc_UnsafeExecute_grantsComplex => TestAcc_Execute_grantsComplex}/test.tf (88%) rename pkg/resources/testdata/{TestAcc_UnsafeExecute_grantsComplex => TestAcc_Execute_grantsComplex}/variables.tf (100%) rename pkg/resources/testdata/{TestAcc_UnsafeExecute_withRead => TestAcc_Execute_withRead}/test.tf (61%) rename pkg/resources/testdata/{TestAcc_UnsafeExecute_withRead => TestAcc_Execute_withRead}/variables.tf (100%) create mode 100644 templates/resources/execute.md.tmpl diff --git a/.github/ISSUE_TEMPLATE/01-bug.yml b/.github/ISSUE_TEMPLATE/01-bug.yml index cff3e79c65..3fe9c7b893 100644 --- a/.github/ISSUE_TEMPLATE/01-bug.yml +++ b/.github/ISSUE_TEMPLATE/01-bug.yml @@ -108,6 +108,7 @@ body: - resource:database_role - resource:dynamic_table - resource:email_notification_integration + - resource:execute - resource:external_function - resource:external_oauth_integration - resource:external_table diff --git a/.github/ISSUE_TEMPLATE/02-general-usage.yml b/.github/ISSUE_TEMPLATE/02-general-usage.yml index 1ddd1d306f..9591da908c 100644 --- a/.github/ISSUE_TEMPLATE/02-general-usage.yml +++ b/.github/ISSUE_TEMPLATE/02-general-usage.yml @@ -106,6 +106,7 @@ body: - resource:database_role - resource:dynamic_table - resource:email_notification_integration + - resource:execute - resource:external_function - resource:external_oauth_integration - resource:external_table diff --git a/.github/ISSUE_TEMPLATE/03-documentation.yml b/.github/ISSUE_TEMPLATE/03-documentation.yml index d73349e095..05fd7110e6 100644 --- a/.github/ISSUE_TEMPLATE/03-documentation.yml +++ b/.github/ISSUE_TEMPLATE/03-documentation.yml @@ -40,6 +40,7 @@ body: - resource:database_role - resource:dynamic_table - resource:email_notification_integration + - resource:execute - resource:external_function - resource:external_oauth_integration - resource:external_table diff --git a/.github/ISSUE_TEMPLATE/04-feature-request.yml b/.github/ISSUE_TEMPLATE/04-feature-request.yml index b653f251ae..c66556b86f 100644 --- a/.github/ISSUE_TEMPLATE/04-feature-request.yml +++ b/.github/ISSUE_TEMPLATE/04-feature-request.yml @@ -76,6 +76,7 @@ body: - resource:database_role - resource:dynamic_table - resource:email_notification_integration + - resource:execute - resource:external_function - resource:external_oauth_integration - resource:external_table diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 2fe0dd5647..9904e232e5 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -17,6 +17,16 @@ During resource deleting, provider now uses `UNSET` instead of `SET` with the de #### *(behavior change)* changes in `key` field The value of `key` field is now case-insensitive and is validated. The list of supported values is available in the resource documentation. +### unsafe_execute resource deprecation / new execute resource + +The `snowflake_unsafe_execute` gets deprecated in favor of the new resource `snowflake_execute`. +The `snowflake_execute` was build on top of `snowflake_unsafe_execute` with a few improvements. +The unsafe version will be removed with the v1 release, so please migrate to the `snowflake_execute` resource. + +For no downtime migration, follow our [guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). +When importing, remember that the given resource id has to be unique (using UUIDs is recommended). +Also, because of the nature of the resource, first apply after importing is necessary to "copy" values from the configuration to the state. + ### snowflake_oauth_integration_for_partner_applications and snowflake_oauth_integration_for_custom_clients resource changes #### *(behavior change)* `blocked_roles_list` field is no longer required diff --git a/docs/index.md b/docs/index.md index 9a69fadbe9..f04b4eb2b5 100644 --- a/docs/index.md +++ b/docs/index.md @@ -367,6 +367,7 @@ provider "snowflake" { - [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead - [snowflake_stream](./docs/resources/stream) - [snowflake_tag_masking_policy_association](./docs/resources/tag_masking_policy_association) +- [snowflake_unsafe_execute](./docs/resources/unsafe_execute) - use [snowflake_execute](./docs/resources/execute) instead ## Currently deprecated datasources diff --git a/docs/resources/execute.md b/docs/resources/execute.md new file mode 100644 index 0000000000..6fbfbb993f --- /dev/null +++ b/docs/resources/execute.md @@ -0,0 +1,128 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_execute Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource allowing execution of ANY SQL statement. +--- + +# snowflake_execute (Resource) + +!> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. + +~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. + +~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. + +Resource allowing execution of ANY SQL statement. + +## Example Usage + +```terraform +################################## +### simple use cases +################################## + +# create and destroy resource +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# create and destroy resource using qualified name +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE \"abc\"" + revert = "DROP DATABASE \"abc\"" +} + +# with query +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} + +################################## +### grants example +################################## + +# grant and revoke privilege USAGE to ROLE on database +resource "snowflake_execute" "test" { + execute = "GRANT USAGE ON DATABASE ABC TO ROLE XYZ" + revert = "REVOKE USAGE ON DATABASE ABC FROM ROLE XYZ" +} + +# grant and revoke with for_each +variable "database_grants" { + type = list(object({ + database_name = string + role_id = string + privileges = list(string) + })) +} + +resource "snowflake_execute" "test" { + for_each = { for index, db_grant in var.database_grants : index => db_grant } + execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" + revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" +} + +################################## +### fixing bad configuration +################################## + +# bad revert +# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "SELECT 1" +} + +# 2 - fix the revert first; resource won't be recreated +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# bad query +# 1 - resource will be created; query_results will be empty +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "bad query" +} + +# 2 - fix the query; query_results will be calculated; resource won't be recreated +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + + +## Schema + +### Required + +- `execute` (String) SQL statement to execute. Forces recreation of resource on change. +- `revert` (String) SQL statement to revert the execute statement. Invoked when resource is being destroyed. + +### Optional + +- `query` (String) Optional SQL statement to do a read. Invoked on every resource refresh and every time it is changed. + +### Read-Only + +- `id` (String) The ID of this resource. +- `query_results` (List of Map of String) List of key-value maps (text to text) retrieved after executing read query. Will be empty if the query results in an error. + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_execute.example '' +``` diff --git a/docs/resources/unsafe_execute.md b/docs/resources/unsafe_execute.md index 358daaa521..3ac9e7c7b6 100644 --- a/docs/resources/unsafe_execute.md +++ b/docs/resources/unsafe_execute.md @@ -10,12 +10,12 @@ description: |- !> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. -~> **Note** This resource will be included in the V1 (check [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/ESSENTIAL_GA_OBJECTS.MD)) but may be slightly modified before. Design decisions and changes will be listed in the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-guide). - ~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. ~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_execute](./execute) instead. + Experimental resource allowing execution of ANY SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. ## Example Usage @@ -139,7 +139,7 @@ resource "snowflake_unsafe_execute" "test" { ### Optional -- `query` (String) Optional SQL statement to do a read. Invoked after creation and every time it is changed. +- `query` (String) Optional SQL statement to do a read. Invoked on every resource refresh and every time it is changed. ### Read-Only diff --git a/examples/additional/deprecated_resources.MD b/examples/additional/deprecated_resources.MD index 557f4af44a..b0a8941854 100644 --- a/examples/additional/deprecated_resources.MD +++ b/examples/additional/deprecated_resources.MD @@ -6,3 +6,4 @@ - [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead - [snowflake_stream](./docs/resources/stream) - [snowflake_tag_masking_policy_association](./docs/resources/tag_masking_policy_association) +- [snowflake_unsafe_execute](./docs/resources/unsafe_execute) - use [snowflake_execute](./docs/resources/execute) instead diff --git a/examples/resources/snowflake_execute/import.sh b/examples/resources/snowflake_execute/import.sh new file mode 100644 index 0000000000..65c1257965 --- /dev/null +++ b/examples/resources/snowflake_execute/import.sh @@ -0,0 +1 @@ +terraform import snowflake_execute.example '' diff --git a/examples/resources/snowflake_execute/resource.tf b/examples/resources/snowflake_execute/resource.tf new file mode 100644 index 0000000000..bb6cde5616 --- /dev/null +++ b/examples/resources/snowflake_execute/resource.tf @@ -0,0 +1,79 @@ +################################## +### simple use cases +################################## + +# create and destroy resource +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# create and destroy resource using qualified name +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE \"abc\"" + revert = "DROP DATABASE \"abc\"" +} + +# with query +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} + +################################## +### grants example +################################## + +# grant and revoke privilege USAGE to ROLE on database +resource "snowflake_execute" "test" { + execute = "GRANT USAGE ON DATABASE ABC TO ROLE XYZ" + revert = "REVOKE USAGE ON DATABASE ABC FROM ROLE XYZ" +} + +# grant and revoke with for_each +variable "database_grants" { + type = list(object({ + database_name = string + role_id = string + privileges = list(string) + })) +} + +resource "snowflake_execute" "test" { + for_each = { for index, db_grant in var.database_grants : index => db_grant } + execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" + revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" +} + +################################## +### fixing bad configuration +################################## + +# bad revert +# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "SELECT 1" +} + +# 2 - fix the revert first; resource won't be recreated +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# bad query +# 1 - resource will be created; query_results will be empty +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "bad query" +} + +# 2 - fix the query; query_results will be calculated; resource won't be recreated +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 37e5316e39..327c4ceafb 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -487,6 +487,7 @@ func getResources() map[string]*schema.Resource { "snowflake_database_role": resources.DatabaseRole(), "snowflake_dynamic_table": resources.DynamicTable(), "snowflake_email_notification_integration": resources.EmailNotificationIntegration(), + "snowflake_execute": resources.Execute(), "snowflake_external_function": resources.ExternalFunction(), "snowflake_external_oauth_integration": resources.ExternalOauthIntegration(), "snowflake_external_table": resources.ExternalTable(), diff --git a/pkg/provider/provider_acceptance_test.go b/pkg/provider/provider_acceptance_test.go index 8d9d5b0666..6321957a17 100644 --- a/pkg/provider/provider_acceptance_test.go +++ b/pkg/provider/provider_acceptance_test.go @@ -623,10 +623,10 @@ func TestAcc_Provider_sessionParameters(t *testing.T) { "statement_timeout_in_seconds": tfconfig.IntegerVariable(31337), }, ), - )) + unsafeExecuteShowSessionParameter(), + )) + executeShowSessionParameter(), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_unsafe_execute.t", "query_results.#", "1"), - resource.TestCheckResourceAttr("snowflake_unsafe_execute.t", "query_results.0.value", "31337"), + resource.TestCheckResourceAttr("snowflake_execute.t", "query_results.#", "1"), + resource.TestCheckResourceAttr("snowflake_execute.t", "query_results.0.value", "31337"), ), }, }, @@ -803,9 +803,9 @@ func datasourceModel() config.DatasourceModel { return datasourcemodel.Database("t", acc.TestDatabaseName) } -func unsafeExecuteShowSessionParameter() string { +func executeShowSessionParameter() string { return ` -resource snowflake_unsafe_execute "t" { +resource snowflake_execute "t" { execute = "SELECT 1" query = "SHOW PARAMETERS LIKE 'STATEMENT_TIMEOUT_IN_SECONDS' IN SESSION" revert = "SELECT 1" diff --git a/pkg/provider/resources/resources.go b/pkg/provider/resources/resources.go index 8b43f012e7..4a576e79ed 100644 --- a/pkg/provider/resources/resources.go +++ b/pkg/provider/resources/resources.go @@ -20,6 +20,7 @@ const ( DatabaseRole resource = "snowflake_database_role" DynamicTable resource = "snowflake_dynamic_table" EmailNotificationIntegration resource = "snowflake_email_notification_integration" + Execute resource = "snowflake_execute" ExternalFunction resource = "snowflake_external_function" ExternalTable resource = "snowflake_external_table" ExternalOauthSecurityIntegration resource = "snowflake_external_oauth_security_integration" diff --git a/pkg/resources/execute.go b/pkg/resources/execute.go new file mode 100644 index 0000000000..6ad9b189cd --- /dev/null +++ b/pkg/resources/execute.go @@ -0,0 +1,172 @@ +package resources + +import ( + "context" + "fmt" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + + "github.com/hashicorp/go-uuid" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var executeSchema = map[string]*schema.Schema{ + "execute": { + Type: schema.TypeString, + Required: true, + Description: "SQL statement to execute. Forces recreation of resource on change.", + }, + "revert": { + Type: schema.TypeString, + Required: true, + Description: "SQL statement to revert the execute statement. Invoked when resource is being destroyed.", + }, + "query": { + Type: schema.TypeString, + Optional: true, + Description: "Optional SQL statement to do a read. Invoked on every resource refresh and every time it is changed.", + }, + "query_results": { + Type: schema.TypeList, + Computed: true, + Description: "List of key-value maps (text to text) retrieved after executing read query. Will be empty if the query results in an error.", + Elem: &schema.Schema{ + Type: schema.TypeMap, + Elem: &schema.Schema{ + Type: schema.TypeString, + Optional: true, + }, + }, + }, +} + +func Execute() *schema.Resource { + return &schema.Resource{ + CreateContext: TrackingCreateWrapper(resources.Execute, CreateExecute), + ReadContext: TrackingReadWrapper(resources.Execute, ReadExecute), + UpdateContext: TrackingUpdateWrapper(resources.Execute, UpdateExecute), + DeleteContext: TrackingDeleteWrapper(resources.Execute, DeleteExecute), + + Schema: executeSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + + Description: "Resource allowing execution of ANY SQL statement.", + + CustomizeDiff: TrackingCustomDiffWrapper(resources.UnsafeExecute, customdiff.All( + customdiff.ForceNewIfChange("execute", func(ctx context.Context, oldValue, newValue, meta any) bool { + return oldValue != "" + }), + func(_ context.Context, diff *schema.ResourceDiff, _ any) error { + if diff.HasChange("query") { + err := diff.SetNewComputed("query_results") + if err != nil { + return err + } + } + return nil + }), + ), + } +} + +func CreateExecute(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id, err := uuid.GenerateUUID() + if err != nil { + return diag.FromErr(err) + } + + executeStatement := d.Get("execute").(string) + _, err = client.ExecUnsafe(ctx, executeStatement) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(id) + log.Printf(`[INFO] SQL "%s" applied successfully\n`, executeStatement) + + return ReadExecute(ctx, d, meta) +} + +func UpdateExecute(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + if d.HasChange("query") { + return ReadExecute(ctx, d, meta) + } + return nil +} + +func ReadExecute(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + readStatement := d.Get("query").(string) + + setNilResults := func() diag.Diagnostics { + log.Printf(`[DEBUG] Clearing query_results`) + err := d.Set("query_results", nil) + if err != nil { + return diag.FromErr(err) + } + return nil + } + + if readStatement == "" { + return setNilResults() + } else { + rows, err := client.QueryUnsafe(ctx, readStatement) + if err != nil { + log.Printf(`[WARN] SQL query "%s" failed with err %v`, readStatement, err) + return setNilResults() + } + log.Printf(`[INFO] SQL query "%s" executed successfully, returned rows count: %d`, readStatement, len(rows)) + rowsTransformed := make([]map[string]any, len(rows)) + for i, row := range rows { + t := make(map[string]any) + for k, v := range row { + if *v == nil { + t[k] = nil + } else { + switch (*v).(type) { + case fmt.Stringer: + t[k] = fmt.Sprintf("%v", *v) + case string: + t[k] = *v + default: + return diag.FromErr(fmt.Errorf("currently only objects convertible to String are supported by query; got %v", *v)) + } + } + } + rowsTransformed[i] = t + } + err = d.Set("query_results", rowsTransformed) + if err != nil { + return diag.FromErr(err) + } + } + + return nil +} + +func DeleteExecute(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + revertStatement := d.Get("revert").(string) + _, err := client.ExecUnsafe(ctx, revertStatement) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + log.Printf(`[INFO] SQL "%s" applied successfully\n`, revertStatement) + + return nil +} diff --git a/pkg/resources/unsafe_execute_acceptance_test.go b/pkg/resources/execute_acceptance_test.go similarity index 78% rename from pkg/resources/unsafe_execute_acceptance_test.go rename to pkg/resources/execute_acceptance_test.go index 33a78be8b0..d97722701f 100644 --- a/pkg/resources/unsafe_execute_acceptance_test.go +++ b/pkg/resources/execute_acceptance_test.go @@ -1,12 +1,15 @@ package resources_test import ( + "context" "errors" "fmt" "regexp" "strings" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" @@ -20,17 +23,17 @@ import ( "github.com/stretchr/testify/require" ) -func TestAcc_UnsafeExecute_basic(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") +func TestAcc_Execute_basic(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") name := id.Name() - secondId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") + secondId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") nameLowerCase := strings.ToLower(secondId.Name()) secondIdLowerCased := sdk.NewAccountObjectIdentifier(nameLowerCase) nameLowerCaseEscaped := fmt.Sprintf(`"%s"`, nameLowerCase) createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" createConfigVariables := func(id string) map[string]config.Variable { return map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(id)), @@ -47,7 +50,7 @@ func TestAcc_UnsafeExecute_basic(t *testing.T) { CheckDestroy: testAccCheckDatabaseExistence(t, id, false), Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(name), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -73,7 +76,7 @@ func TestAcc_UnsafeExecute_basic(t *testing.T) { CheckDestroy: testAccCheckDatabaseExistence(t, secondIdLowerCased, false), Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(nameLowerCaseEscaped), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -91,14 +94,14 @@ func TestAcc_UnsafeExecute_basic(t *testing.T) { }) } -func TestAcc_UnsafeExecute_withRead(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") +func TestAcc_Execute_withRead(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") name := id.Name() createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } showDatabaseStatement := func(id string) string { return fmt.Sprintf("show databases like '%%%s%%'", id) } - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" createConfigVariables := func(id string) map[string]config.Variable { return map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(id)), @@ -116,7 +119,7 @@ func TestAcc_UnsafeExecute_withRead(t *testing.T) { CheckDestroy: testAccCheckDatabaseExistence(t, id, false), Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_withRead"), ConfigVariables: createConfigVariables(name), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -138,13 +141,13 @@ func TestAcc_UnsafeExecute_withRead(t *testing.T) { }) } -func TestAcc_UnsafeExecute_readRemoved(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") +func TestAcc_Execute_readRemoved(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") name := id.Name() createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } showDatabaseStatement := func(id string) string { return fmt.Sprintf("show databases like '%%%s%%'", id) } - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -155,7 +158,7 @@ func TestAcc_UnsafeExecute_readRemoved(t *testing.T) { CheckDestroy: testAccCheckDatabaseExistence(t, id, false), Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_withRead"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(name)), "revert": config.StringVariable(dropDatabaseStatement(name)), @@ -170,7 +173,7 @@ func TestAcc_UnsafeExecute_readRemoved(t *testing.T) { ), }, { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_withRead"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(name)), "revert": config.StringVariable(dropDatabaseStatement(name)), @@ -188,13 +191,13 @@ func TestAcc_UnsafeExecute_readRemoved(t *testing.T) { }) } -func TestAcc_UnsafeExecute_badQuery(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") +func TestAcc_Execute_badQuery(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") name := id.Name() createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } showDatabaseStatement := func(id string) string { return fmt.Sprintf("show databases like '%%%s%%'", id) } - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -205,7 +208,7 @@ func TestAcc_UnsafeExecute_badQuery(t *testing.T) { CheckDestroy: testAccCheckDatabaseExistence(t, id, false), Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_withRead"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(name)), "revert": config.StringVariable(dropDatabaseStatement(name)), @@ -223,7 +226,7 @@ func TestAcc_UnsafeExecute_badQuery(t *testing.T) { ), }, { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_withRead"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(name)), "revert": config.StringVariable(dropDatabaseStatement(name)), @@ -243,7 +246,7 @@ func TestAcc_UnsafeExecute_badQuery(t *testing.T) { }) } -func TestAcc_UnsafeExecute_invalidExecuteStatement(t *testing.T) { +func TestAcc_Execute_invalidExecuteStatement(t *testing.T) { invalidCreateStatement := "create database" invalidDropStatement := "drop database" @@ -262,7 +265,7 @@ func TestAcc_UnsafeExecute_invalidExecuteStatement(t *testing.T) { }, Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -273,16 +276,16 @@ func TestAcc_UnsafeExecute_invalidExecuteStatement(t *testing.T) { }) } -func TestAcc_UnsafeExecute_invalidRevertStatement(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") +func TestAcc_Execute_invalidRevertStatement(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") name := id.Name() - updatedId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") + updatedId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") updatedName := updatedId.Name() createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } invalidDropStatement := "drop database" - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -303,7 +306,7 @@ func TestAcc_UnsafeExecute_invalidRevertStatement(t *testing.T) { }, Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(name)), "revert": config.StringVariable(invalidDropStatement), @@ -319,7 +322,7 @@ func TestAcc_UnsafeExecute_invalidRevertStatement(t *testing.T) { ), }, { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(updatedName)), "revert": config.StringVariable(invalidDropStatement), @@ -330,7 +333,7 @@ func TestAcc_UnsafeExecute_invalidRevertStatement(t *testing.T) { ExpectError: regexp.MustCompile("SQL compilation error"), }, { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(name)), "revert": config.StringVariable(dropDatabaseStatement(name)), @@ -347,7 +350,7 @@ func TestAcc_UnsafeExecute_invalidRevertStatement(t *testing.T) { ), }, { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: map[string]config.Variable{ "execute": config.StringVariable(createDatabaseStatement(updatedName)), "revert": config.StringVariable(dropDatabaseStatement(updatedName)), @@ -367,15 +370,15 @@ func TestAcc_UnsafeExecute_invalidRevertStatement(t *testing.T) { }) } -func TestAcc_UnsafeExecute_revertUpdated(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") +func TestAcc_Execute_revertUpdated(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") name := id.Name() execute := fmt.Sprintf("create database %s", name) revert := fmt.Sprintf("drop database %s", name) notMatchingRevert := "select 1" var savedId string - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" createConfigVariables := func(execute string, revert string) map[string]config.Variable { return map[string]config.Variable{ "execute": config.StringVariable(execute), @@ -392,7 +395,7 @@ func TestAcc_UnsafeExecute_revertUpdated(t *testing.T) { CheckDestroy: testAccCheckDatabaseExistence(t, id, false), Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(execute, notMatchingRevert), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -409,7 +412,7 @@ func TestAcc_UnsafeExecute_revertUpdated(t *testing.T) { ), }, { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(execute, revert), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -431,20 +434,20 @@ func TestAcc_UnsafeExecute_revertUpdated(t *testing.T) { }) } -func TestAcc_UnsafeExecute_executeUpdated(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") +func TestAcc_Execute_executeUpdated(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") name := id.Name() execute := fmt.Sprintf("create database %s", name) revert := fmt.Sprintf("drop database %s", name) - newId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("UNSAFE_EXECUTE_TEST_DATABASE_") + newId := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix("EXECUTE_TEST_DATABASE_") newName := newId.Name() newExecute := fmt.Sprintf("create database %s", newName) newRevert := fmt.Sprintf("drop database %s", newName) var savedId string - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" createConfigVariables := func(execute string, revert string) map[string]config.Variable { return map[string]config.Variable{ "execute": config.StringVariable(execute), @@ -471,7 +474,7 @@ func TestAcc_UnsafeExecute_executeUpdated(t *testing.T) { }, Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(execute, revert), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -488,7 +491,7 @@ func TestAcc_UnsafeExecute_executeUpdated(t *testing.T) { ), }, { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(newExecute, newRevert), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -511,7 +514,7 @@ func TestAcc_UnsafeExecute_executeUpdated(t *testing.T) { }) } -func TestAcc_UnsafeExecute_grants(t *testing.T) { +func TestAcc_Execute_grants(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) @@ -527,7 +530,7 @@ func TestAcc_UnsafeExecute_grants(t *testing.T) { execute := fmt.Sprintf("GRANT %s ON DATABASE %s TO ROLE %s", privilege, database.ID().FullyQualifiedName(), role.ID().FullyQualifiedName()) revert := fmt.Sprintf("REVOKE %s ON DATABASE %s FROM ROLE %s", privilege, database.ID().FullyQualifiedName(), role.ID().FullyQualifiedName()) - resourceName := "snowflake_unsafe_execute.test" + resourceName := "snowflake_execute.test" createConfigVariables := func(execute string, revert string) map[string]config.Variable { return map[string]config.Variable{ "execute": config.StringVariable(execute), @@ -547,7 +550,7 @@ func TestAcc_UnsafeExecute_grants(t *testing.T) { }, Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Execute_commonSetup"), ConfigVariables: createConfigVariables(execute, revert), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, @@ -563,19 +566,19 @@ func TestAcc_UnsafeExecute_grants(t *testing.T) { }) } -// TestAcc_UnsafeExecute_grantsComplex test fails with: +// TestAcc_Execute_grantsComplex test fails with: // -// testing_new_config.go:156: unexpected index type (string) for "snowflake_unsafe_execute.test[\"0\"]", for_each is not supported -// testing_new.go:68: unexpected index type (string) for "snowflake_unsafe_execute.test[\"0\"]", for_each is not supported +// testing_new_config.go:156: unexpected index type (string) for "snowflake_execute.test[\"0\"]", for_each is not supported +// testing_new.go:68: unexpected index type (string) for "snowflake_execute.test[\"0\"]", for_each is not supported // // Quick search unveiled this issue: https://github.com/hashicorp/terraform-plugin-sdk/issues/536. // // It also seems that it is working correctly underneath; with TF_LOG set to DEBUG we have: // -// 2023/11/26 17:16:03 [DEBUG] SQL "GRANT CREATE SCHEMA,MODIFY ON DATABASE UNSAFE_EXECUTE_TEST_DATABASE_4397 TO ROLE UNSAFE_EXECUTE_TEST_ROLE_1145" applied successfully -// 2023/11/26 17:16:03 [DEBUG] SQL "GRANT MODIFY,USAGE ON DATABASE UNSAFE_EXECUTE_TEST_DATABASE_3740 TO ROLE UNSAFE_EXECUTE_TEST_ROLE_3008" applied successfully -func TestAcc_UnsafeExecute_grantsComplex(t *testing.T) { - t.Skip("Skipping TestAcc_UnsafeExecute_grantsComplex because of https://github.com/hashicorp/terraform-plugin-sdk/issues/536 issue") +// 2023/11/26 17:16:03 [DEBUG] SQL "GRANT CREATE SCHEMA,MODIFY ON DATABASE EXECUTE_TEST_DATABASE_4397 TO ROLE EXECUTE_TEST_ROLE_1145" applied successfully +// 2023/11/26 17:16:03 [DEBUG] SQL "GRANT MODIFY,USAGE ON DATABASE EXECUTE_TEST_DATABASE_3740 TO ROLE EXECUTE_TEST_ROLE_3008" applied successfully +func TestAcc_Execute_grantsComplex(t *testing.T) { + t.Skip("Skipping TestAcc_Execute_grantsComplex because of https://github.com/hashicorp/terraform-plugin-sdk/issues/536 issue") client := acc.TestClient() @@ -599,8 +602,8 @@ func TestAcc_UnsafeExecute_grantsComplex(t *testing.T) { privilege2 := sdk.AccountObjectPrivilegeModify privilege3 := sdk.AccountObjectPrivilegeUsage - // resourceName1 := "snowflake_unsafe_execute.test.0" - // resourceName2 := "snowflake_unsafe_execute.test.1" + // resourceName1 := "snowflake_execute.test.0" + // resourceName2 := "snowflake_execute.test.1" createConfigVariables := func() map[string]config.Variable { return map[string]config.Variable{ "database_grants": config.ListVariable(config.ObjectVariable(map[string]config.Variable{ @@ -671,8 +674,8 @@ func TestAcc_UnsafeExecute_grantsComplex(t *testing.T) { } // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2491 -func TestAcc_UnsafeExecute_queryResultsBug(t *testing.T) { - resourceName := "snowflake_unsafe_execute.test" +func TestAcc_Execute_queryResultsBug(t *testing.T) { + resourceName := "snowflake_execute.test" resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -682,7 +685,7 @@ func TestAcc_UnsafeExecute_queryResultsBug(t *testing.T) { }, Steps: []resource.TestStep{ { - Config: unsafeExecuteConfig(108), + Config: executeConfig(108), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "query", "SELECT 108"), resource.TestCheckResourceAttrSet(resourceName, "query_results.#"), @@ -690,7 +693,7 @@ func TestAcc_UnsafeExecute_queryResultsBug(t *testing.T) { ), }, { - Config: unsafeExecuteConfig(96), + Config: executeConfig(96), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "query", "SELECT 96"), resource.TestCheckResourceAttrSet(resourceName, "query_results.#"), @@ -704,20 +707,68 @@ func TestAcc_UnsafeExecute_queryResultsBug(t *testing.T) { }) } -func unsafeExecuteConfig(queryNumber int) string { +func executeConfig(queryNumber int) string { return fmt.Sprintf(` -resource "snowflake_unsafe_execute" "test" { +resource "snowflake_execute" "test" { execute = "SELECT 18" revert = "SELECT 36" query = "SELECT %d" } -output "unsafe" { - value = snowflake_unsafe_execute.test.query_results +output "query_results_output" { + value = snowflake_execute.test.query_results } `, queryNumber) } +func TestAcc_Execute_QueryResultsRecomputedWithoutQueryChanges(t *testing.T) { + resourceName := "snowflake_execute.test" + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: executeConfigCreateDatabase(id), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "query_results.#", "1"), + resource.TestCheckResourceAttr(resourceName, "query_results.0.name", id.Name()), + resource.TestCheckResourceAttr(resourceName, "query_results.0.comment", ""), + ), + }, + { + PreConfig: func() { + acc.TestClient().Database.Alter(t, id, &sdk.AlterDatabaseOptions{ + Set: &sdk.DatabaseSet{ + Comment: sdk.String("some comment"), + }, + }) + }, + Config: executeConfigCreateDatabase(id), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "query_results.#", "1"), + resource.TestCheckResourceAttr(resourceName, "query_results.0.name", id.Name()), + resource.TestCheckResourceAttr(resourceName, "query_results.0.comment", "some comment"), + ), + }, + }, + }) +} + +func executeConfigCreateDatabase(id sdk.AccountObjectIdentifier) string { + return fmt.Sprintf(` +resource "snowflake_execute" "test" { + execute = "CREATE DATABASE \"%[1]s\"" + revert = "DROP DATABASE \"%[1]s\"" + query = "SHOW DATABASES LIKE '%[1]s'" +} +`, id.Name()) +} + func verifyGrantExists(t *testing.T, roleId sdk.AccountObjectIdentifier, privilege sdk.AccountObjectPrivilege, shouldExist bool) func(state *terraform.State) error { t.Helper() return func(state *terraform.State) error { @@ -740,3 +791,59 @@ func verifyGrantExists(t *testing.T, roleId sdk.AccountObjectIdentifier, privile return nil } } + +func TestAcc_Execute_ImportWithRandomId(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { + _, databaseCleanup := acc.TestClient().Database.CreateDatabaseWithIdentifier(t, id) + t.Cleanup(databaseCleanup) + }, + Config: executeConfigCreateDatabase(id), + ResourceName: "snowflake_execute.test", + ImportState: true, + ImportStatePersist: true, + ImportStateId: "random_id", + ImportStateVerifyIgnore: []string{"query_results"}, + }, + // filling the empty state fields (execute changed from empty) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_execute.test", plancheck.ResourceActionUpdate), + }, + }, + Config: executeConfigCreateDatabase(id), + }, + // change the id in every query to see if: + // 1. execute will trigger force new behavior + // 2. an old database is used in delete (it is) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_execute.test", plancheck.ResourceActionDestroyBeforeCreate), + }, + PostApplyPostRefresh: []plancheck.PlanCheck{ + resources.PlanCheckFunc(func(ctx context.Context, req plancheck.CheckPlanRequest, resp *plancheck.CheckPlanResponse) { + _, err := acc.TestClient().Database.Show(t, id) + if err == nil { + resp.Error = fmt.Errorf("database %s still exist", id.FullyQualifiedName()) + t.Cleanup(acc.TestClient().Database.DropDatabaseFunc(t, id)) + } + }), + }, + }, + Config: executeConfigCreateDatabase(newId), + }, + }, + }) +} diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index 6752840e1e..d7638e6093 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -1,9 +1,12 @@ package resources import ( + "context" "fmt" "slices" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -292,3 +295,9 @@ func parseSchemaObjectIdentifierSet(v any) ([]sdk.SchemaObjectIdentifier, error) } return ids, nil } + +type PlanCheckFunc func(ctx context.Context, req plancheck.CheckPlanRequest, resp *plancheck.CheckPlanResponse) + +func (fn PlanCheckFunc) CheckPlan(ctx context.Context, req plancheck.CheckPlanRequest, resp *plancheck.CheckPlanResponse) { + fn(ctx, req, resp) +} diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/test.tf b/pkg/resources/testdata/TestAcc_Execute_commonSetup/test.tf similarity index 52% rename from pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/test.tf rename to pkg/resources/testdata/TestAcc_Execute_commonSetup/test.tf index a71f75afd3..f5458de821 100644 --- a/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/test.tf +++ b/pkg/resources/testdata/TestAcc_Execute_commonSetup/test.tf @@ -1,4 +1,4 @@ -resource "snowflake_unsafe_execute" "test" { +resource "snowflake_execute" "test" { execute = var.execute revert = var.revert } diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/variables.tf b/pkg/resources/testdata/TestAcc_Execute_commonSetup/variables.tf similarity index 100% rename from pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/variables.tf rename to pkg/resources/testdata/TestAcc_Execute_commonSetup/variables.tf diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/test.tf b/pkg/resources/testdata/TestAcc_Execute_grantsComplex/test.tf similarity index 88% rename from pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/test.tf rename to pkg/resources/testdata/TestAcc_Execute_grantsComplex/test.tf index 96c70bae50..e553905be9 100644 --- a/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/test.tf +++ b/pkg/resources/testdata/TestAcc_Execute_grantsComplex/test.tf @@ -1,4 +1,4 @@ -resource "snowflake_unsafe_execute" "test" { +resource "snowflake_execute" "test" { for_each = { for index, db_grant in var.database_grants : index => db_grant } execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/variables.tf b/pkg/resources/testdata/TestAcc_Execute_grantsComplex/variables.tf similarity index 100% rename from pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/variables.tf rename to pkg/resources/testdata/TestAcc_Execute_grantsComplex/variables.tf diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/test.tf b/pkg/resources/testdata/TestAcc_Execute_withRead/test.tf similarity index 61% rename from pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/test.tf rename to pkg/resources/testdata/TestAcc_Execute_withRead/test.tf index ff23b05f4e..eb08263587 100644 --- a/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/test.tf +++ b/pkg/resources/testdata/TestAcc_Execute_withRead/test.tf @@ -1,4 +1,4 @@ -resource "snowflake_unsafe_execute" "test" { +resource "snowflake_execute" "test" { execute = var.execute revert = var.revert query = var.query diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/variables.tf b/pkg/resources/testdata/TestAcc_Execute_withRead/variables.tf similarity index 100% rename from pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/variables.tf rename to pkg/resources/testdata/TestAcc_Execute_withRead/variables.tf diff --git a/pkg/resources/unsafe_execute.go b/pkg/resources/unsafe_execute.go index 8d56630ea6..822900ce02 100644 --- a/pkg/resources/unsafe_execute.go +++ b/pkg/resources/unsafe_execute.go @@ -1,164 +1,12 @@ package resources import ( - "context" - "fmt" - "log" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - - "github.com/hashicorp/go-uuid" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -var unsafeExecuteSchema = map[string]*schema.Schema{ - "execute": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "SQL statement to execute. Forces recreation of resource on change.", - }, - "revert": { - Type: schema.TypeString, - Required: true, - Description: "SQL statement to revert the execute statement. Invoked when resource is being destroyed.", - }, - "query": { - Type: schema.TypeString, - Optional: true, - Description: "Optional SQL statement to do a read. Invoked after creation and every time it is changed.", - }, - "query_results": { - Type: schema.TypeList, - Computed: true, - Description: "List of key-value maps (text to text) retrieved after executing read query. Will be empty if the query results in an error.", - Elem: &schema.Schema{ - Type: schema.TypeMap, - Elem: &schema.Schema{ - Type: schema.TypeString, - Optional: true, - }, - }, - }, -} - func UnsafeExecute() *schema.Resource { - return &schema.Resource{ - Create: CreateUnsafeExecute, - Read: ReadUnsafeExecute, - Delete: DeleteUnsafeExecute, - Update: UpdateUnsafeExecute, - - Schema: unsafeExecuteSchema, - - Description: "Experimental resource allowing execution of ANY SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk.", - - CustomizeDiff: TrackingCustomDiffWrapper(resources.UnsafeExecute, func(_ context.Context, diff *schema.ResourceDiff, _ interface{}) error { - if diff.HasChange("query") { - err := diff.SetNewComputed("query_results") - if err != nil { - return err - } - } - return nil - }), - } -} - -func ReadUnsafeExecute(d *schema.ResourceData, meta interface{}) error { - client := meta.(*provider.Context).Client - ctx := context.Background() - - readStatement := d.Get("query").(string) - - setNilResults := func() error { - log.Printf(`[DEBUG] Clearing query_results`) - err := d.Set("query_results", nil) - if err != nil { - return err - } - return nil - } - - if readStatement == "" { - return setNilResults() - } else { - rows, err := client.QueryUnsafe(ctx, readStatement) - if err != nil { - log.Printf(`[WARN] SQL query "%s" failed with err %v`, readStatement, err) - return setNilResults() - } - log.Printf(`[INFO] SQL query "%s" executed successfully, returned rows count: %d`, readStatement, len(rows)) - rowsTransformed := make([]map[string]any, len(rows)) - for i, row := range rows { - t := make(map[string]any) - for k, v := range row { - if *v == nil { - t[k] = nil - } else { - switch (*v).(type) { - case fmt.Stringer: - t[k] = fmt.Sprintf("%v", *v) - case string: - t[k] = *v - default: - return fmt.Errorf("currently only objects convertible to String are supported by query; got %v", *v) - } - } - } - rowsTransformed[i] = t - } - err = d.Set("query_results", rowsTransformed) - if err != nil { - return err - } - } - - return nil -} - -func CreateUnsafeExecute(d *schema.ResourceData, meta interface{}) error { - client := meta.(*provider.Context).Client - ctx := context.Background() - - id, err := uuid.GenerateUUID() - if err != nil { - return err - } - - executeStatement := d.Get("execute").(string) - _, err = client.ExecUnsafe(ctx, executeStatement) - if err != nil { - return err - } - - d.SetId(id) - log.Printf(`[INFO] SQL "%s" applied successfully\n`, executeStatement) - - return ReadUnsafeExecute(d, meta) -} - -func DeleteUnsafeExecute(d *schema.ResourceData, meta interface{}) error { - client := meta.(*provider.Context).Client - ctx := context.Background() - - revertStatement := d.Get("revert").(string) - _, err := client.ExecUnsafe(ctx, revertStatement) - if err != nil { - return err - } - - d.SetId("") - log.Printf(`[INFO] SQL "%s" applied successfully\n`, revertStatement) - - return nil -} - -func UpdateUnsafeExecute(d *schema.ResourceData, meta interface{}) error { - if d.HasChange("query") { - return ReadUnsafeExecute(d, meta) - } - return nil + unsafeExecute := Execute() + unsafeExecute.Description = "Experimental resource allowing execution of ANY SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk." + unsafeExecute.DeprecationMessage = "This resource is deprecated and will be removed in a future major version release. Please use snowflake_execute instead." + return unsafeExecute } diff --git a/pkg/scripts/issues/labels.go b/pkg/scripts/issues/labels.go index cb5a130d3e..0d1e0b29c6 100644 --- a/pkg/scripts/issues/labels.go +++ b/pkg/scripts/issues/labels.go @@ -18,6 +18,7 @@ var RepositoryLabels = []string{ "resource:database_role", "resource:dynamic_table", "resource:email_notification_integration", + "resource:execute", "resource:external_function", "resource:external_oauth_integration", "resource:external_table", diff --git a/templates/resources/execute.md.tmpl b/templates/resources/execute.md.tmpl new file mode 100644 index 0000000000..80794cba89 --- /dev/null +++ b/templates/resources/execute.md.tmpl @@ -0,0 +1,40 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +# {{.Name}} ({{.Type}}) + +!> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. + +~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. + +~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/unsafe_execute.md.tmpl b/templates/resources/unsafe_execute.md.tmpl index da7c2970a9..80794cba89 100644 --- a/templates/resources/unsafe_execute.md.tmpl +++ b/templates/resources/unsafe_execute.md.tmpl @@ -14,8 +14,6 @@ description: |- !> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. -~> **Note** This resource will be included in the V1 (check [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/ESSENTIAL_GA_OBJECTS.MD)) but may be slightly modified before. Design decisions and changes will be listed in the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-guide). - ~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. ~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. @@ -38,5 +36,5 @@ description: |- Import is supported using the following syntax: -{{ printf "{{codefile \"shell\" %q}}" .ImportFile }} +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} {{- end }} From b3d6b9e5b4f327b186161f50dc9ac732d199fb19 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 12 Dec 2024 16:17:12 +0100 Subject: [PATCH 09/20] feat: Add a new account roles data source (#3257) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - add a new data source for account rolers to be consistent with resources - prove that database role with the same prefix is not in the output of SHOW ROLES - deprecate the `roles` data source (it will be removed in v1-ready branch) ## Test Plan * [x] acceptance tests * [ ] … ## References https://docs.snowflake.com/en/sql-reference/sql/show-roles --- MIGRATION_GUIDE.md | 20 +++ docs/data-sources/account_roles.md | 99 +++++++++++++++ docs/data-sources/role.md | 2 +- docs/data-sources/roles.md | 2 + docs/index.md | 3 +- docs/resources/database.md | 3 +- docs/resources/secondary_database.md | 3 +- docs/resources/shared_database.md | 3 +- examples/additional/deprecated_datasources.MD | 3 +- .../snowflake_account_roles/data-source.tf | 48 ++++++++ pkg/acceptance/bettertestspoc/README.md | 1 + pkg/datasources/account_roles.go | 94 ++++++++++++++ .../account_roles_acceptance_test.go | 116 ++++++++++++++++++ pkg/datasources/role.go | 2 +- pkg/datasources/roles.go | 7 +- pkg/datasources/roles_acceptance_test.go | 12 +- .../TestAcc_AccountRoles_Complete/1/test.tf | 23 ++++ .../1/variables.tf | 19 +++ .../TestAcc_AccountRoles_Complete/2/test.tf | 3 + pkg/provider/datasources/datasources.go | 1 + pkg/provider/provider.go | 1 + pkg/resources/diff_suppressions.go | 1 + templates/resources/database.md.tmpl | 3 +- .../resources/secondary_database.md.tmpl | 3 +- templates/resources/shared_database.md.tmpl | 3 +- 25 files changed, 450 insertions(+), 25 deletions(-) create mode 100644 docs/data-sources/account_roles.md create mode 100644 examples/data-sources/snowflake_account_roles/data-source.tf create mode 100644 pkg/datasources/account_roles.go create mode 100644 pkg/datasources/account_roles_acceptance_test.go create mode 100644 pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/variables.tf create mode 100644 pkg/datasources/testdata/TestAcc_AccountRoles_Complete/2/test.tf diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 9904e232e5..8bd54b0a87 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,6 +9,26 @@ across different versions. ## v0.99.0 ➞ v0.100.0 +### *(new feature)* Account role data source +Added a new `snowflake_account_roles` data source for account roles. Now it reflects It's based on `snowflake_roles` data source. +`account_roles` field now organizes output of show under `show_output` field. + +Before: +```terraform +output "simple_output" { + value = data.snowflake_roles.test.roles[0].show_output[0].name +} +``` +After: +```terraform +output "simple_output" { + value = data.snowflake_account_roles.test.account_roles[0].show_output[0].name +} +``` + +### snowflake_roles data source deprecation +`snowflake_roles` is now deprecated in favor of `snowflake_account_roles` with a similar schema and behavior. It will be removed with the v1 release. Please adjust your configuration files. + ### snowflake_account_parameter resource changes #### *(behavior change)* resource deletion diff --git a/docs/data-sources/account_roles.md b/docs/data-sources/account_roles.md new file mode 100644 index 0000000000..f26c03bec5 --- /dev/null +++ b/docs/data-sources/account_roles.md @@ -0,0 +1,99 @@ +--- +page_title: "snowflake_account_roles Data Source - terraform-provider-snowflake" +subcategory: "" +description: |- + Data source used to get details of filtered account roles. Filtering is aligned with the current possibilities for SHOW ROLES https://docs.snowflake.com/en/sql-reference/sql/show-roles query (like and in_class are all supported). The results of SHOW are encapsulated in one output collection. +--- + +# snowflake_account_roles (Data Source) + +Data source used to get details of filtered account roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection. + +## Example Usage + +```terraform +# Simple usage +data "snowflake_account_roles" "simple" { +} + +output "simple_output" { + value = data.snowflake_account_roles.simple.roles +} + +# Filtering (like) +data "snowflake_account_roles" "like" { + like = "role-name" +} + +output "like_output" { + value = data.snowflake_account_roles.like.roles +} + +# Filtering (in class) +data "snowflake_account_roles" "in_class" { + in_class = "SNOWFLAKE.CORE.BUDGET" +} + +output "in_class_output" { + value = data.snowflake_account_roles.in_class.roles +} + +# Ensure the number of roles is equal to at least one element (with the use of postcondition) +data "snowflake_account_roles" "assert_with_postcondition" { + like = "role-name-%" + lifecycle { + postcondition { + condition = length(self.roles) > 0 + error_message = "there should be at least one role" + } + } +} + +# Ensure the number of roles is equal to at exactly one element (with the use of check block) +check "role_check" { + data "snowflake_account_roles" "assert_with_check_block" { + like = "role-name" + } + + assert { + condition = length(data.snowflake_account_roles.assert_with_check_block.roles) == 1 + error_message = "Roles filtered by '${data.snowflake_account_roles.assert_with_check_block.like}' returned ${length(data.snowflake_account_roles.assert_with_check_block.roles)} roles where one was expected" + } +} +``` + + +## Schema + +### Optional + +- `in_class` (String) Filters the SHOW GRANTS output by class name. +- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). + +### Read-Only + +- `account_roles` (List of Object) Holds the aggregated output of all account role details queries. (see [below for nested schema](#nestedatt--account_roles)) +- `id` (String) The ID of this resource. + + +### Nested Schema for `account_roles` + +Read-Only: + +- `show_output` (List of Object) (see [below for nested schema](#nestedobjatt--account_roles--show_output)) + + +### Nested Schema for `account_roles.show_output` + +Read-Only: + +- `assigned_to_users` (Number) +- `comment` (String) +- `created_on` (String) +- `granted_roles` (Number) +- `granted_to_roles` (Number) +- `is_current` (Boolean) +- `is_default` (Boolean) +- `is_inherited` (Boolean) +- `name` (String) +- `owner` (String) diff --git a/docs/data-sources/role.md b/docs/data-sources/role.md index ddb3bcc22e..d2a16369ec 100644 --- a/docs/data-sources/role.md +++ b/docs/data-sources/role.md @@ -7,7 +7,7 @@ description: |- # snowflake_role (Data Source) -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_roles](./roles) instead. +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_account_roles](./account_roles) instead. ## Example Usage diff --git a/docs/data-sources/roles.md b/docs/data-sources/roles.md index 0e5db6a28f..ff55a1326f 100644 --- a/docs/data-sources/roles.md +++ b/docs/data-sources/roles.md @@ -12,6 +12,8 @@ description: |- # snowflake_roles (Data Source) +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_account_roles](./account_roles) instead. + Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection. ## Example Usage diff --git a/docs/index.md b/docs/index.md index f04b4eb2b5..c36999718b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -371,4 +371,5 @@ provider "snowflake" { ## Currently deprecated datasources -- [snowflake_role](./docs/data-sources/role) - use [snowflake_roles](./docs/data-sources/roles) instead +- [snowflake_role](./docs/data-sources/role) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead +- [snowflake_roles](./docs/data-sources/roles) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead diff --git a/docs/resources/database.md b/docs/resources/database.md index e5d16f93e4..95df361ef3 100644 --- a/docs/resources/database.md +++ b/docs/resources/database.md @@ -9,8 +9,7 @@ description: |- !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. -!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. -`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_database (Resource) diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md index 652f875871..c6e4e6a2e5 100644 --- a/docs/resources/secondary_database.md +++ b/docs/resources/secondary_database.md @@ -10,8 +10,7 @@ description: |- !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. -!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. -`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_secondary_database (Resource) diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md index daf52b60bd..af44483644 100644 --- a/docs/resources/shared_database.md +++ b/docs/resources/shared_database.md @@ -9,8 +9,7 @@ description: |- !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. -!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. -`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_shared_database (Resource) diff --git a/examples/additional/deprecated_datasources.MD b/examples/additional/deprecated_datasources.MD index 935ebcfd54..9846a374a1 100644 --- a/examples/additional/deprecated_datasources.MD +++ b/examples/additional/deprecated_datasources.MD @@ -1,3 +1,4 @@ ## Currently deprecated datasources -- [snowflake_role](./docs/data-sources/role) - use [snowflake_roles](./docs/data-sources/roles) instead +- [snowflake_role](./docs/data-sources/role) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead +- [snowflake_roles](./docs/data-sources/roles) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead diff --git a/examples/data-sources/snowflake_account_roles/data-source.tf b/examples/data-sources/snowflake_account_roles/data-source.tf new file mode 100644 index 0000000000..2f48b3067c --- /dev/null +++ b/examples/data-sources/snowflake_account_roles/data-source.tf @@ -0,0 +1,48 @@ +# Simple usage +data "snowflake_account_roles" "simple" { +} + +output "simple_output" { + value = data.snowflake_account_roles.simple.roles +} + +# Filtering (like) +data "snowflake_account_roles" "like" { + like = "role-name" +} + +output "like_output" { + value = data.snowflake_account_roles.like.roles +} + +# Filtering (in class) +data "snowflake_account_roles" "in_class" { + in_class = "SNOWFLAKE.CORE.BUDGET" +} + +output "in_class_output" { + value = data.snowflake_account_roles.in_class.roles +} + +# Ensure the number of roles is equal to at least one element (with the use of postcondition) +data "snowflake_account_roles" "assert_with_postcondition" { + like = "role-name-%" + lifecycle { + postcondition { + condition = length(self.roles) > 0 + error_message = "there should be at least one role" + } + } +} + +# Ensure the number of roles is equal to at exactly one element (with the use of check block) +check "role_check" { + data "snowflake_account_roles" "assert_with_check_block" { + like = "role-name" + } + + assert { + condition = length(data.snowflake_account_roles.assert_with_check_block.roles) == 1 + error_message = "Roles filtered by '${data.snowflake_account_roles.assert_with_check_block.like}' returned ${length(data.snowflake_account_roles.assert_with_check_block.roles)} roles where one was expected" + } +} diff --git a/pkg/acceptance/bettertestspoc/README.md b/pkg/acceptance/bettertestspoc/README.md index 1cae4a6760..ee03bf37be 100644 --- a/pkg/acceptance/bettertestspoc/README.md +++ b/pkg/acceptance/bettertestspoc/README.md @@ -393,6 +393,7 @@ func (w *WarehouseDatasourceShowOutputAssert) IsEmpty() { - generate assertions checking that time is not empty - we often do not compare time fields by value, but check if they are set - utilize `ContainsExactlyInAnyOrder` function in `pkg/acceptance/bettertestspoc/assert/commons.go` to create asserts on collections that are order independent - Additional asserts for sets and lists that wouldn't rely on the order of items saved to the state (SNOW-1706544) + - this should also support nested sets and lists (see `accountRolesDataSourceContainsRole` for example) - support generating provider config and use generated configs in `pkg/provider/provider_acceptance_test.go` - add config builders for other block types (Variable, Output, Locals, Module, Terraform) - add provider to resource/datasource models (use in the grant_ownership_acceptance_test) diff --git a/pkg/datasources/account_roles.go b/pkg/datasources/account_roles.go new file mode 100644 index 0000000000..2602462df2 --- /dev/null +++ b/pkg/datasources/account_roles.go @@ -0,0 +1,94 @@ +package datasources + +import ( + "context" + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var accountRolesSchema = map[string]*schema.Schema{ + "like": likeSchema, + "in_class": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: resources.IsValidIdentifier[sdk.SchemaObjectIdentifier](), + Description: "Filters the SHOW GRANTS output by class name.", + }, + "account_roles": { + Type: schema.TypeList, + Computed: true, + Description: "Holds the aggregated output of all account role details queries.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + resources.ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW ROLES.", + Elem: &schema.Resource{ + Schema: schemas.ShowRoleSchema, + }, + }, + }, + }, + }, +} + +func AccountRoles() *schema.Resource { + return &schema.Resource{ + ReadContext: TrackingReadWrapper(datasources.AccountRoles, ReadAccountRoles), + Schema: accountRolesSchema, + Description: "Data source used to get details of filtered account roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection.", + } +} + +func ReadAccountRoles(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + req := sdk.NewShowRoleRequest() + + handleLike(d, &req.Like) + + if className, ok := d.GetOk("in_class"); ok { + req.WithInClass(sdk.RolesInClass{ + Class: sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(className.(string)), + }) + } + + roles, err := client.Roles.Show(ctx, req) + if err != nil { + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "Failed to show account roles", + Detail: fmt.Sprintf("Error: %s", err), + }, + } + } + + d.SetId("account_roles_read") + + flattenedAccountRoles := make([]map[string]any, len(roles)) + for i, role := range roles { + role := role + flattenedAccountRoles[i] = map[string]any{ + resources.ShowOutputAttributeName: []map[string]any{schemas.RoleToSchema(&role)}, + } + } + + err = d.Set("account_roles", flattenedAccountRoles) + if err != nil { + return diag.FromErr(err) + } + + return nil +} diff --git a/pkg/datasources/account_roles_acceptance_test.go b/pkg/datasources/account_roles_acceptance_test.go new file mode 100644 index 0000000000..66dde1f377 --- /dev/null +++ b/pkg/datasources/account_roles_acceptance_test.go @@ -0,0 +1,116 @@ +package datasources_test + +import ( + "fmt" + "strconv" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_AccountRoles_Complete(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + accountRoleNamePrefix := random.AlphaN(10) + accountRoleName1 := acc.TestClient().Ids.AlphaWithPrefix(accountRoleNamePrefix + "1") + accountRoleName2 := acc.TestClient().Ids.AlphaWithPrefix(accountRoleNamePrefix + "2") + accountRoleName3 := acc.TestClient().Ids.Alpha() + dbRoleName := acc.TestClient().Ids.AlphaWithPrefix(accountRoleNamePrefix + "db") + comment := random.Comment() + + // Proof that database role with the same prefix is not in the output of SHOW ROLES. + dbRole, dbRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, dbRoleName) + t.Cleanup(dbRoleCleanup) + + likeVariables := config.Variables{ + "account_role_name_1": config.StringVariable(accountRoleName1), + "account_role_name_2": config.StringVariable(accountRoleName2), + "account_role_name_3": config.StringVariable(accountRoleName3), + "comment": config.StringVariable(comment), + "like": config.StringVariable(accountRoleNamePrefix + "%"), + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ConfigDirectory: config.TestStepDirectory(), + ConfigVariables: likeVariables, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_account_roles.test", "account_roles.#", "2"), + accountRolesDataSourceContainsRole(accountRoleName1, comment), + accountRolesDataSourceContainsRole(accountRoleName2, comment), + accountRolesDataSourceDoesNotContainRole(accountRoleName3, comment), + accountRolesDataSourceDoesNotContainRole(dbRole.ID().FullyQualifiedName(), comment), + ), + }, + { + ConfigDirectory: config.TestStepDirectory(), + ConfigVariables: config.Variables{}, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrWith("data.snowflake_account_roles.test", "account_roles.#", func(value string) error { + numberOfRoles, err := strconv.ParseInt(value, 10, 8) + if err != nil { + return err + } + + if numberOfRoles == 0 { + return fmt.Errorf("expected roles to be non-empty") + } + + return nil + }), + ), + }, + }, + }) +} + +func accountRolesDataSourceDoesNotContainRole(name string, comment string) func(s *terraform.State) error { + return func(state *terraform.State) error { + err := accountRolesDataSourceContainsRole(name, comment)(state) + if err != nil && err.Error() == fmt.Sprintf("role %s not found", name) { + return nil + } + return fmt.Errorf("expected %s not to be present", name) + } +} + +func accountRolesDataSourceContainsRole(name string, comment string) func(s *terraform.State) error { + return func(s *terraform.State) error { + for _, rs := range s.RootModule().Resources { + if rs.Type != "snowflake_account_roles" { + continue + } + + iter, err := strconv.ParseInt(rs.Primary.Attributes["account_roles.#"], 10, 32) + if err != nil { + return err + } + + for i := 0; i < int(iter); i++ { + if rs.Primary.Attributes[fmt.Sprintf("account_roles.%d.show_output.0.name", i)] == name { + actualComment := rs.Primary.Attributes[fmt.Sprintf("account_roles.%d.show_output.0.comment", i)] + if actualComment != comment { + return fmt.Errorf("expected comment: %s, but got: %s", comment, actualComment) + } + + return nil + } + } + } + + return fmt.Errorf("role %s not found", name) + } +} diff --git a/pkg/datasources/role.go b/pkg/datasources/role.go index 4f2ccb5898..bb168351d7 100644 --- a/pkg/datasources/role.go +++ b/pkg/datasources/role.go @@ -33,7 +33,7 @@ func Role() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Role, ReadRole), Schema: roleSchema, - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_roles instead.", + DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_account_roles instead.", Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, }, diff --git a/pkg/datasources/roles.go b/pkg/datasources/roles.go index bdce14c43e..597702a637 100644 --- a/pkg/datasources/roles.go +++ b/pkg/datasources/roles.go @@ -49,9 +49,10 @@ var rolesSchema = map[string]*schema.Schema{ func Roles() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Roles, ReadRoles), - Schema: rolesSchema, - Description: "Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection.", + ReadContext: TrackingReadWrapper(datasources.Roles, ReadRoles), + Schema: rolesSchema, + DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_account_roles instead.", + Description: "Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection.", } } diff --git a/pkg/datasources/roles_acceptance_test.go b/pkg/datasources/roles_acceptance_test.go index af29e8fd11..35623e2ec0 100644 --- a/pkg/datasources/roles_acceptance_test.go +++ b/pkg/datasources/roles_acceptance_test.go @@ -41,9 +41,9 @@ func TestAcc_Roles_Complete(t *testing.T) { ConfigVariables: likeVariables, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("data.snowflake_roles.test", "roles.#", "2"), - containsAccountRole(accountRoleName1, comment), - containsAccountRole(accountRoleName2, comment), - doesntContainAccountRole(accountRoleName3, comment), + // containsRole(accountRoleName1, comment), + // containsRole(accountRoleName2, comment), + doesntContainRole(accountRoleName3, comment), ), }, { @@ -68,9 +68,9 @@ func TestAcc_Roles_Complete(t *testing.T) { }) } -func doesntContainAccountRole(name string, comment string) func(s *terraform.State) error { +func doesntContainRole(name string, comment string) func(s *terraform.State) error { return func(state *terraform.State) error { - err := containsAccountRole(name, comment)(state) + err := containsRole(name, comment)(state) if err != nil && err.Error() == fmt.Sprintf("role %s not found", name) { return nil } @@ -78,7 +78,7 @@ func doesntContainAccountRole(name string, comment string) func(s *terraform.Sta } } -func containsAccountRole(name string, comment string) func(s *terraform.State) error { +func containsRole(name string, comment string) func(s *terraform.State) error { return func(s *terraform.State) error { for _, rs := range s.RootModule().Resources { if rs.Type != "snowflake_roles" { diff --git a/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/test.tf b/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/test.tf new file mode 100644 index 0000000000..68afff3b58 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/test.tf @@ -0,0 +1,23 @@ +resource "snowflake_account_role" "test1" { + name = var.account_role_name_1 + comment = var.comment +} + +resource "snowflake_account_role" "test2" { + name = var.account_role_name_2 + comment = var.comment +} + +resource "snowflake_account_role" "test3" { + name = var.account_role_name_3 + comment = var.comment +} + +data "snowflake_account_roles" "test" { + depends_on = [ + snowflake_account_role.test1, + snowflake_account_role.test2, + snowflake_account_role.test3, + ] + like = var.like +} diff --git a/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/variables.tf b/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/variables.tf new file mode 100644 index 0000000000..fcd75c445f --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/1/variables.tf @@ -0,0 +1,19 @@ +variable "account_role_name_1" { + type = string +} + +variable "account_role_name_2" { + type = string +} + +variable "account_role_name_3" { + type = string +} + +variable "comment" { + type = string +} + +variable "like" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/2/test.tf b/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/2/test.tf new file mode 100644 index 0000000000..8a45ee2f6e --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_AccountRoles_Complete/2/test.tf @@ -0,0 +1,3 @@ +data "snowflake_account_roles" "test" { + in_class = "SNOWFLAKE.CORE.BUDGET" +} diff --git a/pkg/provider/datasources/datasources.go b/pkg/provider/datasources/datasources.go index 56ea68b1c3..c31b954a22 100644 --- a/pkg/provider/datasources/datasources.go +++ b/pkg/provider/datasources/datasources.go @@ -4,6 +4,7 @@ type datasource string const ( Accounts datasource = "snowflake_accounts" + AccountRoles datasource = "snowflake_account_roles" Alerts datasource = "snowflake_alerts" Connections datasource = "snowflake_connections" CortexSearchServices datasource = "snowflake_cortex_search_services" diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 327c4ceafb..8fdbb085db 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -580,6 +580,7 @@ func getResources() map[string]*schema.Resource { func getDataSources() map[string]*schema.Resource { return map[string]*schema.Resource{ "snowflake_accounts": datasources.Accounts(), + "snowflake_account_roles": datasources.AccountRoles(), "snowflake_alerts": datasources.Alerts(), "snowflake_connections": datasources.Connections(), "snowflake_cortex_search_services": datasources.CortexSearchServices(), diff --git a/pkg/resources/diff_suppressions.go b/pkg/resources/diff_suppressions.go index 4e80bbf0e4..8d10cb1758 100644 --- a/pkg/resources/diff_suppressions.go +++ b/pkg/resources/diff_suppressions.go @@ -267,6 +267,7 @@ func IgnoreNewEmptyListOrSubfields(ignoredSubfields ...string) schema.SchemaDiff // IgnoreMatchingColumnNameAndMaskingPolicyUsingFirstElem ignores when the first element of USING is matching the column name. // see USING section in https://docs.snowflake.com/en/sql-reference/sql/create-view#optional-parameters +// TODO(SNOW-1852423): improve docs and add more tests func IgnoreMatchingColumnNameAndMaskingPolicyUsingFirstElem() schema.SchemaDiffSuppressFunc { return func(k, old, new string, d *schema.ResourceData) bool { // suppress diff when the name of the column matches the name of using diff --git a/templates/resources/database.md.tmpl b/templates/resources/database.md.tmpl index 719a06f49e..65bc7d0dc9 100644 --- a/templates/resources/database.md.tmpl +++ b/templates/resources/database.md.tmpl @@ -13,8 +13,7 @@ description: |- !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. -!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. -`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/secondary_database.md.tmpl b/templates/resources/secondary_database.md.tmpl index acb3bfb61c..5abd07b360 100644 --- a/templates/resources/secondary_database.md.tmpl +++ b/templates/resources/secondary_database.md.tmpl @@ -14,8 +14,7 @@ description: |- !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. -!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. -`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/shared_database.md.tmpl b/templates/resources/shared_database.md.tmpl index 719a06f49e..65bc7d0dc9 100644 --- a/templates/resources/shared_database.md.tmpl +++ b/templates/resources/shared_database.md.tmpl @@ -13,8 +13,7 @@ description: |- !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. -!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. -`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) From a7121952892847f61e24e7a7a4fe78c38a450985 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 16:35:03 +0100 Subject: [PATCH 10/20] feat: Test imports and small fixes (#3276) Improve and test imports for functions and procedures --- .../function_and_procedure_commons.go | 11 +++++ pkg/resources/function_commons.go | 36 +++++++++++++++++ pkg/resources/function_java.go | 2 +- .../function_java_acceptance_test.go | 40 +++++++++++++++++-- pkg/resources/function_javascript.go | 2 +- pkg/resources/function_python.go | 2 +- pkg/resources/function_scala.go | 2 +- pkg/resources/function_sql.go | 3 +- pkg/resources/procedure_commons.go | 35 ++++++++++++++++ pkg/resources/procedure_java.go | 2 +- .../procedure_java_acceptance_test.go | 38 ++++++++++++++++-- pkg/resources/procedure_javascript.go | 2 +- pkg/resources/procedure_python.go | 2 +- pkg/resources/procedure_scala.go | 2 +- pkg/resources/procedure_sql.go | 2 +- 15 files changed, 164 insertions(+), 17 deletions(-) diff --git a/pkg/resources/function_and_procedure_commons.go b/pkg/resources/function_and_procedure_commons.go index 213217e968..daf0709af7 100644 --- a/pkg/resources/function_and_procedure_commons.go +++ b/pkg/resources/function_and_procedure_commons.go @@ -26,6 +26,17 @@ func readFunctionOrProcedureArguments(d *schema.ResourceData, args []sdk.Normali } } +func importFunctionOrProcedureArguments(d *schema.ResourceData, args []sdk.NormalizedArgument) error { + currentArgs := make([]map[string]any, len(args)) + for i, arg := range args { + currentArgs[i] = map[string]any{ + "arg_name": arg.Name, + "arg_data_type": arg.DataType.ToSql(), + } + } + return d.Set("arguments", currentArgs) +} + func readFunctionOrProcedureImports(d *schema.ResourceData, imports []sdk.NormalizedPath) error { if len(imports) == 0 { // don't do anything if imports not present diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index 12ea55bd73..c9c8bf05e6 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -9,6 +9,7 @@ import ( "slices" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -511,6 +512,41 @@ func UpdateFunction(language string, readFunc func(ctx context.Context, d *schem } } +func ImportFunction(ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { + logging.DebugLogger.Printf("[DEBUG] Starting function import") + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return nil, err + } + + functionDetails, err := client.Functions.DescribeDetails(ctx, id) + if err != nil { + return nil, err + } + + function, err := client.Functions.ShowByID(ctx, id) + if err != nil { + return nil, err + } + + err = errors.Join( + d.Set("database", id.DatabaseName()), + d.Set("schema", id.SchemaName()), + d.Set("name", id.Name()), + d.Set("is_secure", booleanStringFromBool(function.IsSecure)), + setOptionalFromStringPtr(d, "null_input_behavior", functionDetails.NullHandling), + setOptionalFromStringPtr(d, "return_results_behavior", functionDetails.Volatility), + importFunctionOrProcedureArguments(d, functionDetails.NormalizedArguments), + // all others are set in read + ) + if err != nil { + return nil, err + } + + return []*schema.ResourceData{d}, nil +} + // TODO [SNOW-1850370]: Make the rest of the functions in this file generic (for reuse with procedures) func parseFunctionArgumentsCommon(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, error) { args := make([]sdk.FunctionArgumentRequest, 0) diff --git a/pkg/resources/function_java.go b/pkg/resources/function_java.go index 4dae73f94c..14053affed 100644 --- a/pkg/resources/function_java.go +++ b/pkg/resources/function_java.go @@ -40,7 +40,7 @@ func FunctionJava() *schema.Resource { Schema: collections.MergeMaps(javaFunctionSchema, functionParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.FunctionJava, ImportFunction), }, } } diff --git a/pkg/resources/function_java_acceptance_test.go b/pkg/resources/function_java_acceptance_test.go index 9b8f032779..8e30bf28e4 100644 --- a/pkg/resources/function_java_acceptance_test.go +++ b/pkg/resources/function_java_acceptance_test.go @@ -17,6 +17,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -27,7 +28,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -// TODO [SNOW-1348103]: test import // TODO [SNOW-1348103]: test external changes // TODO [SNOW-1348103]: test changes of attributes separately @@ -77,6 +77,20 @@ func TestAcc_FunctionJava_InlineBasic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), ), }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior", "return_results_behavior"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionJavaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, // RENAME { Config: config.FromModels(t, functionModelRenamed), @@ -220,6 +234,9 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { }). WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). WithRuntimeVersion("11"). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). WithComment("some comment") functionModelUpdateWithoutRecreation := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). @@ -235,6 +252,9 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { }). WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). WithRuntimeVersion("11"). + WithIsSecure(r.BooleanFalse). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). WithComment("some other comment") resource.Test(t, resource.TestCase{ @@ -251,7 +271,7 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { Check: assert.AssertThat(t, resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). HasNameString(id.Name()). - HasIsSecureString(r.BooleanDefault). + HasIsSecureString(r.BooleanFalse). HasImportsLength(2). HasRuntimeVersionString("11"). HasFunctionDefinitionString(definition). @@ -267,6 +287,20 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { HasIsSecure(false), ), }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionJavaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, // UPDATE WITHOUT RECREATION { ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -278,7 +312,7 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { Check: assert.AssertThat(t, resourceassert.FunctionJavaResource(t, functionModelUpdateWithoutRecreation.ResourceReference()). HasNameString(id.Name()). - HasIsSecureString(r.BooleanDefault). + HasIsSecureString(r.BooleanFalse). HasImportsLength(2). HasRuntimeVersionString("11"). HasFunctionDefinitionString(definition). diff --git a/pkg/resources/function_javascript.go b/pkg/resources/function_javascript.go index fe0884dd67..ff98838485 100644 --- a/pkg/resources/function_javascript.go +++ b/pkg/resources/function_javascript.go @@ -40,7 +40,7 @@ func FunctionJavascript() *schema.Resource { Schema: collections.MergeMaps(javascriptFunctionSchema, functionParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.FunctionJavascript, ImportFunction), }, } } diff --git a/pkg/resources/function_python.go b/pkg/resources/function_python.go index ebc3dd7259..1a6f9e6788 100644 --- a/pkg/resources/function_python.go +++ b/pkg/resources/function_python.go @@ -40,7 +40,7 @@ func FunctionPython() *schema.Resource { Schema: collections.MergeMaps(pythonFunctionSchema, functionParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.FunctionPython, ImportFunction), }, } } diff --git a/pkg/resources/function_scala.go b/pkg/resources/function_scala.go index 491af794b4..c323a54a27 100644 --- a/pkg/resources/function_scala.go +++ b/pkg/resources/function_scala.go @@ -40,7 +40,7 @@ func FunctionScala() *schema.Resource { Schema: collections.MergeMaps(scalaFunctionSchema, functionParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.FunctionScala, ImportFunction), }, } } diff --git a/pkg/resources/function_sql.go b/pkg/resources/function_sql.go index 53694da3a1..4a30186e53 100644 --- a/pkg/resources/function_sql.go +++ b/pkg/resources/function_sql.go @@ -40,7 +40,7 @@ func FunctionSql() *schema.Resource { Schema: collections.MergeMaps(sqlFunctionSchema, functionParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.FunctionSql, ImportFunction), }, } } @@ -114,7 +114,6 @@ func ReadContextFunctionSql(ctx context.Context, d *schema.ResourceData, meta an // not reading is_secure on purpose (handled as external change to show output) readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), - // not reading null_input_behavior on purpose (handled as external change to show output) // not reading return_results_behavior on purpose (handled as external change to show output) d.Set("comment", allFunctionDetails.function.Description), setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index 12d3645388..83d79963a5 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -9,6 +9,7 @@ import ( "slices" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -480,6 +481,40 @@ func UpdateProcedure(language string, readFunc func(ctx context.Context, d *sche } } +func ImportProcedure(ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { + logging.DebugLogger.Printf("[DEBUG] Starting procedure import") + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return nil, err + } + + procedureDetails, err := client.Procedures.DescribeDetails(ctx, id) + if err != nil { + return nil, err + } + + procedure, err := client.Procedures.ShowByID(ctx, id) + if err != nil { + return nil, err + } + + err = errors.Join( + d.Set("database", id.DatabaseName()), + d.Set("schema", id.SchemaName()), + d.Set("name", id.Name()), + d.Set("is_secure", booleanStringFromBool(procedure.IsSecure)), + setOptionalFromStringPtr(d, "null_input_behavior", procedureDetails.NullHandling), + importFunctionOrProcedureArguments(d, procedureDetails.NormalizedArguments), + // all others are set in read + ) + if err != nil { + return nil, err + } + + return []*schema.ResourceData{d}, nil +} + func queryAllProcedureDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allProcedureDetailsCommon, diag.Diagnostics) { procedureDetails, err := client.Procedures.DescribeDetails(ctx, id) if err != nil { diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index 1d98f7cf2a..584c4da787 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -41,7 +41,7 @@ func ProcedureJava() *schema.Resource { Schema: collections.MergeMaps(javaProcedureSchema, procedureParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.ProcedureJava, ImportProcedure), }, } } diff --git a/pkg/resources/procedure_java_acceptance_test.go b/pkg/resources/procedure_java_acceptance_test.go index c6d0aba743..b7a81a0b40 100644 --- a/pkg/resources/procedure_java_acceptance_test.go +++ b/pkg/resources/procedure_java_acceptance_test.go @@ -16,6 +16,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -26,7 +27,6 @@ import ( "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -// TODO [SNOW-1348103]: test import // TODO [SNOW-1348103]: test external changes // TODO [SNOW-1348103]: test changes of attributes separately @@ -76,6 +76,20 @@ func TestAcc_ProcedureJava_InlineBasic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", "")), ), }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureJavaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, // RENAME { Config: config.FromModels(t, procedureModelRenamed), @@ -220,6 +234,8 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { }). WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). WithRuntimeVersion("11"). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). WithComment("some comment") procedureModelUpdateWithoutRecreation := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). @@ -236,6 +252,8 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { }). WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). WithRuntimeVersion("11"). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). WithComment("some other comment") resource.Test(t, resource.TestCase{ @@ -252,7 +270,7 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { Check: assert.AssertThat(t, resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). HasNameString(id.Name()). - HasIsSecureString(r.BooleanDefault). + HasIsSecureString(r.BooleanFalse). HasImportsLength(2). HasRuntimeVersionString("11"). HasProcedureDefinitionString(definition). @@ -269,6 +287,20 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { HasIsSecure(false), ), }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureJavaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, // UPDATE WITHOUT RECREATION { ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -280,7 +312,7 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { Check: assert.AssertThat(t, resourceassert.ProcedureJavaResource(t, procedureModelUpdateWithoutRecreation.ResourceReference()). HasNameString(id.Name()). - HasIsSecureString(r.BooleanDefault). + HasIsSecureString(r.BooleanFalse). HasImportsLength(2). HasRuntimeVersionString("11"). HasProcedureDefinitionString(definition). diff --git a/pkg/resources/procedure_javascript.go b/pkg/resources/procedure_javascript.go index 4a273e28f5..97336f2839 100644 --- a/pkg/resources/procedure_javascript.go +++ b/pkg/resources/procedure_javascript.go @@ -40,7 +40,7 @@ func ProcedureJavascript() *schema.Resource { Schema: collections.MergeMaps(javascriptProcedureSchema, procedureParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.ProcedureJavascript, ImportProcedure), }, } } diff --git a/pkg/resources/procedure_python.go b/pkg/resources/procedure_python.go index 0432fbb966..deb95c9871 100644 --- a/pkg/resources/procedure_python.go +++ b/pkg/resources/procedure_python.go @@ -41,7 +41,7 @@ func ProcedurePython() *schema.Resource { Schema: collections.MergeMaps(pythonProcedureSchema, procedureParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.ProcedurePython, ImportProcedure), }, } } diff --git a/pkg/resources/procedure_scala.go b/pkg/resources/procedure_scala.go index 0a5dc691d0..c860c6282d 100644 --- a/pkg/resources/procedure_scala.go +++ b/pkg/resources/procedure_scala.go @@ -41,7 +41,7 @@ func ProcedureScala() *schema.Resource { Schema: collections.MergeMaps(scalaProcedureSchema, procedureParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.ProcedureScala, ImportProcedure), }, } } diff --git a/pkg/resources/procedure_sql.go b/pkg/resources/procedure_sql.go index 64ddfde270..d38717904a 100644 --- a/pkg/resources/procedure_sql.go +++ b/pkg/resources/procedure_sql.go @@ -40,7 +40,7 @@ func ProcedureSql() *schema.Resource { Schema: collections.MergeMaps(sqlProcedureSchema, procedureParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.ProcedureSql, ImportProcedure), }, } } From 5af6efb08c479edbaea54f87f79672c802edcc86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Thu, 12 Dec 2024 20:12:52 +0100 Subject: [PATCH 11/20] chore: function tests (#3279) function tests --- .../function_javascript_resource_gen.go | 2 +- .../function_sql_resource_gen.go | 2 +- .../model/function_javascript_model_ext.go | 27 ++ .../config/model/function_python_model_ext.go | 69 +++++ .../config/model/function_scala_model_ext.go | 99 +++++++ .../config/model/function_sql_model_ext.go | 27 ++ pkg/acceptance/helpers/function_client.go | 13 + .../function_java_acceptance_test.go | 2 +- .../function_javascript_acceptance_test.go | 194 ++++++++++++++ .../function_python_acceptance_test.go | 229 ++++++++++++++++ pkg/resources/function_scala.go | 2 +- .../function_scala_acceptance_test.go | 248 ++++++++++++++++++ pkg/resources/function_sql_acceptance_test.go | 164 ++++++++++++ pkg/sdk/testint/functions_integration_test.go | 7 +- 14 files changed, 1078 insertions(+), 7 deletions(-) create mode 100644 pkg/resources/function_javascript_acceptance_test.go create mode 100644 pkg/resources/function_python_acceptance_test.go create mode 100644 pkg/resources/function_scala_acceptance_test.go create mode 100644 pkg/resources/function_sql_acceptance_test.go diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/function_javascript_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_javascript_resource_gen.go index e633c26e0c..86fa5f5a52 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/function_javascript_resource_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_javascript_resource_gen.go @@ -93,7 +93,7 @@ func (f *FunctionJavascriptResourceAssert) HasNullInputBehaviorString(expected s } func (f *FunctionJavascriptResourceAssert) HasReturnBehaviorString(expected string) *FunctionJavascriptResourceAssert { - f.AddAssertion(assert.ValueSet("return_behavior", expected)) + f.AddAssertion(assert.ValueSet("return_results_behavior", expected)) return f } diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/function_sql_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_sql_resource_gen.go index 142de640a5..7a4188adeb 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/function_sql_resource_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_sql_resource_gen.go @@ -93,7 +93,7 @@ func (f *FunctionSqlResourceAssert) HasNullInputBehaviorString(expected string) } func (f *FunctionSqlResourceAssert) HasReturnBehaviorString(expected string) *FunctionSqlResourceAssert { - f.AddAssertion(assert.ValueSet("return_behavior", expected)) + f.AddAssertion(assert.ValueSet("return_results_behavior", expected)) return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_ext.go index 3fa63b5701..c1429191a6 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_ext.go @@ -2,6 +2,12 @@ package model import ( "encoding/json" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" ) func (f *FunctionJavascriptModel) MarshalJSON() ([]byte, error) { @@ -14,3 +20,24 @@ func (f *FunctionJavascriptModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func FunctionJavascriptInline(resourceName string, id sdk.SchemaObjectIdentifierWithArguments, functionDefinition string, returnType string) *FunctionJavascriptModel { + f := &FunctionJavascriptModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionJavascript)} + f.WithDatabase(id.DatabaseName()) + f.WithFunctionDefinition(functionDefinition) + f.WithName(id.Name()) + f.WithReturnType(returnType) + f.WithSchema(id.SchemaName()) + return f +} + +func (f *FunctionJavascriptModel) WithArgument(argName string, argDataType datatypes.DataType) *FunctionJavascriptModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_python_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/function_python_model_ext.go index 8d7475e389..f68dec8871 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_python_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_python_model_ext.go @@ -2,6 +2,11 @@ package model import ( "encoding/json" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" ) func (f *FunctionPythonModel) MarshalJSON() ([]byte, error) { @@ -14,3 +19,67 @@ func (f *FunctionPythonModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func FunctionPythonBasicInline(resourceName string, id sdk.SchemaObjectIdentifierWithArguments, runtimeVersion string, returnType datatypes.DataType, handler string, functionDefinition string) *FunctionPythonModel { + return FunctionPython(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), runtimeVersion, id.SchemaName()).WithFunctionDefinition(functionDefinition) +} + +func (f *FunctionPythonModel) WithArgument(argName string, argDataType datatypes.DataType) *FunctionPythonModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *FunctionPythonModel) WithImports(imports ...sdk.NormalizedPath) *FunctionPythonModel { + return f.WithImportsValue( + tfconfig.SetVariable( + collections.Map(imports, func(imp sdk.NormalizedPath) tfconfig.Variable { + return tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(imp.StageLocation), + "path_on_stage": tfconfig.StringVariable(imp.PathOnStage), + }, + ) + })..., + ), + ) +} + +func (f *FunctionPythonModel) WithExternalAccessIntegrations(ids ...sdk.AccountObjectIdentifier) *FunctionPythonModel { + return f.WithExternalAccessIntegrationsValue( + tfconfig.SetVariable( + collections.Map(ids, func(id sdk.AccountObjectIdentifier) tfconfig.Variable { return tfconfig.StringVariable(id.Name()) })..., + ), + ) +} + +func (f *FunctionPythonModel) WithSecrets(secrets map[string]sdk.SchemaObjectIdentifier) *FunctionPythonModel { + objects := make([]tfconfig.Variable, 0) + for k, v := range secrets { + objects = append(objects, tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "secret_variable_name": tfconfig.StringVariable(k), + "secret_id": tfconfig.StringVariable(v.FullyQualifiedName()), + }, + )) + } + + return f.WithSecretsValue( + tfconfig.SetVariable( + objects..., + ), + ) +} + +func (f *FunctionPythonModel) WithPackages(pkgs ...string) *FunctionPythonModel { + return f.WithPackagesValue( + tfconfig.SetVariable( + collections.Map(pkgs, func(pkg string) tfconfig.Variable { return tfconfig.StringVariable(pkg) })..., + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_scala_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/function_scala_model_ext.go index a5e43e53ca..bb7fcf2847 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_scala_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_scala_model_ext.go @@ -2,6 +2,12 @@ package model import ( "encoding/json" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" ) func (f *FunctionScalaModel) MarshalJSON() ([]byte, error) { @@ -14,3 +20,96 @@ func (f *FunctionScalaModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func FunctionScalaBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + runtimeVersion string, + returnType datatypes.DataType, + handler string, + functionDefinition string, +) *FunctionScalaModel { + return FunctionScala(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), runtimeVersion, id.SchemaName()).WithFunctionDefinition(functionDefinition) +} + +func (f *FunctionScalaModel) WithArgument(argName string, argDataType datatypes.DataType) *FunctionScalaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *FunctionScalaModel) WithImport(stageLocation string, pathOnStage string) *FunctionScalaModel { + return f.WithImportsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(strings.TrimPrefix(stageLocation, "@")), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} + +func (f *FunctionScalaModel) WithImports(imports ...sdk.NormalizedPath) *FunctionScalaModel { + return f.WithImportsValue( + tfconfig.SetVariable( + collections.Map(imports, func(imp sdk.NormalizedPath) tfconfig.Variable { + return tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(imp.StageLocation), + "path_on_stage": tfconfig.StringVariable(imp.PathOnStage), + }, + ) + })..., + ), + ) +} + +func (f *FunctionScalaModel) WithPackages(pkgs ...string) *FunctionScalaModel { + return f.WithPackagesValue( + tfconfig.SetVariable( + collections.Map(pkgs, func(pkg string) tfconfig.Variable { return tfconfig.StringVariable(pkg) })..., + ), + ) +} + +func (f *FunctionScalaModel) WithExternalAccessIntegrations(ids ...sdk.AccountObjectIdentifier) *FunctionScalaModel { + return f.WithExternalAccessIntegrationsValue( + tfconfig.SetVariable( + collections.Map(ids, func(id sdk.AccountObjectIdentifier) tfconfig.Variable { return tfconfig.StringVariable(id.Name()) })..., + ), + ) +} + +func (f *FunctionScalaModel) WithSecrets(secrets map[string]sdk.SchemaObjectIdentifier) *FunctionScalaModel { + objects := make([]tfconfig.Variable, 0) + for k, v := range secrets { + objects = append(objects, tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "secret_variable_name": tfconfig.StringVariable(k), + "secret_id": tfconfig.StringVariable(v.FullyQualifiedName()), + }, + )) + } + + return f.WithSecretsValue( + tfconfig.SetVariable( + objects..., + ), + ) +} + +func (f *FunctionScalaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *FunctionScalaModel { + return f.WithTargetPathValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_sql_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/function_sql_model_ext.go index d4f775628d..f01b2e8ffe 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_sql_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_sql_model_ext.go @@ -2,6 +2,12 @@ package model import ( "encoding/json" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" ) func (f *FunctionSqlModel) MarshalJSON() ([]byte, error) { @@ -14,3 +20,24 @@ func (f *FunctionSqlModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func FunctionSqlBasicInline(resourceName string, id sdk.SchemaObjectIdentifierWithArguments, functionDefinition string, returnType string) *FunctionSqlModel { + f := &FunctionSqlModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionSql)} + f.WithDatabase(id.DatabaseName()) + f.WithFunctionDefinition(functionDefinition) + f.WithName(id.Name()) + f.WithReturnType(returnType) + f.WithSchema(id.SchemaName()) + return f +} + +func (f *FunctionSqlModel) WithArgument(argName string, argDataType datatypes.DataType) *FunctionSqlModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} diff --git a/pkg/acceptance/helpers/function_client.go b/pkg/acceptance/helpers/function_client.go index 36c5ffcfb4..600027fe2c 100644 --- a/pkg/acceptance/helpers/function_client.go +++ b/pkg/acceptance/helpers/function_client.go @@ -240,6 +240,11 @@ func (c *FunctionClient) SampleJavascriptDefinition(t *testing.T, argName string `, argName) } +func (c *FunctionClient) SampleJavascriptDefinitionNoArgs(t *testing.T) string { + t.Helper() + return `return 1;` +} + func (c *FunctionClient) SamplePythonDefinition(t *testing.T, funcName string, argName string) string { t.Helper() @@ -271,6 +276,14 @@ func (c *FunctionClient) SampleSqlDefinition(t *testing.T) string { return "3.141592654::FLOAT" } +func (c *FunctionClient) SampleSqlDefinitionWithArgument(t *testing.T, argName string) string { + t.Helper() + + return fmt.Sprintf(` +%s +`, argName) +} + func (c *FunctionClient) PythonIdentityDefinition(t *testing.T, funcName string, argName string) string { t.Helper() diff --git a/pkg/resources/function_java_acceptance_test.go b/pkg/resources/function_java_acceptance_test.go index 8e30bf28e4..e210974888 100644 --- a/pkg/resources/function_java_acceptance_test.go +++ b/pkg/resources/function_java_acceptance_test.go @@ -234,7 +234,7 @@ func TestAcc_FunctionJava_InlineFull(t *testing.T) { }). WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). WithRuntimeVersion("11"). - WithIsSecure("false"). + WithIsSecure(r.BooleanFalse). WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). WithComment("some comment") diff --git a/pkg/resources/function_javascript_acceptance_test.go b/pkg/resources/function_javascript_acceptance_test.go new file mode 100644 index 0000000000..2ef1c7d7f7 --- /dev/null +++ b/pkg/resources/function_javascript_acceptance_test.go @@ -0,0 +1,194 @@ +package resources_test + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_FunctionJavascript_InlineBasic(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeVariant + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Function.SampleJavascriptDefinition(t, argName) + + functionModel := model.FunctionJavascriptInline("test", id, definition, datatypes.VariantLegacyDataType). + WithArgument(argName, dataType) + functionModelRenamed := model.FunctionJavascriptInline("test", idWithChangedNameButTheSameDataType, definition, datatypes.VariantLegacyDataType). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJavascript), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavascriptResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasReturnTypeString(datatypes.VariantLegacyDataType). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVASCRIPT"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, functionModelRenamed), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionJavascript_InlineEmptyArgs(t *testing.T) { + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes() + definition := acc.TestClient().Function.SampleJavascriptDefinitionNoArgs(t) + functionModel := model.FunctionJavascriptInline("test", id, definition, datatypes.VariantLegacyDataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJavascript), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVASCRIPT"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionJavascript_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + argName := "x" + dataType := testdatatypes.DataTypeVariant + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + definition := acc.TestClient().Function.SampleJavascriptDefinition(t, argName) + functionModel := model.FunctionJavascriptInline("test", id, definition, datatypes.VariantLegacyDataType). + WithIsSecure(r.BooleanFalse). + WithArgument(argName, dataType). + WithNullInputBehavior(string(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment("some comment") + + functionModelUpdateWithoutRecreation := model.FunctionJavascriptInline("test", id, definition, datatypes.VariantLegacyDataType). + WithArgument(argName, dataType). + WithIsSecure(r.BooleanFalse). + WithNullInputBehavior(string(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment("some other comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJavascript), + Steps: []resource.TestStep{ + // CREATE WITH ALL + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavascriptResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasFunctionDefinitionString(definition). + HasCommentString("some comment"). + HasFunctionLanguageString("JAVASCRIPT"). + HasNullInputBehaviorString(string(sdk.NullInputBehaviorReturnsNullInput)). + HasReturnBehaviorString(string(sdk.ReturnResultsBehaviorVolatile)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + ), + }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionJavaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARIANT")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(functionModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, functionModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.FunctionJavascriptResource(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasFunctionDefinitionString(definition). + HasCommentString("some other comment"). + HasFunctionLanguageString("JAVASCRIPT"). + HasNullInputBehaviorString(string(sdk.NullInputBehaviorReturnsNullInput)). + HasReturnBehaviorString(string(sdk.ReturnResultsBehaviorVolatile)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} diff --git a/pkg/resources/function_python_acceptance_test.go b/pkg/resources/function_python_acceptance_test.go new file mode 100644 index 0000000000..04c82d91e5 --- /dev/null +++ b/pkg/resources/function_python_acceptance_test.go @@ -0,0 +1,229 @@ +package resources_test + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_FunctionPython_InlineBasic(t *testing.T) { + funcName := "some_function" + argName := "x" + dataType := testdatatypes.DataTypeNumber_36_2 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Function.SamplePythonDefinition(t, funcName, argName) + + functionModel := model.FunctionPythonBasicInline("test", id, "3.8", dataType, funcName, definition). + WithArgument(argName, dataType) + functionModelRenamed := model.FunctionPythonBasicInline("test", idWithChangedNameButTheSameDataType, "3.8", dataType, funcName, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionPython), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionPythonResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasRuntimeVersionString("3.8"). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("PYTHON"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type", "is_aggregate", "is_secure", "null_input_behavior", "return_results_behavior"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionPythonResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "NUMBER(38, 0)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, functionModelRenamed), + Check: assert.AssertThat(t, + resourceassert.FunctionPythonResource(t, functionModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionPython_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + secretId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secretId2 := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + networkRule, networkRuleCleanup := acc.TestClient().NetworkRule.Create(t) + t.Cleanup(networkRuleCleanup) + + secret, secretCleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId, "test_secret_string") + t.Cleanup(secretCleanup) + + secret2, secret2Cleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId2, "test_secret_string_2") + t.Cleanup(secret2Cleanup) + + externalAccessIntegration, externalAccessIntegrationCleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) + t.Cleanup(externalAccessIntegrationCleanup) + + externalAccessIntegration2, externalAccessIntegration2Cleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret2.ID()) + t.Cleanup(externalAccessIntegration2Cleanup) + + tmpPythonFunction := acc.TestClient().CreateSamplePythonFunctionAndModule(t) + tmpPythonFunction2 := acc.TestClient().CreateSamplePythonFunctionAndModule(t) + + funcName := "some_function" + argName := "x" + dataType := testdatatypes.DataTypeNumber_36_2 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + definition := acc.TestClient().Function.SamplePythonDefinition(t, funcName, argName) + + functionModel := model.FunctionPythonBasicInline("test", id, "3.8", dataType, funcName, definition). + WithIsSecure(r.BooleanFalse). + WithArgument(argName, dataType). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment("some comment"). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction.ModuleName + ".py"}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction2.ModuleName + ".py"}, + ). + WithPackages("numpy", "pandas"). + WithExternalAccessIntegrations(externalAccessIntegration, externalAccessIntegration2). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "abc": secretId, + "def": secretId2, + }) + + functionModelUpdateWithoutRecreation := model.FunctionPythonBasicInline("test", id, "3.8", dataType, funcName, definition). + WithIsSecure(r.BooleanFalse). + WithArgument(argName, dataType). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment("some other comment"). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction.ModuleName + ".py"}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction2.ModuleName + ".py"}, + ). + WithPackages("numpy", "pandas"). + WithExternalAccessIntegrations(externalAccessIntegration). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "def": secretId2, + }) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionPython), + Steps: []resource.TestStep{ + // CREATE WITH ALL + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionPythonResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasRuntimeVersionString("3.8"). + HasFunctionDefinitionString(definition). + HasCommentString("some comment"). + HasFunctionLanguageString("PYTHON"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "secrets.#", "2")), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "external_access_integrations.#", "2")), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "packages.#", "2")), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + ), + }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"is_aggregate", "arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionPythonResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "NUMBER(38, 0)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(functionModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, functionModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.FunctionPythonResource(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasRuntimeVersionString("3.8"). + HasFunctionDefinitionString(definition). + HasCommentString("some other comment"). + HasFunctionLanguageString("PYTHON"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.#", "1")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_variable_name", "def")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_id", secretId2.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.#", "1")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.0", externalAccessIntegration.Name())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "packages.#", "2")), + resourceshowoutputassert.FunctionShowOutput(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} diff --git a/pkg/resources/function_scala.go b/pkg/resources/function_scala.go index c323a54a27..8331b63c16 100644 --- a/pkg/resources/function_scala.go +++ b/pkg/resources/function_scala.go @@ -65,7 +65,7 @@ func CreateContextFunctionScala(ctx context.Context, d *schema.ResourceData, met argumentDataTypes := collections.Map(argumentRequests, func(r sdk.FunctionArgumentRequest) datatypes.DataType { return r.ArgDataType }) id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) - request := sdk.NewCreateForScalaFunctionRequest(id.SchemaObjectId(), returnDataType, runtimeVersion, handler). + request := sdk.NewCreateForScalaFunctionRequest(id.SchemaObjectId(), returnDataType, handler, runtimeVersion). WithArguments(argumentRequests) errs := errors.Join( diff --git a/pkg/resources/function_scala_acceptance_test.go b/pkg/resources/function_scala_acceptance_test.go new file mode 100644 index 0000000000..2dbc466a4a --- /dev/null +++ b/pkg/resources/function_scala_acceptance_test.go @@ -0,0 +1,248 @@ +package resources_test + +import ( + "fmt" + "testing" + "time" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_FunctionScala_InlineBasic(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleScalaDefinition(t, className, funcName, argName) + + functionModel := model.FunctionScalaBasicInline("test", id, "2.12", dataType, handler, definition). + WithArgument(argName, dataType) + functionModelRenamed := model.FunctionScalaBasicInline("test", idWithChangedNameButTheSameDataType, "2.12", dataType, handler, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionScala), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionScalaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasRuntimeVersionString("2.12"). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("SCALA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type", "is_secure", "null_input_behavior", "return_results_behavior"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionScalaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, functionModelRenamed), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionScala_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + secretId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secretId2 := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + networkRule, networkRuleCleanup := acc.TestClient().NetworkRule.Create(t) + t.Cleanup(networkRuleCleanup) + + secret, secretCleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId, "test_secret_string") + t.Cleanup(secretCleanup) + + secret2, secret2Cleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId2, "test_secret_string_2") + t.Cleanup(secret2Cleanup) + + externalAccessIntegration, externalAccessIntegrationCleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) + t.Cleanup(externalAccessIntegrationCleanup) + + externalAccessIntegration2, externalAccessIntegration2Cleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret2.ID()) + t.Cleanup(externalAccessIntegration2Cleanup) + + tmpJavaFunction := acc.TestClient().CreateSampleJavaFunctionAndJarOnUserStage(t) + tmpJavaFunction2 := acc.TestClient().CreateSampleJavaFunctionAndJarOnUserStage(t) + + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleScalaDefinition(t, className, funcName, argName) + // TODO [SNOW-1850370]: extract to helper + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + + functionModel := model.FunctionScalaBasicInline("test", id, "2.12", dataType, handler, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction2.JarName}, + ). + WithPackages("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration, externalAccessIntegration2). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "abc": secretId, + "def": secretId2, + }). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithIsSecure(r.BooleanFalse). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment("some comment") + + functionModelUpdateWithoutRecreation := model.FunctionScalaBasicInline("test", id, "2.12", dataType, handler, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaFunction2.JarName}, + ). + WithPackages("com.snowflake:snowpark:1.14.0", "com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "def": secretId2, + }). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithIsSecure(r.BooleanFalse). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment("some other comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionScala), + Steps: []resource.TestStep{ + // CREATE WITH ALL + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionScalaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasRuntimeVersionString("2.12"). + HasFunctionDefinitionString(definition). + HasCommentString("some comment"). + HasFunctionLanguageString("SCALA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "secrets.#", "2")), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "external_access_integrations.#", "2")), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "packages.#", "2")), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + ), + }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionScalaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(functionModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, functionModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.FunctionScalaResource(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasRuntimeVersionString("2.12"). + HasFunctionDefinitionString(definition). + HasCommentString("some other comment"). + HasFunctionLanguageString("SCALA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.#", "1")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_variable_name", "def")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_id", secretId2.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.#", "1")), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.0", externalAccessIntegration.Name())), + assert.Check(resource.TestCheckResourceAttr(functionModelUpdateWithoutRecreation.ResourceReference(), "packages.#", "2")), + resourceshowoutputassert.FunctionShowOutput(t, functionModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} diff --git a/pkg/resources/function_sql_acceptance_test.go b/pkg/resources/function_sql_acceptance_test.go new file mode 100644 index 0000000000..c8df1348ed --- /dev/null +++ b/pkg/resources/function_sql_acceptance_test.go @@ -0,0 +1,164 @@ +package resources_test + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_FunctionSql_InlineBasic(t *testing.T) { + argName := "abc" + dataType := testdatatypes.DataTypeFloat + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes() + + definition := acc.TestClient().Function.SampleSqlDefinitionWithArgument(t, argName) + + functionModel := model.FunctionSqlBasicInline("test", id, definition, dataType.ToLegacyDataTypeSql()). + WithArgument(argName, dataType) + functionModelRenamed := model.FunctionSqlBasicInline("test", idWithChangedNameButTheSameDataType, definition, dataType.ToLegacyDataTypeSql()). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionSql), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionSqlResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("SQL"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionSqlResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "FLOAT")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, functionModelRenamed), + Check: assert.AssertThat(t, + resourceassert.FunctionSqlResource(t, functionModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionSql_InlineFull(t *testing.T) { + argName := "abc" + comment := random.Comment() + otherComment := random.Comment() + dataType := testdatatypes.DataTypeFloat + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Function.SampleSqlDefinitionWithArgument(t, argName) + + functionModel := model.FunctionSqlBasicInline("test", id, definition, dataType.ToLegacyDataTypeSql()). + WithIsSecure(r.BooleanFalse). + WithArgument(argName, dataType). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment(comment) + functionModelRenamed := model.FunctionSqlBasicInline("test", idWithChangedNameButTheSameDataType, definition, dataType.ToLegacyDataTypeSql()). + WithIsSecure(r.BooleanFalse). + WithArgument(argName, dataType). + WithReturnResultsBehavior(string(sdk.ReturnResultsBehaviorVolatile)). + WithComment(otherComment) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionSql), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionSqlResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasCommentString(comment). + HasReturnBehaviorString(string(sdk.ReturnResultsBehaviorVolatile)). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("SQL"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"return_results_behavior"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedFunctionSqlResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "FLOAT")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, functionModelRenamed), + Check: assert.AssertThat(t, + resourceassert.FunctionSqlResource(t, functionModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()). + HasCommentString(otherComment), + ), + }, + }, + }) +} diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index b5e9a35fb6..1036429d78 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -2053,12 +2053,13 @@ func TestInt_Functions(t *testing.T) { t.Run("create function for SQL - return table data type", func(t *testing.T) { argName := "x" - returnDataType, err := datatypes.ParseDataType(fmt.Sprintf("TABLE(ID %s, PRICE %s, THIRD %s)", datatypes.NumberLegacyDataType, datatypes.FloatLegacyDataType, datatypes.VarcharLegacyDataType)) + returnDataType, err := datatypes.ParseDataType(fmt.Sprintf("TABLE(PRICE %s, THIRD %s)", datatypes.FloatLegacyDataType, datatypes.VarcharLegacyDataType)) require.NoError(t, err) id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(datatypes.VarcharLegacyDataType) - definition := ` SELECT 1, 2.2::float, 'abc';` + definition := ` +SELECT 2.2::float, 'abc');` // the ending parenthesis has to be there (otherwise SQL compilation error is thrown) dt := sdk.NewFunctionReturnsResultDataTypeRequest(returnDataType) returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) argument := sdk.NewFunctionArgumentRequest(argName, nil).WithArgDataTypeOld(datatypes.VarcharLegacyDataType) @@ -2076,7 +2077,7 @@ func TestInt_Functions(t *testing.T) { HasCreatedOnNotEmpty(). HasName(id.Name()). HasSchemaName(id.SchemaName()). - HasArgumentsRawContains(returnDataType.ToLegacyDataTypeSql()), + HasArgumentsRawContains(strings.ReplaceAll(returnDataType.ToLegacyDataTypeSql(), "TABLE(", "TABLE (")), ) assertions.AssertThatObject(t, objectassert.FunctionDetails(t, id). From 10517f337c6b22d5f7f2a4f6c747b6fd2d2f47e9 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 20:32:49 +0100 Subject: [PATCH 12/20] feat: Docs, test, and missing parameter (#3280) - Update docs - Update migration guide - Add execute as - change execute as SDK - Add tests to procedures - Add examples and import instructions - Describe limitations - Deprecate old function and procedure --- MIGRATION_GUIDE.md | 34 +++ docs/index.md | 2 + docs/resources/function.md | 2 +- docs/resources/function_java.md | 46 ++- docs/resources/function_javascript.md | 48 +++- docs/resources/function_python.md | 53 +++- docs/resources/function_scala.md | 99 ++++++- docs/resources/function_sql.md | 66 ++++- docs/resources/procedure.md | 2 +- docs/resources/procedure_java.md | 96 ++++++- docs/resources/procedure_javascript.md | 48 +++- docs/resources/procedure_python.md | 52 +++- docs/resources/procedure_scala.md | 48 +++- docs/resources/procedure_sql.md | 45 ++- examples/additional/deprecated_resources.MD | 2 + .../snowflake_function_java/import.sh | 1 + .../snowflake_function_java/resource.tf | 12 + .../snowflake_function_javascript/import.sh | 1 + .../snowflake_function_javascript/resource.tf | 14 + .../snowflake_function_python/import.sh | 2 + .../snowflake_function_python/resource.tf | 17 ++ .../snowflake_function_scala/import.sh | 1 + .../snowflake_function_scala/resource.tf | 67 +++++ .../snowflake_function_sql/import.sh | 1 + .../snowflake_function_sql/resource.tf | 32 +++ .../snowflake_procedure_java/import.sh | 1 + .../snowflake_procedure_java/resource.tf | 60 ++++ .../snowflake_procedure_javascript/import.sh | 1 + .../resource.tf | 12 + .../snowflake_procedure_python/import.sh | 1 + .../snowflake_procedure_python/resource.tf | 14 + .../snowflake_procedure_scala/import.sh | 1 + .../snowflake_procedure_scala/resource.tf | 14 + .../snowflake_procedure_sql/import.sh | 1 + .../snowflake_procedure_sql/resource.tf | 11 + .../procedure_python_resource_ext.go | 12 + .../procedure_scala_resource_ext.go | 17 ++ .../model/procedure_javascript_model_ext.go | 25 ++ .../model/procedure_python_model_ext.go | 77 +++++ .../config/model/procedure_scala_model_ext.go | 123 ++++++++ .../config/model/procedure_sql_model_ext.go | 25 ++ pkg/acceptance/helpers/procedure_client.go | 10 + pkg/resources/function.go | 2 + pkg/resources/procedure.go | 2 + pkg/resources/procedure_commons.go | 22 ++ pkg/resources/procedure_java.go | 2 + .../procedure_java_acceptance_test.go | 6 +- pkg/resources/procedure_javascript.go | 2 + .../procedure_javascript_acceptance_test.go | 178 ++++++++++++ pkg/resources/procedure_python.go | 2 + .../procedure_python_acceptance_test.go | 240 ++++++++++++++++ pkg/resources/procedure_scala.go | 4 +- .../procedure_scala_acceptance_test.go | 262 ++++++++++++++++++ pkg/resources/procedure_sql.go | 2 + .../procedure_sql_acceptance_test.go | 178 ++++++++++++ pkg/sdk/common_types.go | 4 +- pkg/sdk/common_types_test.go | 2 +- pkg/sdk/functions_and_procedures_commons.go | 2 +- pkg/sdk/procedures_gen.go | 12 +- pkg/sdk/testint/functions_integration_test.go | 2 +- .../testint/procedures_integration_test.go | 2 +- templates/resources/function_java.md.tmpl | 52 ++++ .../resources/function_javascript.md.tmpl | 52 ++++ templates/resources/function_python.md.tmpl | 54 ++++ templates/resources/function_scala.md.tmpl | 50 ++++ templates/resources/function_sql.md.tmpl | 52 ++++ templates/resources/procedure_java.md.tmpl | 52 ++++ .../resources/procedure_javascript.md.tmpl | 52 ++++ templates/resources/procedure_python.md.tmpl | 54 ++++ templates/resources/procedure_scala.md.tmpl | 50 ++++ templates/resources/procedure_sql.md.tmpl | 50 ++++ 71 files changed, 2609 insertions(+), 31 deletions(-) create mode 100644 examples/resources/snowflake_function_java/import.sh create mode 100644 examples/resources/snowflake_function_java/resource.tf create mode 100644 examples/resources/snowflake_function_javascript/import.sh create mode 100644 examples/resources/snowflake_function_javascript/resource.tf create mode 100644 examples/resources/snowflake_function_python/import.sh create mode 100644 examples/resources/snowflake_function_python/resource.tf create mode 100644 examples/resources/snowflake_function_scala/import.sh create mode 100644 examples/resources/snowflake_function_scala/resource.tf create mode 100644 examples/resources/snowflake_function_sql/import.sh create mode 100644 examples/resources/snowflake_function_sql/resource.tf create mode 100644 examples/resources/snowflake_procedure_java/import.sh create mode 100644 examples/resources/snowflake_procedure_java/resource.tf create mode 100644 examples/resources/snowflake_procedure_javascript/import.sh create mode 100644 examples/resources/snowflake_procedure_javascript/resource.tf create mode 100644 examples/resources/snowflake_procedure_python/import.sh create mode 100644 examples/resources/snowflake_procedure_python/resource.tf create mode 100644 examples/resources/snowflake_procedure_scala/import.sh create mode 100644 examples/resources/snowflake_procedure_scala/resource.tf create mode 100644 examples/resources/snowflake_procedure_sql/import.sh create mode 100644 examples/resources/snowflake_procedure_sql/resource.tf create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_ext.go create mode 100644 pkg/resources/procedure_javascript_acceptance_test.go create mode 100644 pkg/resources/procedure_python_acceptance_test.go create mode 100644 pkg/resources/procedure_scala_acceptance_test.go create mode 100644 pkg/resources/procedure_sql_acceptance_test.go create mode 100644 templates/resources/function_java.md.tmpl create mode 100644 templates/resources/function_javascript.md.tmpl create mode 100644 templates/resources/function_python.md.tmpl create mode 100644 templates/resources/function_scala.md.tmpl create mode 100644 templates/resources/function_sql.md.tmpl create mode 100644 templates/resources/procedure_java.md.tmpl create mode 100644 templates/resources/procedure_javascript.md.tmpl create mode 100644 templates/resources/procedure_python.md.tmpl create mode 100644 templates/resources/procedure_scala.md.tmpl create mode 100644 templates/resources/procedure_sql.md.tmpl diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 8bd54b0a87..7dea8fd480 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,6 +9,40 @@ across different versions. ## v0.99.0 ➞ v0.100.0 +### *(preview feature/deprecation)* Function and procedure resources + +`snowflake_function` is now deprecated in favor of 5 new preview resources: + +- `snowflake_function_java` +- `snowflake_function_javascript` +- `snowflake_function_python` +- `snowflake_function_scala` +- `snowflake_function_sql` + +It will be removed with the v1 release. Please check the docs for the new resources and adjust your configuration files. +For no downtime migration, follow our [guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). + +The new resources are more aligned with current features like: +- external access integrations support +- secrets support +- argument default values + +`snowflake_procedure` is now deprecated in favor of 5 new preview resources: + +- `snowflake_procedure_java` +- `snowflake_procedure_javascript` +- `snowflake_procedure_python` +- `snowflake_procedure_scala` +- `snowflake_procedure_sql` + +It will be removed with the v1 release. Please check the docs for the new resources and adjust your configuration files. +For no downtime migration, follow our [guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). + +The new resources are more aligned with current features like: +- external access integrations support +- secrets support +- argument default values + ### *(new feature)* Account role data source Added a new `snowflake_account_roles` data source for account roles. Now it reflects It's based on `snowflake_roles` data source. `account_roles` field now organizes output of show under `show_output` field. diff --git a/docs/index.md b/docs/index.md index c36999718b..9055832dba 100644 --- a/docs/index.md +++ b/docs/index.md @@ -362,7 +362,9 @@ provider "snowflake" { ## Currently deprecated resources - [snowflake_database_old](./docs/resources/database_old) +- [snowflake_function](./docs/resources/function) - [snowflake_oauth_integration](./docs/resources/oauth_integration) +- [snowflake_procedure](./docs/resources/procedure) - [snowflake_role](./docs/resources/role) - use [snowflake_account_role](./docs/resources/account_role) instead - [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead - [snowflake_stream](./docs/resources/stream) diff --git a/docs/resources/function.md b/docs/resources/function.md index f3a3290542..43b28436fc 100644 --- a/docs/resources/function.md +++ b/docs/resources/function.md @@ -7,7 +7,7 @@ description: |- # snowflake_function (Resource) - +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use snowflake_function_java, snowflake_function_javascript, snowflake_function_python, snowflake_function_scala, and snowflake_function_sql instead. ## Example Usage diff --git a/docs/resources/function_java.md b/docs/resources/function_java.md index 820bb4e63d..e1c86fdda8 100644 --- a/docs/resources/function_java.md +++ b/docs/resources/function_java.md @@ -5,11 +5,44 @@ description: |- Resource used to manage java function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_function_java (Resource) Resource used to manage java function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function). - +## Example Usage + +```terraform +resource "snowflake_function_java" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + function_definition = "\n\tclass TestFunc {\n\t\tpublic static String echoVarchar(String x) {\n\t\t\treturn x;\n\t\t}\n\t}\n" +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -173,3 +206,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_function_java.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/function_javascript.md b/docs/resources/function_javascript.md index 1619ab3a06..c81f21a080 100644 --- a/docs/resources/function_javascript.md +++ b/docs/resources/function_javascript.md @@ -5,11 +5,46 @@ description: |- Resource used to manage javascript function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_function_javascript (Resource) Resource used to manage javascript function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function). - +## Example Usage + +```terraform +# Minimal +resource "snowflake_function_javascript" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_javascript_function" + arguments { + arg_data_type = "VARIANT" + arg_name = "x" + } + function_definition = < **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -139,3 +174,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_function_javascript.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/function_python.md b/docs/resources/function_python.md index 21e4244789..66fee1c02f 100644 --- a/docs/resources/function_python.md +++ b/docs/resources/function_python.md @@ -5,11 +5,51 @@ description: |- Resource used to manage python function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** `is_aggregate` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_function_python (Resource) Resource used to manage python function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function). - +## Example Usage + +```terraform +# Minimal +resource "snowflake_function_python" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_function_function" + runtime_version = "3.8" + arguments { + arg_data_type = "NUMBER(36, 2)" + arg_name = "x" + } + function_definition = < **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -164,3 +204,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_function_python.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/function_scala.md b/docs/resources/function_scala.md index 01226e5512..b9da549e0f 100644 --- a/docs/resources/function_scala.md +++ b/docs/resources/function_scala.md @@ -5,11 +5,97 @@ description: |- Resource used to manage scala function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_function_scala (Resource) Resource used to manage scala function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function). - +## Example Usage + +```terraform +# Minimal +resource "snowflake_function_scala" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_scala_function" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + function_definition = < **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -173,3 +259,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_function_scala.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/function_sql.md b/docs/resources/function_sql.md index bb4e772742..82efdc328a 100644 --- a/docs/resources/function_sql.md +++ b/docs/resources/function_sql.md @@ -5,11 +5,64 @@ description: |- Resource used to manage sql function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +-> **Note** External changes to `is_secure` and `return_results_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `MEMOIZABLE` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_function_sql (Resource) Resource used to manage sql function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function). - +## Example Usage + +```terraform +# Minimal +resource "snowflake_function_sql" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_sql_function" + arguments { + arg_data_type = "FLOAT" + arg_name = "arg_name" + } + function_definition = < **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -138,3 +191,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_function_sql.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/procedure.md b/docs/resources/procedure.md index d8135b31b0..5985cb4dab 100644 --- a/docs/resources/procedure.md +++ b/docs/resources/procedure.md @@ -7,7 +7,7 @@ description: |- # snowflake_procedure (Resource) - +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use snowflake_procedure_java, snowflake_procedure_javascript, snowflake_procedure_python, snowflake_procedure_scala, and snowflake_procedure_sql instead. ## Example Usage diff --git a/docs/resources/procedure_java.md b/docs/resources/procedure_java.md index edc8047672..b7fae95a96 100644 --- a/docs/resources/procedure_java.md +++ b/docs/resources/procedure_java.md @@ -5,11 +5,92 @@ description: |- Resource used to manage java procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_procedure_java (Resource) Resource used to manage java procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). - +## Example Usage + +```terraform +# basic example +resource "snowflake_procedure_java" "basic" { + database = "Database" + schema = "Schema" + name = "ProcedureName" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + procedure_definition = "\n\timport com.snowflake.snowpark_java.*;\n\tclass TestFunc {\n\t\tpublic static String echoVarchar(Session session, String x) {\n\t\t\treturn x;\n\t\t}\n\t}\n" + runtime_version = "11" + snowpark_package = "1.14.0" +} + +# full example +resource "snowflake_procedure_java" "full" { + database = "Database" + schema = "Schema" + name = "ProcedureName" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + procedure_definition = "\n\timport com.snowflake.snowpark_java.*;\n\tclass TestFunc {\n\t\tpublic static String echoVarchar(Session session, String x) {\n\t\t\treturn x;\n\t\t}\n\t}\n" + runtime_version = "11" + snowpark_package = "1.14.0" + + comment = "some comment" + execute_as = "CALLER" + target_path { + path_on_stage = "tf-1734028493-OkoTf.jar" + stage_location = snowflake_stage.example.fully_qualified_name + } + packages = ["com.snowflake:telemetry:0.1.0"] + imports { + path_on_stage = "tf-1734028486-OLJpF.jar" + stage_location = "~" + } + imports { + path_on_stage = "tf-1734028491-EMoDC.jar" + stage_location = "~" + } + is_secure = "false" + null_input_behavior = "CALLED ON NULL INPUT" + external_access_integrations = [ + "INTEGRATION_1", "INTEGRATION_2" + ] + secrets { + secret_id = snowflake_secret_with_generic_string.example1.fully_qualified_name + secret_variable_name = "abc" + } + secrets { + secret_id = snowflake_secret_with_generic_string.example2.fully_qualified_name + secret_variable_name = "def" + } +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -29,7 +110,7 @@ Resource used to manage java procedure objects. For more information, check [pro - `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) - `comment` (String) Specifies a comment for the procedure. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). -- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `CALLER` | `OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. - `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. @@ -170,3 +251,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_procedure_java.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/procedure_javascript.md b/docs/resources/procedure_javascript.md index a8d0ee9db2..cecdf0a6f2 100644 --- a/docs/resources/procedure_javascript.md +++ b/docs/resources/procedure_javascript.md @@ -5,11 +5,44 @@ description: |- Resource used to manage javascript procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_procedure_javascript (Resource) Resource used to manage javascript procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). - +## Example Usage + +```terraform +# basic +resource "snowflake_procedure_javascript" "basic" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + procedure_definition = "\n\tif (x \u003c= 0) {\n\t\treturn 1;\n\t} else {\n\t\tvar result = 1;\n\t\tfor (var i = 2; i \u003c= x; i++) {\n\t\t\tresult = result * i;\n\t\t}\n\t\treturn result;\n\t}\n" +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -27,7 +60,7 @@ Resource used to manage javascript procedure objects. For more information, chec - `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) - `comment` (String) Specifies a comment for the procedure. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). -- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `CALLER` | `OWNER`. - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). @@ -135,3 +168,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_procedure_javascript.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/procedure_python.md b/docs/resources/procedure_python.md index 8761764754..9a857e15be 100644 --- a/docs/resources/procedure_python.md +++ b/docs/resources/procedure_python.md @@ -5,11 +5,48 @@ description: |- Resource used to manage python procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** `is_aggregate` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_procedure_python (Resource) Resource used to manage python procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). - +## Example Usage + +```terraform +resource "snowflake_procedure_python" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "echoVarchar" + procedure_definition = "\ndef echoVarchar(x):\n\tresult = \"\"\n\tfor a in range(5):\n\t\tresult += x\n\treturn result\n" + runtime_version = "3.8" + snowpark_package = "1.14.0" +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -29,7 +66,7 @@ Resource used to manage python procedure objects. For more information, check [p - `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) - `comment` (String) Specifies a comment for the procedure. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). -- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `CALLER` | `OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. - `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If your stored procedure’s code will be on a stage, you must also include a path to the module file your code is in. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. @@ -160,3 +197,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_procedure_python.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/procedure_scala.md b/docs/resources/procedure_scala.md index ef76be8b1d..51d0382cbf 100644 --- a/docs/resources/procedure_scala.md +++ b/docs/resources/procedure_scala.md @@ -5,11 +5,44 @@ description: |- Resource used to manage scala procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_procedure_scala (Resource) Resource used to manage scala procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). - +## Example Usage + +```terraform +resource "snowflake_procedure_scala" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + procedure_definition = "\n\timport com.snowflake.snowpark_java.Session\n\n\tclass TestFunc {\n\t\tdef echoVarchar(session : Session, x : String): String = {\n\t\t\treturn x\n\t\t}\n\t}\n" + runtime_version = "2.12" + snowpark_package = "1.14.0" +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -29,7 +62,7 @@ Resource used to manage scala procedure objects. For more information, check [pr - `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) - `comment` (String) Specifies a comment for the procedure. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). -- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `CALLER` | `OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. - `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. @@ -170,3 +203,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_procedure_scala.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/docs/resources/procedure_sql.md b/docs/resources/procedure_sql.md index 3dcc0fefb7..3814df934f 100644 --- a/docs/resources/procedure_sql.md +++ b/docs/resources/procedure_sql.md @@ -5,11 +5,41 @@ description: |- Resource used to manage sql procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +-> **Note** External changes to `is_secure` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + # snowflake_procedure_sql (Resource) Resource used to manage sql procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). - +## Example Usage + +```terraform +resource "snowflake_procedure_sql" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + procedure_definition = "\nBEGIN\n RETURN message;\nEND;\n" +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + ## Schema @@ -27,7 +57,7 @@ Resource used to manage sql procedure objects. For more information, check [proc - `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) - `comment` (String) Specifies a comment for the procedure. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). -- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `CALLER` | `OWNER`. - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). @@ -135,3 +165,14 @@ Read-Only: - `schema_name` (String) - `secrets` (String) - `valid_for_clustering` (Boolean) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_procedure_sql.example '""."".""(varchar, varchar, varchar)' +``` + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/examples/additional/deprecated_resources.MD b/examples/additional/deprecated_resources.MD index b0a8941854..0f6a49a421 100644 --- a/examples/additional/deprecated_resources.MD +++ b/examples/additional/deprecated_resources.MD @@ -1,7 +1,9 @@ ## Currently deprecated resources - [snowflake_database_old](./docs/resources/database_old) +- [snowflake_function](./docs/resources/function) - [snowflake_oauth_integration](./docs/resources/oauth_integration) +- [snowflake_procedure](./docs/resources/procedure) - [snowflake_role](./docs/resources/role) - use [snowflake_account_role](./docs/resources/account_role) instead - [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead - [snowflake_stream](./docs/resources/stream) diff --git a/examples/resources/snowflake_function_java/import.sh b/examples/resources/snowflake_function_java/import.sh new file mode 100644 index 0000000000..0b92dc8ebb --- /dev/null +++ b/examples/resources/snowflake_function_java/import.sh @@ -0,0 +1 @@ +terraform import snowflake_function_java.example '""."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_function_java/resource.tf b/examples/resources/snowflake_function_java/resource.tf new file mode 100644 index 0000000000..a34d6af477 --- /dev/null +++ b/examples/resources/snowflake_function_java/resource.tf @@ -0,0 +1,12 @@ +resource "snowflake_function_java" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + function_definition = "\n\tclass TestFunc {\n\t\tpublic static String echoVarchar(String x) {\n\t\t\treturn x;\n\t\t}\n\t}\n" +} diff --git a/examples/resources/snowflake_function_javascript/import.sh b/examples/resources/snowflake_function_javascript/import.sh new file mode 100644 index 0000000000..12b84c4fe0 --- /dev/null +++ b/examples/resources/snowflake_function_javascript/import.sh @@ -0,0 +1 @@ +terraform import snowflake_function_javascript.example '""."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_function_javascript/resource.tf b/examples/resources/snowflake_function_javascript/resource.tf new file mode 100644 index 0000000000..9cff28c6e0 --- /dev/null +++ b/examples/resources/snowflake_function_javascript/resource.tf @@ -0,0 +1,14 @@ +# Minimal +resource "snowflake_function_javascript" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_javascript_function" + arguments { + arg_data_type = "VARIANT" + arg_name = "x" + } + function_definition = <"."".""(varchar, varchar, varchar)' + diff --git a/examples/resources/snowflake_function_python/resource.tf b/examples/resources/snowflake_function_python/resource.tf new file mode 100644 index 0000000000..50a450a239 --- /dev/null +++ b/examples/resources/snowflake_function_python/resource.tf @@ -0,0 +1,17 @@ +# Minimal +resource "snowflake_function_python" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_function_function" + runtime_version = "3.8" + arguments { + arg_data_type = "NUMBER(36, 2)" + arg_name = "x" + } + function_definition = <"."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_function_scala/resource.tf b/examples/resources/snowflake_function_scala/resource.tf new file mode 100644 index 0000000000..0fa07b79ff --- /dev/null +++ b/examples/resources/snowflake_function_scala/resource.tf @@ -0,0 +1,67 @@ +# Minimal +resource "snowflake_function_scala" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_scala_function" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + function_definition = <"."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_function_sql/resource.tf b/examples/resources/snowflake_function_sql/resource.tf new file mode 100644 index 0000000000..16a91889dd --- /dev/null +++ b/examples/resources/snowflake_function_sql/resource.tf @@ -0,0 +1,32 @@ +# Minimal +resource "snowflake_function_sql" "minimal" { + database = snowflake_database.test.name + schema = snowflake_schema.test.name + name = "my_sql_function" + arguments { + arg_data_type = "FLOAT" + arg_name = "arg_name" + } + function_definition = <"."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_procedure_java/resource.tf b/examples/resources/snowflake_procedure_java/resource.tf new file mode 100644 index 0000000000..6f3bc051b9 --- /dev/null +++ b/examples/resources/snowflake_procedure_java/resource.tf @@ -0,0 +1,60 @@ +# basic example +resource "snowflake_procedure_java" "basic" { + database = "Database" + schema = "Schema" + name = "ProcedureName" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + procedure_definition = "\n\timport com.snowflake.snowpark_java.*;\n\tclass TestFunc {\n\t\tpublic static String echoVarchar(Session session, String x) {\n\t\t\treturn x;\n\t\t}\n\t}\n" + runtime_version = "11" + snowpark_package = "1.14.0" +} + +# full example +resource "snowflake_procedure_java" "full" { + database = "Database" + schema = "Schema" + name = "ProcedureName" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + procedure_definition = "\n\timport com.snowflake.snowpark_java.*;\n\tclass TestFunc {\n\t\tpublic static String echoVarchar(Session session, String x) {\n\t\t\treturn x;\n\t\t}\n\t}\n" + runtime_version = "11" + snowpark_package = "1.14.0" + + comment = "some comment" + execute_as = "CALLER" + target_path { + path_on_stage = "tf-1734028493-OkoTf.jar" + stage_location = snowflake_stage.example.fully_qualified_name + } + packages = ["com.snowflake:telemetry:0.1.0"] + imports { + path_on_stage = "tf-1734028486-OLJpF.jar" + stage_location = "~" + } + imports { + path_on_stage = "tf-1734028491-EMoDC.jar" + stage_location = "~" + } + is_secure = "false" + null_input_behavior = "CALLED ON NULL INPUT" + external_access_integrations = [ + "INTEGRATION_1", "INTEGRATION_2" + ] + secrets { + secret_id = snowflake_secret_with_generic_string.example1.fully_qualified_name + secret_variable_name = "abc" + } + secrets { + secret_id = snowflake_secret_with_generic_string.example2.fully_qualified_name + secret_variable_name = "def" + } +} diff --git a/examples/resources/snowflake_procedure_javascript/import.sh b/examples/resources/snowflake_procedure_javascript/import.sh new file mode 100644 index 0000000000..15833413b7 --- /dev/null +++ b/examples/resources/snowflake_procedure_javascript/import.sh @@ -0,0 +1 @@ +terraform import snowflake_procedure_javascript.example '""."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_procedure_javascript/resource.tf b/examples/resources/snowflake_procedure_javascript/resource.tf new file mode 100644 index 0000000000..5ad3f42b62 --- /dev/null +++ b/examples/resources/snowflake_procedure_javascript/resource.tf @@ -0,0 +1,12 @@ +# basic +resource "snowflake_procedure_javascript" "basic" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + procedure_definition = "\n\tif (x \u003c= 0) {\n\t\treturn 1;\n\t} else {\n\t\tvar result = 1;\n\t\tfor (var i = 2; i \u003c= x; i++) {\n\t\t\tresult = result * i;\n\t\t}\n\t\treturn result;\n\t}\n" +} diff --git a/examples/resources/snowflake_procedure_python/import.sh b/examples/resources/snowflake_procedure_python/import.sh new file mode 100644 index 0000000000..ccc897486f --- /dev/null +++ b/examples/resources/snowflake_procedure_python/import.sh @@ -0,0 +1 @@ +terraform import snowflake_procedure_python.example '""."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_procedure_python/resource.tf b/examples/resources/snowflake_procedure_python/resource.tf new file mode 100644 index 0000000000..3f4987d36b --- /dev/null +++ b/examples/resources/snowflake_procedure_python/resource.tf @@ -0,0 +1,14 @@ +resource "snowflake_procedure_python" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "echoVarchar" + procedure_definition = "\ndef echoVarchar(x):\n\tresult = \"\"\n\tfor a in range(5):\n\t\tresult += x\n\treturn result\n" + runtime_version = "3.8" + snowpark_package = "1.14.0" +} diff --git a/examples/resources/snowflake_procedure_scala/import.sh b/examples/resources/snowflake_procedure_scala/import.sh new file mode 100644 index 0000000000..6efc556919 --- /dev/null +++ b/examples/resources/snowflake_procedure_scala/import.sh @@ -0,0 +1 @@ +terraform import snowflake_procedure_scala.example '""."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_procedure_scala/resource.tf b/examples/resources/snowflake_procedure_scala/resource.tf new file mode 100644 index 0000000000..889473e122 --- /dev/null +++ b/examples/resources/snowflake_procedure_scala/resource.tf @@ -0,0 +1,14 @@ +resource "snowflake_procedure_scala" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + handler = "TestFunc.echoVarchar" + procedure_definition = "\n\timport com.snowflake.snowpark_java.Session\n\n\tclass TestFunc {\n\t\tdef echoVarchar(session : Session, x : String): String = {\n\t\t\treturn x\n\t\t}\n\t}\n" + runtime_version = "2.12" + snowpark_package = "1.14.0" +} diff --git a/examples/resources/snowflake_procedure_sql/import.sh b/examples/resources/snowflake_procedure_sql/import.sh new file mode 100644 index 0000000000..f976bd6fed --- /dev/null +++ b/examples/resources/snowflake_procedure_sql/import.sh @@ -0,0 +1 @@ +terraform import snowflake_procedure_sql.example '""."".""(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_procedure_sql/resource.tf b/examples/resources/snowflake_procedure_sql/resource.tf new file mode 100644 index 0000000000..77258c73d9 --- /dev/null +++ b/examples/resources/snowflake_procedure_sql/resource.tf @@ -0,0 +1,11 @@ +resource "snowflake_procedure_sql" "w" { + database = "Database" + schema = "Schema" + name = "Name" + arguments { + arg_data_type = "VARCHAR(100)" + arg_name = "x" + } + return_type = "VARCHAR(100)" + procedure_definition = "\nBEGIN\n RETURN message;\nEND;\n" +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_ext.go new file mode 100644 index 0000000000..5890820342 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_ext.go @@ -0,0 +1,12 @@ +package resourceassert + +import ( + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (f *ProcedurePythonResourceAssert) HasImportsLength(len int) *ProcedurePythonResourceAssert { + f.AddAssertion(assert.ValueSet("imports.#", strconv.FormatInt(int64(len), 10))) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_ext.go new file mode 100644 index 0000000000..86473eaad2 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_ext.go @@ -0,0 +1,17 @@ +package resourceassert + +import ( + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (f *ProcedureScalaResourceAssert) HasImportsLength(len int) *ProcedureScalaResourceAssert { + f.AddAssertion(assert.ValueSet("imports.#", strconv.FormatInt(int64(len), 10))) + return f +} + +func (f *ProcedureScalaResourceAssert) HasTargetPathEmpty() *ProcedureScalaResourceAssert { + f.AddAssertion(assert.ValueSet("target_path.#", "0")) + return f +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go index 548259aa97..4952c06d38 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go @@ -2,6 +2,11 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *ProcedureJavascriptModel) MarshalJSON() ([]byte, error) { @@ -14,3 +19,23 @@ func (f *ProcedureJavascriptModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func ProcedureJavascriptBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + procedureDefinition string, +) *ProcedureJavascriptModel { + return ProcedureJavascript(resourceName, id.DatabaseName(), id.Name(), procedureDefinition, returnType.ToSql(), id.SchemaName()) +} + +func (f *ProcedureJavascriptModel) WithArgument(argName string, argDataType datatypes.DataType) *ProcedureJavascriptModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go index 1bff75bcc2..3ebd87eca7 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go @@ -2,6 +2,12 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *ProcedurePythonModel) MarshalJSON() ([]byte, error) { @@ -14,3 +20,74 @@ func (f *ProcedurePythonModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func ProcedurePythonBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + procedureDefinition string, +) *ProcedurePythonModel { + return ProcedurePython(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), "3.8", id.SchemaName(), "1.14.0"). + WithProcedureDefinition(procedureDefinition) +} + +func (f *ProcedurePythonModel) WithArgument(argName string, argDataType datatypes.DataType) *ProcedurePythonModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *ProcedurePythonModel) WithImports(imports ...sdk.NormalizedPath) *ProcedurePythonModel { + return f.WithImportsValue( + tfconfig.SetVariable( + collections.Map(imports, func(imp sdk.NormalizedPath) tfconfig.Variable { + return tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(imp.StageLocation), + "path_on_stage": tfconfig.StringVariable(imp.PathOnStage), + }, + ) + })..., + ), + ) +} + +func (f *ProcedurePythonModel) WithPackages(pkgs ...string) *ProcedurePythonModel { + return f.WithPackagesValue( + tfconfig.SetVariable( + collections.Map(pkgs, func(pkg string) tfconfig.Variable { return tfconfig.StringVariable(pkg) })..., + ), + ) +} + +func (f *ProcedurePythonModel) WithExternalAccessIntegrations(ids ...sdk.AccountObjectIdentifier) *ProcedurePythonModel { + return f.WithExternalAccessIntegrationsValue( + tfconfig.SetVariable( + collections.Map(ids, func(id sdk.AccountObjectIdentifier) tfconfig.Variable { return tfconfig.StringVariable(id.Name()) })..., + ), + ) +} + +func (f *ProcedurePythonModel) WithSecrets(secrets map[string]sdk.SchemaObjectIdentifier) *ProcedurePythonModel { + objects := make([]tfconfig.Variable, 0) + for k, v := range secrets { + objects = append(objects, tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "secret_variable_name": tfconfig.StringVariable(k), + "secret_id": tfconfig.StringVariable(v.FullyQualifiedName()), + }, + )) + } + + return f.WithSecretsValue( + tfconfig.SetVariable( + objects..., + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go index b7434a4250..c78de9dbcc 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go @@ -2,6 +2,12 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *ProcedureScalaModel) MarshalJSON() ([]byte, error) { @@ -14,3 +20,120 @@ func (f *ProcedureScalaModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func ProcedureScalaBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + procedureDefinition string, +) *ProcedureScalaModel { + return ProcedureScala(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), "2.12", id.SchemaName(), "1.14.0"). + WithProcedureDefinition(procedureDefinition) +} + +func ProcedureScalaBasicStaged( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + stageLocation string, + pathOnStage string, +) *ProcedureScalaModel { + return ProcedureScala(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), "2.12", id.SchemaName(), "1.14.0"). + WithImport(stageLocation, pathOnStage) +} + +func (f *ProcedureScalaModel) WithArgument(argName string, argDataType datatypes.DataType) *ProcedureScalaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *ProcedureScalaModel) WithArgumentWithDefaultValue(argName string, argDataType datatypes.DataType, value string) *ProcedureScalaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + "arg_default_value": tfconfig.StringVariable(value), + }, + ), + ) +} + +func (f *ProcedureScalaModel) WithImport(stageLocation string, pathOnStage string) *ProcedureScalaModel { + return f.WithImportsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} + +func (f *ProcedureScalaModel) WithImports(imports ...sdk.NormalizedPath) *ProcedureScalaModel { + return f.WithImportsValue( + tfconfig.SetVariable( + collections.Map(imports, func(imp sdk.NormalizedPath) tfconfig.Variable { + return tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(imp.StageLocation), + "path_on_stage": tfconfig.StringVariable(imp.PathOnStage), + }, + ) + })..., + ), + ) +} + +func (f *ProcedureScalaModel) WithPackages(pkgs ...string) *ProcedureScalaModel { + return f.WithPackagesValue( + tfconfig.SetVariable( + collections.Map(pkgs, func(pkg string) tfconfig.Variable { return tfconfig.StringVariable(pkg) })..., + ), + ) +} + +func (f *ProcedureScalaModel) WithExternalAccessIntegrations(ids ...sdk.AccountObjectIdentifier) *ProcedureScalaModel { + return f.WithExternalAccessIntegrationsValue( + tfconfig.SetVariable( + collections.Map(ids, func(id sdk.AccountObjectIdentifier) tfconfig.Variable { return tfconfig.StringVariable(id.Name()) })..., + ), + ) +} + +func (f *ProcedureScalaModel) WithSecrets(secrets map[string]sdk.SchemaObjectIdentifier) *ProcedureScalaModel { + objects := make([]tfconfig.Variable, 0) + for k, v := range secrets { + objects = append(objects, tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "secret_variable_name": tfconfig.StringVariable(k), + "secret_id": tfconfig.StringVariable(v.FullyQualifiedName()), + }, + )) + } + + return f.WithSecretsValue( + tfconfig.SetVariable( + objects..., + ), + ) +} + +func (f *ProcedureScalaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *ProcedureScalaModel { + return f.WithTargetPathValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go index 8b5dc3afbf..0cb2a2bf8c 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go @@ -2,6 +2,11 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *ProcedureSqlModel) MarshalJSON() ([]byte, error) { @@ -14,3 +19,23 @@ func (f *ProcedureSqlModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func ProcedureSqlBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + procedureDefinition string, +) *ProcedureSqlModel { + return ProcedureSql(resourceName, id.DatabaseName(), id.Name(), procedureDefinition, returnType.ToSql(), id.SchemaName()) +} + +func (f *ProcedureSqlModel) WithArgument(argName string, argDataType datatypes.DataType) *ProcedureSqlModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} diff --git a/pkg/acceptance/helpers/procedure_client.go b/pkg/acceptance/helpers/procedure_client.go index 7e77e37782..b35f9239df 100644 --- a/pkg/acceptance/helpers/procedure_client.go +++ b/pkg/acceptance/helpers/procedure_client.go @@ -245,3 +245,13 @@ BEGIN END; ` } + +func (c *ProcedureClient) SampleSqlDefinitionWithArgument(t *testing.T) string { + t.Helper() + + return ` +BEGIN + RETURN message; +END; +` +} diff --git a/pkg/resources/function.go b/pkg/resources/function.go index 69e972f7d5..663dcf5693 100644 --- a/pkg/resources/function.go +++ b/pkg/resources/function.go @@ -173,6 +173,8 @@ func Function() *schema.Resource { UpdateContext: TrackingUpdateWrapper(resources.Function, UpdateContextFunction), DeleteContext: TrackingDeleteWrapper(resources.Function, DeleteFunction), + DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_function_java, snowflake_function_javascript, snowflake_function_python, snowflake_function_scala, and snowflake_function_sql instead.", + CustomizeDiff: TrackingCustomDiffWrapper(resources.Function, customdiff.All( // TODO(SNOW-1348103): add `arguments` to ComputedIfAnyAttributeChanged. This can't be done now because this function compares values without diff suppress. ComputedIfAnyAttributeChanged(functionSchema, FullyQualifiedNameAttributeName, "name"), diff --git a/pkg/resources/procedure.go b/pkg/resources/procedure.go index fa986ae8f5..2c567902f8 100644 --- a/pkg/resources/procedure.go +++ b/pkg/resources/procedure.go @@ -188,6 +188,8 @@ func Procedure() *schema.Resource { UpdateContext: TrackingUpdateWrapper(resources.Procedure, UpdateContextProcedure), DeleteContext: TrackingDeleteWrapper(resources.Procedure, DeleteProcedure), + DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_procedure_java, snowflake_procedure_javascript, snowflake_procedure_python, snowflake_procedure_scala, and snowflake_procedure_sql instead.", + // TODO(SNOW-1348106): add `arguments` to ComputedIfAnyAttributeChanged for FullyQualifiedNameAttributeName. // This can't be done now because this function compares values without diff suppress. CustomizeDiff: TrackingCustomDiffWrapper(resources.Procedure, customdiff.All( diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index 83d79963a5..1cd819647b 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -467,12 +467,33 @@ func UpdateProcedure(language string, readFunc func(ctx context.Context, d *sche if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) if err != nil { + d.Partial(true) return diag.FromErr(err) } } if !reflect.DeepEqual(*unsetRequest, *sdk.NewProcedureUnsetRequest()) { err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnset(*unsetRequest)) if err != nil { + d.Partial(true) + return diag.FromErr(err) + } + } + + // has to be handled separately + if d.HasChange("execute_as") { + var value sdk.ExecuteAs + if v, ok := d.GetOk("execute_as"); ok { + value, err = sdk.ToExecuteAs(v.(string)) + if err != nil { + return diag.FromErr(err) + } + } else { + // there is no UNSET, so we need to set it manually + value = sdk.ExecuteAsOwner + } + err = client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithExecuteAs(value)) + if err != nil { + d.Partial(true) return diag.FromErr(err) } } @@ -505,6 +526,7 @@ func ImportProcedure(ctx context.Context, d *schema.ResourceData, meta any) ([]* d.Set("name", id.Name()), d.Set("is_secure", booleanStringFromBool(procedure.IsSecure)), setOptionalFromStringPtr(d, "null_input_behavior", procedureDetails.NullHandling), + d.Set("execute_as", procedureDetails.ExecuteAs), importFunctionOrProcedureArguments(d, procedureDetails.NormalizedArguments), // all others are set in read ) diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index 584c4da787..b9901eb77e 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -77,6 +77,7 @@ func CreateContextProcedureJava(ctx context.Context, d *schema.ResourceData, met errs := errors.Join( booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "execute_as", request.WithExecuteAs, sdk.ToExecuteAs), stringAttributeCreateBuilder(d, "comment", request.WithComment), setProcedureImportsInBuilder(d, request.WithImports), setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), @@ -128,6 +129,7 @@ func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading execute_as on purpose (handled as external change to show output) setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), d.Set("comment", allProcedureDetails.procedure.Description), readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), diff --git a/pkg/resources/procedure_java_acceptance_test.go b/pkg/resources/procedure_java_acceptance_test.go index b7a81a0b40..651e1e35c5 100644 --- a/pkg/resources/procedure_java_acceptance_test.go +++ b/pkg/resources/procedure_java_acceptance_test.go @@ -81,7 +81,7 @@ func TestAcc_ProcedureJava_InlineBasic(t *testing.T) { ResourceName: procedureModel.ResourceReference(), ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior"}, + ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior", "execute_as"}, ImportStateCheck: assert.AssertThatImport(t, resourceassert.ImportedProcedureJavaResource(t, id.FullyQualifiedName()). HasFullyQualifiedNameString(id.FullyQualifiedName()), @@ -236,6 +236,7 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { WithRuntimeVersion("11"). WithIsSecure("false"). WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsCaller)). WithComment("some comment") procedureModelUpdateWithoutRecreation := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). @@ -254,6 +255,7 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { WithRuntimeVersion("11"). WithIsSecure("false"). WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsOwner)). WithComment("some other comment") resource.Test(t, resource.TestCase{ @@ -276,6 +278,7 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { HasProcedureDefinitionString(definition). HasCommentString("some comment"). HasProcedureLanguageString("JAVA"). + HasExecuteAsString(string(sdk.ExecuteAsCaller)). HasFullyQualifiedNameString(id.FullyQualifiedName()), assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), @@ -318,6 +321,7 @@ func TestAcc_ProcedureJava_InlineFull(t *testing.T) { HasProcedureDefinitionString(definition). HasCommentString("some other comment"). HasProcedureLanguageString("JAVA"). + HasExecuteAsString(string(sdk.ExecuteAsOwner)). HasFullyQualifiedNameString(id.FullyQualifiedName()), assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.path_on_stage", jarName)), diff --git a/pkg/resources/procedure_javascript.go b/pkg/resources/procedure_javascript.go index 97336f2839..d2b8fa9392 100644 --- a/pkg/resources/procedure_javascript.go +++ b/pkg/resources/procedure_javascript.go @@ -70,6 +70,7 @@ func CreateContextProcedureJavascript(ctx context.Context, d *schema.ResourceDat errs := errors.Join( booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "execute_as", request.WithExecuteAs, sdk.ToExecuteAs), stringAttributeCreateBuilder(d, "comment", request.WithComment), ) if errs != nil { @@ -116,6 +117,7 @@ func ReadContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading execute_as on purpose (handled as external change to show output) d.Set("comment", allProcedureDetails.procedure.Description), setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), diff --git a/pkg/resources/procedure_javascript_acceptance_test.go b/pkg/resources/procedure_javascript_acceptance_test.go new file mode 100644 index 0000000000..3a6bf5efd8 --- /dev/null +++ b/pkg/resources/procedure_javascript_acceptance_test.go @@ -0,0 +1,178 @@ +package resources_test + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_ProcedureJavascript_InlineBasic(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Procedure.SampleJavascriptDefinition(t, argName) + + procedureModel := model.ProcedureJavascriptBasicInline("w", id, dataType, definition). + WithArgument(argName, dataType) + procedureModelRenamed := model.ProcedureJavascriptBasicInline("w", idWithChangedNameButTheSameDataType, dataType, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJavascript), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavascriptResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVASCRIPT"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior", "execute_as"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureJavascriptResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, procedureModelRenamed), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavascriptResource(t, procedureModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJavascript_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Procedure.SampleJavascriptDefinition(t, argName) + + procedureModel := model.ProcedureJavascriptBasicInline("w", id, dataType, definition). + WithArgument(argName, dataType). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsCaller)). + WithComment("some comment") + + procedureModelUpdateWithoutRecreation := model.ProcedureJavascriptBasicInline("w", id, dataType, definition). + WithArgument(argName, dataType). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsOwner)). + WithComment("some other comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJavascript), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavascriptResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasProcedureDefinitionString(definition). + HasCommentString("some comment"). + HasProcedureLanguageString("JAVASCRIPT"). + HasExecuteAsString(string(sdk.ExecuteAsCaller)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureJavascriptResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(procedureModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, procedureModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavascriptResource(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasProcedureDefinitionString(definition). + HasCommentString("some other comment"). + HasProcedureLanguageString("JAVASCRIPT"). + HasExecuteAsString(string(sdk.ExecuteAsOwner)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} diff --git a/pkg/resources/procedure_python.go b/pkg/resources/procedure_python.go index deb95c9871..eb62cdac7b 100644 --- a/pkg/resources/procedure_python.go +++ b/pkg/resources/procedure_python.go @@ -77,6 +77,7 @@ func CreateContextProcedurePython(ctx context.Context, d *schema.ResourceData, m errs := errors.Join( booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "execute_as", request.WithExecuteAs, sdk.ToExecuteAs), stringAttributeCreateBuilder(d, "comment", request.WithComment), setProcedureImportsInBuilder(d, request.WithImports), setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), @@ -127,6 +128,7 @@ func ReadContextProcedurePython(ctx context.Context, d *schema.ResourceData, met readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading execute_as on purpose (handled as external change to show output) setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), d.Set("comment", allProcedureDetails.procedure.Description), readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), diff --git a/pkg/resources/procedure_python_acceptance_test.go b/pkg/resources/procedure_python_acceptance_test.go new file mode 100644 index 0000000000..7932a83381 --- /dev/null +++ b/pkg/resources/procedure_python_acceptance_test.go @@ -0,0 +1,240 @@ +package resources_test + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_ProcedurePython_InlineBasic(t *testing.T) { + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Procedure.SamplePythonDefinition(t, funcName, argName) + + procedureModel := model.ProcedurePythonBasicInline("w", id, dataType, funcName, definition). + WithArgument(argName, dataType) + procedureModelRenamed := model.ProcedurePythonBasicInline("w", idWithChangedNameButTheSameDataType, dataType, funcName, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedurePython), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedurePythonResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(0). + HasRuntimeVersionString("3.8"). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("PYTHON"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior", "execute_as"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedurePythonResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, procedureModelRenamed), + Check: assert.AssertThat(t, + resourceassert.ProcedurePythonResource(t, procedureModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedurePython_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + secretId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secretId2 := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + networkRule, networkRuleCleanup := acc.TestClient().NetworkRule.Create(t) + t.Cleanup(networkRuleCleanup) + + secret, secretCleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId, "test_secret_string") + t.Cleanup(secretCleanup) + + secret2, secret2Cleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId2, "test_secret_string_2") + t.Cleanup(secret2Cleanup) + + externalAccessIntegration, externalAccessIntegrationCleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) + t.Cleanup(externalAccessIntegrationCleanup) + + externalAccessIntegration2, externalAccessIntegration2Cleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret2.ID()) + t.Cleanup(externalAccessIntegration2Cleanup) + + tmpPythonFunction := acc.TestClient().CreateSamplePythonFunctionAndModule(t) + tmpPythonFunction2 := acc.TestClient().CreateSamplePythonFunctionAndModule(t) + + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Procedure.SamplePythonDefinition(t, funcName, argName) + + procedureModel := model.ProcedurePythonBasicInline("w", id, dataType, funcName, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName()}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction2.PythonFileName()}, + ). + WithSnowparkPackage("1.14.0"). + WithPackages("absl-py==0.10.0"). + WithExternalAccessIntegrations(externalAccessIntegration, externalAccessIntegration2). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "abc": secretId, + "def": secretId2, + }). + WithRuntimeVersion("3.8"). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsCaller)). + WithComment("some comment") + + procedureModelUpdateWithoutRecreation := model.ProcedurePythonBasicInline("w", id, dataType, funcName, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName()}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpPythonFunction2.PythonFileName()}, + ). + WithSnowparkPackage("1.14.0"). + WithPackages("absl-py==0.10.0"). + WithExternalAccessIntegrations(externalAccessIntegration). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "def": secretId2, + }). + WithRuntimeVersion("3.8"). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsOwner)). + WithComment("some other comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedurePython), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedurePythonResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasImportsLength(2). + HasRuntimeVersionString("3.8"). + HasProcedureDefinitionString(definition). + HasCommentString("some comment"). + HasProcedureLanguageString("PYTHON"). + HasExecuteAsString(string(sdk.ExecuteAsCaller)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "secrets.#", "2")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "external_access_integrations.#", "2")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "packages.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "packages.0", "absl-py==0.10.0")), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedurePythonResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(procedureModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, procedureModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.ProcedurePythonResource(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasImportsLength(2). + HasRuntimeVersionString("3.8"). + HasProcedureDefinitionString(definition). + HasCommentString("some other comment"). + HasProcedureLanguageString("PYTHON"). + HasExecuteAsString(string(sdk.ExecuteAsOwner)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_variable_name", "def")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_id", secretId2.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.0", externalAccessIntegration.Name())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "packages.#", "1")), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} diff --git a/pkg/resources/procedure_scala.go b/pkg/resources/procedure_scala.go index c860c6282d..5b21c728ae 100644 --- a/pkg/resources/procedure_scala.go +++ b/pkg/resources/procedure_scala.go @@ -23,7 +23,7 @@ func ProcedureScala() *schema.Resource { return &schema.Resource{ CreateContext: TrackingCreateWrapper(resources.ProcedureScala, CreateContextProcedureScala), ReadContext: TrackingReadWrapper(resources.ProcedureScala, ReadContextProcedureScala), - UpdateContext: TrackingUpdateWrapper(resources.ProcedureScala, UpdateProcedure("SQL", ReadContextProcedureScala)), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureScala, UpdateProcedure("SCALA", ReadContextProcedureScala)), DeleteContext: TrackingDeleteWrapper(resources.ProcedureScala, DeleteProcedure), Description: "Resource used to manage scala procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", @@ -77,6 +77,7 @@ func CreateContextProcedureScala(ctx context.Context, d *schema.ResourceData, me errs := errors.Join( booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "execute_as", request.WithExecuteAs, sdk.ToExecuteAs), stringAttributeCreateBuilder(d, "comment", request.WithComment), setProcedureImportsInBuilder(d, request.WithImports), setExternalAccessIntegrationsInBuilder(d, request.WithExternalAccessIntegrations), @@ -128,6 +129,7 @@ func ReadContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading execute_as on purpose (handled as external change to show output) setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), d.Set("comment", allProcedureDetails.procedure.Description), readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), diff --git a/pkg/resources/procedure_scala_acceptance_test.go b/pkg/resources/procedure_scala_acceptance_test.go new file mode 100644 index 0000000000..a45a717b63 --- /dev/null +++ b/pkg/resources/procedure_scala_acceptance_test.go @@ -0,0 +1,262 @@ +package resources_test + +import ( + "fmt" + "testing" + "time" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +// TODO [SNOW-1348103]: test external changes +// TODO [SNOW-1348103]: test changes of attributes separately + +func TestAcc_ProcedureScala_InlineBasic(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleScalaDefinition(t, className, funcName, argName) + + procedureModel := model.ProcedureScalaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + procedureModelRenamed := model.ProcedureScalaBasicInline("w", idWithChangedNameButTheSameDataType, dataType, handler, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureScala), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureScalaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(0). + HasTargetPathEmpty(). + HasRuntimeVersionString("2.12"). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("SCALA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior", "execute_as"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureScalaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, procedureModelRenamed), + Check: assert.AssertThat(t, + resourceassert.ProcedureScalaResource(t, procedureModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedureScala_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + secretId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secretId2 := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + networkRule, networkRuleCleanup := acc.TestClient().NetworkRule.Create(t) + t.Cleanup(networkRuleCleanup) + + secret, secretCleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId, "test_secret_string") + t.Cleanup(secretCleanup) + + secret2, secret2Cleanup := acc.TestClient().Secret.CreateWithGenericString(t, secretId2, "test_secret_string_2") + t.Cleanup(secret2Cleanup) + + externalAccessIntegration, externalAccessIntegrationCleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) + t.Cleanup(externalAccessIntegrationCleanup) + + externalAccessIntegration2, externalAccessIntegration2Cleanup := acc.TestClient().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret2.ID()) + t.Cleanup(externalAccessIntegration2Cleanup) + + tmpJavaProcedure := acc.TestClient().CreateSampleJavaProcedureAndJarOnUserStage(t) + tmpJavaProcedure2 := acc.TestClient().CreateSampleJavaProcedureAndJarOnUserStage(t) + + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleScalaDefinition(t, className, funcName, argName) + // TODO [SNOW-1850370]: extract to helper + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + + procedureModel := model.ProcedureScalaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure2.JarName}, + ). + WithSnowparkPackage("1.14.0"). + WithPackages("com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration, externalAccessIntegration2). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "abc": secretId, + "def": secretId2, + }). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("2.12"). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsCaller)). + WithComment("some comment") + + procedureModelUpdateWithoutRecreation := model.ProcedureScalaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithImports( + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName}, + sdk.NormalizedPath{StageLocation: "~", PathOnStage: tmpJavaProcedure2.JarName}, + ). + WithSnowparkPackage("1.14.0"). + WithPackages("com.snowflake:telemetry:0.1.0"). + WithExternalAccessIntegrations(externalAccessIntegration). + WithSecrets(map[string]sdk.SchemaObjectIdentifier{ + "def": secretId2, + }). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("2.12"). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsOwner)). + WithComment("some other comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureScala), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureScalaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasImportsLength(2). + HasRuntimeVersionString("2.12"). + HasProcedureDefinitionString(definition). + HasCommentString("some comment"). + HasProcedureLanguageString("SCALA"). + HasExecuteAsString(string(sdk.ExecuteAsCaller)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "secrets.#", "2")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "external_access_integrations.#", "2")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "packages.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "packages.0", "com.snowflake:telemetry:0.1.0")), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureScalaResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(procedureModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, procedureModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.ProcedureScalaResource(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasImportsLength(2). + HasRuntimeVersionString("2.12"). + HasProcedureDefinitionString(definition). + HasCommentString("some other comment"). + HasProcedureLanguageString("SCALA"). + HasExecuteAsString(string(sdk.ExecuteAsOwner)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "target_path.0.path_on_stage", jarName)), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_variable_name", "def")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "secrets.0.secret_id", secretId2.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.#", "1")), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "external_access_integrations.0", externalAccessIntegration.Name())), + assert.Check(resource.TestCheckResourceAttr(procedureModelUpdateWithoutRecreation.ResourceReference(), "packages.#", "1")), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} diff --git a/pkg/resources/procedure_sql.go b/pkg/resources/procedure_sql.go index d38717904a..290d20f279 100644 --- a/pkg/resources/procedure_sql.go +++ b/pkg/resources/procedure_sql.go @@ -69,6 +69,7 @@ func CreateContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta errs := errors.Join( booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "execute_as", request.WithExecuteAs, sdk.ToExecuteAs), stringAttributeCreateBuilder(d, "comment", request.WithComment), ) if errs != nil { @@ -115,6 +116,7 @@ func ReadContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta a readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading execute_as on purpose (handled as external change to show output) d.Set("comment", allProcedureDetails.procedure.Description), setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), diff --git a/pkg/resources/procedure_sql_acceptance_test.go b/pkg/resources/procedure_sql_acceptance_test.go new file mode 100644 index 0000000000..f00eb1003f --- /dev/null +++ b/pkg/resources/procedure_sql_acceptance_test.go @@ -0,0 +1,178 @@ +package resources_test + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_ProcedureSql_InlineBasic(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Procedure.SampleSqlDefinitionWithArgument(t) + + procedureModel := model.ProcedureSqlBasicInline("w", id, dataType, definition). + WithArgument(argName, dataType) + procedureModelRenamed := model.ProcedureSqlBasicInline("w", idWithChangedNameButTheSameDataType, dataType, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureSql), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureSqlResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("SQL"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"is_secure", "arguments.0.arg_data_type", "null_input_behavior", "execute_as"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureSqlResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, procedureModelRenamed), + Check: assert.AssertThat(t, + resourceassert.ProcedureSqlResource(t, procedureModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedureSql_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + definition := acc.TestClient().Procedure.SampleSqlDefinitionWithArgument(t) + + procedureModel := model.ProcedureSqlBasicInline("w", id, dataType, definition). + WithArgument(argName, dataType). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsCaller)). + WithComment("some comment") + + procedureModelUpdateWithoutRecreation := model.ProcedureSqlBasicInline("w", id, dataType, definition). + WithArgument(argName, dataType). + WithIsSecure("false"). + WithNullInputBehavior(string(sdk.NullInputBehaviorCalledOnNullInput)). + WithExecuteAs(string(sdk.ExecuteAsOwner)). + WithComment("some other comment") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureSql), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureSqlResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasProcedureDefinitionString(definition). + HasCommentString("some comment"). + HasProcedureLanguageString("SQL"). + HasExecuteAsString(string(sdk.ExecuteAsCaller)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + // IMPORT + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"arguments.0.arg_data_type", "null_input_behavior"}, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedProcedureSqlResource(t, id.FullyQualifiedName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_name", argName)), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_data_type", "VARCHAR(16777216)")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "arguments.0.arg_default_value", "")), + ), + }, + // UPDATE WITHOUT RECREATION + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(procedureModelUpdateWithoutRecreation.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModels(t, procedureModelUpdateWithoutRecreation), + Check: assert.AssertThat(t, + resourceassert.ProcedureSqlResource(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanFalse). + HasProcedureDefinitionString(definition). + HasCommentString("some other comment"). + HasProcedureLanguageString("SQL"). + HasExecuteAsString(string(sdk.ExecuteAsOwner)). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModelUpdateWithoutRecreation.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} diff --git a/pkg/sdk/common_types.go b/pkg/sdk/common_types.go index 0621d9d995..57215541ef 100644 --- a/pkg/sdk/common_types.go +++ b/pkg/sdk/common_types.go @@ -222,8 +222,8 @@ func ExecuteAsPointer(v ExecuteAs) *ExecuteAs { // TODO [SNOW-1348103]: fix SDK - constants should have only CALLER and OWNER (not the EXECUTE AS part) const ( - ExecuteAsCaller ExecuteAs = "EXECUTE AS CALLER" - ExecuteAsOwner ExecuteAs = "EXECUTE AS OWNER" + ExecuteAsCaller ExecuteAs = "CALLER" + ExecuteAsOwner ExecuteAs = "OWNER" ) func ToExecuteAs(value string) (ExecuteAs, error) { diff --git a/pkg/sdk/common_types_test.go b/pkg/sdk/common_types_test.go index 7df8be779e..ac935b09e1 100644 --- a/pkg/sdk/common_types_test.go +++ b/pkg/sdk/common_types_test.go @@ -273,7 +273,7 @@ func Test_ToExecuteAs(t *testing.T) { {Input: string(ExecuteAsOwner), Expected: ExecuteAsOwner}, {Name: "validation: incorrect execute as", Input: "incorrect", Error: "unknown execute as: incorrect"}, {Name: "validation: empty input", Input: "", Error: "unknown execute as: "}, - {Name: "validation: lower case input", Input: "execute as caller", Expected: ExecuteAsCaller}, + {Name: "validation: lower case input", Input: "caller", Expected: ExecuteAsCaller}, } for _, testCase := range testCases { diff --git a/pkg/sdk/functions_and_procedures_commons.go b/pkg/sdk/functions_and_procedures_commons.go index 35f4ab3dbb..574166dab5 100644 --- a/pkg/sdk/functions_and_procedures_commons.go +++ b/pkg/sdk/functions_and_procedures_commons.go @@ -127,7 +127,7 @@ func parseFunctionOrProcedureExternalAccessIntegrations(raw string) ([]AccountOb // TODO [before V1]: test func parseFunctionOrProcedurePackages(raw string) ([]string, error) { - log.Printf("[DEBUG] external access integrations: %s", raw) + log.Printf("[DEBUG] packages: %s", raw) return collections.Map(ParseCommaSeparatedStringArray(raw, true), strings.TrimSpace), nil } diff --git a/pkg/sdk/procedures_gen.go b/pkg/sdk/procedures_gen.go index 0dbbf8f5a1..88777d0e8f 100644 --- a/pkg/sdk/procedures_gen.go +++ b/pkg/sdk/procedures_gen.go @@ -52,7 +52,7 @@ type CreateForJavaProcedureOptions struct { Secrets []SecretReference `ddl:"parameter,parentheses" sql:"SECRETS"` TargetPath *string `ddl:"parameter,single_quotes" sql:"TARGET_PATH"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` + ExecuteAs *ExecuteAs `ddl:"parameter,no_quotes,no_equals" sql:"EXECUTE AS"` ProcedureDefinition *string `ddl:"parameter,no_equals" sql:"AS"` } @@ -110,7 +110,7 @@ type CreateForJavaScriptProcedureOptions struct { NullInputBehavior *NullInputBehavior `ddl:"keyword"` ReturnResultsBehavior *ReturnResultsBehavior `ddl:"keyword"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` + ExecuteAs *ExecuteAs `ddl:"parameter,no_quotes,no_equals" sql:"EXECUTE AS"` ProcedureDefinition string `ddl:"parameter,no_equals" sql:"AS"` } @@ -134,7 +134,7 @@ type CreateForPythonProcedureOptions struct { ExternalAccessIntegrations []AccountObjectIdentifier `ddl:"parameter,parentheses" sql:"EXTERNAL_ACCESS_INTEGRATIONS"` Secrets []SecretReference `ddl:"parameter,parentheses" sql:"SECRETS"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` + ExecuteAs *ExecuteAs `ddl:"parameter,no_quotes,no_equals" sql:"EXECUTE AS"` ProcedureDefinition *string `ddl:"parameter,no_equals" sql:"AS"` } @@ -159,7 +159,7 @@ type CreateForScalaProcedureOptions struct { Secrets []SecretReference `ddl:"parameter,parentheses" sql:"SECRETS"` TargetPath *string `ddl:"parameter,single_quotes" sql:"TARGET_PATH"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` + ExecuteAs *ExecuteAs `ddl:"parameter,no_quotes,no_equals" sql:"EXECUTE AS"` ProcedureDefinition *string `ddl:"parameter,no_equals" sql:"AS"` } @@ -177,7 +177,7 @@ type CreateForSQLProcedureOptions struct { NullInputBehavior *NullInputBehavior `ddl:"keyword"` ReturnResultsBehavior *ReturnResultsBehavior `ddl:"keyword"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` + ExecuteAs *ExecuteAs `ddl:"parameter,no_quotes,no_equals" sql:"EXECUTE AS"` ProcedureDefinition string `ddl:"parameter,no_equals" sql:"AS"` } @@ -198,7 +198,7 @@ type AlterProcedureOptions struct { Unset *ProcedureUnset `ddl:"list" sql:"UNSET"` SetTags []TagAssociation `ddl:"keyword" sql:"SET TAG"` UnsetTags []ObjectIdentifier `ddl:"keyword" sql:"UNSET TAG"` - ExecuteAs *ExecuteAs `ddl:"keyword"` + ExecuteAs *ExecuteAs `ddl:"parameter,no_quotes,no_equals" sql:"EXECUTE AS"` } type ProcedureSet struct { diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index 1036429d78..b28d3ef9e2 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -34,7 +34,7 @@ import ( // TODO [SNOW-1348103]: test secure // TODO [SNOW-1348103]: python aggregate func (100357 (P0000): Could not find accumulate method in function CVVEMHIT_06547800_08D6_DBCA_1AC7_5E422AFF8B39 with handler dump) // TODO [SNOW-1348103]: add test with multiple imports -// TODO [this PR]: test with multiple external access integrations and secrets +// TODO [SNOW-1348103]: test with multiple external access integrations and secrets func TestInt_Functions(t *testing.T) { client := testClient(t) ctx := context.Background() diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 56f1a20248..b0a935f070 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -1811,7 +1811,7 @@ def filter_by_role(session, table_name, role): HasCreatedOnNotEmpty(). HasName(id.Name()). HasSchemaName(id.SchemaName()). - HasArgumentsRawContains(expectedReturnDataType.ToLegacyDataTypeSql()), + HasArgumentsRawContains(strings.ReplaceAll(expectedReturnDataType.ToLegacyDataTypeSql(), "TABLE(", "TABLE (")), ) assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). diff --git a/templates/resources/function_java.md.tmpl b/templates/resources/function_java.md.tmpl new file mode 100644 index 0000000000..4092173dd0 --- /dev/null +++ b/templates/resources/function_java.md.tmpl @@ -0,0 +1,52 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/function_javascript.md.tmpl b/templates/resources/function_javascript.md.tmpl new file mode 100644 index 0000000000..4092173dd0 --- /dev/null +++ b/templates/resources/function_javascript.md.tmpl @@ -0,0 +1,52 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/function_python.md.tmpl b/templates/resources/function_python.md.tmpl new file mode 100644 index 0000000000..32911bea98 --- /dev/null +++ b/templates/resources/function_python.md.tmpl @@ -0,0 +1,54 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** `is_aggregate` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/function_scala.md.tmpl b/templates/resources/function_scala.md.tmpl new file mode 100644 index 0000000000..f452e9136e --- /dev/null +++ b/templates/resources/function_scala.md.tmpl @@ -0,0 +1,50 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/function_sql.md.tmpl b/templates/resources/function_sql.md.tmpl new file mode 100644 index 0000000000..63cede4080 --- /dev/null +++ b/templates/resources/function_sql.md.tmpl @@ -0,0 +1,52 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure` and `return_results_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `MEMOIZABLE` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/procedure_java.md.tmpl b/templates/resources/procedure_java.md.tmpl new file mode 100644 index 0000000000..e67c6205b5 --- /dev/null +++ b/templates/resources/procedure_java.md.tmpl @@ -0,0 +1,52 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/procedure_javascript.md.tmpl b/templates/resources/procedure_javascript.md.tmpl new file mode 100644 index 0000000000..e67c6205b5 --- /dev/null +++ b/templates/resources/procedure_javascript.md.tmpl @@ -0,0 +1,52 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/procedure_python.md.tmpl b/templates/resources/procedure_python.md.tmpl new file mode 100644 index 0000000000..c0d68e135d --- /dev/null +++ b/templates/resources/procedure_python.md.tmpl @@ -0,0 +1,54 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** `is_aggregate` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/procedure_scala.md.tmpl b/templates/resources/procedure_scala.md.tmpl new file mode 100644 index 0000000000..cc0445d06e --- /dev/null +++ b/templates/resources/procedure_scala.md.tmpl @@ -0,0 +1,50 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** `RETURN... [[ NOT ] NULL]` is not currently supported. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. diff --git a/templates/resources/procedure_sql.md.tmpl b/templates/resources/procedure_sql.md.tmpl new file mode 100644 index 0000000000..a72c04a1c2 --- /dev/null +++ b/templates/resources/procedure_sql.md.tmpl @@ -0,0 +1,50 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +-> **Note** External changes to `is_secure` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. + +-> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. + +-> **Note** Use of return type `TABLE` is currently limited. It will be improved in the following versions of the provider which may still affect this resource. + +-> **Note** Snowflake is not returning full data type information for arguments which may lead to unexpected plan outputs. Diff suppression for such cases will be improved. + +-> **Note** Snowflake is not returning the default values for arguments so argument's `arg_default_value` external changes cannot be tracked. + +-> **Note** Limit the use of special characters (`.`, `'`, `/`, `"`, `(`, `)`, `[`, `]`, `{`, `}`, ` `) in argument names, stage ids, and secret ids. It's best to limit to only alphanumeric and underscores. There is a lot of parsing of SHOW/DESCRIBE outputs involved and using special characters may limit the possibility to achieve the correct results. + +~> **Required warehouse** This resource may require active warehouse. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} + +Note: Snowflake is not returning all information needed to populate the state correctly after import (e.g. data types with attributes like NUMBER(32, 10) are returned as NUMBER, default values for arguments are not returned at all). +Also, `ALTER` for functions is very limited so most of the attributes on this resource are marked as force new. Because of that, in multiple situations plan won't be empty after importing and manual state operations may be required. From 5139b1a175ffbef63a561577d2cd1e5a1a04e597 Mon Sep 17 00:00:00 2001 From: "snowflake-release-please[bot]" <105954990+snowflake-release-please[bot]@users.noreply.github.com> Date: Thu, 12 Dec 2024 20:42:17 +0100 Subject: [PATCH 13/20] chore(main): release 0.100.0 (#3237) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* --- ## [0.100.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.99.0...v0.100.0) (2024-12-12) ### 🎉 **What's new:** * Account v1 readiness ([#3236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3236)) ([5df33a8](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5df33a8c1abe38c29124bac1e03727202c556347)) * Account v1 readiness generated files ([#3242](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3242)) ([3df59dd](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/3df59dd51b53acae9155b732811cfda56d7f20b8)) * Account v1 readiness resource ([#3252](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3252)) ([8f5698d](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/8f5698dbce3325461d572c4029ef2dbc364e819b)) * Add a new account roles data source ([#3257](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3257)) ([b3d6b9e](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/b3d6b9e5b4f327b186161f50dc9ac732d199fb19)) * Add account data source ([#3261](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3261)) ([6087fc9](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/6087fc9fdb2467e022ec7489137e7f5a5fe1ff25)) * Add all other functions and procedures implementations ([#3275](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3275)) ([7a6f68d](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/7a6f68df2fb0a0a4696a5442569344039a839c27)) * Basic functions implementation ([#3269](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3269)) ([6d4a103](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/6d4a10364276e92fa791eaa022c3bd7bce16228d)) * Basic procedures implementation ([#3271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3271)) ([933335f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/933335f56d1e53bf3e95d1f552672f35425b4878)) * Docs, test, and missing parameter ([#3280](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3280)) ([10517f3](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/10517f337c6b22d5f7f2a4f6c747b6fd2d2f47e9)) * Functions and procedures schemas and generated sources ([#3262](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3262)) ([9b70f87](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/9b70f872ca799126bc2051b4ed70160f868ac267)) * Functions sdk update ([#3254](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3254)) ([fc1eace](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/fc1eace306e8d919c3349d56480fa3386ca664af)) * Handle missing fields in function and procedure ([#3273](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3273)) ([53e7a0a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/53e7a0aea3350e9e03a804d67e7df796f15bff3a)) * Procedures schemas and generated sources ([#3263](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3263)) ([211ad46](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/211ad46223f1bdf03b20cc7a06110bfce18a967e)) * Procedures sdk update ([#3255](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3255)) ([682606a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/682606adea5e40befa7e599ced5aa7dc8570f80a)) * Rework account parameter resource ([#3264](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3264)) ([15aa9c2](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/15aa9c2c94d80ae1d299a333b8035e38de6a6dfc)) * Rework data types ([#3244](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3244)) ([05ada91](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/05ada917414ea7c574be3974c7de4f09535961fd)) * support table data type ([#3274](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3274)) ([13401d5](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/13401d5fff320eedcf40eed7c0831154cc6cc13a)) * Tag association v1 readiness ([#3210](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3210)) ([04f6d54](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/04f6d54a83cf4e9ea4b292087eefa056114eb5b5)) * Test imports and small fixes ([#3276](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3276)) ([a712195](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/a7121952892847f61e24e7a7a4fe78c38a450985)) * Unsafe execute v1 readiness ([#3266](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3266)) ([c4f1e8f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/c4f1e8fd55150e40d8a556580016ff83fe65bdaf)) * Use new data types in sql builder for functions and procedures ([#3247](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3247)) ([69f677a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/69f677a6f86faa79cdece4d422eb61284c1599a6)) ### 🔧 **Misc** * Add ShowByID filtering generation ([#3227](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3227)) ([548ec42](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/548ec42ae7bcb8daa038de4cb2f81ced9c028f2d)) * Adress small task-related todos ([#3243](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3243)) ([40de9ae](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/40de9ae93796afbc3091aa2fbb2c5dfba71f911c)) * Apply masking ([#3234](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3234)) ([c209a8a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/c209a8ae6c15fa9515e933d18add962070b60257)) * fix missing references in toOpts and changes with newlines ([#3240](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3240)) ([246547f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/246547f8eb13118a325881630f33433b3f5d8f0a)) * function tests ([#3279](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3279)) ([5af6efb](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5af6efb08c479edbaea54f87f79672c802edcc86)) * Improve config builders ([#3207](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3207)) ([425787c](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/425787c5938e88895af1157f505889611bdef398)) * Revert to proper env ([#3238](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3238)) ([5d4ed3b](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5d4ed3bc233a77196f01351d9c972bb56730298e)) * Use service user for ci ([#3228](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3228)) ([2fb50d7](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/2fb50d7b5beb1f361d3c761b344bab3216f6ea59)) ### 🐛 **Bug fixes:** * Make blocked_roles_field optional in OAuth security integrations ([#3267](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3267)) ([7197b57](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/7197b57c5dd75be34fc77eb82aabbd091074b809)) * Minor fixes ([#3231](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3231)) ([1863bf6](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/1863bf697f05177f27c351c0687c4bee24fe2c1b)) * Minor fixes 2 ([#3230](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3230)) ([73b7e74](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/73b7e74bf44b1ae6ddc78cac752f2b7febb836cd)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: snowflake-release-please[bot] <105954990+snowflake-release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 233351f618..ae66222e55 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,51 @@ # Changelog +## [0.100.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.99.0...v0.100.0) (2024-12-12) + + +### 🎉 **What's new:** + +* Account v1 readiness ([#3236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3236)) ([5df33a8](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5df33a8c1abe38c29124bac1e03727202c556347)) +* Account v1 readiness generated files ([#3242](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3242)) ([3df59dd](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/3df59dd51b53acae9155b732811cfda56d7f20b8)) +* Account v1 readiness resource ([#3252](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3252)) ([8f5698d](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/8f5698dbce3325461d572c4029ef2dbc364e819b)) +* Add a new account roles data source ([#3257](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3257)) ([b3d6b9e](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/b3d6b9e5b4f327b186161f50dc9ac732d199fb19)) +* Add account data source ([#3261](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3261)) ([6087fc9](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/6087fc9fdb2467e022ec7489137e7f5a5fe1ff25)) +* Add all other functions and procedures implementations ([#3275](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3275)) ([7a6f68d](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/7a6f68df2fb0a0a4696a5442569344039a839c27)) +* Basic functions implementation ([#3269](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3269)) ([6d4a103](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/6d4a10364276e92fa791eaa022c3bd7bce16228d)) +* Basic procedures implementation ([#3271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3271)) ([933335f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/933335f56d1e53bf3e95d1f552672f35425b4878)) +* Docs, test, and missing parameter ([#3280](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3280)) ([10517f3](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/10517f337c6b22d5f7f2a4f6c747b6fd2d2f47e9)) +* Functions and procedures schemas and generated sources ([#3262](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3262)) ([9b70f87](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/9b70f872ca799126bc2051b4ed70160f868ac267)) +* Functions sdk update ([#3254](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3254)) ([fc1eace](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/fc1eace306e8d919c3349d56480fa3386ca664af)) +* Handle missing fields in function and procedure ([#3273](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3273)) ([53e7a0a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/53e7a0aea3350e9e03a804d67e7df796f15bff3a)) +* Procedures schemas and generated sources ([#3263](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3263)) ([211ad46](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/211ad46223f1bdf03b20cc7a06110bfce18a967e)) +* Procedures sdk update ([#3255](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3255)) ([682606a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/682606adea5e40befa7e599ced5aa7dc8570f80a)) +* Rework account parameter resource ([#3264](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3264)) ([15aa9c2](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/15aa9c2c94d80ae1d299a333b8035e38de6a6dfc)) +* Rework data types ([#3244](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3244)) ([05ada91](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/05ada917414ea7c574be3974c7de4f09535961fd)) +* support table data type ([#3274](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3274)) ([13401d5](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/13401d5fff320eedcf40eed7c0831154cc6cc13a)) +* Tag association v1 readiness ([#3210](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3210)) ([04f6d54](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/04f6d54a83cf4e9ea4b292087eefa056114eb5b5)) +* Test imports and small fixes ([#3276](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3276)) ([a712195](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/a7121952892847f61e24e7a7a4fe78c38a450985)) +* Unsafe execute v1 readiness ([#3266](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3266)) ([c4f1e8f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/c4f1e8fd55150e40d8a556580016ff83fe65bdaf)) +* Use new data types in sql builder for functions and procedures ([#3247](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3247)) ([69f677a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/69f677a6f86faa79cdece4d422eb61284c1599a6)) + + +### 🔧 **Misc** + +* Add ShowByID filtering generation ([#3227](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3227)) ([548ec42](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/548ec42ae7bcb8daa038de4cb2f81ced9c028f2d)) +* Adress small task-related todos ([#3243](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3243)) ([40de9ae](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/40de9ae93796afbc3091aa2fbb2c5dfba71f911c)) +* Apply masking ([#3234](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3234)) ([c209a8a](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/c209a8ae6c15fa9515e933d18add962070b60257)) +* fix missing references in toOpts and changes with newlines ([#3240](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3240)) ([246547f](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/246547f8eb13118a325881630f33433b3f5d8f0a)) +* function tests ([#3279](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3279)) ([5af6efb](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5af6efb08c479edbaea54f87f79672c802edcc86)) +* Improve config builders ([#3207](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3207)) ([425787c](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/425787c5938e88895af1157f505889611bdef398)) +* Revert to proper env ([#3238](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3238)) ([5d4ed3b](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/5d4ed3bc233a77196f01351d9c972bb56730298e)) +* Use service user for ci ([#3228](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3228)) ([2fb50d7](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/2fb50d7b5beb1f361d3c761b344bab3216f6ea59)) + + +### 🐛 **Bug fixes:** + +* Make blocked_roles_field optional in OAuth security integrations ([#3267](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3267)) ([7197b57](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/7197b57c5dd75be34fc77eb82aabbd091074b809)) +* Minor fixes ([#3231](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3231)) ([1863bf6](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/1863bf697f05177f27c351c0687c4bee24fe2c1b)) +* Minor fixes 2 ([#3230](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3230)) ([73b7e74](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/73b7e74bf44b1ae6ddc78cac752f2b7febb836cd)) + ## [0.99.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.98.0...v0.99.0) (2024-11-26) From 82f240eeed9ec24d6afc82cf5d0106544bec5838 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 12 Dec 2024 21:24:11 +0100 Subject: [PATCH 14/20] chore!: Release v1 (#3281) BREAKING CHANGE: Release v1 - Mark preview features - Remove deprecated features - Adjust feature disclaimers - Update the docs with new examples and references - Update the provider version in the tracking package --------- Co-authored-by: snowflake-release-please[bot] <105954990+snowflake-release-please[bot]@users.noreply.github.com> --- CREATING_ISSUES.md | 6 +- MIGRATION_GUIDE.md | 154 +++- Makefile | 4 +- docs/data-sources/alerts.md | 2 + docs/data-sources/connections.md | 2 - docs/data-sources/cortex_search_services.md | 2 +- docs/data-sources/current_account.md | 2 + docs/data-sources/current_role.md | 2 + docs/data-sources/database.md | 2 + docs/data-sources/database_role.md | 2 + docs/data-sources/database_roles.md | 2 - docs/data-sources/databases.md | 2 - docs/data-sources/dynamic_tables.md | 2 + docs/data-sources/external_functions.md | 2 + docs/data-sources/external_tables.md | 2 + docs/data-sources/failover_groups.md | 2 + docs/data-sources/file_formats.md | 2 + docs/data-sources/functions.md | 2 + docs/data-sources/grants.md | 2 - docs/data-sources/masking_policies.md | 2 - docs/data-sources/materialized_views.md | 2 + docs/data-sources/network_policies.md | 2 - docs/data-sources/parameters.md | 2 + docs/data-sources/pipes.md | 2 + docs/data-sources/procedures.md | 2 + docs/data-sources/resource_monitors.md | 2 - docs/data-sources/role.md | 30 - docs/data-sources/roles.md | 106 --- docs/data-sources/row_access_policies.md | 2 - docs/data-sources/schemas.md | 2 - docs/data-sources/secrets.md | 2 - docs/data-sources/security_integrations.md | 2 - docs/data-sources/sequences.md | 2 + docs/data-sources/shares.md | 2 + docs/data-sources/stages.md | 2 + docs/data-sources/storage_integrations.md | 2 + docs/data-sources/streamlits.md | 2 - docs/data-sources/streams.md | 2 - .../system_generate_scim_access_token.md | 2 + .../system_get_aws_sns_iam_policy.md | 2 + .../system_get_privatelink_config.md | 2 + .../system_get_snowflake_platform_info.md | 2 + docs/data-sources/tables.md | 2 + docs/data-sources/tags.md | 2 - docs/data-sources/tasks.md | 2 - docs/data-sources/users.md | 2 - docs/data-sources/views.md | 2 - docs/data-sources/warehouses.md | 2 - docs/guides/identifiers.md | 2 +- docs/index.md | 38 +- docs/resources/account.md | 2 - ...ccount_authentication_policy_attachment.md | 2 + .../account_password_policy_attachment.md | 2 + docs/resources/account_role.md | 2 - docs/resources/alert.md | 2 + ...tegration_with_authorization_code_grant.md | 2 - ...ion_integration_with_client_credentials.md | 2 - ...hentication_integration_with_jwt_bearer.md | 2 - docs/resources/api_integration.md | 2 + docs/resources/authentication_policy.md | 2 + docs/resources/cortex_search_service.md | 2 +- docs/resources/database.md | 2 - docs/resources/database_old.md | 88 -- docs/resources/database_role.md | 2 - docs/resources/dynamic_table.md | 2 + .../email_notification_integration.md | 2 + docs/resources/external_function.md | 2 + docs/resources/external_oauth_integration.md | 2 - docs/resources/external_table.md | 2 + docs/resources/external_volume.md | 2 + docs/resources/failover_group.md | 2 + docs/resources/file_format.md | 2 + docs/resources/function.md | 138 --- docs/resources/function_java.md | 2 + docs/resources/function_javascript.md | 2 + docs/resources/function_python.md | 2 + docs/resources/function_scala.md | 2 + docs/resources/function_sql.md | 2 + docs/resources/grant_account_role.md | 2 - docs/resources/grant_application_role.md | 2 - docs/resources/grant_database_role.md | 2 - docs/resources/grant_ownership.md | 2 - .../grant_privileges_to_account_role.md | 2 - .../grant_privileges_to_database_role.md | 2 - docs/resources/grant_privileges_to_share.md | 2 - docs/resources/legacy_service_user.md | 2 - docs/resources/managed_account.md | 2 + docs/resources/masking_policy.md | 2 - docs/resources/materialized_view.md | 2 + docs/resources/network_policy.md | 2 - docs/resources/network_policy_attachment.md | 2 + docs/resources/network_rule.md | 2 + docs/resources/notification_integration.md | 2 + docs/resources/oauth_integration.md | 58 -- .../oauth_integration_for_custom_clients.md | 2 - ...th_integration_for_partner_applications.md | 2 - docs/resources/object_parameter.md | 2 + docs/resources/password_policy.md | 2 + docs/resources/pipe.md | 2 + docs/resources/primary_connection.md | 2 - docs/resources/procedure.md | 99 --- docs/resources/procedure_java.md | 2 + docs/resources/procedure_javascript.md | 2 + docs/resources/procedure_python.md | 2 + docs/resources/procedure_scala.md | 2 + docs/resources/procedure_sql.md | 2 + docs/resources/resource_monitor.md | 2 - docs/resources/role.md | 71 -- docs/resources/row_access_policy.md | 2 - docs/resources/saml2_integration.md | 2 - docs/resources/saml_integration.md | 66 -- docs/resources/schema.md | 2 - docs/resources/scim_integration.md | 2 - docs/resources/secondary_connection.md | 2 - docs/resources/secondary_database.md | 2 - .../secret_with_authorization_code_grant.md | 2 - .../secret_with_basic_authentication.md | 2 - .../secret_with_client_credentials.md | 2 - docs/resources/secret_with_generic_string.md | 2 - docs/resources/sequence.md | 2 + docs/resources/service_user.md | 2 - docs/resources/session_parameter.md | 54 -- docs/resources/share.md | 2 + docs/resources/shared_database.md | 2 - docs/resources/stage.md | 2 + docs/resources/storage_integration.md | 2 + docs/resources/stream.md | 76 -- docs/resources/stream_on_directory_table.md | 2 - docs/resources/stream_on_external_table.md | 2 - docs/resources/stream_on_table.md | 2 - docs/resources/stream_on_view.md | 2 - docs/resources/streamlit.md | 2 - docs/resources/table.md | 2 + docs/resources/table_constraint.md | 2 + docs/resources/tag.md | 2 - docs/resources/tag_association.md | 3 - .../tag_masking_policy_association.md | 85 -- docs/resources/task.md | 2 - docs/resources/unsafe_execute.md | 147 ---- docs/resources/user.md | 2 - .../user_authentication_policy_attachment.md | 2 + .../user_password_policy_attachment.md | 2 + docs/resources/user_public_keys.md | 2 + docs/resources/view.md | 2 - docs/resources/warehouse.md | 2 - .../identifiers_rework_design_decisions.md | 44 +- examples/additional/deprecated_datasources.MD | 5 +- examples/additional/deprecated_resources.MD | 12 +- .../snowflake_role/data-source.tf | 3 - .../snowflake_roles/data-source.tf | 48 -- .../snowflake_database_old/import.sh | 1 - .../snowflake_database_old/resource.tf | 30 - .../resources/snowflake_function/import.sh | 2 - .../resources/snowflake_function/resource.tf | 73 -- .../snowflake_oauth_integration/import.sh | 1 - .../snowflake_oauth_integration/resource.tf | 8 - .../resources/snowflake_procedure/import.sh | 2 - .../resources/snowflake_procedure/resource.tf | 34 - examples/resources/snowflake_role/import.sh | 1 - examples/resources/snowflake_role/resource.tf | 10 - .../snowflake_saml_integration/import.sh | 1 - .../snowflake_saml_integration/resource.tf | 8 - .../snowflake_session_parameter/import.sh | 1 - .../snowflake_session_parameter/resource.tf | 11 - examples/resources/snowflake_stream/import.sh | 2 - .../resources/snowflake_stream/resource.tf | 24 - .../import.sh | 2 - .../resource.tf | 44 - .../snowflake_unsafe_execute/resource.tf | 104 --- framework/provider/provider.go | 42 +- framework/provider/provider_helpers.go | 28 +- go.mod | 1 - go.sum | 2 - .../providermodel/snowflake_model_ext.go | 9 + .../providermodel/snowflake_model_gen.go | 137 +-- pkg/acceptance/check_destroy.go | 21 +- .../testenvs/testing_environment_variables.go | 1 + pkg/datasources/alerts.go | 3 +- pkg/datasources/common.go | 16 + pkg/datasources/cortex_search_services.go | 3 +- pkg/datasources/current_account.go | 3 +- .../current_account_acceptance_test.go | 1 - pkg/datasources/current_role.go | 3 +- pkg/datasources/database.go | 3 +- pkg/datasources/database_role.go | 3 +- pkg/datasources/dynamic_tables.go | 3 +- pkg/datasources/external_functions.go | 3 +- pkg/datasources/external_tables.go | 3 +- pkg/datasources/failover_groups.go | 3 +- pkg/datasources/file_formats.go | 3 +- pkg/datasources/functions.go | 3 +- pkg/datasources/materialized_views.go | 3 +- pkg/datasources/parameters.go | 3 +- pkg/datasources/parameters_acceptance_test.go | 34 - pkg/datasources/pipes.go | 3 +- pkg/datasources/procedures.go | 3 +- pkg/datasources/role.go | 67 -- pkg/datasources/role_acceptance_test.go | 48 -- pkg/datasources/roles.go | 101 --- pkg/datasources/roles_acceptance_test.go | 107 --- pkg/datasources/sequences.go | 3 +- pkg/datasources/shares.go | 3 +- pkg/datasources/stages.go | 3 +- pkg/datasources/storage_integrations.go | 3 +- .../system_generate_scim_access_token.go | 3 +- .../system_get_aws_sns_iam_policy.go | 3 +- .../system_get_privatelink_config.go | 3 +- .../system_get_snowflake_platform_info.go | 3 +- pkg/datasources/tables.go | 3 +- .../TestAcc_Procedures/complete/test.tf | 8 +- .../testdata/TestAcc_Roles_Complete/1/test.tf | 23 - .../TestAcc_Roles_Complete/1/variables.tf | 19 - .../testdata/TestAcc_Roles_Complete/2/test.tf | 3 - pkg/internal/provider/provider_context.go | 3 +- .../snowflake_environment_variables.go | 2 - pkg/internal/tools/doc-gen-helper/main.go | 24 +- .../tools/doc-gen-helper/templates.go | 6 +- pkg/internal/tracking/context.go | 2 +- pkg/provider/datasources/datasources.go | 2 - .../previewfeatures/preview_features.go | 157 ++++ .../previewfeatures/preview_features_test.go | 91 ++ pkg/provider/provider.go | 191 +---- pkg/provider/provider_acceptance_test.go | 91 +- pkg/provider/provider_helpers.go | 32 +- pkg/provider/resources/resources.go | 6 - ...ccount_authentication_policy_attachment.go | 7 +- .../account_password_policy_attachment.go | 7 +- pkg/resources/alert.go | 9 +- pkg/resources/api_integration.go | 9 +- pkg/resources/authentication_policy.go | 9 +- pkg/resources/common.go | 50 ++ pkg/resources/cortex_search_service.go | 12 +- pkg/resources/database_old.go | 368 -------- pkg/resources/database_old_acceptance_test.go | 448 ---------- pkg/resources/database_state_upgraders.go | 6 +- pkg/resources/deprecated_helpers_test.go | 31 - pkg/resources/dynamic_table.go | 9 +- .../email_notification_integration.go | 9 +- pkg/resources/execute.go | 2 +- pkg/resources/execute_acceptance_test.go | 18 + pkg/resources/external_function.go | 9 +- pkg/resources/external_table.go | 9 +- pkg/resources/external_volume.go | 12 +- pkg/resources/failover_group.go | 9 +- pkg/resources/file_format.go | 9 +- pkg/resources/function.go | 790 ----------------- pkg/resources/function_acceptance_test.go | 451 ---------- pkg/resources/function_java.go | 9 +- pkg/resources/function_javascript.go | 9 +- pkg/resources/function_python.go | 9 +- pkg/resources/function_scala.go | 9 +- pkg/resources/function_sql.go | 9 +- pkg/resources/function_state_upgraders.go | 63 -- pkg/resources/managed_account.go | 7 +- .../upgrade_cloned_database/step_2.tf | 8 +- .../upgrade_secondary_database/step_2.tf | 14 +- .../upgrade_shared_database/step_2.tf | 20 +- pkg/resources/materialized_view.go | 9 +- pkg/resources/network_policy_attachment.go | 9 +- pkg/resources/network_rule.go | 9 +- pkg/resources/notification_integration.go | 9 +- pkg/resources/oauth_integration.go | 347 -------- .../oauth_integration_acceptance_test.go | 113 --- pkg/resources/oauth_integration_test.go | 91 -- pkg/resources/object_parameter.go | 9 +- pkg/resources/password_policy.go | 9 +- pkg/resources/pipe.go | 9 +- pkg/resources/procedure.go | 804 ------------------ pkg/resources/procedure_acceptance_test.go | 583 ------------- pkg/resources/procedure_java.go | 9 +- pkg/resources/procedure_javascript.go | 9 +- pkg/resources/procedure_python.go | 9 +- pkg/resources/procedure_scala.go | 9 +- pkg/resources/procedure_sql.go | 9 +- pkg/resources/procedure_state_upgraders.go | 63 -- pkg/resources/role.go | 9 - pkg/resources/saml_integration.go | 484 ----------- .../saml_integration_acceptance_test.go | 60 -- pkg/resources/saml_integration_test.go | 109 --- pkg/resources/sequence.go | 9 +- pkg/resources/session_parameter.go | 155 ---- .../session_parameter_acceptance_test.go | 82 -- pkg/resources/share.go | 9 +- pkg/resources/stage.go | 9 +- pkg/resources/storage_integration.go | 14 +- pkg/resources/stream.go | 328 ------- pkg/resources/stream_acceptance_test.go | 510 ----------- pkg/resources/table.go | 9 +- ...table_column_masking_policy_application.go | 7 +- pkg/resources/table_constraint.go | 9 +- pkg/resources/tag_association.go | 7 - .../tag_masking_policy_association.go | 190 ----- ...king_policy_association_acceptance_test.go | 121 --- .../test.tf | 4 - .../variables.tf | 3 - .../WithDataRetentionSet/test.tf | 4 - .../WithDataRetentionSet/variables.tf | 7 - .../WithoutDataRetentionSet/test.tf | 3 - .../WithoutDataRetentionSet/variables.tf | 3 - .../testdata/TestAcc_Function/complex/test.tf | 25 - .../TestAcc_Function/complex/variables.tf | 15 - .../testdata/TestAcc_Function/java/test.tf | 22 - .../TestAcc_Function/java/variables.tf | 15 - .../TestAcc_Function/javascript/test.tf | 25 - .../TestAcc_Function/javascript/variables.tf | 15 - .../testdata/TestAcc_Function/python/test.tf | 20 - .../TestAcc_Function/python/variables.tf | 15 - .../testdata/TestAcc_Function/scala/test.tf | 23 - .../TestAcc_Function/scala/variables.tf | 15 - .../testdata/TestAcc_Function/sql/test.tf | 17 - .../TestAcc_Function/sql/variables.tf | 15 - .../OnObject_Procedure_ToAccountRole/test.tf | 5 +- .../OnObject_Procedure_ToDatabaseRole/test.tf | 7 +- .../TestAcc_Procedure/complex/test.tf | 22 - .../TestAcc_Procedure/complex/variables.tf | 19 - .../testdata/TestAcc_Procedure/java/test.tf | 30 - .../TestAcc_Procedure/java/variables.tf | 19 - .../TestAcc_Procedure/javascript/test.tf | 12 - .../TestAcc_Procedure/javascript/variables.tf | 19 - .../testdata/TestAcc_Procedure/python/test.tf | 26 - .../TestAcc_Procedure/python/variables.tf | 19 - .../testdata/TestAcc_Procedure/scala/test.tf | 31 - .../TestAcc_Procedure/scala/variables.tf | 19 - .../testdata/TestAcc_Procedure/sql/test.tf | 19 - .../TestAcc_Procedure/sql/variables.tf | 19 - .../basic/test.tf | 27 - .../basic/variables.tf | 15 - pkg/resources/unsafe_execute.go | 12 - pkg/resources/user_acceptance_test.go | 2 + .../user_authentication_policy_attachment.go | 10 +- .../user_password_policy_attachment.go | 10 +- pkg/resources/user_public_keys.go | 9 +- pkg/resources/validators.go | 2 +- pkg/sdk/config.go | 6 +- pkg/sdk/config_test.go | 43 +- pkg/sdk/helper_test.go | 9 + pkg/snowflake/external_oauth_integration.go | 165 ---- .../external_oauth_integration_test.go | 124 --- pkg/snowflake/masking_policy.go | 52 -- pkg/snowflake/oauth_integration.go | 69 -- pkg/snowflake/oauth_integration_test.go | 29 - pkg/snowflake/saml_integration.go | 41 - pkg/snowflake/saml_integration_test.go | 43 - pkg/snowflake/scim_integration.go | 40 - pkg/snowflake/scim_integration_test.go | 38 - templates/data-sources.md.tmpl | 2 + templates/data-sources/account_roles.md.tmpl | 22 + templates/data-sources/accounts.md.tmpl | 22 + templates/data-sources/connections.md.tmpl | 2 - .../cortex_search_services.md.tmpl | 2 +- templates/data-sources/database_roles.md.tmpl | 2 - templates/data-sources/databases.md.tmpl | 2 - templates/data-sources/grants.md.tmpl | 2 - .../data-sources/masking_policies.md.tmpl | 2 - .../data-sources/network_policies.md.tmpl | 2 - .../data-sources/resource_monitors.md.tmpl | 2 - templates/data-sources/roles.md.tmpl | 27 - .../data-sources/row_access_policies.md.tmpl | 2 - templates/data-sources/schemas.md.tmpl | 2 - templates/data-sources/secrets.md.tmpl | 2 - .../security_integrations.md.tmpl | 2 - templates/data-sources/streamlits.md.tmpl | 2 - templates/data-sources/streams.md.tmpl | 2 - .../system_get_aws_sns_iam_policy.md.tmpl | 2 + templates/data-sources/tags.md.tmpl | 2 - templates/data-sources/tasks.md.tmpl | 2 - templates/data-sources/users.md.tmpl | 2 - templates/data-sources/views.md.tmpl | 2 - templates/data-sources/warehouses.md.tmpl | 2 - templates/guides/identifiers.md.tmpl | 2 +- templates/index.md.tmpl | 2 +- templates/resources.md.tmpl | 2 + templates/resources/account.md.tmpl | 2 - templates/resources/account_role.md.tmpl | 2 - ...tion_with_authorization_code_grant.md.tmpl | 2 - ...ntegration_with_client_credentials.md.tmpl | 2 - ...cation_integration_with_jwt_bearer.md.tmpl | 2 - .../resources/authentication_policy.md.tmpl | 2 + .../resources/cortex_search_service.md.tmpl | 2 +- templates/resources/database.md.tmpl | 2 - templates/resources/database_role.md.tmpl | 2 - .../external_oauth_integration.md.tmpl | 2 - templates/resources/function_java.md.tmpl | 2 + .../resources/function_javascript.md.tmpl | 2 + templates/resources/function_python.md.tmpl | 2 + templates/resources/function_scala.md.tmpl | 2 + templates/resources/function_sql.md.tmpl | 2 + .../resources/grant_account_role.md.tmpl | 2 - .../resources/grant_application_role.md.tmpl | 2 - .../resources/grant_database_role.md.tmpl | 2 - templates/resources/grant_ownership.md.tmpl | 2 - .../grant_privileges_to_account_role.md.tmpl | 2 - .../grant_privileges_to_database_role.md.tmpl | 2 - .../grant_privileges_to_share.md.tmpl | 2 - .../resources/legacy_service_user.md.tmpl | 2 - templates/resources/masking_policy.md.tmpl | 2 - templates/resources/network_policy.md.tmpl | 2 - templates/resources/network_rule.md.tmpl | 2 + ...uth_integration_for_custom_clients.md.tmpl | 2 - ...tegration_for_partner_applications.md.tmpl | 2 - templates/resources/password_policy.md.tmpl | 2 + templates/resources/pipe.md.tmpl | 2 + .../resources/primary_connection.md.tmpl | 2 - templates/resources/procedure_java.md.tmpl | 2 + .../resources/procedure_javascript.md.tmpl | 2 + templates/resources/procedure_python.md.tmpl | 2 + templates/resources/procedure_scala.md.tmpl | 2 + templates/resources/procedure_sql.md.tmpl | 2 + templates/resources/resource_monitor.md.tmpl | 2 - templates/resources/row_access_policy.md.tmpl | 2 - templates/resources/saml2_integration.md.tmpl | 2 - templates/resources/schema.md.tmpl | 2 - templates/resources/scim_integration.md.tmpl | 2 - .../resources/secondary_connection.md.tmpl | 2 - .../resources/secondary_database.md.tmpl | 2 - ...cret_with_authorization_code_grant.md.tmpl | 2 - .../secret_with_basic_authentication.md.tmpl | 2 - .../secret_with_client_credentials.md.tmpl | 2 - .../secret_with_generic_string.md.tmpl | 2 - templates/resources/service_user.md.tmpl | 2 - templates/resources/shared_database.md.tmpl | 2 - .../stream_on_directory_table.md.tmpl | 2 - .../stream_on_external_table.md.tmpl | 2 - templates/resources/stream_on_table.md.tmpl | 2 - templates/resources/stream_on_view.md.tmpl | 2 - templates/resources/streamlit.md.tmpl | 2 - templates/resources/tag.md.tmpl | 2 - templates/resources/tag_association.md.tmpl | 2 - templates/resources/task.md.tmpl | 2 - templates/resources/unsafe_execute.md.tmpl | 40 - templates/resources/user.md.tmpl | 2 - templates/resources/user_public_keys.md.tmpl | 2 + templates/resources/view.md.tmpl | 2 - templates/resources/warehouse.md.tmpl | 2 - v1-preparations/ESSENTIAL_GA_OBJECTS.MD | 4 +- v1-preparations/GENERAL_TOPICS.md | 2 +- .../LIST_OF_PREVIEW_FEATURES_FOR_V1.md | 122 +-- .../LIST_OF_REMOVED_RESOURCES_FOR_V1.md | 22 +- .../LIST_OF_STABLE_RESOURCES_FOR_V1.md | 154 ++-- v1-preparations/REMAINING_GA_OBJECTS.MD | 3 +- 440 files changed, 1398 insertions(+), 10418 deletions(-) delete mode 100644 docs/data-sources/role.md delete mode 100644 docs/data-sources/roles.md delete mode 100644 docs/resources/database_old.md delete mode 100644 docs/resources/function.md delete mode 100644 docs/resources/oauth_integration.md delete mode 100644 docs/resources/procedure.md delete mode 100644 docs/resources/role.md delete mode 100644 docs/resources/saml_integration.md delete mode 100644 docs/resources/session_parameter.md delete mode 100644 docs/resources/stream.md delete mode 100644 docs/resources/tag_masking_policy_association.md delete mode 100644 docs/resources/unsafe_execute.md delete mode 100644 examples/data-sources/snowflake_role/data-source.tf delete mode 100644 examples/data-sources/snowflake_roles/data-source.tf delete mode 100644 examples/resources/snowflake_database_old/import.sh delete mode 100644 examples/resources/snowflake_database_old/resource.tf delete mode 100644 examples/resources/snowflake_function/import.sh delete mode 100644 examples/resources/snowflake_function/resource.tf delete mode 100644 examples/resources/snowflake_oauth_integration/import.sh delete mode 100644 examples/resources/snowflake_oauth_integration/resource.tf delete mode 100644 examples/resources/snowflake_procedure/import.sh delete mode 100644 examples/resources/snowflake_procedure/resource.tf delete mode 100644 examples/resources/snowflake_role/import.sh delete mode 100644 examples/resources/snowflake_role/resource.tf delete mode 100644 examples/resources/snowflake_saml_integration/import.sh delete mode 100644 examples/resources/snowflake_saml_integration/resource.tf delete mode 100644 examples/resources/snowflake_session_parameter/import.sh delete mode 100644 examples/resources/snowflake_session_parameter/resource.tf delete mode 100644 examples/resources/snowflake_stream/import.sh delete mode 100644 examples/resources/snowflake_stream/resource.tf delete mode 100644 examples/resources/snowflake_tag_masking_policy_association/import.sh delete mode 100644 examples/resources/snowflake_tag_masking_policy_association/resource.tf delete mode 100644 examples/resources/snowflake_unsafe_execute/resource.tf delete mode 100644 pkg/datasources/role.go delete mode 100644 pkg/datasources/role_acceptance_test.go delete mode 100644 pkg/datasources/roles.go delete mode 100644 pkg/datasources/roles_acceptance_test.go delete mode 100644 pkg/datasources/testdata/TestAcc_Roles_Complete/1/test.tf delete mode 100644 pkg/datasources/testdata/TestAcc_Roles_Complete/1/variables.tf delete mode 100644 pkg/datasources/testdata/TestAcc_Roles_Complete/2/test.tf create mode 100644 pkg/provider/previewfeatures/preview_features.go create mode 100644 pkg/provider/previewfeatures/preview_features_test.go delete mode 100644 pkg/resources/database_old.go delete mode 100644 pkg/resources/database_old_acceptance_test.go delete mode 100644 pkg/resources/deprecated_helpers_test.go delete mode 100644 pkg/resources/function.go delete mode 100644 pkg/resources/function_acceptance_test.go delete mode 100644 pkg/resources/function_state_upgraders.go delete mode 100644 pkg/resources/oauth_integration.go delete mode 100644 pkg/resources/oauth_integration_acceptance_test.go delete mode 100644 pkg/resources/oauth_integration_test.go delete mode 100644 pkg/resources/procedure.go delete mode 100644 pkg/resources/procedure_acceptance_test.go delete mode 100644 pkg/resources/procedure_state_upgraders.go delete mode 100644 pkg/resources/role.go delete mode 100644 pkg/resources/saml_integration.go delete mode 100644 pkg/resources/saml_integration_acceptance_test.go delete mode 100644 pkg/resources/saml_integration_test.go delete mode 100644 pkg/resources/session_parameter.go delete mode 100644 pkg/resources/session_parameter_acceptance_test.go delete mode 100644 pkg/resources/stream.go delete mode 100644 pkg/resources/stream_acceptance_test.go delete mode 100644 pkg/resources/tag_masking_policy_association.go delete mode 100644 pkg/resources/tag_masking_policy_association_acceptance_test.go delete mode 100644 pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/complex/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/complex/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/java/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/java/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/javascript/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/javascript/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/python/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/python/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/scala/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/scala/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/sql/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Function/sql/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/complex/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/complex/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/java/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/java/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/javascript/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/javascript/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/python/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/python/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/scala/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/scala/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/sql/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_Procedure/sql/variables.tf delete mode 100644 pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_TagMaskingPolicyAssociation/basic/variables.tf delete mode 100644 pkg/resources/unsafe_execute.go delete mode 100644 pkg/snowflake/external_oauth_integration.go delete mode 100644 pkg/snowflake/external_oauth_integration_test.go delete mode 100644 pkg/snowflake/masking_policy.go delete mode 100644 pkg/snowflake/oauth_integration.go delete mode 100644 pkg/snowflake/oauth_integration_test.go delete mode 100644 pkg/snowflake/saml_integration.go delete mode 100644 pkg/snowflake/saml_integration_test.go delete mode 100644 pkg/snowflake/scim_integration.go delete mode 100644 pkg/snowflake/scim_integration_test.go create mode 100644 templates/data-sources/account_roles.md.tmpl create mode 100644 templates/data-sources/accounts.md.tmpl delete mode 100644 templates/data-sources/roles.md.tmpl delete mode 100644 templates/resources/unsafe_execute.md.tmpl diff --git a/CREATING_ISSUES.md b/CREATING_ISSUES.md index 87d00a1a92..274d61662c 100644 --- a/CREATING_ISSUES.md +++ b/CREATING_ISSUES.md @@ -181,7 +181,7 @@ resource "snowflake_grant_privileges_to_account_role" "grant_on_procedure" { account_role_name = snowflake_account_role.name on_schema_object { object_type = "PROCEDURE" - object_name = "\"${snowflake_database.database.name}\".\"${snowflake_schema.schema.name}\".\"${snowflake_procedure.procedure.name}\"" + object_name = "\"${snowflake_database.database.name}\".\"${snowflake_schema.schema.name}\".\"${snowflake_procedure_sql.procedure.name}\"" } } ``` @@ -202,7 +202,7 @@ resource "snowflake_grant_privileges_to_account_role" "grant_on_procedure" { account_role_name = snowflake_account_role.name on_schema_object { object_type = "PROCEDURE" - object_name = "\"${snowflake_database.database.name}\".\"${snowflake_schema.schema.name}\".\"${snowflake_procedure.procedure.name}\"(NUMBER, VARCHAR)" + object_name = "\"${snowflake_database.database.name}\".\"${snowflake_schema.schema.name}\".\"${snowflake_procedure_sql.procedure.name}\"(NUMBER, VARCHAR)" } } ``` @@ -215,7 +215,7 @@ resource "snowflake_grant_privileges_to_account_role" "grant_on_procedure" { account_role_name = snowflake_account_role.name on_schema_object { object_type = "PROCEDURE" - object_name = snowflake_procedure.procedure_name.fully_qualified_name + object_name = snowflake_procedure_sql.procedure_name.fully_qualified_name } } ``` diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 7dea8fd480..3ecf522a66 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -7,6 +7,104 @@ across different versions. > [!TIP] > We highly recommend upgrading the versions one by one instead of bulk upgrades. +## v0.100.0 ➞ v1.0.0 + +### Preview features flag +All of the preview features objects are now disabled by default. This includes: +- Resources + - `snowflake_account_password_policy_attachment` + - `snowflake_alert` + - `snowflake_api_integration` + - `snowflake_cortex_search_service` + - `snowflake_dynamic_table` + - `snowflake_external_function` + - `snowflake_external_table` + - `snowflake_external_volume` + - `snowflake_failover_group` + - `snowflake_file_format` + - `snowflake_managed_account` + - `snowflake_materialized_view` + - `snowflake_network_policy_attachment` + - `snowflake_network_rule` + - `snowflake_email_notification_integration` + - `snowflake_notification_integration` + - `snowflake_object_parameter` + - `snowflake_password_policy` + - `snowflake_pipe` + - `snowflake_sequence` + - `snowflake_share` + - `snowflake_stage` + - `snowflake_storage_integration` + - `snowflake_table_column_masking_policy_application` + - `snowflake_table_constraint` + - `snowflake_user_public_keys` + - `snowflake_user_password_policy_attachment` +- Data sources + - `snowflake_current_account` + - `snowflake_alerts` + - `snowflake_cortex_search_services` + - `snowflake_database` + - `snowflake_database_role` + - `snowflake_dynamic_tables` + - `snowflake_external_functions` + - `snowflake_external_tables` + - `snowflake_failover_groups` + - `snowflake_file_formats` + - `snowflake_materialized_views` + - `snowflake_pipes` + - `snowflake_current_role` + - `snowflake_sequences` + - `snowflake_shares` + - `snowflake_parameters` + - `snowflake_stages` + - `snowflake_storage_integrations` + - `snowflake_system_generate_scim_access_token` + - `snowflake_system_get_aws_sns_iam_policy` + - `snowflake_system_get_privatelink_config` + - `snowflake_system_get_snowflake_platform_info` + +If you want to have them enabled, add the feature name to the provider configuration (with `_datasource` or `_resource` suffix), like this: +```terraform +provider "snowflake" { + preview_features_enabled = ["snowflake_current_account_datasource", "snowflake_alert_resource"] +} +``` + +### Removed deprecated objects +All of the deprecated objects are removed from v1 release. This includes: +- Resources + - `snowflake_database_old` - see [migration guide](#new-feature-new-database-resources) + - `snowflake_role` - see [migration guide](#new-feature-new-snowflake_account_role-resource) + - `snowflake_oauth_integration` - see [migration guide](#new-feature-snowflake_oauth_integration_for_custom_clients-and-snowflake_oauth_integration_for_partner_applications-resources) + - `snowflake_saml_integration` - see [migration guide](#new-feature-snowflake_saml2_integration-resource) + - `snowflake_session_parameter` + - `snowflake_stream` - see [migration guide](#new-feature-snowflake_stream_on_directory_table-and-snowflake_stream_on_view-resource) + - `snowflake_tag_masking_policy_association` - see [migration guide](#snowflake_tag_masking_policy_association-deprecation) + - `snowflake_function` + - `snowflake_procedure` + - `snowflake_unsafe_execute` - see [migration guide](#unsafe_execute-resource-deprecation--new-execute-resource) +- Data sources + - `snowflake_role` - see [migration guide](#snowflake_role-data-source-deprecation) + - `snowflake_roles` - see [migration guide](#new-feature-account-role-data-source) +- Fields in the provider configuration: + - `account` - see [migration guide](#behavior-change-deprecated-fields) + - OAuth related fields - see [migration guide](#structural-change-oauth-api): + - `oauth_access_token` + - `oauth_client_id` + - `oauth_client_secret` + - `oauth_endpoint` + - `oauth_redirect_url` + - `oauth_refresh_token` + - `browser_auth` + - `private_key_path` - see [migration guide](#private_key_path-deprecation) + - `region` - see [migration guide](#remove-redundant-information-region) + - `session_params` - see [migration guide](#rename-session_params--params) + - `username` - see [migration guide](#rename-username--user) +- Fields in `tag` resource: + - `object_name` + +Additionally, `JWT` value is no longer available for `authenticator` field in the provider configuration. + ## v0.99.0 ➞ v0.100.0 ### *(preview feature/deprecation)* Function and procedure resources @@ -462,7 +560,8 @@ We have added new fields to match the ones in [the driver](https://pkg.go.dev/gi To be more consistent with other configuration options, we have decided to add `driver_tracing` to the configuration schema. This value can also be configured by `SNOWFLAKE_DRIVER_TRACING` environmental variable and by `drivertracing` field in the TOML file. The previous `SF_TF_GOSNOWFLAKE_LOG_LEVEL` environmental variable is not supported now, and was removed from the provider. #### *(behavior change)* deprecated fields -Because of new fields `account_name` and `organization_name`, `account` is now deprecated. It will be removed with the v1 release. Please adjust your configurations from +Because of new fields `account_name` and `organization_name`, `account` is now deprecated. It will be removed with the v1 release. +If you use Terraform configuration file, adjust it from ```terraform provider "snowflake" { account = "ORGANIZATION-ACCOUNT" @@ -477,6 +576,31 @@ provider "snowflake" { } ``` +If you use TOML configuration file, adjust it from +```toml +[default] + account = "ORGANIZATION-ACCOUNT" +} +``` + +to +```toml +[default] + organizationname = "ORGANIZATION" + accountname = "ACCOUNT" +} +``` + +If you use environmental variables, adjust them from +```bash +SNOWFLAKE_ACCOUNT = "ORGANIZATION-ACCOUNT" +``` + +```bash +SNOWFLAKE_ORGANIZATION_NAME = "ORGANIZATION" +SNOWFLAKE_ACCOUNT_NAME = "ACCOUNT" +``` + This change may cause the connection host URL to change. If you get errors like ``` Error: open snowflake connection: Post "https://ORGANIZATION-ACCOUNT.snowflakecomputing.com:443/session/v1/login-request?requestId=[guid]&request_guid=[guid]&roleName=myrole": EOF @@ -1270,21 +1394,6 @@ Changes: - `pattern` was renamed to `like` - output of SHOW is enclosed in `show_output`, so before, e.g. `roles.0.comment` is now `roles.0.show_output.0.comment` -### *(new feature)* new snowflake_account_role resource - -Already existing `snowflake_role` was deprecated in favor of the new `snowflake_account_role`. The old resource got upgraded to -have the same features as the new one. The only difference is the deprecation message on the old resource. - -New fields: -- added `show_output` field that holds the response from SHOW ROLES. Remember that the field will be only recomputed if one of the fields (`name` or `comment`) are changed. - -### *(breaking change)* refactored snowflake_roles data source - -Changes: -- New `in_class` filtering option to filter out roles by class name, e.g. `in_class = "SNOWFLAKE.CORE.BUDGET"` -- `pattern` was renamed to `like` -- output of SHOW is enclosed in `show_output`, so before, e.g. `roles.0.comment` is now `roles.0.show_output.0.comment` - ### *(new feature)* snowflake_streamlit resource Added a new resource for managing streamlits. See reference [docs](https://docs.snowflake.com/en/sql-reference/sql/create-streamlit). In this resource, we decided to split `ROOT_LOCATION` in Snowflake to two fields: `stage` representing stage fully qualified name and `directory_location` containing a path within this stage to root location. @@ -1640,6 +1749,11 @@ The `ForceNew` field was removed in favor of in-place Update for `name` paramete So from now, these objects won't be re-created when the `name` changes, but instead only the name will be updated with `ALTER .. RENAME TO` statements. ## v0.87.0 ➞ v0.88.0 + +### snowflake_role data source deprecation + +Already existing `snowflake_role` was deprecated in favor of the new `snowflake_roles`. You can have a similar behavior like before by specifying `pattern` field. Please adjust your Terraform configurations. + ### snowflake_procedure resource changes #### *(behavior change)* Execute as validation added From now on, the `snowflake_procedure`'s `execute_as` parameter allows only two values: OWNER and CALLER (case-insensitive). Setting other values earlier resulted in falling back to the Snowflake default (currently OWNER) and creating a permadiff. @@ -1842,7 +1956,7 @@ were already available in Golang Snowflake driver. This lead to several attribut We will focus on the deprecated ones and show you how to adapt your current configuration to the new changes. #### *(rename)* username ➞ user - +Provider field `username` were renamed to `user`. Adjust your provider configuration like below: ```terraform provider "snowflake" { # before @@ -1854,6 +1968,7 @@ provider "snowflake" { ``` #### *(structural change)* OAuth API +Provider fields regarding Oauth were renamed and nested. Adjust your provider configuration like below: ```terraform provider "snowflake" { @@ -1895,7 +2010,8 @@ provider "snowflake" { } ``` -#### *(todo)* private key path +#### private_key_path deprecation +Provider field `private_key_path` is now deprecated in favor of `private_key` and `file` Terraform function (see [docs](https://developer.hashicorp.com/terraform/language/functions/file)). Adjust your provider configuration like below: ```terraform provider "snowflake" { @@ -1908,7 +2024,7 @@ provider "snowflake" { ``` #### *(rename)* session_params ➞ params - +Provider field `session_params` were renamed to `params`. Adjust your provider configuration like below: ```terraform provider "snowflake" { # before diff --git a/Makefile b/Makefile index e8bd91a003..e796f0218d 100644 --- a/Makefile +++ b/Makefile @@ -65,7 +65,7 @@ test: test-client ## run unit and integration tests go test -v -cover -timeout=45m ./... test-acceptance: ## run acceptance tests - TF_ACC=1 SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE=true TEST_SF_TF_REQUIRE_TEST_OBJECT_SUFFIX=1 go test -run "^TestAcc_" -v -cover -timeout=120m ./... + TF_ACC=1 SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE=true TEST_SF_TF_REQUIRE_TEST_OBJECT_SUFFIX=1 SF_TF_ACC_TEST_ENABLE_ALL_PREVIEW_FEATURES=true go test -run "^TestAcc_" -v -cover -timeout=120m ./... test-integration: ## run SDK integration tests TEST_SF_TF_REQUIRE_TEST_OBJECT_SUFFIX=1 go test -run "^TestInt_" -v -cover -timeout=60m ./... @@ -80,7 +80,7 @@ test-object-renaming: ## runs tests in object_renaming_acceptance_test.go TEST_SF_TF_ENABLE_OBJECT_RENAMING=1 go test ./pkg/resources/object_renaming_acceptace_test.go -v test-acceptance-%: ## run acceptance tests for the given resource only, e.g. test-acceptance-Warehouse - TF_ACC=1 TF_LOG=DEBUG SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE=true go test -run ^TestAcc_$*_ -v -timeout=20m ./pkg/resources + TF_ACC=1 TF_LOG=DEBUG SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE=true SF_TF_ACC_TEST_ENABLE_ALL_PREVIEW_FEATURES=true go test -run ^TestAcc_$*_ -v -timeout=20m ./pkg/resources build-local: ## build the binary locally go build -o $(BASE_BINARY_NAME) . diff --git a/docs/data-sources/alerts.md b/docs/data-sources/alerts.md index de34d28c78..b84f7eb4b8 100644 --- a/docs/data-sources/alerts.md +++ b/docs/data-sources/alerts.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_alerts (Data Source) diff --git a/docs/data-sources/connections.md b/docs/data-sources/connections.md index e794d01f9f..345d6cb562 100644 --- a/docs/data-sources/connections.md +++ b/docs/data-sources/connections.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered connections. Filtering is aligned with the current possibilities for SHOW CONNECTIONS https://docs.snowflake.com/en/sql-reference/sql/show-connections query. The results of SHOW is encapsulated in one output collection connections. --- -!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_connections (Data Source) Data source used to get details of filtered connections. Filtering is aligned with the current possibilities for [SHOW CONNECTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-connections) query. The results of SHOW is encapsulated in one output collection `connections`. diff --git a/docs/data-sources/cortex_search_services.md b/docs/data-sources/cortex_search_services.md index 1015a4ee2d..6f87f5a453 100644 --- a/docs/data-sources/cortex_search_services.md +++ b/docs/data-sources/cortex_search_services.md @@ -5,7 +5,7 @@ description: |- --- -!> **Disclaimer for Cortex Search service** Note that Cortex Search is a Private Preview feature as such, should be used only with non-production data even when using Snowflake's Terraform Provider. Also, note that the Terraform Provider is not covered by Snowflake's support team; the Product and Engineering teams are available for any questions. However, please contact the Cortex Search team for any issues with this object. +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. # snowflake_cortex_search_services (Data Source) diff --git a/docs/data-sources/current_account.md b/docs/data-sources/current_account.md index 7280779adb..d0aead9256 100644 --- a/docs/data-sources/current_account.md +++ b/docs/data-sources/current_account.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_current_account (Data Source) diff --git a/docs/data-sources/current_role.md b/docs/data-sources/current_role.md index edc1daed09..3014c157ee 100644 --- a/docs/data-sources/current_role.md +++ b/docs/data-sources/current_role.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_current_role (Data Source) diff --git a/docs/data-sources/database.md b/docs/data-sources/database.md index 514cb0ca14..387bbe06e4 100644 --- a/docs/data-sources/database.md +++ b/docs/data-sources/database.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_database (Data Source) diff --git a/docs/data-sources/database_role.md b/docs/data-sources/database_role.md index 13d03ddb42..e1c025dbe7 100644 --- a/docs/data-sources/database_role.md +++ b/docs/data-sources/database_role.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_database_role (Data Source) diff --git a/docs/data-sources/database_roles.md b/docs/data-sources/database_roles.md index ccdfd274b7..12f55291d1 100644 --- a/docs/data-sources/database_roles.md +++ b/docs/data-sources/database_roles.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered database roles. Filtering is aligned with the current possibilities for SHOW DATABASE ROLES https://docs.snowflake.com/en/sql-reference/sql/show-database-roles query (like and limit are supported). The results of SHOW is encapsulated in show_output collection. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # snowflake_database_roles (Data Source) Data source used to get details of filtered database roles. Filtering is aligned with the current possibilities for [SHOW DATABASE ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-database-roles) query (`like` and `limit` are supported). The results of SHOW is encapsulated in show_output collection. diff --git a/docs/data-sources/databases.md b/docs/data-sources/databases.md index 691ded55b1..e9376b650e 100644 --- a/docs/data-sources/databases.md +++ b/docs/data-sources/databases.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered databases. Filtering is aligned with the current possibilities for SHOW DATABASES https://docs.snowflake.com/en/sql-reference/sql/show-databases query (like, starts_with, and limit are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # snowflake_databases (Data Source) Data source used to get details of filtered databases. Filtering is aligned with the current possibilities for [SHOW DATABASES](https://docs.snowflake.com/en/sql-reference/sql/show-databases) query (`like`, `starts_with`, and `limit` are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. diff --git a/docs/data-sources/dynamic_tables.md b/docs/data-sources/dynamic_tables.md index ec100591e4..9ea093c0d4 100644 --- a/docs/data-sources/dynamic_tables.md +++ b/docs/data-sources/dynamic_tables.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_dynamic_tables (Data Source) diff --git a/docs/data-sources/external_functions.md b/docs/data-sources/external_functions.md index ffe7062229..e268d9826f 100644 --- a/docs/data-sources/external_functions.md +++ b/docs/data-sources/external_functions.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_external_functions (Data Source) diff --git a/docs/data-sources/external_tables.md b/docs/data-sources/external_tables.md index 06616ca25b..5a9f9ac3fb 100644 --- a/docs/data-sources/external_tables.md +++ b/docs/data-sources/external_tables.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_external_tables (Data Source) diff --git a/docs/data-sources/failover_groups.md b/docs/data-sources/failover_groups.md index f7dca6dcbd..f027e982b8 100644 --- a/docs/data-sources/failover_groups.md +++ b/docs/data-sources/failover_groups.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_failover_groups (Data Source) diff --git a/docs/data-sources/file_formats.md b/docs/data-sources/file_formats.md index 87a9dae60d..68ac079c1a 100644 --- a/docs/data-sources/file_formats.md +++ b/docs/data-sources/file_formats.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_file_formats (Data Source) diff --git a/docs/data-sources/functions.md b/docs/data-sources/functions.md index 8848a92506..269f9c2e67 100644 --- a/docs/data-sources/functions.md +++ b/docs/data-sources/functions.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_functions (Data Source) diff --git a/docs/data-sources/grants.md b/docs/data-sources/grants.md index 6ffd89b195..c7ccbfafce 100644 --- a/docs/data-sources/grants.md +++ b/docs/data-sources/grants.md @@ -5,8 +5,6 @@ description: |- --- -!> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # snowflake_grants (Data Source) diff --git a/docs/data-sources/masking_policies.md b/docs/data-sources/masking_policies.md index 1facb000b9..35851f83b7 100644 --- a/docs/data-sources/masking_policies.md +++ b/docs/data-sources/masking_policies.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered masking policies. Filtering is aligned with the current possibilities for SHOW MASKING POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies query. The results of SHOW and DESCRIBE are encapsulated in one output collection masking_policies. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - # snowflake_masking_policies (Data Source) Data source used to get details of filtered masking policies. Filtering is aligned with the current possibilities for [SHOW MASKING POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `masking_policies`. diff --git a/docs/data-sources/materialized_views.md b/docs/data-sources/materialized_views.md index 5ee9317163..a926f2cc39 100644 --- a/docs/data-sources/materialized_views.md +++ b/docs/data-sources/materialized_views.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_materialized_views (Data Source) diff --git a/docs/data-sources/network_policies.md b/docs/data-sources/network_policies.md index b46596fce8..a6867f688b 100644 --- a/docs/data-sources/network_policies.md +++ b/docs/data-sources/network_policies.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered network policies. Filtering is aligned with the current possibilities for SHOW NETWORK POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-network-policies query (like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # snowflake_network_policies (Data Source) Data source used to get details of filtered network policies. Filtering is aligned with the current possibilities for [SHOW NETWORK POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-network-policies) query (`like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection. diff --git a/docs/data-sources/parameters.md b/docs/data-sources/parameters.md index 4c1fd45572..fb29f58d0c 100644 --- a/docs/data-sources/parameters.md +++ b/docs/data-sources/parameters.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_parameters (Data Source) diff --git a/docs/data-sources/pipes.md b/docs/data-sources/pipes.md index 91e6924194..e157763ef0 100644 --- a/docs/data-sources/pipes.md +++ b/docs/data-sources/pipes.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_pipes (Data Source) diff --git a/docs/data-sources/procedures.md b/docs/data-sources/procedures.md index 5d8dbc1962..3e1908556b 100644 --- a/docs/data-sources/procedures.md +++ b/docs/data-sources/procedures.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_procedures (Data Source) diff --git a/docs/data-sources/resource_monitors.md b/docs/data-sources/resource_monitors.md index 6e44d1a023..b3c1820856 100644 --- a/docs/data-sources/resource_monitors.md +++ b/docs/data-sources/resource_monitors.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for SHOW RESOURCE MONITORS https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors query (like is supported). The results of SHOW is encapsulated in show_output collection. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - # snowflake_resource_monitors (Data Source) Data source used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for [SHOW RESOURCE MONITORS](https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors) query (`like` is supported). The results of SHOW is encapsulated in show_output collection. diff --git a/docs/data-sources/role.md b/docs/data-sources/role.md deleted file mode 100644 index d2a16369ec..0000000000 --- a/docs/data-sources/role.md +++ /dev/null @@ -1,30 +0,0 @@ ---- -page_title: "snowflake_role Data Source - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_role (Data Source) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_account_roles](./account_roles) instead. - -## Example Usage - -```terraform -data "snowflake_role" "this" { - name = "role1" -} -``` - - -## Schema - -### Required - -- `name` (String) The role for which to return metadata. - -### Read-Only - -- `comment` (String) The comment on the role -- `id` (String) The ID of this resource. diff --git a/docs/data-sources/roles.md b/docs/data-sources/roles.md deleted file mode 100644 index ff55a1326f..0000000000 --- a/docs/data-sources/roles.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -page_title: "snowflake_roles Data Source - terraform-provider-snowflake" -subcategory: "" -description: |- - Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for SHOW ROLES https://docs.snowflake.com/en/sql-reference/sql/show-roles query (like and in_class are all supported). The results of SHOW are encapsulated in one output collection. ---- - -!> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - - --> **Note** Fields `STARTS WITH` and `LIMIT` are currently missing. They will be added in the future. - -# snowflake_roles (Data Source) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_account_roles](./account_roles) instead. - -Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection. - -## Example Usage - -```terraform -# Simple usage -data "snowflake_roles" "simple" { -} - -output "simple_output" { - value = data.snowflake_roles.simple.roles -} - -# Filtering (like) -data "snowflake_roles" "like" { - like = "role-name" -} - -output "like_output" { - value = data.snowflake_roles.like.roles -} - -# Filtering (in class) -data "snowflake_roles" "in_class" { - in_class = "SNOWFLAKE.CORE.BUDGET" -} - -output "in_class_output" { - value = data.snowflake_roles.in_class.roles -} - -# Ensure the number of roles is equal to at least one element (with the use of postcondition) -data "snowflake_roles" "assert_with_postcondition" { - like = "role-name-%" - lifecycle { - postcondition { - condition = length(self.roles) > 0 - error_message = "there should be at least one role" - } - } -} - -# Ensure the number of roles is equal to at exactly one element (with the use of check block) -check "role_check" { - data "snowflake_roles" "assert_with_check_block" { - like = "role-name" - } - - assert { - condition = length(data.snowflake_roles.assert_with_check_block.roles) == 1 - error_message = "Roles filtered by '${data.snowflake_roles.assert_with_check_block.like}' returned ${length(data.snowflake_roles.assert_with_check_block.roles)} roles where one was expected" - } -} -``` - - -## Schema - -### Optional - -- `in_class` (String) Filters the SHOW GRANTS output by class name. -- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). - -### Read-Only - -- `id` (String) The ID of this resource. -- `roles` (List of Object) Holds the aggregated output of all role details queries. (see [below for nested schema](#nestedatt--roles)) - - -### Nested Schema for `roles` - -Read-Only: - -- `show_output` (List of Object) (see [below for nested schema](#nestedobjatt--roles--show_output)) - - -### Nested Schema for `roles.show_output` - -Read-Only: - -- `assigned_to_users` (Number) -- `comment` (String) -- `created_on` (String) -- `granted_roles` (Number) -- `granted_to_roles` (Number) -- `is_current` (Boolean) -- `is_default` (Boolean) -- `is_inherited` (Boolean) -- `name` (String) -- `owner` (String) diff --git a/docs/data-sources/row_access_policies.md b/docs/data-sources/row_access_policies.md index 1c7c7b6d28..09d37a3824 100644 --- a/docs/data-sources/row_access_policies.md +++ b/docs/data-sources/row_access_policies.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered row access policies. Filtering is aligned with the current possibilities for SHOW ROW ACCESS POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies query. The results of SHOW and DESCRIBE are encapsulated in one output collection row_access_policies. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - # snowflake_row_access_policies (Data Source) Data source used to get details of filtered row access policies. Filtering is aligned with the current possibilities for [SHOW ROW ACCESS POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `row_access_policies`. diff --git a/docs/data-sources/schemas.md b/docs/data-sources/schemas.md index 5787b9bd7b..a3599c9327 100644 --- a/docs/data-sources/schemas.md +++ b/docs/data-sources/schemas.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered schemas. Filtering is aligned with the current possibilities for SHOW SCHEMAS https://docs.snowflake.com/en/sql-reference/sql/show-schemas query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - -> **Note** Field `WITH PRIVILEGES` is currently missing. It will be added in the future. diff --git a/docs/data-sources/secrets.md b/docs/data-sources/secrets.md index 397cfa9e3e..3743232c84 100644 --- a/docs/data-sources/secrets.md +++ b/docs/data-sources/secrets.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered secrets. Filtering is aligned with the current possibilities for SHOW SECRETS https://docs.snowflake.com/en/sql-reference/sql/show-secrets query. The results of SHOW and DESCRIBE are encapsulated in one output collection secrets. --- -!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_secrets (Data Source) Data source used to get details of filtered secrets. Filtering is aligned with the current possibilities for [SHOW SECRETS](https://docs.snowflake.com/en/sql-reference/sql/show-secrets) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `secrets`. diff --git a/docs/data-sources/security_integrations.md b/docs/data-sources/security_integrations.md index 833eb70663..b28451c17a 100644 --- a/docs/data-sources/security_integrations.md +++ b/docs/data-sources/security_integrations.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered security integrations. Filtering is aligned with the current possibilities for SHOW SECURITY INTEGRATIONS https://docs.snowflake.com/en/sql-reference/sql/show-integrations query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection security_integrations. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # snowflake_security_integrations (Data Source) Data source used to get details of filtered security integrations. Filtering is aligned with the current possibilities for [SHOW SECURITY INTEGRATIONS](https://docs.snowflake.com/en/sql-reference/sql/show-integrations) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `security_integrations`. diff --git a/docs/data-sources/sequences.md b/docs/data-sources/sequences.md index 2e0fd71c02..d353caf455 100644 --- a/docs/data-sources/sequences.md +++ b/docs/data-sources/sequences.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_sequences (Data Source) diff --git a/docs/data-sources/shares.md b/docs/data-sources/shares.md index 7fbf97b590..82cd6b3795 100644 --- a/docs/data-sources/shares.md +++ b/docs/data-sources/shares.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_shares (Data Source) diff --git a/docs/data-sources/stages.md b/docs/data-sources/stages.md index 63b38cab9e..c72ccec99b 100644 --- a/docs/data-sources/stages.md +++ b/docs/data-sources/stages.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_stages (Data Source) diff --git a/docs/data-sources/storage_integrations.md b/docs/data-sources/storage_integrations.md index 5225987d90..7598c669b5 100644 --- a/docs/data-sources/storage_integrations.md +++ b/docs/data-sources/storage_integrations.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_storage_integrations (Data Source) diff --git a/docs/data-sources/streamlits.md b/docs/data-sources/streamlits.md index 3bb9c19549..979cdc12cb 100644 --- a/docs/data-sources/streamlits.md +++ b/docs/data-sources/streamlits.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered streamlits. Filtering is aligned with the current possibilities for SHOW STREAMLITS https://docs.snowflake.com/en/sql-reference/sql/show-streamlits query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection streamlits. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - # snowflake_streamlits (Data Source) Data source used to get details of filtered streamlits. Filtering is aligned with the current possibilities for [SHOW STREAMLITS](https://docs.snowflake.com/en/sql-reference/sql/show-streamlits) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `streamlits`. diff --git a/docs/data-sources/streams.md b/docs/data-sources/streams.md index 23acd3a192..cf74885737 100644 --- a/docs/data-sources/streams.md +++ b/docs/data-sources/streams.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered streams. Filtering is aligned with the current possibilities for SHOW STREAMS https://docs.snowflake.com/en/sql-reference/sql/show-streams query. The results of SHOW and DESCRIBE are encapsulated in one output collection streams. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_streams (Data Source) Data source used to get details of filtered streams. Filtering is aligned with the current possibilities for [SHOW STREAMS](https://docs.snowflake.com/en/sql-reference/sql/show-streams) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `streams`. diff --git a/docs/data-sources/system_generate_scim_access_token.md b/docs/data-sources/system_generate_scim_access_token.md index 12bc4a0312..667fac7fa4 100644 --- a/docs/data-sources/system_generate_scim_access_token.md +++ b/docs/data-sources/system_generate_scim_access_token.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_system_generate_scim_access_token (Data Source) diff --git a/docs/data-sources/system_get_aws_sns_iam_policy.md b/docs/data-sources/system_get_aws_sns_iam_policy.md index cbbe270f60..abee189abe 100644 --- a/docs/data-sources/system_get_aws_sns_iam_policy.md +++ b/docs/data-sources/system_get_aws_sns_iam_policy.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_system_get_aws_sns_iam_policy (Data Source) diff --git a/docs/data-sources/system_get_privatelink_config.md b/docs/data-sources/system_get_privatelink_config.md index 24bf2a2e11..6eab72d1a2 100644 --- a/docs/data-sources/system_get_privatelink_config.md +++ b/docs/data-sources/system_get_privatelink_config.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_system_get_privatelink_config (Data Source) diff --git a/docs/data-sources/system_get_snowflake_platform_info.md b/docs/data-sources/system_get_snowflake_platform_info.md index 5f48527e65..f8d48922ac 100644 --- a/docs/data-sources/system_get_snowflake_platform_info.md +++ b/docs/data-sources/system_get_snowflake_platform_info.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_system_get_snowflake_platform_info (Data Source) diff --git a/docs/data-sources/tables.md b/docs/data-sources/tables.md index dcaa2b815a..abae7ca08b 100644 --- a/docs/data-sources/tables.md +++ b/docs/data-sources/tables.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_tables (Data Source) diff --git a/docs/data-sources/tags.md b/docs/data-sources/tags.md index cde76cf652..a66090220e 100644 --- a/docs/data-sources/tags.md +++ b/docs/data-sources/tags.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered tags. Filtering is aligned with the current possibilities for SHOW TAGS https://docs.snowflake.com/en/sql-reference/sql/show-tags query. The results of SHOW are encapsulated in one output collection tags. --- -!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - # snowflake_tags (Data Source) Data source used to get details of filtered tags. Filtering is aligned with the current possibilities for [SHOW TAGS](https://docs.snowflake.com/en/sql-reference/sql/show-tags) query. The results of SHOW are encapsulated in one output collection `tags`. diff --git a/docs/data-sources/tasks.md b/docs/data-sources/tasks.md index f035be55cd..50bc3205dd 100644 --- a/docs/data-sources/tasks.md +++ b/docs/data-sources/tasks.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered tasks. Filtering is aligned with the current possibilities for SHOW TASKS https://docs.snowflake.com/en/sql-reference/sql/show-tasks query. The results of SHOW and SHOW PARAMETERS IN are encapsulated in one output collection tasks. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - # snowflake_tasks (Data Source) Data source used to get details of filtered tasks. Filtering is aligned with the current possibilities for [SHOW TASKS](https://docs.snowflake.com/en/sql-reference/sql/show-tasks) query. The results of SHOW and SHOW PARAMETERS IN are encapsulated in one output collection `tasks`. diff --git a/docs/data-sources/users.md b/docs/data-sources/users.md index 4a068375c9..bf5df0c281 100644 --- a/docs/data-sources/users.md +++ b/docs/data-sources/users.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered users. Filtering is aligned with the current possibilities for SHOW USERS https://docs.snowflake.com/en/sql-reference/sql/show-users query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in show_output (only empty or default values), the DESCRIBE command cannot be called, so you have to set with_describe = false. Only parameters output is not affected by the lack of privileges. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # snowflake_users (Data Source) Data source used to get details of filtered users. Filtering is aligned with the current possibilities for [SHOW USERS](https://docs.snowflake.com/en/sql-reference/sql/show-users) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in `show_output` (only empty or default values), the DESCRIBE command cannot be called, so you have to set `with_describe = false`. Only `parameters` output is not affected by the lack of privileges. diff --git a/docs/data-sources/views.md b/docs/data-sources/views.md index 9425ccbcde..a7640970a4 100644 --- a/docs/data-sources/views.md +++ b/docs/data-sources/views.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered views. Filtering is aligned with the current possibilities for SHOW VIEWS https://docs.snowflake.com/en/sql-reference/sql/show-views query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection views. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. - # snowflake_views (Data Source) Data source used to get details of filtered views. Filtering is aligned with the current possibilities for [SHOW VIEWS](https://docs.snowflake.com/en/sql-reference/sql/show-views) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `views`. diff --git a/docs/data-sources/warehouses.md b/docs/data-sources/warehouses.md index 2afcc6f502..9d7358c43f 100644 --- a/docs/data-sources/warehouses.md +++ b/docs/data-sources/warehouses.md @@ -5,8 +5,6 @@ description: |- Data source used to get details of filtered warehouses. Filtering is aligned with the current possibilities for SHOW WAREHOUSES https://docs.snowflake.com/en/sql-reference/sql/show-warehouses query (only like is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # snowflake_warehouses (Data Source) Data source used to get details of filtered warehouses. Filtering is aligned with the current possibilities for [SHOW WAREHOUSES](https://docs.snowflake.com/en/sql-reference/sql/show-warehouses) query (only `like` is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. diff --git a/docs/guides/identifiers.md b/docs/guides/identifiers.md index 0af6936acf..e8ddac899a 100644 --- a/docs/guides/identifiers.md +++ b/docs/guides/identifiers.md @@ -15,7 +15,7 @@ For example, instead of writing ``` object_name = “\”${snowflake_table.database}\”.\”${snowflake_table.schema}\”.\”${snowflake_table.name}\”” # for procedures -object_name = “\”${snowflake_procedure.database}\”.\”${snowflake_procedure.schema}\”.\”${snowflake_procedure.name}\"(NUMBER, VARCHAR)” +object_name = “\”${snowflake_procedure_sql.database}\”.\”${snowflake_procedure_sql.schema}\”.\”${snowflake_procedure_sql.name}\"(NUMBER, VARCHAR)” ``` now we can write diff --git a/docs/index.md b/docs/index.md index 9055832dba..b5d8c76acb 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,7 +5,7 @@ description: Manage SnowflakeDB with Terraform. # Snowflake Provider -~> **Disclaimer** the project is still in the 0.x.x version, which means it’s still in the experimental phase (check [Go module versioning](https://go.dev/doc/modules/version-numbers#v0-number) for more details). It can be used in production but makes no stability or backward compatibility guarantees. We do not provide backward bug fixes and, therefore, always suggest using the newest version. We are providing only limited support for the provider; priorities will be assigned on a case-by-case basis. Our main current goals are stabilization, addressing existing issues, and providing the missing features (prioritizing the GA features; supporting PrPr and PuPr features are not high priorities now). With all that in mind, we aim to reach V1 with a stable, reliable, and functional provider. V1 will be free of all the above limitations. +~> **Disclaimer** The project is in v1 version, but some features are in preview. Such resources and data sources are considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee their stability. They will be reworked and marked as a stable feature in future releases. Breaking changes in these features are expected, even without bumping the major version. They are disabled by default. To use them, add the relevant feature name to `preview_features_enabled` field in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). The list of preview features is available below. Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. ~> **Note** Please check the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md) when changing the version of the provider. @@ -70,10 +70,8 @@ provider "snowflake" { ### Optional -- `account` (String, Deprecated) Use `account_name` and `organization_name` instead. Specifies your Snowflake account identifier assigned, by Snowflake. The [account locator](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-2-account-locator-in-a-region) format is not supported. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html). Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_ACCOUNT` environment variable. - `account_name` (String) Specifies your Snowflake account name assigned by Snowflake. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier#account-name). Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_ACCOUNT_NAME` environment variable. -- `authenticator` (String) Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid options are: `SNOWFLAKE` | `OAUTH` | `EXTERNALBROWSER` | `OKTA` | `JWT` | `SNOWFLAKE_JWT` | `TOKENACCESSOR` | `USERNAMEPASSWORDMFA`. Value `JWT` is deprecated and will be removed in future releases. Can also be sourced from the `SNOWFLAKE_AUTHENTICATOR` environment variable. -- `browser_auth` (Boolean, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_USE_BROWSER_AUTH` environment variable. +- `authenticator` (String) Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid options are: `SNOWFLAKE` | `OAUTH` | `EXTERNALBROWSER` | `OKTA` | `SNOWFLAKE_JWT` | `TOKENACCESSOR` | `USERNAMEPASSWORDMFA`. Can also be sourced from the `SNOWFLAKE_AUTHENTICATOR` environment variable. - `client_ip` (String) IP address for network checks. Can also be sourced from the `SNOWFLAKE_CLIENT_IP` environment variable. - `client_request_mfa_token` (String) When true the MFA token is cached in the credential manager. True by default in Windows/OSX. False for Linux. Can also be sourced from the `SNOWFLAKE_CLIENT_REQUEST_MFA_TOKEN` environment variable. - `client_store_temporary_credential` (String) When true the ID token is cached in the credential manager. True by default in Windows/OSX. False for Linux. Can also be sourced from the `SNOWFLAKE_CLIENT_STORE_TEMPORARY_CREDENTIAL` environment variable. @@ -91,34 +89,25 @@ provider "snowflake" { - `keep_session_alive` (Boolean) Enables the session to persist even after the connection is closed. Can also be sourced from the `SNOWFLAKE_KEEP_SESSION_ALIVE` environment variable. - `login_timeout` (Number) Login retry timeout in seconds EXCLUDING network roundtrip and read out http response. Can also be sourced from the `SNOWFLAKE_LOGIN_TIMEOUT` environment variable. - `max_retry_count` (Number) Specifies how many times non-periodic HTTP request can be retried by the driver. Can also be sourced from the `SNOWFLAKE_MAX_RETRY_COUNT` environment variable. -- `oauth_access_token` (String, Sensitive, Deprecated) Token for use with OAuth. Generating the token is left to other tools. Cannot be used with `browser_auth`, `private_key_path`, `oauth_refresh_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable. -- `oauth_client_id` (String, Sensitive, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_CLIENT_ID` environment variable. -- `oauth_client_secret` (String, Sensitive, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_CLIENT_SECRET` environment variable. -- `oauth_endpoint` (String, Sensitive, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_ENDPOINT` environment variable. -- `oauth_redirect_url` (String, Sensitive, Deprecated) Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_REDIRECT_URL` environment variable. -- `oauth_refresh_token` (String, Sensitive, Deprecated) Token for use with OAuth. Setup and generation of the token is left to other tools. Should be used in conjunction with `oauth_client_id`, `oauth_client_secret`, `oauth_endpoint`, `oauth_redirect_url`. Cannot be used with `browser_auth`, `private_key_path`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_REFRESH_TOKEN` environment variable. - `ocsp_fail_open` (String) True represents OCSP fail open mode. False represents OCSP fail closed mode. Fail open true by default. Can also be sourced from the `SNOWFLAKE_OCSP_FAIL_OPEN` environment variable. - `okta_url` (String) The URL of the Okta server. e.g. https://example.okta.com. Okta URL host needs to to have a suffix `okta.com`. Read more in Snowflake [docs](https://docs.snowflake.com/en/user-guide/oauth-okta). Can also be sourced from the `SNOWFLAKE_OKTA_URL` environment variable. - `organization_name` (String) Specifies your Snowflake organization name assigned by Snowflake. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier#organization-name). Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_ORGANIZATION_NAME` environment variable. - `params` (Map of String) Sets other connection (i.e. session) parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters). This field can not be set with environmental variables. - `passcode` (String) Specifies the passcode provided by Duo when using multi-factor authentication (MFA) for login. Can also be sourced from the `SNOWFLAKE_PASSCODE` environment variable. - `passcode_in_password` (Boolean) False by default. Set to true if the MFA passcode is embedded to the configured password. Can also be sourced from the `SNOWFLAKE_PASSCODE_IN_PASSWORD` environment variable. -- `password` (String, Sensitive) Password for user + password auth. Cannot be used with `browser_auth` or `private_key_path`. Can also be sourced from the `SNOWFLAKE_PASSWORD` environment variable. +- `password` (String, Sensitive) Password for user + password auth. Cannot be used with `private_key` and `private_key_passphrase`. Can also be sourced from the `SNOWFLAKE_PASSWORD` environment variable. - `port` (Number) Specifies a custom port value used by the driver for privatelink connections. Can also be sourced from the `SNOWFLAKE_PORT` environment variable. -- `private_key` (String, Sensitive) Private Key for username+private-key auth. Cannot be used with `browser_auth` or `password`. Can also be sourced from the `SNOWFLAKE_PRIVATE_KEY` environment variable. +- `preview_features_enabled` (Set of String) A list of preview features that are handled by the provider. See [preview features list](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md). Preview features may have breaking changes in future releases, even without raising the major version. This field can not be set with environmental variables. Valid options are: `snowflake_current_account_datasource` | `snowflake_account_authentication_policy_attachment_resource` | `snowflake_account_password_policy_attachment_resource` | `snowflake_alert_resource` | `snowflake_alerts_datasource` | `snowflake_api_integration_resource` | `snowflake_authentication_policy_resource` | `snowflake_cortex_search_service_resource` | `snowflake_cortex_search_services_datasource` | `snowflake_database_datasource` | `snowflake_database_role_datasource` | `snowflake_dynamic_table_resource` | `snowflake_dynamic_tables_datasource` | `snowflake_external_function_resource` | `snowflake_external_functions_datasource` | `snowflake_external_table_resource` | `snowflake_external_tables_datasource` | `snowflake_external_volume_resource` | `snowflake_failover_group_resource` | `snowflake_failover_groups_datasource` | `snowflake_file_format_resource` | `snowflake_file_formats_datasource` | `snowflake_managed_account_resource` | `snowflake_materialized_view_resource` | `snowflake_materialized_views_datasource` | `snowflake_network_policy_attachment_resource` | `snowflake_network_rule_resource` | `snowflake_email_notification_integration_resource` | `snowflake_notification_integration_resource` | `snowflake_object_parameter_resource` | `snowflake_password_policy_resource` | `snowflake_pipe_resource` | `snowflake_pipes_datasource` | `snowflake_current_role_datasource` | `snowflake_sequence_resource` | `snowflake_sequences_datasource` | `snowflake_share_resource` | `snowflake_shares_datasource` | `snowflake_parameters_datasource` | `snowflake_stage_resource` | `snowflake_stages_datasource` | `snowflake_storage_integration_resource` | `snowflake_storage_integrations_datasource` | `snowflake_system_generate_scim_access_token_datasource` | `snowflake_system_get_aws_sns_iam_policy_datasource` | `snowflake_system_get_privatelink_config_datasource` | `snowflake_system_get_snowflake_platform_info_datasource` | `snowflake_table_column_masking_policy_application_resource` | `snowflake_table_constraint_resource` | `snowflake_user_authentication_policy_attachment_resource` | `snowflake_user_public_keys_resource` | `snowflake_user_password_policy_attachment_resource`. +- `private_key` (String, Sensitive) Private Key for username+private-key auth. Cannot be used with `password`. Can also be sourced from the `SNOWFLAKE_PRIVATE_KEY` environment variable. - `private_key_passphrase` (String, Sensitive) Supports the encryption ciphers aes-128-cbc, aes-128-gcm, aes-192-cbc, aes-192-gcm, aes-256-cbc, aes-256-gcm, and des-ede3-cbc. Can also be sourced from the `SNOWFLAKE_PRIVATE_KEY_PASSPHRASE` environment variable. -- `private_key_path` (String, Sensitive, Deprecated) Path to a private key for using keypair authentication. Cannot be used with `browser_auth`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_PRIVATE_KEY_PATH` environment variable. - `profile` (String) Sets the profile to read from ~/.snowflake/config file. Can also be sourced from the `SNOWFLAKE_PROFILE` environment variable. - `protocol` (String) A protocol used in the connection. Valid options are: `http` | `https`. Can also be sourced from the `SNOWFLAKE_PROTOCOL` environment variable. -- `region` (String, Deprecated) Snowflake region, such as "eu-central-1", with this parameter. However, since this parameter is deprecated, it is best to specify the region as part of the account parameter. For details, see the description of the account parameter. [Snowflake region](https://docs.snowflake.com/en/user-guide/intro-regions.html) to use. Required if using the [legacy format for the `account` identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#format-2-legacy-account-locator-in-a-region) in the form of `.`. Can also be sourced from the `SNOWFLAKE_REGION` environment variable. - `request_timeout` (Number) request retry timeout in seconds EXCLUDING network roundtrip and read out http response. Can also be sourced from the `SNOWFLAKE_REQUEST_TIMEOUT` environment variable. - `role` (String) Specifies the role to use by default for accessing Snowflake objects in the client session. Can also be sourced from the `SNOWFLAKE_ROLE` environment variable. -- `session_params` (Map of String, Deprecated) Sets session parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters) - `tmp_directory_path` (String) Sets temporary directory used by the driver for operations like encrypting, compressing etc. Can also be sourced from the `SNOWFLAKE_TMP_DIRECTORY_PATH` environment variable. - `token` (String, Sensitive) Token to use for OAuth and other forms of token based auth. Can also be sourced from the `SNOWFLAKE_TOKEN` environment variable. - `token_accessor` (Block List, Max: 1) (see [below for nested schema](#nestedblock--token_accessor)) - `user` (String) Username. Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_USER` environment variable. -- `username` (String, Deprecated) Username for user + password authentication. Required unless using `profile`. Can also be sourced from the `SNOWFLAKE_USERNAME` environment variable. - `validate_default_parameters` (String) True by default. If false, disables the validation checks for Database, Schema, Warehouse and Role at the time a connection is established. Can also be sourced from the `SNOWFLAKE_VALIDATE_DEFAULT_PARAMETERS` environment variable. - `warehouse` (String) Specifies the virtual warehouse to use by default for queries, loading, etc. in the client session. Can also be sourced from the `SNOWFLAKE_WAREHOUSE` environment variable. @@ -359,19 +348,6 @@ provider "snowflake" { } ``` -## Currently deprecated resources + -- [snowflake_database_old](./docs/resources/database_old) -- [snowflake_function](./docs/resources/function) -- [snowflake_oauth_integration](./docs/resources/oauth_integration) -- [snowflake_procedure](./docs/resources/procedure) -- [snowflake_role](./docs/resources/role) - use [snowflake_account_role](./docs/resources/account_role) instead -- [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead -- [snowflake_stream](./docs/resources/stream) -- [snowflake_tag_masking_policy_association](./docs/resources/tag_masking_policy_association) -- [snowflake_unsafe_execute](./docs/resources/unsafe_execute) - use [snowflake_execute](./docs/resources/execute) instead - -## Currently deprecated datasources - -- [snowflake_role](./docs/data-sources/role) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead -- [snowflake_roles](./docs/data-sources/roles) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead + diff --git a/docs/resources/account.md b/docs/resources/account.md index 6597e1e855..fa653eb524 100644 --- a/docs/resources/account.md +++ b/docs/resources/account.md @@ -5,8 +5,6 @@ description: |- The account resource allows you to create and manage Snowflake accounts. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0990--v01000) to use it. - # snowflake_account (Resource) The account resource allows you to create and manage Snowflake accounts. diff --git a/docs/resources/account_authentication_policy_attachment.md b/docs/resources/account_authentication_policy_attachment.md index 2d393e9beb..4889ce6edf 100644 --- a/docs/resources/account_authentication_policy_attachment.md +++ b/docs/resources/account_authentication_policy_attachment.md @@ -5,6 +5,8 @@ description: |- Specifies the authentication policy to use for the current account. To set the authentication policy of a different account, use a provider alias. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_account_authentication_policy_attachment (Resource) Specifies the authentication policy to use for the current account. To set the authentication policy of a different account, use a provider alias. diff --git a/docs/resources/account_password_policy_attachment.md b/docs/resources/account_password_policy_attachment.md index f6c7487e90..548f629ab3 100644 --- a/docs/resources/account_password_policy_attachment.md +++ b/docs/resources/account_password_policy_attachment.md @@ -5,6 +5,8 @@ description: |- Specifies the password policy to use for the current account. To set the password policy of a different account, use a provider alias. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_account_password_policy_attachment (Resource) Specifies the password policy to use for the current account. To set the password policy of a different account, use a provider alias. diff --git a/docs/resources/account_role.md b/docs/resources/account_role.md index bc409840f6..a812d7e109 100644 --- a/docs/resources/account_role.md +++ b/docs/resources/account_role.md @@ -5,8 +5,6 @@ description: |- The resource is used for role management, where roles can be assigned privileges and, in turn, granted to users and other roles. When granted to roles they can create hierarchies of privilege structures. For more details, refer to the official documentation https://docs.snowflake.com/en/user-guide/security-access-control-overview. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # snowflake_account_role (Resource) The resource is used for role management, where roles can be assigned privileges and, in turn, granted to users and other roles. When granted to roles they can create hierarchies of privilege structures. For more details, refer to the [official documentation](https://docs.snowflake.com/en/user-guide/security-access-control-overview). diff --git a/docs/resources/alert.md b/docs/resources/alert.md index 371627ce5c..5bfd84b22a 100644 --- a/docs/resources/alert.md +++ b/docs/resources/alert.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_alert (Resource) diff --git a/docs/resources/api_authentication_integration_with_authorization_code_grant.md b/docs/resources/api_authentication_integration_with_authorization_code_grant.md index 683d691d23..ab7c1268b3 100644 --- a/docs/resources/api_authentication_integration_with_authorization_code_grant.md +++ b/docs/resources/api_authentication_integration_with_authorization_code_grant.md @@ -5,8 +5,6 @@ description: |- Resource used to manage api authentication security integration objects with authorization code grant. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-api-auth. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # snowflake_api_authentication_integration_with_authorization_code_grant (Resource) diff --git a/docs/resources/api_authentication_integration_with_client_credentials.md b/docs/resources/api_authentication_integration_with_client_credentials.md index 539e6b51cb..9bda83adff 100644 --- a/docs/resources/api_authentication_integration_with_client_credentials.md +++ b/docs/resources/api_authentication_integration_with_client_credentials.md @@ -5,8 +5,6 @@ description: |- Resource used to manage api authentication security integration objects with client credentials. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-api-auth. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # snowflake_api_authentication_integration_with_client_credentials (Resource) diff --git a/docs/resources/api_authentication_integration_with_jwt_bearer.md b/docs/resources/api_authentication_integration_with_jwt_bearer.md index 623a15d70e..47a0a03bcb 100644 --- a/docs/resources/api_authentication_integration_with_jwt_bearer.md +++ b/docs/resources/api_authentication_integration_with_jwt_bearer.md @@ -5,8 +5,6 @@ description: |- Resource used to manage api authentication security integration objects with jwt bearer. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-api-auth. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # snowflake_api_authentication_integration_with_jwt_bearer (Resource) diff --git a/docs/resources/api_integration.md b/docs/resources/api_integration.md index 8af2950c86..0432d680e2 100644 --- a/docs/resources/api_integration.md +++ b/docs/resources/api_integration.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_api_integration (Resource) diff --git a/docs/resources/authentication_policy.md b/docs/resources/authentication_policy.md index bd78a8eef8..42493edf0f 100644 --- a/docs/resources/authentication_policy.md +++ b/docs/resources/authentication_policy.md @@ -5,6 +5,8 @@ description: |- Resource used to manage authentication policy objects. For more information, check authentication policy documentation https://docs.snowflake.com/en/sql-reference/sql/create-authentication-policy. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-authentication-policy#usage-notes), an authentication policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_authentication_policy (Resource) diff --git a/docs/resources/cortex_search_service.md b/docs/resources/cortex_search_service.md index 0d93abbd52..d88516fe29 100644 --- a/docs/resources/cortex_search_service.md +++ b/docs/resources/cortex_search_service.md @@ -5,7 +5,7 @@ description: |- --- -!> **Disclaimer for Cortex Search service** Note that Cortex Search is a Private Preview feature as such, should be used only with non-production data even when using Snowflake's Terraform Provider. Also, note that the Terraform Provider is not covered by Snowflake's support team; the Product and Engineering teams are available for any questions. However, please contact the Cortex Search team for any issues with this object. +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. # snowflake_cortex_search_service (Resource) diff --git a/docs/resources/database.md b/docs/resources/database.md index 95df361ef3..4c9b67b239 100644 --- a/docs/resources/database.md +++ b/docs/resources/database.md @@ -5,8 +5,6 @@ description: |- Represents a standard database. If replication configuration is specified, the database is promoted to serve as a primary database for replication. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. !> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. diff --git a/docs/resources/database_old.md b/docs/resources/database_old.md deleted file mode 100644 index a0ee5bb8c1..0000000000 --- a/docs/resources/database_old.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -page_title: "snowflake_database_old Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_database_old (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use snowflake_database or snowflake_shared_database or snowflake_secondary_database instead. - -## Example Usage - -```terraform -resource "snowflake_database_old" "simple" { - name = "testing" - comment = "test comment" - data_retention_time_in_days = 3 -} - -resource "snowflake_database_old" "with_replication" { - name = "testing_2" - comment = "test comment 2" - replication_configuration { - accounts = ["test_account1", "test_account_2"] - ignore_edition_check = true - } -} - -resource "snowflake_database_old" "from_replica" { - name = "testing_3" - comment = "test comment" - data_retention_time_in_days = 3 - from_replica = "\"org1\".\"account1\".\"primary_db_name\"" -} - -resource "snowflake_database_old" "from_share" { - name = "testing_4" - comment = "test comment" - from_share = { - provider = "account1_locator" - share = "share1" - } -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `name` (String) Specifies the identifier for the database; must be unique for your account. - -### Optional - -- `comment` (String) Specifies a comment for the database. -- `data_retention_time_in_days` (Number) Number of days for which Snowflake retains historical data for performing Time Travel actions (SELECT, CLONE, UNDROP) on the object. A value of 0 effectively disables Time Travel for the specified database. Default value for this field is set to -1, which is a fallback to use Snowflake default. For more information, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). -- `from_database` (String) Specify a database to create a clone from. -- `from_replica` (String) Specify a fully-qualified path to a database to create a replica from. A fully qualified path follows the format of `""."".""`. An example would be: `"myorg1"."account1"."db1"` -- `from_share` (Map of String) Specify a provider and a share in this map to create a database from a share. As of version 0.87.0, the provider field is the account locator. -- `is_transient` (Boolean) Specifies a database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. -- `replication_configuration` (Block List, Max: 1) When set, specifies the configurations for database replication. (see [below for nested schema](#nestedblock--replication_configuration)) - -### Read-Only - -- `id` (String) The ID of this resource. - - -### Nested Schema for `replication_configuration` - -Required: - -- `accounts` (List of String) - -Optional: - -- `ignore_edition_check` (Boolean) - -## Import - -Import is supported using the following syntax: - -```shell -terraform import snowflake_database_old.example 'database_name' -``` diff --git a/docs/resources/database_role.md b/docs/resources/database_role.md index 706ceba760..747e2f6874 100644 --- a/docs/resources/database_role.md +++ b/docs/resources/database_role.md @@ -5,8 +5,6 @@ description: |- Resource used to manage database roles. For more information, check database roles documentation https://docs.snowflake.com/en/sql-reference/sql/create-database-role. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. - # snowflake_database_role (Resource) Resource used to manage database roles. For more information, check [database roles documentation](https://docs.snowflake.com/en/sql-reference/sql/create-database-role). diff --git a/docs/resources/dynamic_table.md b/docs/resources/dynamic_table.md index 76d7071078..ac511e1ffe 100644 --- a/docs/resources/dynamic_table.md +++ b/docs/resources/dynamic_table.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_dynamic_table (Resource) diff --git a/docs/resources/email_notification_integration.md b/docs/resources/email_notification_integration.md index edc8483e44..33a9071279 100644 --- a/docs/resources/email_notification_integration.md +++ b/docs/resources/email_notification_integration.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_email_notification_integration (Resource) diff --git a/docs/resources/external_function.md b/docs/resources/external_function.md index cbc68ef06c..fb426800a3 100644 --- a/docs/resources/external_function.md +++ b/docs/resources/external_function.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_external_function (Resource) diff --git a/docs/resources/external_oauth_integration.md b/docs/resources/external_oauth_integration.md index 2bcb6b2dc7..0b723cdc20 100644 --- a/docs/resources/external_oauth_integration.md +++ b/docs/resources/external_oauth_integration.md @@ -5,8 +5,6 @@ description: |- Resource used to manage external oauth security integration objects. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-oauth-external. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # snowflake_external_oauth_integration (Resource) diff --git a/docs/resources/external_table.md b/docs/resources/external_table.md index 6a8125635b..f9cd0a5456 100644 --- a/docs/resources/external_table.md +++ b/docs/resources/external_table.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_external_table (Resource) diff --git a/docs/resources/external_volume.md b/docs/resources/external_volume.md index 943f610e93..8bc0d9f3c8 100644 --- a/docs/resources/external_volume.md +++ b/docs/resources/external_volume.md @@ -5,6 +5,8 @@ description: |- Resource used to manage external volume objects. For more information, check external volume documentation https://docs.snowflake.com/en/sql-reference/commands-data-loading#external-volume. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_external_volume (Resource) Resource used to manage external volume objects. For more information, check [external volume documentation](https://docs.snowflake.com/en/sql-reference/commands-data-loading#external-volume). diff --git a/docs/resources/failover_group.md b/docs/resources/failover_group.md index 99ec5c08c2..db28eee2f8 100644 --- a/docs/resources/failover_group.md +++ b/docs/resources/failover_group.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_failover_group (Resource) diff --git a/docs/resources/file_format.md b/docs/resources/file_format.md index 31b301f477..894d9de520 100644 --- a/docs/resources/file_format.md +++ b/docs/resources/file_format.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_file_format (Resource) diff --git a/docs/resources/function.md b/docs/resources/function.md deleted file mode 100644 index 43b28436fc..0000000000 --- a/docs/resources/function.md +++ /dev/null @@ -1,138 +0,0 @@ ---- -page_title: "snowflake_function Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_function (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use snowflake_function_java, snowflake_function_javascript, snowflake_function_python, snowflake_function_scala, and snowflake_function_sql instead. - -## Example Usage - -```terraform -// Provider configuration -provider "snowflake" { - region = "REGION" // Default is "us-west-2" - username = "USERNAME" - account = "ACCOUNT" - password = "PASSWORD" - role = "MY_ROLE" - warehouse = "MY_WH" // Optional attribute, some resources (e.g. Python UDFs)' require a warehouse to create and can also be set optionally from the `SNOWFLAKE_WAREHOUSE` environment variable -} - -// Create database -resource "snowflake_database" "db" { - name = "MY_DB" - data_retention_days = 1 -} - -// Create schema -resource "snowflake_schema" "schema" { - database = snowflake_database.db.name - name = "MY_SCHEMA" - data_retention_days = 1 -} - -// Example for Java language -resource "snowflake_function" "test_funct_java" { - name = "my_java_func" - database = "MY_DB" - schema = "MY_SCHEMA" - arguments { - name = "arg1" - type = "number" - } - comment = "Example for java language" - return_type = "varchar" - language = "java" - handler = "CoolFunc.test" - statement = "class CoolFunc {public static String test(int n) {return \"hello!\";}}" -} - -// Example for Python language -resource "snowflake_function" "python_test" { - name = "MY_PYTHON_FUNC" - database = "MY_DB" - schema = "MY_SCHEMA" - arguments { - name = "arg1" - type = "number" - } - comment = "Example for Python language" - return_type = "NUMBER(38,0)" - null_input_behavior = "CALLED ON NULL INPUT" - return_behavior = "VOLATILE" - language = "python" - runtime_version = "3.8" - handler = "add_py" - statement = "def add_py(i): return i+1" -} - -// Example SQL language -resource "snowflake_function" "sql_test" { - name = "MY_SQL_FUNC" - database = "MY_DB" - schema = "MY_SCHEMA" - arguments { - name = "arg1" - type = "number" - } - comment = "Example for SQL language" - return_type = "NUMBER(38,0)" - null_input_behavior = "CALLED ON NULL INPUT" - return_behavior = "VOLATILE" - statement = "select arg1 + 1" -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `database` (String) The database in which to create the function. Don't use the | character. -- `name` (String) Specifies the identifier for the function; does not have to be unique for the schema in which the function is created. Don't use the | character. -- `return_type` (String) The return type of the function -- `schema` (String) The schema in which to create the function. Don't use the | character. -- `statement` (String) Specifies the javascript / java / scala / sql / python code used to create the function. - -### Optional - -- `arguments` (Block List) List of the arguments for the function (see [below for nested schema](#nestedblock--arguments)) -- `comment` (String) Specifies a comment for the function. -- `handler` (String) The handler method for Java / Python function. -- `imports` (List of String) Imports for Java / Python functions. For Java this a list of jar files, for Python this is a list of Python files. -- `is_secure` (Boolean) Specifies that the function is secure. -- `language` (String) Specifies the language of the stored function code. -- `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. -- `packages` (List of String) List of package imports to use for Java / Python functions. For Java, package imports should be of the form: package_name:version_number, where package_name is snowflake_domain:package. For Python use it should be: ('numpy','pandas','xgboost==1.5.0'). -- `return_behavior` (String) Specifies the behavior of the function when returning results -- `runtime_version` (String) Required for Python functions. Specifies Python runtime version. -- `target_path` (String) The target path for the Java / Python functions. For Java, it is the path of compiled jar files and for the Python it is the path of the Python files. - -### Read-Only - -- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). -- `id` (String) The ID of this resource. - - -### Nested Schema for `arguments` - -Required: - -- `name` (String) The argument name -- `type` (String) The argument type - -## Import - -Import is supported using the following syntax: - -```shell -# format is ..() -terraform import snowflake_function.example 'dbName.schemaName.functionName(varchar, varchar, varchar)' -``` diff --git a/docs/resources/function_java.md b/docs/resources/function_java.md index e1c86fdda8..5570e2575e 100644 --- a/docs/resources/function_java.md +++ b/docs/resources/function_java.md @@ -5,6 +5,8 @@ description: |- Resource used to manage java function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/function_javascript.md b/docs/resources/function_javascript.md index c81f21a080..9333693081 100644 --- a/docs/resources/function_javascript.md +++ b/docs/resources/function_javascript.md @@ -5,6 +5,8 @@ description: |- Resource used to manage javascript function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/function_python.md b/docs/resources/function_python.md index 66fee1c02f..a2cf487db5 100644 --- a/docs/resources/function_python.md +++ b/docs/resources/function_python.md @@ -5,6 +5,8 @@ description: |- Resource used to manage python function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/function_scala.md b/docs/resources/function_scala.md index b9da549e0f..9a03c90548 100644 --- a/docs/resources/function_scala.md +++ b/docs/resources/function_scala.md @@ -5,6 +5,8 @@ description: |- Resource used to manage scala function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/function_sql.md b/docs/resources/function_sql.md index 82efdc328a..66124dc89f 100644 --- a/docs/resources/function_sql.md +++ b/docs/resources/function_sql.md @@ -5,6 +5,8 @@ description: |- Resource used to manage sql function objects. For more information, check function documentation https://docs.snowflake.com/en/sql-reference/sql/create-function. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `return_results_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/grant_account_role.md b/docs/resources/grant_account_role.md index c98a6b08f7..365ba28c08 100644 --- a/docs/resources/grant_account_role.md +++ b/docs/resources/grant_account_role.md @@ -5,8 +5,6 @@ description: |- --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # snowflake_grant_account_role (Resource) diff --git a/docs/resources/grant_application_role.md b/docs/resources/grant_application_role.md index 989c27e7c2..5f4fafaefd 100644 --- a/docs/resources/grant_application_role.md +++ b/docs/resources/grant_application_role.md @@ -5,8 +5,6 @@ description: |- --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # snowflake_grant_application_role (Resource) diff --git a/docs/resources/grant_database_role.md b/docs/resources/grant_database_role.md index 9ac1dcf848..65f7a9abd4 100644 --- a/docs/resources/grant_database_role.md +++ b/docs/resources/grant_database_role.md @@ -5,8 +5,6 @@ description: |- --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # snowflake_grant_database_role (Resource) diff --git a/docs/resources/grant_ownership.md b/docs/resources/grant_ownership.md index 9536f727b4..0e31df7122 100644 --- a/docs/resources/grant_ownership.md +++ b/docs/resources/grant_ownership.md @@ -6,8 +6,6 @@ description: |- --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - ~> **Note** For more details about granting ownership, please visit [`GRANT OWNERSHIP` Snowflake documentation page](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership). !> **Warning** Grant ownership resource still has some limitations. Delete operation is not implemented for on_future grants (you have to remove the config and then revoke ownership grant on future X manually). diff --git a/docs/resources/grant_privileges_to_account_role.md b/docs/resources/grant_privileges_to_account_role.md index dfc4dee7f0..1f649c4a95 100644 --- a/docs/resources/grant_privileges_to_account_role.md +++ b/docs/resources/grant_privileges_to_account_role.md @@ -6,8 +6,6 @@ description: |- --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). diff --git a/docs/resources/grant_privileges_to_database_role.md b/docs/resources/grant_privileges_to_database_role.md index 9ad169db1b..aef4fff7e8 100644 --- a/docs/resources/grant_privileges_to_database_role.md +++ b/docs/resources/grant_privileges_to_database_role.md @@ -6,8 +6,6 @@ description: |- --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). diff --git a/docs/resources/grant_privileges_to_share.md b/docs/resources/grant_privileges_to_share.md index f22c2cb496..35d14f1195 100644 --- a/docs/resources/grant_privileges_to_share.md +++ b/docs/resources/grant_privileges_to_share.md @@ -6,8 +6,6 @@ description: |- --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # snowflake_grant_privileges_to_share (Resource) diff --git a/docs/resources/legacy_service_user.md b/docs/resources/legacy_service_user.md index 35847df2a0..a47cf4fa42 100644 --- a/docs/resources/legacy_service_user.md +++ b/docs/resources/legacy_service_user.md @@ -5,8 +5,6 @@ description: |- Resource used to manage legacy service user objects. For more information, check user documentation https://docs.snowflake.com/en/sql-reference/commands-user-role#user-management. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - -> **Note** `snowflake_user_password_policy_attachment` will be reworked in the following versions of the provider which may still affect this resource. -> **Note** Attaching user policies will be handled in the following versions of the provider which may still affect this resource. diff --git a/docs/resources/managed_account.md b/docs/resources/managed_account.md index 48a8cd5010..f20ada56ea 100644 --- a/docs/resources/managed_account.md +++ b/docs/resources/managed_account.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_managed_account (Resource) diff --git a/docs/resources/masking_policy.md b/docs/resources/masking_policy.md index 4adb212f90..27f4fd1816 100644 --- a/docs/resources/masking_policy.md +++ b/docs/resources/masking_policy.md @@ -5,8 +5,6 @@ description: |- Resource used to manage masking policies. For more information, check masking policies documentation https://docs.snowflake.com/en/sql-reference/sql/create-masking-policy. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-masking-policy#usage-notes), a masking policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_masking_policy (Resource) diff --git a/docs/resources/materialized_view.md b/docs/resources/materialized_view.md index 4ca0ec89af..8db7497e37 100644 --- a/docs/resources/materialized_view.md +++ b/docs/resources/materialized_view.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_materialized_view (Resource) diff --git a/docs/resources/network_policy.md b/docs/resources/network_policy.md index b77d2776f1..98c7dd1ef0 100644 --- a/docs/resources/network_policy.md +++ b/docs/resources/network_policy.md @@ -5,8 +5,6 @@ description: |- Resource used to control network traffic. For more information, check an official guide https://docs.snowflake.com/en/user-guide/network-policies on controlling network traffic with network policies. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes), a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. !> **Note** Due to technical limitations in Terraform SDK, changes in `allowed_network_rule_list` and `blocked_network_rule_list` do not cause diff for `show_output` and `describe_output`. diff --git a/docs/resources/network_policy_attachment.md b/docs/resources/network_policy_attachment.md index 72e3294276..ba9ec27a02 100644 --- a/docs/resources/network_policy_attachment.md +++ b/docs/resources/network_policy_attachment.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_network_policy_attachment (Resource) diff --git a/docs/resources/network_rule.md b/docs/resources/network_rule.md index 64052fbcfb..cc7581e78d 100644 --- a/docs/resources/network_rule.md +++ b/docs/resources/network_rule.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Note** A network rule cannot be dropped successfully if it is currently assigned to a network policy. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_network_rule (Resource) diff --git a/docs/resources/notification_integration.md b/docs/resources/notification_integration.md index 101e23faff..fcb56f2bb5 100644 --- a/docs/resources/notification_integration.md +++ b/docs/resources/notification_integration.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_notification_integration (Resource) diff --git a/docs/resources/oauth_integration.md b/docs/resources/oauth_integration.md deleted file mode 100644 index 2038424c66..0000000000 --- a/docs/resources/oauth_integration.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -page_title: "snowflake_oauth_integration Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_oauth_integration (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use snowflake_oauth_integration_for_custom_clients or snowflake_oauth_integration_for_partner_applications instead. - -## Example Usage - -```terraform -resource "snowflake_oauth_integration" "tableau_desktop" { - name = "TABLEAU_DESKTOP" - oauth_client = "TABLEAU_DESKTOP" - enabled = true - oauth_issue_refresh_tokens = true - oauth_refresh_token_validity = 3600 - blocked_roles_list = ["SYSADMIN"] -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. -- `oauth_client` (String) Specifies the OAuth client type. - -### Optional - -- `blocked_roles_list` (Set of String) List of roles that a user cannot explicitly consent to using after authenticating. Do not include ACCOUNTADMIN, ORGADMIN or SECURITYADMIN as they are already implicitly enforced and will cause in-place updates. -- `comment` (String) Specifies a comment for the OAuth integration. -- `enabled` (Boolean) Specifies whether this OAuth integration is enabled or disabled. -- `oauth_client_type` (String) Specifies the type of client being registered. Snowflake supports both confidential and public clients. -- `oauth_issue_refresh_tokens` (Boolean) Specifies whether to allow the client to exchange a refresh token for an access token when the current access token has expired. -- `oauth_redirect_uri` (String) Specifies the client URI. After a user is authenticated, the web browser is redirected to this URI. -- `oauth_refresh_token_validity` (Number) Specifies how long refresh tokens should be valid (in seconds). OAUTH_ISSUE_REFRESH_TOKENS must be set to TRUE. -- `oauth_use_secondary_roles` (String) Specifies whether default secondary roles set in the user properties are activated by default in the session being opened. - -### Read-Only - -- `created_on` (String) Date and time when the OAuth integration was created. -- `id` (String) The ID of this resource. - -## Import - -Import is supported using the following syntax: - -```shell -terraform import snowflake_oauth_integration.example name -``` diff --git a/docs/resources/oauth_integration_for_custom_clients.md b/docs/resources/oauth_integration_for_custom_clients.md index 6765f52861..d7966f4fdd 100644 --- a/docs/resources/oauth_integration_for_custom_clients.md +++ b/docs/resources/oauth_integration_for_custom_clients.md @@ -5,8 +5,6 @@ description: |- Resource used to manage oauth security integration for custom clients objects. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-oauth-snowflake. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** Setting a network policy with lowercase letters does not work correctly in Snowflake (see [issue](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3229)). As a workaround, set the network policy with uppercase letters only, or use unsafe_execute with network policy ID wrapped in `'`. !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. diff --git a/docs/resources/oauth_integration_for_partner_applications.md b/docs/resources/oauth_integration_for_partner_applications.md index 0188afb102..f9297109ee 100644 --- a/docs/resources/oauth_integration_for_partner_applications.md +++ b/docs/resources/oauth_integration_for_partner_applications.md @@ -5,8 +5,6 @@ description: |- Resource used to manage oauth security integration for partner applications objects. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-oauth-snowflake. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # snowflake_oauth_integration_for_partner_applications (Resource) diff --git a/docs/resources/object_parameter.md b/docs/resources/object_parameter.md index 00d49016bd..0d5ff13efd 100644 --- a/docs/resources/object_parameter.md +++ b/docs/resources/object_parameter.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_object_parameter (Resource) diff --git a/docs/resources/password_policy.md b/docs/resources/password_policy.md index e3131cb1a3..fed75411f0 100644 --- a/docs/resources/password_policy.md +++ b/docs/resources/password_policy.md @@ -5,6 +5,8 @@ description: |- A password policy specifies the requirements that must be met to create and reset a password to authenticate to Snowflake. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-password-policy#usage-notes), a password policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_password_policy (Resource) diff --git a/docs/resources/pipe.md b/docs/resources/pipe.md index 3f99772140..68291ae000 100644 --- a/docs/resources/pipe.md +++ b/docs/resources/pipe.md @@ -6,6 +6,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_pipe (Resource) ~> **Note** Right now, changes for the `integration` field are not detected. This will be resolved in the [upcoming refactoring](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#preparing-essential-ga-objects-for-the-provider-v1). For now, please try to use the [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) HCL meta-argument. diff --git a/docs/resources/primary_connection.md b/docs/resources/primary_connection.md index 20009789d2..7c9c812a3d 100644 --- a/docs/resources/primary_connection.md +++ b/docs/resources/primary_connection.md @@ -5,8 +5,6 @@ description: |- Resource used to manage primary connections. For managing replicated connection check resource snowflakesecondaryconnection ./secondary_connection. For more information, check connection documentation https://docs.snowflake.com/en/sql-reference/sql/create-connection.html. --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_primary_connection (Resource) Resource used to manage primary connections. For managing replicated connection check resource [snowflake_secondary_connection](./secondary_connection). For more information, check [connection documentation](https://docs.snowflake.com/en/sql-reference/sql/create-connection.html). diff --git a/docs/resources/procedure.md b/docs/resources/procedure.md deleted file mode 100644 index 5985cb4dab..0000000000 --- a/docs/resources/procedure.md +++ /dev/null @@ -1,99 +0,0 @@ ---- -page_title: "snowflake_procedure Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_procedure (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use snowflake_procedure_java, snowflake_procedure_javascript, snowflake_procedure_python, snowflake_procedure_scala, and snowflake_procedure_sql instead. - -## Example Usage - -```terraform -resource "snowflake_database" "db" { - name = "MYDB" - data_retention_days = 1 -} - -resource "snowflake_schema" "schema" { - database = snowflake_database.db.name - name = "MYSCHEMA" - data_retention_days = 1 -} - -resource "snowflake_procedure" "proc" { - name = "SAMPLEPROC" - database = snowflake_database.db.name - schema = snowflake_schema.schema.name - language = "JAVASCRIPT" - arguments { - name = "arg1" - type = "varchar" - } - arguments { - name = "arg2" - type = "DATE" - } - comment = "Procedure with 2 arguments" - return_type = "VARCHAR" - execute_as = "CALLER" - return_behavior = "IMMUTABLE" - null_input_behavior = "RETURNS NULL ON NULL INPUT" - statement = < **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `database` (String) The database in which to create the procedure. Don't use the | character. -- `name` (String) Specifies the identifier for the procedure; does not have to be unique for the schema in which the procedure is created. Don't use the | character. -- `return_type` (String) The return type of the procedure -- `schema` (String) The schema in which to create the procedure. Don't use the | character. -- `statement` (String) Specifies the code used to create the procedure. - -### Optional - -- `arguments` (Block List) List of the arguments for the procedure (see [below for nested schema](#nestedblock--arguments)) -- `comment` (String) Specifies a comment for the procedure. -- `execute_as` (String) Sets execution context. Allowed values are CALLER and OWNER (consult a proper section in the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#id1)). For more information see [caller's rights and owner's rights](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). -- `handler` (String) The handler method for Java / Python procedures. -- `imports` (List of String) Imports for Java / Python procedures. For Java this a list of jar files, for Python this is a list of Python files. -- `language` (String) Specifies the language of the stored procedure code. -- `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. -- `packages` (List of String) List of package imports to use for Java / Python procedures. For Java, package imports should be of the form: package_name:version_number, where package_name is snowflake_domain:package. For Python use it should be: ('numpy','pandas','xgboost==1.5.0'). -- `return_behavior` (String, Deprecated) Specifies the behavior of the function when returning results -- `runtime_version` (String) Required for Python procedures. Specifies Python runtime version. -- `secure` (Boolean) Specifies that the procedure is secure. For more information about secure procedures, see Protecting Sensitive Information with Secure UDFs and Stored Procedures. - -### Read-Only - -- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). -- `id` (String) The ID of this resource. - - -### Nested Schema for `arguments` - -Required: - -- `name` (String) The argument name -- `type` (String) The argument type - -## Import - -Import is supported using the following syntax: - -```shell -# format is ..() -terraform import snowflake_procedure.example 'dbName.schemaName.procedureName(varchar, varchar, varchar)' -``` diff --git a/docs/resources/procedure_java.md b/docs/resources/procedure_java.md index b7fae95a96..3dd3858f89 100644 --- a/docs/resources/procedure_java.md +++ b/docs/resources/procedure_java.md @@ -5,6 +5,8 @@ description: |- Resource used to manage java procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/procedure_javascript.md b/docs/resources/procedure_javascript.md index cecdf0a6f2..fd50fab116 100644 --- a/docs/resources/procedure_javascript.md +++ b/docs/resources/procedure_javascript.md @@ -5,6 +5,8 @@ description: |- Resource used to manage javascript procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/procedure_python.md b/docs/resources/procedure_python.md index 9a857e15be..76d9495fd4 100644 --- a/docs/resources/procedure_python.md +++ b/docs/resources/procedure_python.md @@ -5,6 +5,8 @@ description: |- Resource used to manage python procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/procedure_scala.md b/docs/resources/procedure_scala.md index 51d0382cbf..0b6cf82659 100644 --- a/docs/resources/procedure_scala.md +++ b/docs/resources/procedure_scala.md @@ -5,6 +5,8 @@ description: |- Resource used to manage scala procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/procedure_sql.md b/docs/resources/procedure_sql.md index 3814df934f..6aefb8e115 100644 --- a/docs/resources/procedure_sql.md +++ b/docs/resources/procedure_sql.md @@ -5,6 +5,8 @@ description: |- Resource used to manage sql procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/docs/resources/resource_monitor.md b/docs/resources/resource_monitor.md index a674411c2f..9f5041daea 100644 --- a/docs/resources/resource_monitor.md +++ b/docs/resources/resource_monitor.md @@ -5,8 +5,6 @@ description: |- Resource used to manage resource monitor objects. For more information, check resource monitor documentation https://docs.snowflake.com/en/user-guide/resource-monitors. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - ~> **Note** For more details about resource monitor usage, please visit [this guide on Snowflake documentation page](https://docs.snowflake.com/en/user-guide/resource-monitors). **! Warning !** Due to Snowflake limitations, the following actions are not supported: diff --git a/docs/resources/role.md b/docs/resources/role.md deleted file mode 100644 index 6dba422fba..0000000000 --- a/docs/resources/role.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -page_title: "snowflake_role Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - The resource is used for role management, where roles can be assigned privileges and, in turn, granted to users and other roles. When granted to roles they can create hierarchies of privilege structures. For more details, refer to the official documentation https://docs.snowflake.com/en/user-guide/security-access-control-overview. ---- - -# snowflake_role (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_account_role](./account_role) instead. - -The resource is used for role management, where roles can be assigned privileges and, in turn, granted to users and other roles. When granted to roles they can create hierarchies of privilege structures. For more details, refer to the [official documentation](https://docs.snowflake.com/en/user-guide/security-access-control-overview). - -## Example Usage - -```terraform -## Minimal -resource "snowflake_role" "minimal" { - name = "role_name" -} - -## Complete (with every optional set) -resource "snowflake_role" "complete" { - name = "role_name" - comment = "my account role" -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - -### Optional - -- `comment` (String) - -### Read-Only - -- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). -- `id` (String) The ID of this resource. -- `show_output` (List of Object) Outputs the result of `SHOW ROLES` for the given role. (see [below for nested schema](#nestedatt--show_output)) - - -### Nested Schema for `show_output` - -Read-Only: - -- `assigned_to_users` (Number) -- `comment` (String) -- `created_on` (String) -- `granted_roles` (Number) -- `granted_to_roles` (Number) -- `is_current` (Boolean) -- `is_default` (Boolean) -- `is_inherited` (Boolean) -- `name` (String) -- `owner` (String) - -## Import - -Import is supported using the following syntax: - -```shell -terraform import snowflake_role.example "name" -``` diff --git a/docs/resources/row_access_policy.md b/docs/resources/row_access_policy.md index 5f243e1b1c..edadcf0ea0 100644 --- a/docs/resources/row_access_policy.md +++ b/docs/resources/row_access_policy.md @@ -5,8 +5,6 @@ description: |- Resource used to manage row access policy objects. For more information, check row access policy documentation https://docs.snowflake.com/en/sql-reference/sql/create-row-access-policy. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-row-access-policy#usage-notes), a row access policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_row_access_policy (Resource) diff --git a/docs/resources/saml2_integration.md b/docs/resources/saml2_integration.md index fa55be5e87..cfa507832d 100644 --- a/docs/resources/saml2_integration.md +++ b/docs/resources/saml2_integration.md @@ -5,8 +5,6 @@ description: |- Resource used to manage SAML2 security integration objects. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-saml2. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # snowflake_saml2_integration (Resource) diff --git a/docs/resources/saml_integration.md b/docs/resources/saml_integration.md deleted file mode 100644 index 9ec4415251..0000000000 --- a/docs/resources/saml_integration.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -page_title: "snowflake_saml_integration Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_saml_integration (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_saml2_integration](./saml2_integration) instead. - -## Example Usage - -```terraform -resource "snowflake_saml_integration" "saml_integration" { - name = "saml_integration" - saml2_provider = "CUSTOM" - saml2_issuer = "test_issuer" - saml2_sso_url = "https://testsamlissuer.com" - saml2_x509_cert = "MIICYzCCAcygAwIBAgIBADANBgkqhkiG9w0BAQUFADAuMQswCQYDVQQGEwJVUzEMMAoGA1UEChMDSUJNMREwDwYDVQQLEwhMb2NhbCBDQTAeFw05OTEyMjIwNTAwMDBaFw0wMDEyMjMwNDU5NTlaMC4xCzAJBgNVBAYTAlVTMQwwCgYDVQQKEwNJQk0xETAPBgNVBAsTCExvY2FsIENBMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQD2bZEo7xGaX2/0GHkrNFZvlxBou9v1Jmt/PDiTMPve8r9FeJAQ0QdvFST/0JPQYD20rH0bimdDLgNdNynmyRoS2S/IInfpmf69iyc2G0TPyRvmHIiOZbdCd+YBHQi1adkj17NDcWj6S14tVurFX73zx0sNoMS79q3tuXKrDsxeuwIDAQABo4GQMIGNMEsGCVUdDwGG+EIBDQQ+EzxHZW5lcmF0ZWQgYnkgdGhlIFNlY3VyZVdheSBTZWN1cml0eSBTZXJ2ZXIgZm9yIE9TLzM5MCAoUkFDRikwDgYDVR0PAQH/BAQDAgAGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJ3+ocRyCTJw067dLSwr/nalx6YMMA0GCSqGSIb3DQEBBQUAA4GBAMaQzt+zaj1GU77yzlr8iiMBXgdQrwsZZWJo5exnAucJAEYQZmOfyLiMD6oYq+ZnfvM0n8G/Y79q8nhwvuxpYOnRSAXFp6xSkrIOeZtJMY1h00LKp/JX3Ng1svZ2agE126JHsQ0bhzN5TKsYfbwfTwfjdWAGy6Vf1nYi/rO+ryMO" - enabled = true -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `name` (String) Specifies the name of the SAML2 integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. -- `saml2_issuer` (String) The string containing the IdP EntityID / Issuer. -- `saml2_provider` (String) The string describing the IdP. One of the following: OKTA, ADFS, Custom. -- `saml2_sso_url` (String) The string containing the IdP SSO URL, where the user should be redirected by Snowflake (the Service Provider) with a SAML AuthnRequest message. -- `saml2_x509_cert` (String) The Base64 encoded IdP signing certificate on a single line without the leading -----BEGIN CERTIFICATE----- and ending -----END CERTIFICATE----- markers. - -### Optional - -- `enabled` (Boolean) Specifies whether this security integration is enabled or disabled. -- `saml2_enable_sp_initiated` (Boolean) The Boolean indicating if the Log In With button will be shown on the login page. TRUE: displays the Log in WIth button on the login page. FALSE: does not display the Log in With button on the login page. -- `saml2_force_authn` (Boolean) The Boolean indicating whether users, during the initial authentication flow, are forced to authenticate again to access Snowflake. When set to TRUE, Snowflake sets the ForceAuthn SAML parameter to TRUE in the outgoing request from Snowflake to the identity provider. TRUE: forces users to authenticate again to access Snowflake, even if a valid session with the identity provider exists. FALSE: does not force users to authenticate again to access Snowflake. -- `saml2_post_logout_redirect_url` (String) The endpoint to which Snowflake redirects users after clicking the Log Out button in the classic Snowflake web interface. Snowflake terminates the Snowflake session upon redirecting to the specified endpoint. -- `saml2_requested_nameid_format` (String) The SAML NameID format allows Snowflake to set an expectation of the identifying attribute of the user (i.e. SAML Subject) in the SAML assertion from the IdP to ensure a valid authentication to Snowflake. If a value is not specified, Snowflake sends the urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress value in the authentication request to the IdP. NameID must be one of the following values: urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified, urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress, urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName, urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName, urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos, urn:oasis:names:tc:SAML:2.0:nameid-format:persistent, urn:oasis:names:tc:SAML:2.0:nameid-format:transient . -- `saml2_sign_request` (Boolean) The Boolean indicating whether SAML requests are signed. TRUE: allows SAML requests to be signed. FALSE: does not allow SAML requests to be signed. -- `saml2_snowflake_acs_url` (String) The string containing the Snowflake Assertion Consumer Service URL to which the IdP will send its SAML authentication response back to Snowflake. This property will be set in the SAML authentication request generated by Snowflake when initiating a SAML SSO operation with the IdP. If an incorrect value is specified, Snowflake returns an error message indicating the acceptable values to use. Default: https://..snowflakecomputing.com/fed/login -- `saml2_snowflake_issuer_url` (String) The string containing the EntityID / Issuer for the Snowflake service provider. If an incorrect value is specified, Snowflake returns an error message indicating the acceptable values to use. -- `saml2_snowflake_x509_cert` (String) The Base64 encoded self-signed certificate generated by Snowflake for use with Encrypting SAML Assertions and Signed SAML Requests. You must have at least one of these features (encrypted SAML assertions or signed SAML responses) enabled in your Snowflake account to access the certificate value. -- `saml2_sp_initiated_login_page_label` (String) The string containing the label to display after the Log In With button on the login page. - -### Read-Only - -- `created_on` (String) Date and time when the SAML integration was created. -- `id` (String) The ID of this resource. -- `saml2_digest_methods_used` (String) -- `saml2_signature_methods_used` (String) -- `saml2_snowflake_metadata` (String) Metadata created by Snowflake to provide to SAML2 provider. - -## Import - -Import is supported using the following syntax: - -```shell -terraform import snowflake_saml_integration.example name -``` diff --git a/docs/resources/schema.md b/docs/resources/schema.md index 3c919e9d74..2e6ded33f2 100644 --- a/docs/resources/schema.md +++ b/docs/resources/schema.md @@ -5,8 +5,6 @@ description: |- Resource used to manage schema objects. For more information, check schema documentation https://docs.snowflake.com/en/sql-reference/sql/create-schema. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - -> **Note** Field `CLASSIFICATION_ROLE` is currently missing. It will be added in the future. diff --git a/docs/resources/scim_integration.md b/docs/resources/scim_integration.md index ae782bcbe0..a7200c6b85 100644 --- a/docs/resources/scim_integration.md +++ b/docs/resources/scim_integration.md @@ -5,8 +5,6 @@ description: |- Resource used to manage scim security integration objects. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-scim. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # snowflake_scim_integration (Resource) diff --git a/docs/resources/secondary_connection.md b/docs/resources/secondary_connection.md index 566c4e507b..5ecc62c387 100644 --- a/docs/resources/secondary_connection.md +++ b/docs/resources/secondary_connection.md @@ -5,8 +5,6 @@ description: |- Resource used to manage secondary (replicated) connections. To manage primary connection check resource snowflakeprimaryconnection ./primary_connection. For more information, check connection documentation https://docs.snowflake.com/en/sql-reference/sql/create-connection.html. --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_secondary_connection (Resource) Resource used to manage secondary (replicated) connections. To manage primary connection check resource [snowflake_primary_connection](./primary_connection). For more information, check [connection documentation](https://docs.snowflake.com/en/sql-reference/sql/create-connection.html). diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md index c6e4e6a2e5..139447783c 100644 --- a/docs/resources/secondary_database.md +++ b/docs/resources/secondary_database.md @@ -6,8 +6,6 @@ description: |- A secondary database creates a replica of an existing primary database (i.e. a secondary database). For more information about database replication, see Introduction to database replication across multiple accounts https://docs.snowflake.com/en/user-guide/db-replication-intro. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. !> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. diff --git a/docs/resources/secret_with_authorization_code_grant.md b/docs/resources/secret_with_authorization_code_grant.md index 5f4ea693fd..eb025451a2 100644 --- a/docs/resources/secret_with_authorization_code_grant.md +++ b/docs/resources/secret_with_authorization_code_grant.md @@ -5,8 +5,6 @@ description: |- Resource used to manage secret objects with OAuth Authorization Code Grant. For more information, check secret documentation https://docs.snowflake.com/en/sql-reference/sql/create-secret. --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_secret_with_authorization_code_grant (Resource) Resource used to manage secret objects with OAuth Authorization Code Grant. For more information, check [secret documentation](https://docs.snowflake.com/en/sql-reference/sql/create-secret). diff --git a/docs/resources/secret_with_basic_authentication.md b/docs/resources/secret_with_basic_authentication.md index e6b0d88c1b..07aa9c5a84 100644 --- a/docs/resources/secret_with_basic_authentication.md +++ b/docs/resources/secret_with_basic_authentication.md @@ -5,8 +5,6 @@ description: |- Resource used to manage secret objects with Basic Authentication. For more information, check secret documentation https://docs.snowflake.com/en/sql-reference/sql/create-secret. --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_secret_with_basic_authentication (Resource) Resource used to manage secret objects with Basic Authentication. For more information, check [secret documentation](https://docs.snowflake.com/en/sql-reference/sql/create-secret). diff --git a/docs/resources/secret_with_client_credentials.md b/docs/resources/secret_with_client_credentials.md index 6dd8757a59..7bb80293b4 100644 --- a/docs/resources/secret_with_client_credentials.md +++ b/docs/resources/secret_with_client_credentials.md @@ -5,8 +5,6 @@ description: |- Resource used to manage secret objects with OAuth Client Credentials. For more information, check secret documentation https://docs.snowflake.com/en/sql-reference/sql/create-secret. --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_secret_with_client_credentials (Resource) Resource used to manage secret objects with OAuth Client Credentials. For more information, check [secret documentation](https://docs.snowflake.com/en/sql-reference/sql/create-secret). diff --git a/docs/resources/secret_with_generic_string.md b/docs/resources/secret_with_generic_string.md index 4c0b426ab9..33a1b3612a 100644 --- a/docs/resources/secret_with_generic_string.md +++ b/docs/resources/secret_with_generic_string.md @@ -5,8 +5,6 @@ description: |- Resource used to manage secret objects with Generic String. For more information, check secret documentation https://docs.snowflake.com/en/sql-reference/sql/create-secret. --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # snowflake_secret_with_generic_string (Resource) Resource used to manage secret objects with Generic String. For more information, check [secret documentation](https://docs.snowflake.com/en/sql-reference/sql/create-secret). diff --git a/docs/resources/sequence.md b/docs/resources/sequence.md index 2bd83f402a..f52afd05d0 100644 --- a/docs/resources/sequence.md +++ b/docs/resources/sequence.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_sequence (Resource) diff --git a/docs/resources/service_user.md b/docs/resources/service_user.md index 7daa93d14b..2edab60d7a 100644 --- a/docs/resources/service_user.md +++ b/docs/resources/service_user.md @@ -5,8 +5,6 @@ description: |- Resource used to manage service user objects. For more information, check user documentation https://docs.snowflake.com/en/sql-reference/commands-user-role#user-management. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - -> **Note** `snowflake_user_password_policy_attachment` will be reworked in the following versions of the provider which may still affect this resource. -> **Note** Attaching user policies will be handled in the following versions of the provider which may still affect this resource. diff --git a/docs/resources/session_parameter.md b/docs/resources/session_parameter.md deleted file mode 100644 index 7f41515856..0000000000 --- a/docs/resources/session_parameter.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -page_title: "snowflake_session_parameter Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_session_parameter (Resource) - - - -## Example Usage - -```terraform -resource "snowflake_session_parameter" "s" { - key = "AUTOCOMMIT" - value = "false" - user = "TEST_USER" -} - -resource "snowflake_session_parameter" "s2" { - key = "BINARY_OUTPUT_FORMAT" - value = "BASE64" - on_account = true -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `key` (String) Name of session parameter. Valid values are those in [session parameters](https://docs.snowflake.com/en/sql-reference/parameters.html#session-parameters). -- `value` (String) Value of session parameter, as a string. Constraints are the same as those for the parameters in Snowflake documentation. - -### Optional - -- `on_account` (Boolean) If true, the session parameter will be set on the account level. -- `user` (String) The user to set the session parameter for. Required if on_account is false - -### Read-Only - -- `id` (String) The ID of this resource. - -## Import - -Import is supported using the following syntax: - -```shell -terraform import snowflake_session_parameter.s -``` diff --git a/docs/resources/share.md b/docs/resources/share.md index d02b3d767f..eae395775e 100644 --- a/docs/resources/share.md +++ b/docs/resources/share.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_share (Resource) diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md index af44483644..4b6185148a 100644 --- a/docs/resources/shared_database.md +++ b/docs/resources/shared_database.md @@ -5,8 +5,6 @@ description: |- A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see Introduction to Secure Data Sharing https://docs.snowflake.com/en/user-guide/data-sharing-intro. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. !> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. diff --git a/docs/resources/stage.md b/docs/resources/stage.md index bd33afba4e..6e8cf7841e 100644 --- a/docs/resources/stage.md +++ b/docs/resources/stage.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_stage (Resource) diff --git a/docs/resources/storage_integration.md b/docs/resources/storage_integration.md index 23e69ec9b6..eadc058ebc 100644 --- a/docs/resources/storage_integration.md +++ b/docs/resources/storage_integration.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_storage_integration (Resource) diff --git a/docs/resources/stream.md b/docs/resources/stream.md deleted file mode 100644 index 2cf478e3ba..0000000000 --- a/docs/resources/stream.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -page_title: "snowflake_stream Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - ---- - -# snowflake_stream (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: `snowflake_stream_on_directory_table` | `snowflake_stream_on_external_table` | `snowflake_stream_on_table` | `snowflake_stream_on_view`. - -## Example Usage - -```terraform -resource "snowflake_table" "table" { - database = "database" - schema = "schema" - name = "name" - - column { - type = "NUMBER(38,0)" - name = "id" - } -} - -resource "snowflake_stream" "stream" { - comment = "A stream." - - database = "database" - schema = "schema" - name = "stream" - - on_table = snowflake_table.table.fully_qualified_name - append_only = false - insert_only = false - - owner = "role1" -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `database` (String) The database in which to create the stream. -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. -- `schema` (String) The schema in which to create the stream. - -### Optional - -- `append_only` (Boolean) Type of the stream that will be created. -- `comment` (String) Specifies a comment for the stream. -- `insert_only` (Boolean) Create an insert only stream type. -- `on_stage` (String) Specifies an identifier for the stage the stream will monitor. -- `on_table` (String) Specifies an identifier for the table the stream will monitor. -- `on_view` (String) Specifies an identifier for the view the stream will monitor. -- `show_initial_rows` (Boolean) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. - -### Read-Only - -- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). -- `id` (String) The ID of this resource. -- `owner` (String) Name of the role that owns the stream. - -## Import - -Import is supported using the following syntax: - -```shell -# format is database name | schema name | stream name -terraform import snowflake_stream.example 'dbName|schemaName|streamName' -``` diff --git a/docs/resources/stream_on_directory_table.md b/docs/resources/stream_on_directory_table.md index 4f1ebf9772..83c90ac525 100644 --- a/docs/resources/stream_on_directory_table.md +++ b/docs/resources/stream_on_directory_table.md @@ -5,8 +5,6 @@ description: |- Resource used to manage streams on directory tables. For more information, check stream documentation https://docs.snowflake.com/en/sql-reference/sql/create-stream. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - ~> **Note about copy_grants** Fields like `stage`, and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # snowflake_stream_on_directory_table (Resource) diff --git a/docs/resources/stream_on_external_table.md b/docs/resources/stream_on_external_table.md index 48ca9d2e26..8a7ec5630e 100644 --- a/docs/resources/stream_on_external_table.md +++ b/docs/resources/stream_on_external_table.md @@ -5,8 +5,6 @@ description: |- Resource used to manage streams on external tables. For more information, check stream documentation https://docs.snowflake.com/en/sql-reference/sql/create-stream. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - ~> **Note about copy_grants** Fields like `external_table`, `insert_only`, `at`, `before` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # snowflake_stream_on_external_table (Resource) diff --git a/docs/resources/stream_on_table.md b/docs/resources/stream_on_table.md index 67361e3aaa..d82a372cc0 100644 --- a/docs/resources/stream_on_table.md +++ b/docs/resources/stream_on_table.md @@ -5,8 +5,6 @@ description: |- Resource used to manage streams on tables. For more information, check stream documentation https://docs.snowflake.com/en/sql-reference/sql/create-stream. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - ~> **Note about copy_grants** Fields like `table`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # snowflake_stream_on_table (Resource) diff --git a/docs/resources/stream_on_view.md b/docs/resources/stream_on_view.md index 4a9ae5607b..8fbfda41d5 100644 --- a/docs/resources/stream_on_view.md +++ b/docs/resources/stream_on_view.md @@ -5,8 +5,6 @@ description: |- Resource used to manage streams on views. For more information, check stream documentation https://docs.snowflake.com/en/sql-reference/sql/create-stream. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - ~> **Note about copy_grants** Fields like `view`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # snowflake_stream_on_view (Resource) diff --git a/docs/resources/streamlit.md b/docs/resources/streamlit.md index 2087870c40..f7e29aa7ea 100644 --- a/docs/resources/streamlit.md +++ b/docs/resources/streamlit.md @@ -5,8 +5,6 @@ description: |- Resource used to manage streamlits objects. For more information, check streamlit documentation https://docs.snowflake.com/en/sql-reference/commands-streamlit. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - !> **Note** Setting a query warehouse with lowercase letters does not work correctly in Snowflake. As a workaround, set the query warehouse with uppercase letters only, or use unsafe_execute with query warehouse ID wrapped in `'`. diff --git a/docs/resources/table.md b/docs/resources/table.md index 59cd1ddd68..d38db9cb82 100644 --- a/docs/resources/table.md +++ b/docs/resources/table.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_table (Resource) diff --git a/docs/resources/table_constraint.md b/docs/resources/table_constraint.md index c6334476f8..1d218eb746 100644 --- a/docs/resources/table_constraint.md +++ b/docs/resources/table_constraint.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_table_constraint (Resource) diff --git a/docs/resources/tag.md b/docs/resources/tag.md index 92a4c51caf..a3ea0d486a 100644 --- a/docs/resources/tag.md +++ b/docs/resources/tag.md @@ -5,8 +5,6 @@ description: |- Resource used to manage tags. For more information, check tag documentation https://docs.snowflake.com/en/sql-reference/sql/create-tag. For asssigning tags to Snowflake objects, see tag_association resource https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - ~> **Required warehouse** For this resource, the provider now uses [tag references](https://docs.snowflake.com/en/sql-reference/functions/tag_references) to get information about masking policies attached to tags. This function requires a warehouse in the connection. Please, make sure you have either set a `DEFAULT_WAREHOUSE` for the user, or specified a warehouse in the provider configuration. # snowflake_tag (Resource) diff --git a/docs/resources/tag_association.md b/docs/resources/tag_association.md index fef230c0c4..05a3b9b045 100644 --- a/docs/resources/tag_association.md +++ b/docs/resources/tag_association.md @@ -5,8 +5,6 @@ description: |- Resource used to manage tag associations. For more information, check object tagging documentation https://docs.snowflake.com/en/user-guide/object-tagging. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - -> **Note** For `ACCOUNT` object type, only identifiers with organization name are supported. See [account identifier docs](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-1-preferred-account-name-in-your-organization) for more details. -> **Note** Tag association resource ID has the following format: `"TAG_DATABASE"."TAG_SCHEMA"."TAG_NAME"|TAG_VALUE|OBJECT_TYPE`. This means that a tuple of tag ID, tag value and object type should be unique across the resources. If you want to specify this combination for more than one object, you should use only one `tag_association` resource with specified `object_identifiers` set. @@ -94,7 +92,6 @@ resource "snowflake_tag_association" "account_association" { ### Optional -- `object_name` (String, Deprecated) Specifies the object identifier for the tag association. - `skip_validation` (Boolean) If true, skips validation of the tag association. - `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) diff --git a/docs/resources/tag_masking_policy_association.md b/docs/resources/tag_masking_policy_association.md deleted file mode 100644 index ed23cfb3dc..0000000000 --- a/docs/resources/tag_masking_policy_association.md +++ /dev/null @@ -1,85 +0,0 @@ ---- -page_title: "snowflake_tag_masking_policy_association Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - Attach a masking policy to a tag. Requires a current warehouse to be set. Either with SNOWFLAKE_WAREHOUSE env variable or in current session. If no warehouse is provided, a temporary warehouse will be created. ---- - -# snowflake_tag_masking_policy_association (Resource) - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: `snowflake_tag`. - -Attach a masking policy to a tag. Requires a current warehouse to be set. Either with SNOWFLAKE_WAREHOUSE env variable or in current session. If no warehouse is provided, a temporary warehouse will be created. - -## Example Usage - -```terraform -# Note: Currently this feature is only available to accounts that are Enterprise Edition (or higher) - -resource "snowflake_database" "test" { - name = "TEST_DB1" - data_retention_time_in_days = 1 -} - -resource "snowflake_database" "test2" { - name = "TEST_DB2" - data_retention_time_in_days = 1 -} - - -resource "snowflake_schema" "test2" { - database = snowflake_database.test2.name - name = "FOOBAR2" - data_retention_days = snowflake_database.test2.data_retention_time_in_days -} - -resource "snowflake_schema" "test" { - database = snowflake_database.test.name - name = "FOOBAR" - data_retention_days = snowflake_database.test.data_retention_time_in_days -} - -resource "snowflake_tag" "this" { - name = upper("test_tag") - database = snowflake_database.test2.name - schema = snowflake_schema.test2.name -} - -resource "snowflake_masking_policy" "example_masking_policy" { - name = "EXAMPLE_MASKING_POLICY" - database = snowflake_database.test.name - schema = snowflake_schema.test.name - value_data_type = "string" - masking_expression = "case when current_role() in ('ACCOUNTADMIN') then val else sha2(val, 512) end" - return_data_type = "string" -} - -resource "snowflake_tag_masking_policy_association" "name" { - tag_id = snowflake_tag.this.fully_qualified_name - masking_policy_id = snowflake_masking_policy.example_masking_policy.fully_qualified_name -} -``` - --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `masking_policy_id` (String) The resource id of the masking policy -- `tag_id` (String) Specifies the identifier for the tag. Note: format must follow: "databaseName"."schemaName"."tagName" or "databaseName.schemaName.tagName" or "databaseName|schemaName.tagName" (snowflake_tag.tag.id) - -### Read-Only - -- `id` (String) The ID of this resource. - -## Import - -Import is supported using the following syntax: - -```shell -# format is tag database name | tag schema name | tag name | masking policy database | masking policy schema | masking policy name -terraform import snowflake_tag_masking_policy_association.example 'tag_db|tag_schema|tag_name|mp_db|mp_schema|mp_name' -``` diff --git a/docs/resources/task.md b/docs/resources/task.md index 8f9a2c6034..f7965aed10 100644 --- a/docs/resources/task.md +++ b/docs/resources/task.md @@ -5,8 +5,6 @@ description: |- Resource used to manage task objects. For more information, check task documentation https://docs.snowflake.com/en/user-guide/tasks-intro. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - # snowflake_task (Resource) Resource used to manage task objects. For more information, check [task documentation](https://docs.snowflake.com/en/user-guide/tasks-intro). diff --git a/docs/resources/unsafe_execute.md b/docs/resources/unsafe_execute.md deleted file mode 100644 index 3ac9e7c7b6..0000000000 --- a/docs/resources/unsafe_execute.md +++ /dev/null @@ -1,147 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "snowflake_unsafe_execute Resource - terraform-provider-snowflake" -subcategory: "" -description: |- - Experimental resource allowing execution of ANY SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. ---- - -# snowflake_unsafe_execute (Resource) - -!> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. - -~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. - -~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. - -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use [snowflake_execute](./execute) instead. - -Experimental resource allowing execution of ANY SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. - -## Example Usage - -```terraform -################################## -### simple use cases -################################## - -# create and destroy resource -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" -} - -# create and destroy resource using qualified name -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE \"abc\"" - revert = "DROP DATABASE \"abc\"" -} - -# with query -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" - query = "SHOW DATABASES LIKE '%ABC%'" -} - -################################## -### grants example -################################## - -# grant and revoke privilege USAGE to ROLE on database -resource "snowflake_unsafe_execute" "test" { - execute = "GRANT USAGE ON DATABASE ABC TO ROLE XYZ" - revert = "REVOKE USAGE ON DATABASE ABC FROM ROLE XYZ" -} - -# grant and revoke with for_each -variable "database_grants" { - type = list(object({ - database_name = string - role_id = string - privileges = list(string) - })) -} - -resource "snowflake_unsafe_execute" "test" { - for_each = { for index, db_grant in var.database_grants : index => db_grant } - execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" - revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" -} - -################################## -### fixing bad configuration -################################## - -# bad revert - simple -# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "SELECT 1" -} - -# 2 - fix the revert first; resource won't be recreated -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" -} - -# bad revert - complex (we assume that the problem is spotted after trying to change the execute) -# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "SELECT 1" -} - -# 2 - try to create different database; it will fail on bad destroy -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE XYZ" - revert = "SELECT 1" -} - -# 3 - fix the revert first -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" -} - -# 4 - create different database updating revert also -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE XYZ" - revert = "DROP DATABASE XYZ" -} - -# bad query -# 1 - resource will be created; query_results will be empty -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" - query = "bad query" -} - -# 2 - fix the query; query_results will be calculated; resource won't be recreated -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" - query = "SHOW DATABASES LIKE '%ABC%'" -} -``` --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - - -## Schema - -### Required - -- `execute` (String) SQL statement to execute. Forces recreation of resource on change. -- `revert` (String) SQL statement to revert the execute statement. Invoked when resource is being destroyed. - -### Optional - -- `query` (String) Optional SQL statement to do a read. Invoked on every resource refresh and every time it is changed. - -### Read-Only - -- `id` (String) The ID of this resource. -- `query_results` (List of Map of String) List of key-value maps (text to text) retrieved after executing read query. Will be empty if the query results in an error. diff --git a/docs/resources/user.md b/docs/resources/user.md index b1bb599467..9c0b2a012e 100644 --- a/docs/resources/user.md +++ b/docs/resources/user.md @@ -5,8 +5,6 @@ description: |- Resource used to manage user objects. For more information, check user documentation https://docs.snowflake.com/en/sql-reference/commands-user-role#user-management. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. - -> **Note** `snowflake_user_password_policy_attachment` will be reworked in the following versions of the provider which may still affect this resource. -> **Note** Attaching user policies will be handled in the following versions of the provider which may still affect this resource. diff --git a/docs/resources/user_authentication_policy_attachment.md b/docs/resources/user_authentication_policy_attachment.md index 4d3ba86057..9cd4af4a98 100644 --- a/docs/resources/user_authentication_policy_attachment.md +++ b/docs/resources/user_authentication_policy_attachment.md @@ -5,6 +5,8 @@ description: |- Specifies the authentication policy to use for a certain user. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_user_authentication_policy_attachment (Resource) Specifies the authentication policy to use for a certain user. diff --git a/docs/resources/user_password_policy_attachment.md b/docs/resources/user_password_policy_attachment.md index a0c59d0386..da7e90f51c 100644 --- a/docs/resources/user_password_policy_attachment.md +++ b/docs/resources/user_password_policy_attachment.md @@ -5,6 +5,8 @@ description: |- Specifies the password policy to use for a certain user. --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # snowflake_user_password_policy_attachment (Resource) Specifies the password policy to use for a certain user. diff --git a/docs/resources/user_public_keys.md b/docs/resources/user_public_keys.md index 54a0067069..06288a39b6 100644 --- a/docs/resources/user_public_keys.md +++ b/docs/resources/user_public_keys.md @@ -5,6 +5,8 @@ description: |- --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Important** Starting from v0.95.0, it is advised to use this resource **only** if users are not managed through terraform. Check more in the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950). # snowflake_user_public_keys (Resource) diff --git a/docs/resources/view.md b/docs/resources/view.md index 9a4efafe6d..876f999a75 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -5,8 +5,6 @@ description: |- Resource used to manage view objects. For more information, check view documentation https://docs.snowflake.com/en/sql-reference/sql/create-view. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. - !> Due to Snowflake limitations, to properly compute diff on `statement` field, the provider parses a `text` field which contains the whole CREATE query used to create the resource. We recommend not using special characters, especially `(`, `,`, `)` in any of the fields, if possible. ~> **Note about copy_grants** Fields like `is_recursive`, `is_temporary`, `copy_grants` and `statement` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-view)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. diff --git a/docs/resources/warehouse.md b/docs/resources/warehouse.md index 44f1ca5e65..819a74fd06 100644 --- a/docs/resources/warehouse.md +++ b/docs/resources/warehouse.md @@ -5,8 +5,6 @@ description: |- Resource used to manage warehouse objects. For more information, check warehouse documentation https://docs.snowflake.com/en/sql-reference/commands-warehouse. --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - -> **Note** Field `RESOURCE_CONSTRAINT` is currently missing. It will be added in the future. diff --git a/docs/technical-documentation/identifiers_rework_design_decisions.md b/docs/technical-documentation/identifiers_rework_design_decisions.md index b60aad2183..b0edcc776a 100644 --- a/docs/technical-documentation/identifiers_rework_design_decisions.md +++ b/docs/technical-documentation/identifiers_rework_design_decisions.md @@ -18,7 +18,7 @@ * [Conclusions](#conclusions) -This document summarises work done in the [identifiers rework](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#identifiers-rework) and future plans for further identifier improvements. +This document summarises work done in the [identifiers rework](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#identifiers-rework) and future plans for further identifier improvements. But before we dive into results and design decisions, here’s the list of reasons why we decided to rework the identifiers in the first place: - Common issues with identifiers with arguments (identifiers for functions, procedures, and external functions). - Meaningless error messages whenever an invalid identifier is specified. @@ -31,31 +31,31 @@ Now, knowing the issues we wanted to solve, we would like to present the changes ## Topics ### New identifier parser -To resolve many of our underlying problems with parsing identifiers, we decided to go with the new one that will be able to correctly parse fully qualified names of objects. -In addition to a better parsing function, we made sure it will return user-friendly error messages that will be able to find the root cause of a problem when specifying invalid identifiers. +To resolve many of our underlying problems with parsing identifiers, we decided to go with the new one that will be able to correctly parse fully qualified names of objects. +In addition to a better parsing function, we made sure it will return user-friendly error messages that will be able to find the root cause of a problem when specifying invalid identifiers. Previously, the error looked like [this](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2091). ### Using the recommended format for account identifiers -Previously, the use of account identifiers was mixed across the resources, in many cases causing confusion ([commonly known issues reference](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/CREATING_ISSUES.md#incorrect-account-identifier-snowflake_databasefrom_share)). -Some of them required an account locator format (that was not fully supported and is currently deprecated), and some of the new recommended ones. -We decided to unify them and use the new account identifier format everywhere. +Previously, the use of account identifiers was mixed across the resources, in many cases causing confusion ([commonly known issues reference](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/CREATING_ISSUES.md#incorrect-account-identifier-snowflake_databasefrom_share)). +Some of them required an account locator format (that was not fully supported), and some of the new recommended ones. +We decided to unify them and use the new account identifier format everywhere. The account locator format is not supported in v1. ### Better handling for identifiers with arguments Previously, the handling of identifiers with arguments was not done fully correctly, causing many issues and confusion on how to use them ([commonly known issues reference](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/CREATING_ISSUES.md#granting-on-functions-or-procedures)). -The main pain point was using them with privilege-granting resources. To address this we had to make two steps. -The first one was adding a dedicated representation of an identifier containing arguments and using it in our SDK. -The second one was additional parsing for the output of SHOW GRANTS in our SDK which was only necessary for functions, +The main pain point was using them with privilege-granting resources. To address this we had to make two steps. +The first one was adding a dedicated representation of an identifier containing arguments and using it in our SDK. +The second one was additional parsing for the output of SHOW GRANTS in our SDK which was only necessary for functions, procedures, and external functions that returned non-valid identifier formats. ### Quoting differences -There are many reported issues on identifier quoting and how it is inconsistent across resources and causes plan diffs to enforce certain format (e.g. [#2982](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2982), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236)). -To address that, we decided to add diff suppress on identifier fields that ignore changes related to differences in quotes. -The main root cause of such differences was that Snowflake has specific rules when a given identifier (or part of an identifier) is quoted and when it’s not. +There are many reported issues on identifier quoting and how it is inconsistent across resources and causes plan diffs to enforce certain format (e.g. [#2982](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2982), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236)). +To address that, we decided to add diff suppress on identifier fields that ignore changes related to differences in quotes. +The main root cause of such differences was that Snowflake has specific rules when a given identifier (or part of an identifier) is quoted and when it’s not. The diff suppression should make those rules irrelevant whenever identifiers in your Terraform configuration contain quotes or not. ### New computed fully qualified name field in resources -With the combination of quotes, old parsing methods, and other factors, it was a struggle to specify the fully qualified name of an object needed (e.g. [#2164](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2164), [#2754](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2754)). -Now, with v0.95.0, every resource that represents an object in Snowflake (e.g. user, role), and not an association (e.g. grants) will have a new computed field named `fully_qualified_name`. +With the combination of quotes, old parsing methods, and other factors, it was a struggle to specify the fully qualified name of an object needed (e.g. [#2164](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2164), [#2754](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2754)). +Now, with v0.95.0, every resource that represents an object in Snowflake (e.g. user, role), and not an association (e.g. grants) will have a new computed field named `fully_qualified_name`. With the new computed field, it will be much easier to use resources requiring fully qualified names, for examples of usage head over to the [documentation for granting privileges to account role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_privileges_to_account_role). ### New resource identifier format @@ -70,11 +70,11 @@ The main limitations around identifiers are strictly connected to what character - Avoid parentheses ‘(’ and ‘)’ when specifying identifiers for functions, procedures, external functions. Parentheses as part of their identifiers could potentially make our parser split the identifier into wrong parts causing issues. - Do not use double quotes as part of identifiers (in Snowflake you can have double quotes inside identifiers by escaping them with the second double quote, e.g. `create database “test””identifier”` will create a database with name `test"identifier`). -As a general recommendation, please lean toward simple names without any special characters, and if word separation is needed, use underscores. -This also applies to other “identifiers” like column names in tables or argument names in functions. +As a general recommendation, please lean toward simple names without any special characters, and if word separation is needed, use underscores. +This also applies to other “identifiers” like column names in tables or argument names in functions. If you are currently using complex identifiers, we recommend considering migration to simpler identifiers for a more straightforward and less error-prone experience. Also, we want to make it clear that every field specifying an identifier (or its part, e.g. `name`, `database`, `schema`) is always case-sensitive. By specifying -an identifier with lowercase characters in Terraform, you also have to refer to them with lowercase names in quotes in Snowflake. +an identifier with lowercase characters in Terraform, you also have to refer to them with lowercase names in quotes in Snowflake. For example, by specifying an account role with `name = "test"` to check privileges granted to the role in Snowflake, you have to call: ```sql show grants to role "test"; @@ -82,18 +82,18 @@ show grants to role test; -- this won't work, because unquoted identifiers are c ``` ### New identifier conventions -Although, we are closing the identifiers rework, some resources won’t have the mentioned improvements. -They were mostly applied to the objects that were already prepared for v1 ([essential objects](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/ESSENTIAL_GA_OBJECTS.MD)). -The remaining resources (and newly created ones) will receive these improvements [during v1 preparation](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#preparing-essential-ga-objects-for-the-provider-v1) following our internal guidelines that contain those new rules regarding identifiers. +Although, we are closing the identifiers rework, some resources won’t have the mentioned improvements. +They were mostly applied to the objects that were already prepared for v1 ([essential objects](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/ESSENTIAL_GA_OBJECTS.MD)). +The remaining resources (and newly created ones) will receive these improvements [during v1 preparation](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#preparing-essential-ga-objects-for-the-provider-v1) following our internal guidelines that contain those new rules regarding identifiers. No matter if the resource has been refactored or not, the same recommendations mentioned above apply. ## Next steps -While we have completed the identifiers rework for now, we plan to revisit these topics in the future to ensure continued improvements. +While we have completed the identifiers rework for now, we plan to revisit these topics in the future to ensure continued improvements. In the upcoming phases, we will focus on addressing the following key areas: - Implementing better validations for identifiers. - Providing support for new identifier formats in our resources (e.g. [instance roles](https://docs.snowflake.com/en/sql-reference/snowflake-db-classes#instance-roles)). ## Conclusions -We have concluded the identifiers rework, implementing significant improvements to address common issues and inconsistencies in identifier handling. +We have concluded the identifiers rework, implementing significant improvements to address common issues and inconsistencies in identifier handling. Moving forward, we aim to continue enhancing our identifier functionalities to provide a smoother experience. We value your feedback on the recent changes made to the identifiers. Please share your thoughts and suggestions to help us refine our identifier management further. diff --git a/examples/additional/deprecated_datasources.MD b/examples/additional/deprecated_datasources.MD index 9846a374a1..393ab71209 100644 --- a/examples/additional/deprecated_datasources.MD +++ b/examples/additional/deprecated_datasources.MD @@ -1,4 +1,3 @@ -## Currently deprecated datasources + + -- [snowflake_role](./docs/data-sources/role) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead -- [snowflake_roles](./docs/data-sources/roles) - use [snowflake_account_roles](./docs/data-sources/account_roles) instead diff --git a/examples/additional/deprecated_resources.MD b/examples/additional/deprecated_resources.MD index 0f6a49a421..3b52465691 100644 --- a/examples/additional/deprecated_resources.MD +++ b/examples/additional/deprecated_resources.MD @@ -1,11 +1,3 @@ -## Currently deprecated resources + + -- [snowflake_database_old](./docs/resources/database_old) -- [snowflake_function](./docs/resources/function) -- [snowflake_oauth_integration](./docs/resources/oauth_integration) -- [snowflake_procedure](./docs/resources/procedure) -- [snowflake_role](./docs/resources/role) - use [snowflake_account_role](./docs/resources/account_role) instead -- [snowflake_saml_integration](./docs/resources/saml_integration) - use [snowflake_saml2_integration](./docs/resources/saml2_integration) instead -- [snowflake_stream](./docs/resources/stream) -- [snowflake_tag_masking_policy_association](./docs/resources/tag_masking_policy_association) -- [snowflake_unsafe_execute](./docs/resources/unsafe_execute) - use [snowflake_execute](./docs/resources/execute) instead diff --git a/examples/data-sources/snowflake_role/data-source.tf b/examples/data-sources/snowflake_role/data-source.tf deleted file mode 100644 index 49dea38bc1..0000000000 --- a/examples/data-sources/snowflake_role/data-source.tf +++ /dev/null @@ -1,3 +0,0 @@ -data "snowflake_role" "this" { - name = "role1" -} diff --git a/examples/data-sources/snowflake_roles/data-source.tf b/examples/data-sources/snowflake_roles/data-source.tf deleted file mode 100644 index 24ed09708b..0000000000 --- a/examples/data-sources/snowflake_roles/data-source.tf +++ /dev/null @@ -1,48 +0,0 @@ -# Simple usage -data "snowflake_roles" "simple" { -} - -output "simple_output" { - value = data.snowflake_roles.simple.roles -} - -# Filtering (like) -data "snowflake_roles" "like" { - like = "role-name" -} - -output "like_output" { - value = data.snowflake_roles.like.roles -} - -# Filtering (in class) -data "snowflake_roles" "in_class" { - in_class = "SNOWFLAKE.CORE.BUDGET" -} - -output "in_class_output" { - value = data.snowflake_roles.in_class.roles -} - -# Ensure the number of roles is equal to at least one element (with the use of postcondition) -data "snowflake_roles" "assert_with_postcondition" { - like = "role-name-%" - lifecycle { - postcondition { - condition = length(self.roles) > 0 - error_message = "there should be at least one role" - } - } -} - -# Ensure the number of roles is equal to at exactly one element (with the use of check block) -check "role_check" { - data "snowflake_roles" "assert_with_check_block" { - like = "role-name" - } - - assert { - condition = length(data.snowflake_roles.assert_with_check_block.roles) == 1 - error_message = "Roles filtered by '${data.snowflake_roles.assert_with_check_block.like}' returned ${length(data.snowflake_roles.assert_with_check_block.roles)} roles where one was expected" - } -} diff --git a/examples/resources/snowflake_database_old/import.sh b/examples/resources/snowflake_database_old/import.sh deleted file mode 100644 index 3ea61a2c21..0000000000 --- a/examples/resources/snowflake_database_old/import.sh +++ /dev/null @@ -1 +0,0 @@ -terraform import snowflake_database_old.example 'database_name' diff --git a/examples/resources/snowflake_database_old/resource.tf b/examples/resources/snowflake_database_old/resource.tf deleted file mode 100644 index 2219295495..0000000000 --- a/examples/resources/snowflake_database_old/resource.tf +++ /dev/null @@ -1,30 +0,0 @@ -resource "snowflake_database_old" "simple" { - name = "testing" - comment = "test comment" - data_retention_time_in_days = 3 -} - -resource "snowflake_database_old" "with_replication" { - name = "testing_2" - comment = "test comment 2" - replication_configuration { - accounts = ["test_account1", "test_account_2"] - ignore_edition_check = true - } -} - -resource "snowflake_database_old" "from_replica" { - name = "testing_3" - comment = "test comment" - data_retention_time_in_days = 3 - from_replica = "\"org1\".\"account1\".\"primary_db_name\"" -} - -resource "snowflake_database_old" "from_share" { - name = "testing_4" - comment = "test comment" - from_share = { - provider = "account1_locator" - share = "share1" - } -} diff --git a/examples/resources/snowflake_function/import.sh b/examples/resources/snowflake_function/import.sh deleted file mode 100644 index 62c2ca5681..0000000000 --- a/examples/resources/snowflake_function/import.sh +++ /dev/null @@ -1,2 +0,0 @@ -# format is ..() -terraform import snowflake_function.example 'dbName.schemaName.functionName(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_function/resource.tf b/examples/resources/snowflake_function/resource.tf deleted file mode 100644 index 2e5c03a00f..0000000000 --- a/examples/resources/snowflake_function/resource.tf +++ /dev/null @@ -1,73 +0,0 @@ -// Provider configuration -provider "snowflake" { - region = "REGION" // Default is "us-west-2" - username = "USERNAME" - account = "ACCOUNT" - password = "PASSWORD" - role = "MY_ROLE" - warehouse = "MY_WH" // Optional attribute, some resources (e.g. Python UDFs)' require a warehouse to create and can also be set optionally from the `SNOWFLAKE_WAREHOUSE` environment variable -} - -// Create database -resource "snowflake_database" "db" { - name = "MY_DB" - data_retention_days = 1 -} - -// Create schema -resource "snowflake_schema" "schema" { - database = snowflake_database.db.name - name = "MY_SCHEMA" - data_retention_days = 1 -} - -// Example for Java language -resource "snowflake_function" "test_funct_java" { - name = "my_java_func" - database = "MY_DB" - schema = "MY_SCHEMA" - arguments { - name = "arg1" - type = "number" - } - comment = "Example for java language" - return_type = "varchar" - language = "java" - handler = "CoolFunc.test" - statement = "class CoolFunc {public static String test(int n) {return \"hello!\";}}" -} - -// Example for Python language -resource "snowflake_function" "python_test" { - name = "MY_PYTHON_FUNC" - database = "MY_DB" - schema = "MY_SCHEMA" - arguments { - name = "arg1" - type = "number" - } - comment = "Example for Python language" - return_type = "NUMBER(38,0)" - null_input_behavior = "CALLED ON NULL INPUT" - return_behavior = "VOLATILE" - language = "python" - runtime_version = "3.8" - handler = "add_py" - statement = "def add_py(i): return i+1" -} - -// Example SQL language -resource "snowflake_function" "sql_test" { - name = "MY_SQL_FUNC" - database = "MY_DB" - schema = "MY_SCHEMA" - arguments { - name = "arg1" - type = "number" - } - comment = "Example for SQL language" - return_type = "NUMBER(38,0)" - null_input_behavior = "CALLED ON NULL INPUT" - return_behavior = "VOLATILE" - statement = "select arg1 + 1" -} diff --git a/examples/resources/snowflake_oauth_integration/import.sh b/examples/resources/snowflake_oauth_integration/import.sh deleted file mode 100644 index cbbb03d1ea..0000000000 --- a/examples/resources/snowflake_oauth_integration/import.sh +++ /dev/null @@ -1 +0,0 @@ -terraform import snowflake_oauth_integration.example name diff --git a/examples/resources/snowflake_oauth_integration/resource.tf b/examples/resources/snowflake_oauth_integration/resource.tf deleted file mode 100644 index d28900d9ce..0000000000 --- a/examples/resources/snowflake_oauth_integration/resource.tf +++ /dev/null @@ -1,8 +0,0 @@ -resource "snowflake_oauth_integration" "tableau_desktop" { - name = "TABLEAU_DESKTOP" - oauth_client = "TABLEAU_DESKTOP" - enabled = true - oauth_issue_refresh_tokens = true - oauth_refresh_token_validity = 3600 - blocked_roles_list = ["SYSADMIN"] -} diff --git a/examples/resources/snowflake_procedure/import.sh b/examples/resources/snowflake_procedure/import.sh deleted file mode 100644 index 9ba6623768..0000000000 --- a/examples/resources/snowflake_procedure/import.sh +++ /dev/null @@ -1,2 +0,0 @@ -# format is ..() -terraform import snowflake_procedure.example 'dbName.schemaName.procedureName(varchar, varchar, varchar)' diff --git a/examples/resources/snowflake_procedure/resource.tf b/examples/resources/snowflake_procedure/resource.tf deleted file mode 100644 index 210a395897..0000000000 --- a/examples/resources/snowflake_procedure/resource.tf +++ /dev/null @@ -1,34 +0,0 @@ -resource "snowflake_database" "db" { - name = "MYDB" - data_retention_days = 1 -} - -resource "snowflake_schema" "schema" { - database = snowflake_database.db.name - name = "MYSCHEMA" - data_retention_days = 1 -} - -resource "snowflake_procedure" "proc" { - name = "SAMPLEPROC" - database = snowflake_database.db.name - schema = snowflake_schema.schema.name - language = "JAVASCRIPT" - arguments { - name = "arg1" - type = "varchar" - } - arguments { - name = "arg2" - type = "DATE" - } - comment = "Procedure with 2 arguments" - return_type = "VARCHAR" - execute_as = "CALLER" - return_behavior = "IMMUTABLE" - null_input_behavior = "RETURNS NULL ON NULL INPUT" - statement = < diff --git a/examples/resources/snowflake_session_parameter/resource.tf b/examples/resources/snowflake_session_parameter/resource.tf deleted file mode 100644 index c7c0dc2b30..0000000000 --- a/examples/resources/snowflake_session_parameter/resource.tf +++ /dev/null @@ -1,11 +0,0 @@ -resource "snowflake_session_parameter" "s" { - key = "AUTOCOMMIT" - value = "false" - user = "TEST_USER" -} - -resource "snowflake_session_parameter" "s2" { - key = "BINARY_OUTPUT_FORMAT" - value = "BASE64" - on_account = true -} diff --git a/examples/resources/snowflake_stream/import.sh b/examples/resources/snowflake_stream/import.sh deleted file mode 100644 index e8086527ab..0000000000 --- a/examples/resources/snowflake_stream/import.sh +++ /dev/null @@ -1,2 +0,0 @@ -# format is database name | schema name | stream name -terraform import snowflake_stream.example 'dbName|schemaName|streamName' diff --git a/examples/resources/snowflake_stream/resource.tf b/examples/resources/snowflake_stream/resource.tf deleted file mode 100644 index aba5459816..0000000000 --- a/examples/resources/snowflake_stream/resource.tf +++ /dev/null @@ -1,24 +0,0 @@ -resource "snowflake_table" "table" { - database = "database" - schema = "schema" - name = "name" - - column { - type = "NUMBER(38,0)" - name = "id" - } -} - -resource "snowflake_stream" "stream" { - comment = "A stream." - - database = "database" - schema = "schema" - name = "stream" - - on_table = snowflake_table.table.fully_qualified_name - append_only = false - insert_only = false - - owner = "role1" -} diff --git a/examples/resources/snowflake_tag_masking_policy_association/import.sh b/examples/resources/snowflake_tag_masking_policy_association/import.sh deleted file mode 100644 index 69a2971cdf..0000000000 --- a/examples/resources/snowflake_tag_masking_policy_association/import.sh +++ /dev/null @@ -1,2 +0,0 @@ -# format is tag database name | tag schema name | tag name | masking policy database | masking policy schema | masking policy name -terraform import snowflake_tag_masking_policy_association.example 'tag_db|tag_schema|tag_name|mp_db|mp_schema|mp_name' \ No newline at end of file diff --git a/examples/resources/snowflake_tag_masking_policy_association/resource.tf b/examples/resources/snowflake_tag_masking_policy_association/resource.tf deleted file mode 100644 index 94f080aa35..0000000000 --- a/examples/resources/snowflake_tag_masking_policy_association/resource.tf +++ /dev/null @@ -1,44 +0,0 @@ -# Note: Currently this feature is only available to accounts that are Enterprise Edition (or higher) - -resource "snowflake_database" "test" { - name = "TEST_DB1" - data_retention_time_in_days = 1 -} - -resource "snowflake_database" "test2" { - name = "TEST_DB2" - data_retention_time_in_days = 1 -} - - -resource "snowflake_schema" "test2" { - database = snowflake_database.test2.name - name = "FOOBAR2" - data_retention_days = snowflake_database.test2.data_retention_time_in_days -} - -resource "snowflake_schema" "test" { - database = snowflake_database.test.name - name = "FOOBAR" - data_retention_days = snowflake_database.test.data_retention_time_in_days -} - -resource "snowflake_tag" "this" { - name = upper("test_tag") - database = snowflake_database.test2.name - schema = snowflake_schema.test2.name -} - -resource "snowflake_masking_policy" "example_masking_policy" { - name = "EXAMPLE_MASKING_POLICY" - database = snowflake_database.test.name - schema = snowflake_schema.test.name - value_data_type = "string" - masking_expression = "case when current_role() in ('ACCOUNTADMIN') then val else sha2(val, 512) end" - return_data_type = "string" -} - -resource "snowflake_tag_masking_policy_association" "name" { - tag_id = snowflake_tag.this.fully_qualified_name - masking_policy_id = snowflake_masking_policy.example_masking_policy.fully_qualified_name -} diff --git a/examples/resources/snowflake_unsafe_execute/resource.tf b/examples/resources/snowflake_unsafe_execute/resource.tf deleted file mode 100644 index efca62f160..0000000000 --- a/examples/resources/snowflake_unsafe_execute/resource.tf +++ /dev/null @@ -1,104 +0,0 @@ -################################## -### simple use cases -################################## - -# create and destroy resource -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" -} - -# create and destroy resource using qualified name -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE \"abc\"" - revert = "DROP DATABASE \"abc\"" -} - -# with query -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" - query = "SHOW DATABASES LIKE '%ABC%'" -} - -################################## -### grants example -################################## - -# grant and revoke privilege USAGE to ROLE on database -resource "snowflake_unsafe_execute" "test" { - execute = "GRANT USAGE ON DATABASE ABC TO ROLE XYZ" - revert = "REVOKE USAGE ON DATABASE ABC FROM ROLE XYZ" -} - -# grant and revoke with for_each -variable "database_grants" { - type = list(object({ - database_name = string - role_id = string - privileges = list(string) - })) -} - -resource "snowflake_unsafe_execute" "test" { - for_each = { for index, db_grant in var.database_grants : index => db_grant } - execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" - revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" -} - -################################## -### fixing bad configuration -################################## - -# bad revert - simple -# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "SELECT 1" -} - -# 2 - fix the revert first; resource won't be recreated -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" -} - -# bad revert - complex (we assume that the problem is spotted after trying to change the execute) -# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "SELECT 1" -} - -# 2 - try to create different database; it will fail on bad destroy -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE XYZ" - revert = "SELECT 1" -} - -# 3 - fix the revert first -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" -} - -# 4 - create different database updating revert also -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE XYZ" - revert = "DROP DATABASE XYZ" -} - -# bad query -# 1 - resource will be created; query_results will be empty -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" - query = "bad query" -} - -# 2 - fix the query; query_results will be calculated; resource won't be recreated -resource "snowflake_unsafe_execute" "test" { - execute = "CREATE DATABASE ABC" - revert = "DROP DATABASE ABC" - query = "SHOW DATABASES LIKE '%ABC%'" -} diff --git a/framework/provider/provider.go b/framework/provider/provider.go index 235a6f3dd1..2ad2a26cd9 100644 --- a/framework/provider/provider.go +++ b/framework/provider/provider.go @@ -77,7 +77,6 @@ type snowflakeProviderModelV0 struct { OauthEndpoint types.String `tfsdk:"oauth_endpoint"` OauthRedirectURL types.String `tfsdk:"oauth_redirect_url"` BrowserAuth types.Bool `tfsdk:"browser_auth"` - PrivateKeyPath types.String `tfsdk:"private_key_path"` SessionParams types.Map `tfsdk:"session_params"` } @@ -111,11 +110,11 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque DeprecationMessage: "Use `user` instead", }, "password": schema.StringAttribute{ - Description: "Password for username+password auth. Cannot be used with `browser_auth` or `private_key_path`. Can also be sourced from the `SNOWFLAKE_PASSWORD` environment variable.", + Description: "Password for username+password auth. Cannot be used with `browser_auth`. Can also be sourced from the `SNOWFLAKE_PASSWORD` environment variable.", Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key_path"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("oauth_access_token"), path.MatchRoot("oauth_refresh_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("oauth_access_token"), path.MatchRoot("oauth_refresh_token")), }, }, "warehouse": schema.StringAttribute{ @@ -225,7 +224,7 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("password"), path.MatchRoot("private_key_path"), path.MatchRoot("oauth_access_token"), path.MatchRoot("oauth_refresh_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token"), path.MatchRoot("oauth_refresh_token")), }, }, "private_key_passphrase": schema.StringAttribute{ @@ -233,7 +232,7 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("password"), path.MatchRoot("private_key_path"), path.MatchRoot("oauth_access_token"), path.MatchRoot("oauth_refresh_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token"), path.MatchRoot("oauth_refresh_token")), }, }, "disable_telemetry": schema.BoolAttribute{ @@ -277,20 +276,20 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque DeprecationMessage: "Use `params` instead", }, "oauth_access_token": schema.StringAttribute{ - Description: "Token for use with OAuth. Generating the token is left to other tools. Cannot be used with `browser_auth`, `private_key_path`, `oauth_refresh_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable.", + Description: "Token for use with OAuth. Generating the token is left to other tools. Cannot be used with `browser_auth`, `oauth_refresh_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable.", Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key_path"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_refresh_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_refresh_token")), }, DeprecationMessage: "Use `token` instead", }, "oauth_refresh_token": schema.StringAttribute{ - Description: "Token for use with OAuth. Setup and generation of the token is left to other tools. Should be used in conjunction with `oauth_client_id`, `oauth_client_secret`, `oauth_endpoint`, `oauth_redirect_url`. Cannot be used with `browser_auth`, `private_key_path`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_REFRESH_TOKEN` environment variable.", + Description: "Token for use with OAuth. Setup and generation of the token is left to other tools. Should be used in conjunction with `oauth_client_id`, `oauth_client_secret`, `oauth_endpoint`, `oauth_redirect_url`. Cannot be used with `browser_auth`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_REFRESH_TOKEN` environment variable.", Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key_path"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), stringvalidator.AlsoRequires(path.MatchRoot("oauth_client_id"), path.MatchRoot("oauth_client_secret"), path.MatchRoot("oauth_endpoint"), path.MatchRoot("oauth_redirect_url")), }, DeprecationMessage: "Use `token_accessor.0.refresh_token` instead", @@ -300,7 +299,7 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key_path"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), stringvalidator.AlsoRequires(path.MatchRoot("oauth_refresh_token"), path.MatchRoot("oauth_client_secret"), path.MatchRoot("oauth_endpoint"), path.MatchRoot("oauth_redirect_url")), }, DeprecationMessage: "Use `token_accessor.0.client_id` instead", @@ -310,7 +309,7 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key_path"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), stringvalidator.AlsoRequires(path.MatchRoot("oauth_refresh_token"), path.MatchRoot("oauth_client_id"), path.MatchRoot("oauth_endpoint"), path.MatchRoot("oauth_redirect_url")), }, DeprecationMessage: "Use `token_accessor.0.client_secret` instead", @@ -320,7 +319,7 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key_path"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), stringvalidator.AlsoRequires(path.MatchRoot("oauth_refresh_token"), path.MatchRoot("oauth_client_id"), path.MatchRoot("oauth_client_secret"), path.MatchRoot("oauth_redirect_url")), }, DeprecationMessage: "Use `token_accessor.0.token_endpoint` instead", @@ -330,7 +329,7 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque Optional: true, Sensitive: true, Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key_path"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), + stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("private_key"), path.MatchRoot("private_key_passphrase"), path.MatchRoot("password"), path.MatchRoot("oauth_access_token")), stringvalidator.AlsoRequires(path.MatchRoot("oauth_refresh_token"), path.MatchRoot("oauth_client_id"), path.MatchRoot("oauth_client_secret"), path.MatchRoot("oauth_endpoint")), }, DeprecationMessage: "Use `token_accessor.0.redirect_uri` instead", @@ -341,15 +340,6 @@ func (p *SnowflakeProvider) Schema(ctx context.Context, req provider.SchemaReque Sensitive: false, DeprecationMessage: "Use `authenticator` instead", }, - "private_key_path": schema.StringAttribute{ - Description: "Path to a private key for using keypair authentication. Cannot be used with `browser_auth`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_PRIVATE_KEY_PATH` environment variable.", - Optional: true, - Sensitive: true, - Validators: []validator.String{ - stringvalidator.ConflictsWith(path.MatchRoot("browser_auth"), path.MatchRoot("oauth_access_token"), path.MatchRoot("password")), - }, - DeprecationMessage: "use the [file Function](https://developer.hashicorp.com/terraform/language/functions/file) instead", - }, }, Blocks: map[string]schema.Block{ "token_accessor": schema.ListNestedBlock{ @@ -612,16 +602,12 @@ func (p *SnowflakeProvider) Configure(ctx context.Context, req provider.Configur if data.PrivateKey.ValueString() != "" { privateKey = data.PrivateKey.ValueString() } - privateKeyPath := os.Getenv("SNOWFLAKE_PRIVATE_KEY_PATH") - if data.PrivateKeyPath.ValueString() != "" { - privateKeyPath = data.PrivateKeyPath.ValueString() - } privateKeyPassphrase := os.Getenv("SNOWFLAKE_PRIVATE_KEY_PASSPHRASE") if data.PrivateKeyPassphrase.ValueString() != "" { privateKeyPassphrase = data.PrivateKeyPassphrase.ValueString() } - if privateKey != "" || privateKeyPath != "" { - if v, err := getPrivateKey(privateKeyPath, privateKey, privateKeyPassphrase); err != nil && v != nil { + if privateKey != "" { + if v, err := getPrivateKey(privateKey, privateKeyPassphrase); err != nil && v != nil { config.PrivateKey = v } } diff --git a/framework/provider/provider_helpers.go b/framework/provider/provider_helpers.go index 76dac3c27c..424a57c435 100644 --- a/framework/provider/provider_helpers.go +++ b/framework/provider/provider_helpers.go @@ -13,21 +13,13 @@ import ( "strconv" "strings" - "github.com/mitchellh/go-homedir" "github.com/snowflakedb/gosnowflake" "github.com/youmark/pkcs8" "golang.org/x/crypto/ssh" ) -func getPrivateKey(privateKeyPath, privateKeyString, privateKeyPassphrase string) (*rsa.PrivateKey, error) { +func getPrivateKey(privateKeyString, privateKeyPassphrase string) (*rsa.PrivateKey, error) { privateKeyBytes := []byte(privateKeyString) - var err error - if len(privateKeyBytes) == 0 && privateKeyPath != "" { - privateKeyBytes, err = readFile(privateKeyPath) - if err != nil { - return nil, fmt.Errorf("private Key file could not be read err = %w", err) - } - } return parsePrivateKey(privateKeyBytes, []byte(privateKeyPassphrase)) } @@ -79,24 +71,6 @@ func getBoolEnv(key string, defaultValue bool) bool { } } -func readFile(privateKeyPath string) ([]byte, error) { - expandedPrivateKeyPath, err := homedir.Expand(privateKeyPath) - if err != nil { - return nil, fmt.Errorf("invalid Path to private key err = %w", err) - } - - privateKeyBytes, err := os.ReadFile(expandedPrivateKeyPath) - if err != nil { - return nil, fmt.Errorf("could not read private key err = %w", err) - } - - if len(privateKeyBytes) == 0 { - return nil, errors.New("private key is empty") - } - - return privateKeyBytes, nil -} - func parsePrivateKey(privateKeyBytes []byte, passhrase []byte) (*rsa.PrivateKey, error) { privateKeyBlock, _ := pem.Decode(privateKeyBytes) if privateKeyBlock == nil { diff --git a/go.mod b/go.mod index d599d2ef2c..fe8cc98a54 100644 --- a/go.mod +++ b/go.mod @@ -20,7 +20,6 @@ require ( github.com/hashicorp/terraform-plugin-testing v1.6.0 github.com/jmoiron/sqlx v1.3.5 github.com/luna-duclos/instrumentedsql v1.1.3 - github.com/mitchellh/go-homedir v1.1.0 github.com/pelletier/go-toml/v2 v2.1.1 github.com/snowflakedb/gosnowflake v1.10.0 github.com/stretchr/testify v1.8.4 diff --git a/go.sum b/go.sum index 7693a11e05..8539a238d4 100644 --- a/go.sum +++ b/go.sum @@ -231,8 +231,6 @@ github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRU github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= -github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-testing-interface v1.14.1 h1:jrgshOhYAUVNMAJiKbEu7EqAwgJJ2JqpQmpLJOu07cU= github.com/mitchellh/go-testing-interface v1.14.1/go.mod h1:gfgS7OtZj6MA4U1UrDRp04twqAjfvlZyCfX3sDjEym8= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= diff --git a/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_ext.go b/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_ext.go index a3947d4a11..e66051c0ae 100644 --- a/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_ext.go @@ -48,6 +48,15 @@ func (m *SnowflakeModel) WithClientStoreTemporaryCredentialBool(clientStoreTempo return m } +func (m *SnowflakeModel) WithPreviewFeaturesEnabled(previewFeaturesEnabled ...string) *SnowflakeModel { + previewFeaturesEnabledStringVariables := make([]tfconfig.Variable, len(previewFeaturesEnabled)) + for i, v := range previewFeaturesEnabled { + previewFeaturesEnabledStringVariables[i] = tfconfig.StringVariable(v) + } + m.PreviewFeaturesEnabled = tfconfig.SetVariable(previewFeaturesEnabledStringVariables...) + return m +} + func (m *SnowflakeModel) AllFields(tmpConfig *helpers.TmpTomlConfig, tmpUser *helpers.TmpServiceUser) *SnowflakeModel { return SnowflakeProvider(). WithProfile(tmpConfig.Profile). diff --git a/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_gen.go b/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_gen.go index 28ee44bd6b..c5a2b7eb00 100644 --- a/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/providermodel/snowflake_model_gen.go @@ -9,10 +9,8 @@ import ( ) type SnowflakeModel struct { - Account tfconfig.Variable `json:"account,omitempty"` AccountName tfconfig.Variable `json:"account_name,omitempty"` Authenticator tfconfig.Variable `json:"authenticator,omitempty"` - BrowserAuth tfconfig.Variable `json:"browser_auth,omitempty"` ClientIp tfconfig.Variable `json:"client_ip,omitempty"` ClientRequestMfaToken tfconfig.Variable `json:"client_request_mfa_token,omitempty"` ClientStoreTemporaryCredential tfconfig.Variable `json:"client_store_temporary_credential,omitempty"` @@ -30,12 +28,6 @@ type SnowflakeModel struct { KeepSessionAlive tfconfig.Variable `json:"keep_session_alive,omitempty"` LoginTimeout tfconfig.Variable `json:"login_timeout,omitempty"` MaxRetryCount tfconfig.Variable `json:"max_retry_count,omitempty"` - OauthAccessToken tfconfig.Variable `json:"oauth_access_token,omitempty"` - OauthClientId tfconfig.Variable `json:"oauth_client_id,omitempty"` - OauthClientSecret tfconfig.Variable `json:"oauth_client_secret,omitempty"` - OauthEndpoint tfconfig.Variable `json:"oauth_endpoint,omitempty"` - OauthRedirectUrl tfconfig.Variable `json:"oauth_redirect_url,omitempty"` - OauthRefreshToken tfconfig.Variable `json:"oauth_refresh_token,omitempty"` OcspFailOpen tfconfig.Variable `json:"ocsp_fail_open,omitempty"` OktaUrl tfconfig.Variable `json:"okta_url,omitempty"` OrganizationName tfconfig.Variable `json:"organization_name,omitempty"` @@ -44,20 +36,17 @@ type SnowflakeModel struct { PasscodeInPassword tfconfig.Variable `json:"passcode_in_password,omitempty"` Password tfconfig.Variable `json:"password,omitempty"` Port tfconfig.Variable `json:"port,omitempty"` + PreviewFeaturesEnabled tfconfig.Variable `json:"preview_features_enabled,omitempty"` PrivateKey tfconfig.Variable `json:"private_key,omitempty"` PrivateKeyPassphrase tfconfig.Variable `json:"private_key_passphrase,omitempty"` - PrivateKeyPath tfconfig.Variable `json:"private_key_path,omitempty"` Profile tfconfig.Variable `json:"profile,omitempty"` Protocol tfconfig.Variable `json:"protocol,omitempty"` - Region tfconfig.Variable `json:"region,omitempty"` RequestTimeout tfconfig.Variable `json:"request_timeout,omitempty"` Role tfconfig.Variable `json:"role,omitempty"` - SessionParams tfconfig.Variable `json:"session_params,omitempty"` TmpDirectoryPath tfconfig.Variable `json:"tmp_directory_path,omitempty"` Token tfconfig.Variable `json:"token,omitempty"` TokenAccessor tfconfig.Variable `json:"token_accessor,omitempty"` User tfconfig.Variable `json:"user,omitempty"` - Username tfconfig.Variable `json:"username,omitempty"` ValidateDefaultParameters tfconfig.Variable `json:"validate_default_parameters,omitempty"` Warehouse tfconfig.Variable `json:"warehouse,omitempty"` @@ -84,11 +73,6 @@ func SnowflakeProviderAlias( // below all the proper values // ///////////////////////////////// -func (s *SnowflakeModel) WithAccount(account string) *SnowflakeModel { - s.Account = tfconfig.StringVariable(account) - return s -} - func (s *SnowflakeModel) WithAccountName(accountName string) *SnowflakeModel { s.AccountName = tfconfig.StringVariable(accountName) return s @@ -99,11 +83,6 @@ func (s *SnowflakeModel) WithAuthenticator(authenticator string) *SnowflakeModel return s } -func (s *SnowflakeModel) WithBrowserAuth(browserAuth bool) *SnowflakeModel { - s.BrowserAuth = tfconfig.BoolVariable(browserAuth) - return s -} - func (s *SnowflakeModel) WithClientIp(clientIp string) *SnowflakeModel { s.ClientIp = tfconfig.StringVariable(clientIp) return s @@ -189,36 +168,6 @@ func (s *SnowflakeModel) WithMaxRetryCount(maxRetryCount int) *SnowflakeModel { return s } -func (s *SnowflakeModel) WithOauthAccessToken(oauthAccessToken string) *SnowflakeModel { - s.OauthAccessToken = tfconfig.StringVariable(oauthAccessToken) - return s -} - -func (s *SnowflakeModel) WithOauthClientId(oauthClientId string) *SnowflakeModel { - s.OauthClientId = tfconfig.StringVariable(oauthClientId) - return s -} - -func (s *SnowflakeModel) WithOauthClientSecret(oauthClientSecret string) *SnowflakeModel { - s.OauthClientSecret = tfconfig.StringVariable(oauthClientSecret) - return s -} - -func (s *SnowflakeModel) WithOauthEndpoint(oauthEndpoint string) *SnowflakeModel { - s.OauthEndpoint = tfconfig.StringVariable(oauthEndpoint) - return s -} - -func (s *SnowflakeModel) WithOauthRedirectUrl(oauthRedirectUrl string) *SnowflakeModel { - s.OauthRedirectUrl = tfconfig.StringVariable(oauthRedirectUrl) - return s -} - -func (s *SnowflakeModel) WithOauthRefreshToken(oauthRefreshToken string) *SnowflakeModel { - s.OauthRefreshToken = tfconfig.StringVariable(oauthRefreshToken) - return s -} - func (s *SnowflakeModel) WithOcspFailOpen(ocspFailOpen string) *SnowflakeModel { s.OcspFailOpen = tfconfig.StringVariable(ocspFailOpen) return s @@ -256,6 +205,8 @@ func (s *SnowflakeModel) WithPort(port int) *SnowflakeModel { return s } +// preview_features_enabled attribute type is not yet supported, so WithPreviewFeaturesEnabled can't be generated + func (s *SnowflakeModel) WithPrivateKey(privateKey string) *SnowflakeModel { s.PrivateKey = tfconfig.StringVariable(privateKey) return s @@ -266,11 +217,6 @@ func (s *SnowflakeModel) WithPrivateKeyPassphrase(privateKeyPassphrase string) * return s } -func (s *SnowflakeModel) WithPrivateKeyPath(privateKeyPath string) *SnowflakeModel { - s.PrivateKeyPath = tfconfig.StringVariable(privateKeyPath) - return s -} - func (s *SnowflakeModel) WithProfile(profile string) *SnowflakeModel { s.Profile = tfconfig.StringVariable(profile) return s @@ -281,11 +227,6 @@ func (s *SnowflakeModel) WithProtocol(protocol string) *SnowflakeModel { return s } -func (s *SnowflakeModel) WithRegion(region string) *SnowflakeModel { - s.Region = tfconfig.StringVariable(region) - return s -} - func (s *SnowflakeModel) WithRequestTimeout(requestTimeout int) *SnowflakeModel { s.RequestTimeout = tfconfig.IntegerVariable(requestTimeout) return s @@ -296,8 +237,6 @@ func (s *SnowflakeModel) WithRole(role string) *SnowflakeModel { return s } -// session_params attribute type is not yet supported, so WithSessionParams can't be generated - func (s *SnowflakeModel) WithTmpDirectoryPath(tmpDirectoryPath string) *SnowflakeModel { s.TmpDirectoryPath = tfconfig.StringVariable(tmpDirectoryPath) return s @@ -315,11 +254,6 @@ func (s *SnowflakeModel) WithUser(user string) *SnowflakeModel { return s } -func (s *SnowflakeModel) WithUsername(username string) *SnowflakeModel { - s.Username = tfconfig.StringVariable(username) - return s -} - func (s *SnowflakeModel) WithValidateDefaultParameters(validateDefaultParameters string) *SnowflakeModel { s.ValidateDefaultParameters = tfconfig.StringVariable(validateDefaultParameters) return s @@ -334,11 +268,6 @@ func (s *SnowflakeModel) WithWarehouse(warehouse string) *SnowflakeModel { // below it's possible to set any value // ////////////////////////////////////////// -func (s *SnowflakeModel) WithAccountValue(value tfconfig.Variable) *SnowflakeModel { - s.Account = value - return s -} - func (s *SnowflakeModel) WithAccountNameValue(value tfconfig.Variable) *SnowflakeModel { s.AccountName = value return s @@ -349,11 +278,6 @@ func (s *SnowflakeModel) WithAuthenticatorValue(value tfconfig.Variable) *Snowfl return s } -func (s *SnowflakeModel) WithBrowserAuthValue(value tfconfig.Variable) *SnowflakeModel { - s.BrowserAuth = value - return s -} - func (s *SnowflakeModel) WithClientIpValue(value tfconfig.Variable) *SnowflakeModel { s.ClientIp = value return s @@ -439,36 +363,6 @@ func (s *SnowflakeModel) WithMaxRetryCountValue(value tfconfig.Variable) *Snowfl return s } -func (s *SnowflakeModel) WithOauthAccessTokenValue(value tfconfig.Variable) *SnowflakeModel { - s.OauthAccessToken = value - return s -} - -func (s *SnowflakeModel) WithOauthClientIdValue(value tfconfig.Variable) *SnowflakeModel { - s.OauthClientId = value - return s -} - -func (s *SnowflakeModel) WithOauthClientSecretValue(value tfconfig.Variable) *SnowflakeModel { - s.OauthClientSecret = value - return s -} - -func (s *SnowflakeModel) WithOauthEndpointValue(value tfconfig.Variable) *SnowflakeModel { - s.OauthEndpoint = value - return s -} - -func (s *SnowflakeModel) WithOauthRedirectUrlValue(value tfconfig.Variable) *SnowflakeModel { - s.OauthRedirectUrl = value - return s -} - -func (s *SnowflakeModel) WithOauthRefreshTokenValue(value tfconfig.Variable) *SnowflakeModel { - s.OauthRefreshToken = value - return s -} - func (s *SnowflakeModel) WithOcspFailOpenValue(value tfconfig.Variable) *SnowflakeModel { s.OcspFailOpen = value return s @@ -509,6 +403,11 @@ func (s *SnowflakeModel) WithPortValue(value tfconfig.Variable) *SnowflakeModel return s } +func (s *SnowflakeModel) WithPreviewFeaturesEnabledValue(value tfconfig.Variable) *SnowflakeModel { + s.PreviewFeaturesEnabled = value + return s +} + func (s *SnowflakeModel) WithPrivateKeyValue(value tfconfig.Variable) *SnowflakeModel { s.PrivateKey = value return s @@ -519,11 +418,6 @@ func (s *SnowflakeModel) WithPrivateKeyPassphraseValue(value tfconfig.Variable) return s } -func (s *SnowflakeModel) WithPrivateKeyPathValue(value tfconfig.Variable) *SnowflakeModel { - s.PrivateKeyPath = value - return s -} - func (s *SnowflakeModel) WithProfileValue(value tfconfig.Variable) *SnowflakeModel { s.Profile = value return s @@ -534,11 +428,6 @@ func (s *SnowflakeModel) WithProtocolValue(value tfconfig.Variable) *SnowflakeMo return s } -func (s *SnowflakeModel) WithRegionValue(value tfconfig.Variable) *SnowflakeModel { - s.Region = value - return s -} - func (s *SnowflakeModel) WithRequestTimeoutValue(value tfconfig.Variable) *SnowflakeModel { s.RequestTimeout = value return s @@ -549,11 +438,6 @@ func (s *SnowflakeModel) WithRoleValue(value tfconfig.Variable) *SnowflakeModel return s } -func (s *SnowflakeModel) WithSessionParamsValue(value tfconfig.Variable) *SnowflakeModel { - s.SessionParams = value - return s -} - func (s *SnowflakeModel) WithTmpDirectoryPathValue(value tfconfig.Variable) *SnowflakeModel { s.TmpDirectoryPath = value return s @@ -574,11 +458,6 @@ func (s *SnowflakeModel) WithUserValue(value tfconfig.Variable) *SnowflakeModel return s } -func (s *SnowflakeModel) WithUsernameValue(value tfconfig.Variable) *SnowflakeModel { - s.Username = value - return s -} - func (s *SnowflakeModel) WithValidateDefaultParametersValue(value tfconfig.Variable) *SnowflakeModel { s.ValidateDefaultParameters = value return s diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index 6d056a613b..a6b918224d 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -67,15 +67,13 @@ func decodeSnowflakeId(rs *terraform.ResourceState, resource resources.Resource) switch resource { case resources.ExternalFunction: return sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(rs.Primary.ID), nil - case resources.Function, - resources.FunctionJava, + case resources.FunctionJava, resources.FunctionJavascript, resources.FunctionPython, resources.FunctionScala, resources.FunctionSql: return sdk.ParseSchemaObjectIdentifierWithArguments(rs.Primary.ID) - case resources.Procedure, - resources.ProcedureJava, + case resources.ProcedureJava, resources.ProcedureJavascript, resources.ProcedurePython, resources.ProcedureScala, @@ -122,9 +120,6 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Database: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Databases.ShowByID) }, - resources.DatabaseOld: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { - return runShowById(ctx, id, client.Databases.ShowByID) - }, resources.DatabaseRole: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.DatabaseRoles.ShowByID) }, @@ -152,9 +147,6 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.FileFormat: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.FileFormats.ShowByID) }, - resources.Function: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { - return runShowById(ctx, id, client.Functions.ShowByID) - }, resources.FunctionJava: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Functions.ShowByID) }, @@ -203,9 +195,6 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Pipe: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Pipes.ShowByID) }, - resources.Procedure: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { - return runShowById(ctx, id, client.Procedures.ShowByID) - }, resources.ProcedureJava: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Procedures.ShowByID) }, @@ -224,9 +213,6 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.ResourceMonitor: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.ResourceMonitors.ShowByID) }, - resources.Role: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { - return runShowById(ctx, id, client.Roles.ShowByID) - }, resources.RowAccessPolicy: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.RowAccessPolicies.ShowByID) }, @@ -275,9 +261,6 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.StorageIntegration: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.StorageIntegrations.ShowByID) }, - resources.Stream: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { - return runShowById(ctx, id, client.Streams.ShowByID) - }, resources.StreamOnDirectoryTable: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Streams.ShowByID) }, diff --git a/pkg/acceptance/testenvs/testing_environment_variables.go b/pkg/acceptance/testenvs/testing_environment_variables.go index 22ffdeb072..f6320d1d87 100644 --- a/pkg/acceptance/testenvs/testing_environment_variables.go +++ b/pkg/acceptance/testenvs/testing_environment_variables.go @@ -33,6 +33,7 @@ const ( EnableSweep env = "TEST_SF_TF_ENABLE_SWEEP" EnableManual env = "TEST_SF_TF_ENABLE_MANUAL_TESTS" ConfigureClientOnce env = "SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE" + EnableAllPreviewFeatures env = "SF_TF_ACC_TEST_ENABLE_ALL_PREVIEW_FEATURES" TestObjectsSuffix env = "TEST_SF_TF_TEST_OBJECT_SUFFIX" RequireTestObjectsSuffix env = "TEST_SF_TF_REQUIRE_TEST_OBJECT_SUFFIX" diff --git a/pkg/datasources/alerts.go b/pkg/datasources/alerts.go index ac0d643528..f06c3c6ae3 100644 --- a/pkg/datasources/alerts.go +++ b/pkg/datasources/alerts.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -79,7 +80,7 @@ var alertsSchema = map[string]*schema.Schema{ // Alerts Snowflake Roles resource. func Alerts() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Alerts, ReadAlerts), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.AlertsDatasource), TrackingReadWrapper(datasources.Alerts, ReadAlerts)), Schema: alertsSchema, } } diff --git a/pkg/datasources/common.go b/pkg/datasources/common.go index 9b4f354dda..d5a2937c59 100644 --- a/pkg/datasources/common.go +++ b/pkg/datasources/common.go @@ -4,8 +4,10 @@ import ( "context" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/tracking" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -217,3 +219,17 @@ func TrackingReadWrapper(datasourceName datasources.Datasource, readImplementati return readImplementation(ctx, d, meta) } } + +func PreviewFeatureReadWrapper(featureRaw string, readFunc schema.ReadContextFunc) schema.ReadContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + enabled := meta.(*provider.Context).EnabledFeatures + feature, err := previewfeatures.StringToFeature(featureRaw) + if err != nil { + return diag.FromErr(err) + } + if err := previewfeatures.EnsurePreviewFeatureEnabled(feature, enabled); err != nil { + return diag.FromErr(err) + } + return readFunc(ctx, d, meta) + } +} diff --git a/pkg/datasources/cortex_search_services.go b/pkg/datasources/cortex_search_services.go index b657d9f156..7fad174a71 100644 --- a/pkg/datasources/cortex_search_services.go +++ b/pkg/datasources/cortex_search_services.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -111,7 +112,7 @@ var cortexSearchServicesSchema = map[string]*schema.Schema{ // CortexSearchServices Snowflake Cortex search services resource. func CortexSearchServices() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.CortexSearchServices, ReadCortexSearchServices), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.CortexSearchServicesDatasource), TrackingReadWrapper(datasources.CortexSearchServices, ReadCortexSearchServices)), Schema: cortexSearchServicesSchema, } } diff --git a/pkg/datasources/current_account.go b/pkg/datasources/current_account.go index c3ba23e1fa..89aeaf121d 100644 --- a/pkg/datasources/current_account.go +++ b/pkg/datasources/current_account.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -36,7 +37,7 @@ var currentAccountSchema = map[string]*schema.Schema{ // CurrentAccount the Snowflake current account resource. func CurrentAccount() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.CurrentAccount, ReadCurrentAccount), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.CurrentAccountDatasource), TrackingReadWrapper(datasources.CurrentAccount, ReadCurrentAccount)), Schema: currentAccountSchema, } } diff --git a/pkg/datasources/current_account_acceptance_test.go b/pkg/datasources/current_account_acceptance_test.go index df58a2001b..3e10923ef6 100644 --- a/pkg/datasources/current_account_acceptance_test.go +++ b/pkg/datasources/current_account_acceptance_test.go @@ -16,7 +16,6 @@ func TestAcc_CurrentAccount(t *testing.T) { TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, Steps: []resource.TestStep{ { Config: currentAccount(), diff --git a/pkg/datasources/current_role.go b/pkg/datasources/current_role.go index e078773590..57d458c38e 100644 --- a/pkg/datasources/current_role.go +++ b/pkg/datasources/current_role.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -22,7 +23,7 @@ var currentRoleSchema = map[string]*schema.Schema{ func CurrentRole() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.CurrentRole, ReadCurrentRole), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.CurrentRoleDatasource), TrackingReadWrapper(datasources.CurrentRole, ReadCurrentRole)), Schema: currentRoleSchema, } } diff --git a/pkg/datasources/database.go b/pkg/datasources/database.go index d04595cad8..ec6774436e 100644 --- a/pkg/datasources/database.go +++ b/pkg/datasources/database.go @@ -4,6 +4,7 @@ import ( "context" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -57,7 +58,7 @@ var databaseSchema = map[string]*schema.Schema{ // Database the Snowflake Database resource. func Database() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Database, ReadDatabase), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.DatabaseDatasource), TrackingReadWrapper(datasources.Database, ReadDatabase)), Schema: databaseSchema, } } diff --git a/pkg/datasources/database_role.go b/pkg/datasources/database_role.go index dc33ac89fa..e72b862133 100644 --- a/pkg/datasources/database_role.go +++ b/pkg/datasources/database_role.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -39,7 +40,7 @@ var databaseRoleSchema = map[string]*schema.Schema{ // DatabaseRole Snowflake Database Role resource. func DatabaseRole() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.DatabaseRole, ReadDatabaseRole), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.DatabaseRoleDatasource), TrackingReadWrapper(datasources.DatabaseRole, ReadDatabaseRole)), Schema: databaseRoleSchema, } } diff --git a/pkg/datasources/dynamic_tables.go b/pkg/datasources/dynamic_tables.go index 8b5468ef00..8d235c2c8d 100644 --- a/pkg/datasources/dynamic_tables.go +++ b/pkg/datasources/dynamic_tables.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -197,7 +198,7 @@ var dynamicTablesSchema = map[string]*schema.Schema{ // DynamicTables Snowflake Dynamic Tables resource. func DynamicTables() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.DynamicTables, ReadDynamicTables), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.DynamicTablesDatasource), TrackingReadWrapper(datasources.DynamicTables, ReadDynamicTables)), Schema: dynamicTablesSchema, } } diff --git a/pkg/datasources/external_functions.go b/pkg/datasources/external_functions.go index 9a85bd08c3..db45c1a82d 100644 --- a/pkg/datasources/external_functions.go +++ b/pkg/datasources/external_functions.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -61,7 +62,7 @@ var externalFunctionsSchema = map[string]*schema.Schema{ func ExternalFunctions() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.ExternalFunctions, ReadContextExternalFunctions), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.ExternalFunctionsDatasource), TrackingReadWrapper(datasources.ExternalFunctions, ReadContextExternalFunctions)), Schema: externalFunctionsSchema, } } diff --git a/pkg/datasources/external_tables.go b/pkg/datasources/external_tables.go index 7c8c40ff52..45bb94651d 100644 --- a/pkg/datasources/external_tables.go +++ b/pkg/datasources/external_tables.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -57,7 +58,7 @@ var externalTablesSchema = map[string]*schema.Schema{ func ExternalTables() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.ExternalTables, ReadExternalTables), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.ExternalTablesDatasource), TrackingReadWrapper(datasources.ExternalTables, ReadExternalTables)), Schema: externalTablesSchema, } } diff --git a/pkg/datasources/failover_groups.go b/pkg/datasources/failover_groups.go index 85c50fd2c0..8d4cd0a905 100644 --- a/pkg/datasources/failover_groups.go +++ b/pkg/datasources/failover_groups.go @@ -4,6 +4,7 @@ import ( "context" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -120,7 +121,7 @@ var failoverGroupsSchema = map[string]*schema.Schema{ // FailoverGroups Snowflake FailoverGroups resource. func FailoverGroups() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.FailoverGroups, ReadFailoverGroups), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.FailoverGroupsDatasource), TrackingReadWrapper(datasources.FailoverGroups, ReadFailoverGroups)), Schema: failoverGroupsSchema, } } diff --git a/pkg/datasources/file_formats.go b/pkg/datasources/file_formats.go index 465ea03c4a..3864bda9ee 100644 --- a/pkg/datasources/file_formats.go +++ b/pkg/datasources/file_formats.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -59,7 +60,7 @@ var fileFormatsSchema = map[string]*schema.Schema{ func FileFormats() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.FileFormats, ReadFileFormats), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.FileFormatsDatasource), TrackingReadWrapper(datasources.FileFormats, ReadFileFormats)), Schema: fileFormatsSchema, } } diff --git a/pkg/datasources/functions.go b/pkg/datasources/functions.go index 8dc158b406..fcf325c9c9 100644 --- a/pkg/datasources/functions.go +++ b/pkg/datasources/functions.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -66,7 +67,7 @@ var functionsSchema = map[string]*schema.Schema{ func Functions() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Functions, ReadContextFunctions), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.FunctionsDatasource), TrackingReadWrapper(datasources.Functions, ReadContextFunctions)), Schema: functionsSchema, } } diff --git a/pkg/datasources/materialized_views.go b/pkg/datasources/materialized_views.go index 18f3b796e7..9724e9a560 100644 --- a/pkg/datasources/materialized_views.go +++ b/pkg/datasources/materialized_views.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -55,7 +56,7 @@ var materializedViewsSchema = map[string]*schema.Schema{ func MaterializedViews() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.MaterializedViews, ReadMaterializedViews), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.MaterializedViewsDatasource), TrackingReadWrapper(datasources.MaterializedViews, ReadMaterializedViews)), Schema: materializedViewsSchema, } } diff --git a/pkg/datasources/parameters.go b/pkg/datasources/parameters.go index 8c88eb1222..d01738ab87 100644 --- a/pkg/datasources/parameters.go +++ b/pkg/datasources/parameters.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -82,7 +83,7 @@ var parametersSchema = map[string]*schema.Schema{ func Parameters() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Parameters, ReadParameters), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.ParametersDatasource), TrackingReadWrapper(datasources.Parameters, ReadParameters)), Schema: parametersSchema, } } diff --git a/pkg/datasources/parameters_acceptance_test.go b/pkg/datasources/parameters_acceptance_test.go index 29c9dfba7a..9fbef94d26 100644 --- a/pkg/datasources/parameters_acceptance_test.go +++ b/pkg/datasources/parameters_acceptance_test.go @@ -6,8 +6,6 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) @@ -99,38 +97,6 @@ func TestAcc_Parameters_TransactionAbortOnErrorCanBeSet(t *testing.T) { }) } -// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2353 is fixed -// done on user, to not interfere with other parallel tests on the same account -func TestAcc_Parameters_QuotedIdentifiersIgnoreCaseCanBeSet(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - - user, userCleanup := acc.TestClient().User.CreateUser(t) - t.Cleanup(userCleanup) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - Steps: []resource.TestStep{ - { - Config: sessionParameterOnUser(user.ID()), - }, - }, - }) -} - -func sessionParameterOnUser(userId sdk.AccountObjectIdentifier) string { - return fmt.Sprintf( - ` - resource "snowflake_session_parameter" "test" { - key = "QUOTED_IDENTIFIERS_IGNORE_CASE" - value = "true" - user = %[1]s - }`, userId.FullyQualifiedName()) -} - func parametersConfigOnAccount() string { return `data "snowflake_parameters" "p" { parameter_type = "ACCOUNT" diff --git a/pkg/datasources/pipes.go b/pkg/datasources/pipes.go index 9cca5a20b2..8ac99aeee8 100644 --- a/pkg/datasources/pipes.go +++ b/pkg/datasources/pipes.go @@ -6,6 +6,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -60,7 +61,7 @@ var pipesSchema = map[string]*schema.Schema{ func Pipes() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Pipes, ReadPipes), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.PipesDatasource), TrackingReadWrapper(datasources.Pipes, ReadPipes)), Schema: pipesSchema, } } diff --git a/pkg/datasources/procedures.go b/pkg/datasources/procedures.go index c0dd714ff1..5cb4f7dc6c 100644 --- a/pkg/datasources/procedures.go +++ b/pkg/datasources/procedures.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -67,7 +68,7 @@ var proceduresSchema = map[string]*schema.Schema{ func Procedures() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Procedures, ReadContextProcedures), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.ProceduresDatasource), TrackingReadWrapper(datasources.Procedures, ReadContextProcedures)), Schema: proceduresSchema, } } diff --git a/pkg/datasources/role.go b/pkg/datasources/role.go deleted file mode 100644 index bb168351d7..0000000000 --- a/pkg/datasources/role.go +++ /dev/null @@ -1,67 +0,0 @@ -package datasources - -import ( - "context" - "log" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -var roleSchema = map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - Description: "The role for which to return metadata.", - }, - "comment": { - Type: schema.TypeString, - Computed: true, - Description: "The comment on the role", - }, -} - -// Role Snowflake Role resource. -func Role() *schema.Resource { - return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Role, ReadRole), - Schema: roleSchema, - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_account_roles instead.", - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - } -} - -// ReadRole Reads the database metadata information. -func ReadRole(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - - roleId, err := sdk.ParseAccountObjectIdentifier(d.Get("name").(string)) - if err != nil { - return diag.FromErr(err) - } - - role, err := client.Roles.ShowByID(ctx, roleId) - if err != nil { - log.Printf("[DEBUG] role (%s) not found", roleId.Name()) - d.SetId("") - return nil - } - - d.SetId(helpers.EncodeResourceIdentifier(role.ID())) - if err := d.Set("name", role.Name); err != nil { - return diag.FromErr(err) - } - if err := d.Set("comment", role.Comment); err != nil { - return diag.FromErr(err) - } - return nil -} diff --git a/pkg/datasources/role_acceptance_test.go b/pkg/datasources/role_acceptance_test.go deleted file mode 100644 index b79c0d162d..0000000000 --- a/pkg/datasources/role_acceptance_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package datasources_test - -import ( - "fmt" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_Role(t *testing.T) { - roleName := acc.TestClient().Ids.Alpha() - comment := random.Comment() - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: role(roleName, comment), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.snowflake_role.t", "name", roleName), - resource.TestCheckResourceAttr("data.snowflake_role.t", "comment", comment), - ), - }, - }, - }) -} - -func role(roleName, comment string) string { - return fmt.Sprintf(` - resource snowflake_role "test_role" { - name = "%v" - comment = "%v" - } - data snowflake_role "t" { - depends_on = [snowflake_role.test_role] - name = "%v" - } - `, roleName, comment, roleName) -} diff --git a/pkg/datasources/roles.go b/pkg/datasources/roles.go deleted file mode 100644 index 597702a637..0000000000 --- a/pkg/datasources/roles.go +++ /dev/null @@ -1,101 +0,0 @@ -package datasources - -import ( - "context" - "fmt" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -var rolesSchema = map[string]*schema.Schema{ - "like": { - Type: schema.TypeString, - Optional: true, - Description: "Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`).", - }, - "in_class": { - Type: schema.TypeString, - Optional: true, - ValidateDiagFunc: resources.IsValidIdentifier[sdk.SchemaObjectIdentifier](), - Description: "Filters the SHOW GRANTS output by class name.", - }, - "roles": { - Type: schema.TypeList, - Computed: true, - Description: "Holds the aggregated output of all role details queries.", - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - resources.ShowOutputAttributeName: { - Type: schema.TypeList, - Computed: true, - Description: "Holds the output of SHOW ROLES.", - Elem: &schema.Resource{ - Schema: schemas.ShowRoleSchema, - }, - }, - }, - }, - }, -} - -func Roles() *schema.Resource { - return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Roles, ReadRoles), - Schema: rolesSchema, - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_account_roles instead.", - Description: "Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection.", - } -} - -func ReadRoles(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - - req := sdk.NewShowRoleRequest() - - if likePattern, ok := d.GetOk("like"); ok { - req.WithLike(sdk.NewLikeRequest(likePattern.(string))) - } - - if className, ok := d.GetOk("in_class"); ok { - req.WithInClass(sdk.RolesInClass{ - Class: sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(className.(string)), - }) - } - - roles, err := client.Roles.Show(ctx, req) - if err != nil { - return diag.Diagnostics{ - diag.Diagnostic{ - Severity: diag.Error, - Summary: "Failed to show roles", - Detail: fmt.Sprintf("Error: %s", err), - }, - } - } - - d.SetId("roles_read") - - flattenedRoles := make([]map[string]any, len(roles)) - for i, role := range roles { - role := role - flattenedRoles[i] = map[string]any{ - resources.ShowOutputAttributeName: []map[string]any{schemas.RoleToSchema(&role)}, - } - } - - err = d.Set("roles", flattenedRoles) - if err != nil { - return diag.FromErr(err) - } - - return nil -} diff --git a/pkg/datasources/roles_acceptance_test.go b/pkg/datasources/roles_acceptance_test.go deleted file mode 100644 index 35623e2ec0..0000000000 --- a/pkg/datasources/roles_acceptance_test.go +++ /dev/null @@ -1,107 +0,0 @@ -package datasources_test - -import ( - "fmt" - "strconv" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" - "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_Roles_Complete(t *testing.T) { - accountRoleNamePrefix := random.AlphaN(10) - accountRoleName1 := acc.TestClient().Ids.AlphaWithPrefix(accountRoleNamePrefix + "1") - accountRoleName2 := acc.TestClient().Ids.AlphaWithPrefix(accountRoleNamePrefix + "2") - accountRoleName3 := acc.TestClient().Ids.Alpha() - comment := random.Comment() - - likeVariables := config.Variables{ - "account_role_name_1": config.StringVariable(accountRoleName1), - "account_role_name_2": config.StringVariable(accountRoleName2), - "account_role_name_3": config.StringVariable(accountRoleName3), - "comment": config.StringVariable(comment), - "like": config.StringVariable(accountRoleNamePrefix + "%"), - } - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - Steps: []resource.TestStep{ - { - ConfigDirectory: config.TestStepDirectory(), - ConfigVariables: likeVariables, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.snowflake_roles.test", "roles.#", "2"), - // containsRole(accountRoleName1, comment), - // containsRole(accountRoleName2, comment), - doesntContainRole(accountRoleName3, comment), - ), - }, - { - ConfigDirectory: config.TestStepDirectory(), - ConfigVariables: config.Variables{}, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrWith("data.snowflake_roles.test", "roles.#", func(value string) error { - numberOfRoles, err := strconv.ParseInt(value, 10, 8) - if err != nil { - return err - } - - if numberOfRoles == 0 { - return fmt.Errorf("expected roles to be non-empty") - } - - return nil - }), - ), - }, - }, - }) -} - -func doesntContainRole(name string, comment string) func(s *terraform.State) error { - return func(state *terraform.State) error { - err := containsRole(name, comment)(state) - if err != nil && err.Error() == fmt.Sprintf("role %s not found", name) { - return nil - } - return fmt.Errorf("expected %s not to be present", name) - } -} - -func containsRole(name string, comment string) func(s *terraform.State) error { - return func(s *terraform.State) error { - for _, rs := range s.RootModule().Resources { - if rs.Type != "snowflake_roles" { - continue - } - - iter, err := strconv.ParseInt(rs.Primary.Attributes["roles.#"], 10, 32) - if err != nil { - return err - } - - for i := 0; i < int(iter); i++ { - if rs.Primary.Attributes[fmt.Sprintf("roles.%d.show_output.0.name", i)] == name { - actualComment := rs.Primary.Attributes[fmt.Sprintf("roles.%d.show_output.0.comment", i)] - if actualComment != comment { - return fmt.Errorf("expected comment: %s, but got: %s", comment, actualComment) - } - - return nil - } - } - } - - return fmt.Errorf("role %s not found", name) - } -} diff --git a/pkg/datasources/sequences.go b/pkg/datasources/sequences.go index e9c6b7b53e..e0d9b989e2 100644 --- a/pkg/datasources/sequences.go +++ b/pkg/datasources/sequences.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -54,7 +55,7 @@ var sequencesSchema = map[string]*schema.Schema{ func Sequences() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Sequences, ReadSequences), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.SequencesDatasource), TrackingReadWrapper(datasources.Sequences, ReadSequences)), Schema: sequencesSchema, } } diff --git a/pkg/datasources/shares.go b/pkg/datasources/shares.go index 721debb9ed..7096b14400 100644 --- a/pkg/datasources/shares.go +++ b/pkg/datasources/shares.go @@ -4,6 +4,7 @@ import ( "context" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -58,7 +59,7 @@ var sharesSchema = map[string]*schema.Schema{ // Shares Snowflake Shares resource. func Shares() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Shares, ReadShares), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.SharesDatasource), TrackingReadWrapper(datasources.Shares, ReadShares)), Schema: sharesSchema, } } diff --git a/pkg/datasources/stages.go b/pkg/datasources/stages.go index 695e785776..bfbaa0997a 100644 --- a/pkg/datasources/stages.go +++ b/pkg/datasources/stages.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -60,7 +61,7 @@ var stagesSchema = map[string]*schema.Schema{ func Stages() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Stages, ReadStages), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.StagesDatasource), TrackingReadWrapper(datasources.Stages, ReadStages)), Schema: stagesSchema, } } diff --git a/pkg/datasources/storage_integrations.go b/pkg/datasources/storage_integrations.go index 2e17b29701..e35348c16a 100644 --- a/pkg/datasources/storage_integrations.go +++ b/pkg/datasources/storage_integrations.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -45,7 +46,7 @@ var storageIntegrationsSchema = map[string]*schema.Schema{ func StorageIntegrations() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.StorageIntegrations, ReadStorageIntegrations), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.StorageIntegrationsDatasource), TrackingReadWrapper(datasources.StorageIntegrations, ReadStorageIntegrations)), Schema: storageIntegrationsSchema, } } diff --git a/pkg/datasources/system_generate_scim_access_token.go b/pkg/datasources/system_generate_scim_access_token.go index f40eb00306..9c008fd422 100644 --- a/pkg/datasources/system_generate_scim_access_token.go +++ b/pkg/datasources/system_generate_scim_access_token.go @@ -7,6 +7,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -32,7 +33,7 @@ var systemGenerateSCIMAccesstokenSchema = map[string]*schema.Schema{ func SystemGenerateSCIMAccessToken() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.SystemGenerateScimAccessToken, ReadSystemGenerateSCIMAccessToken), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.SystemGenerateSCIMAccessTokenDatasource), TrackingReadWrapper(datasources.SystemGenerateScimAccessToken, ReadSystemGenerateSCIMAccessToken)), Schema: systemGenerateSCIMAccesstokenSchema, } } diff --git a/pkg/datasources/system_get_aws_sns_iam_policy.go b/pkg/datasources/system_get_aws_sns_iam_policy.go index a772d370ad..721d18195e 100644 --- a/pkg/datasources/system_get_aws_sns_iam_policy.go +++ b/pkg/datasources/system_get_aws_sns_iam_policy.go @@ -7,6 +7,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -30,7 +31,7 @@ var systemGetAWSSNSIAMPolicySchema = map[string]*schema.Schema{ func SystemGetAWSSNSIAMPolicy() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.SystemGetAwsSnsIamPolicy, ReadSystemGetAWSSNSIAMPolicy), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.SystemGetAWSSNSIAMPolicyDatasource), TrackingReadWrapper(datasources.SystemGetAwsSnsIamPolicy, ReadSystemGetAWSSNSIAMPolicy)), Schema: systemGetAWSSNSIAMPolicySchema, } } diff --git a/pkg/datasources/system_get_privatelink_config.go b/pkg/datasources/system_get_privatelink_config.go index ae13316ad7..654fbe674e 100644 --- a/pkg/datasources/system_get_privatelink_config.go +++ b/pkg/datasources/system_get_privatelink_config.go @@ -7,6 +7,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -73,7 +74,7 @@ var systemGetPrivateLinkConfigSchema = map[string]*schema.Schema{ func SystemGetPrivateLinkConfig() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.SystemGetPrivateLinkConfig, ReadSystemGetPrivateLinkConfig), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.SystemGetPrivateLinkConfigDatasource), TrackingReadWrapper(datasources.SystemGetPrivateLinkConfig, ReadSystemGetPrivateLinkConfig)), Schema: systemGetPrivateLinkConfigSchema, } } diff --git a/pkg/datasources/system_get_snowflake_platform_info.go b/pkg/datasources/system_get_snowflake_platform_info.go index e9f54ac4ce..f449be022c 100644 --- a/pkg/datasources/system_get_snowflake_platform_info.go +++ b/pkg/datasources/system_get_snowflake_platform_info.go @@ -8,6 +8,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -33,7 +34,7 @@ var systemGetSnowflakePlatformInfoSchema = map[string]*schema.Schema{ func SystemGetSnowflakePlatformInfo() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.SystemGetSnowflakePlatformInfo, ReadSystemGetSnowflakePlatformInfo), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.SystemGetSnowflakePlatformInfoDatasource), TrackingReadWrapper(datasources.SystemGetSnowflakePlatformInfo, ReadSystemGetSnowflakePlatformInfo)), Schema: systemGetSnowflakePlatformInfoSchema, } } diff --git a/pkg/datasources/tables.go b/pkg/datasources/tables.go index fac59da574..c814a1fcb3 100644 --- a/pkg/datasources/tables.go +++ b/pkg/datasources/tables.go @@ -5,6 +5,7 @@ import ( "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/datasources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -55,7 +56,7 @@ var tablesSchema = map[string]*schema.Schema{ func Tables() *schema.Resource { return &schema.Resource{ - ReadContext: TrackingReadWrapper(datasources.Tables, ReadTables), + ReadContext: PreviewFeatureReadWrapper(string(previewfeatures.TablesDatasource), TrackingReadWrapper(datasources.Tables, ReadTables)), Schema: tablesSchema, } } diff --git a/pkg/datasources/testdata/TestAcc_Procedures/complete/test.tf b/pkg/datasources/testdata/TestAcc_Procedures/complete/test.tf index 33ffb62f1d..b91ba271a9 100644 --- a/pkg/datasources/testdata/TestAcc_Procedures/complete/test.tf +++ b/pkg/datasources/testdata/TestAcc_Procedures/complete/test.tf @@ -14,18 +14,17 @@ variable "schema" { type = string } -resource "snowflake_procedure" "test_proc_one" { +resource "snowflake_procedure_javascript" "test_proc_one" { name = var.proc_name_one database = var.database schema = var.schema return_type = "VARCHAR" - language = "JAVASCRIPT" statement = <<-EOF return "Hi" EOF } -resource "snowflake_procedure" "test_proc_two" { +resource "snowflake_procedure_javascript" "test_proc_two" { name = var.proc_name_two database = var.database schema = var.schema @@ -35,7 +34,6 @@ resource "snowflake_procedure" "test_proc_two" { } comment = "Terraform acceptance test" return_type = "varchar" - language = "JAVASCRIPT" statement = <<-EOF var X=1 return X @@ -45,5 +43,5 @@ resource "snowflake_procedure" "test_proc_two" { data "snowflake_procedures" "procedures" { database = var.database schema = var.schema - depends_on = [snowflake_procedure.test_proc_one, snowflake_procedure.test_proc_two] + depends_on = [snowflake_procedure_javascript.test_proc_one, snowflake_procedure_javascript.test_proc_two] } diff --git a/pkg/datasources/testdata/TestAcc_Roles_Complete/1/test.tf b/pkg/datasources/testdata/TestAcc_Roles_Complete/1/test.tf deleted file mode 100644 index ecef265a1c..0000000000 --- a/pkg/datasources/testdata/TestAcc_Roles_Complete/1/test.tf +++ /dev/null @@ -1,23 +0,0 @@ -resource "snowflake_account_role" "test1" { - name = var.account_role_name_1 - comment = var.comment -} - -resource "snowflake_account_role" "test2" { - name = var.account_role_name_2 - comment = var.comment -} - -resource "snowflake_account_role" "test3" { - name = var.account_role_name_3 - comment = var.comment -} - -data "snowflake_roles" "test" { - depends_on = [ - snowflake_account_role.test1, - snowflake_account_role.test2, - snowflake_account_role.test3, - ] - like = var.like -} diff --git a/pkg/datasources/testdata/TestAcc_Roles_Complete/1/variables.tf b/pkg/datasources/testdata/TestAcc_Roles_Complete/1/variables.tf deleted file mode 100644 index fcd75c445f..0000000000 --- a/pkg/datasources/testdata/TestAcc_Roles_Complete/1/variables.tf +++ /dev/null @@ -1,19 +0,0 @@ -variable "account_role_name_1" { - type = string -} - -variable "account_role_name_2" { - type = string -} - -variable "account_role_name_3" { - type = string -} - -variable "comment" { - type = string -} - -variable "like" { - type = string -} diff --git a/pkg/datasources/testdata/TestAcc_Roles_Complete/2/test.tf b/pkg/datasources/testdata/TestAcc_Roles_Complete/2/test.tf deleted file mode 100644 index 371634b3d1..0000000000 --- a/pkg/datasources/testdata/TestAcc_Roles_Complete/2/test.tf +++ /dev/null @@ -1,3 +0,0 @@ -data "snowflake_roles" "test" { - in_class = "SNOWFLAKE.CORE.BUDGET" -} diff --git a/pkg/internal/provider/provider_context.go b/pkg/internal/provider/provider_context.go index 0fbdef8228..64c469fb2f 100644 --- a/pkg/internal/provider/provider_context.go +++ b/pkg/internal/provider/provider_context.go @@ -3,5 +3,6 @@ package provider import "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" type Context struct { - Client *sdk.Client + Client *sdk.Client + EnabledFeatures []string } diff --git a/pkg/internal/snowflakeenvs/snowflake_environment_variables.go b/pkg/internal/snowflakeenvs/snowflake_environment_variables.go index 91f817c655..8440f0092c 100644 --- a/pkg/internal/snowflakeenvs/snowflake_environment_variables.go +++ b/pkg/internal/snowflakeenvs/snowflake_environment_variables.go @@ -1,11 +1,9 @@ package snowflakeenvs const ( - Account = "SNOWFLAKE_ACCOUNT" AccountName = "SNOWFLAKE_ACCOUNT_NAME" OrganizationName = "SNOWFLAKE_ORGANIZATION_NAME" User = "SNOWFLAKE_USER" - Username = "SNOWFLAKE_USERNAME" Password = "SNOWFLAKE_PASSWORD" Warehouse = "SNOWFLAKE_WAREHOUSE" Role = "SNOWFLAKE_ROLE" diff --git a/pkg/internal/tools/doc-gen-helper/main.go b/pkg/internal/tools/doc-gen-helper/main.go index d476136dea..887dafbc56 100644 --- a/pkg/internal/tools/doc-gen-helper/main.go +++ b/pkg/internal/tools/doc-gen-helper/main.go @@ -2,7 +2,6 @@ package main import ( "bytes" - "io" "log" "os" "path/filepath" @@ -72,25 +71,22 @@ func main() { } } - var deprecatedResourcesBuffer bytes.Buffer - printTo(&deprecatedResourcesBuffer, DeprecatedResourcesTemplate, DeprecatedResourcesContext{deprecatedResources}) - - var deprecatedDatasourcesBuffer bytes.Buffer - printTo(&deprecatedDatasourcesBuffer, DeprecatedDatasourcesTemplate, DeprecatedDatasourcesContext{deprecatedDatasources}) - - err := os.WriteFile(filepath.Join(additionalExamplesPath, deprecatedResourcesFilename), deprecatedResourcesBuffer.Bytes(), 0o600) + err := printTo(DeprecatedResourcesTemplate, DeprecatedResourcesContext{deprecatedResources}, filepath.Join(additionalExamplesPath, deprecatedResourcesFilename)) if err != nil { - log.Panicln(err) + log.Fatal(err) } - err = os.WriteFile(filepath.Join(additionalExamplesPath, deprecatedDatasourcesFilename), deprecatedDatasourcesBuffer.Bytes(), 0o600) + + err = printTo(DeprecatedDatasourcesTemplate, DeprecatedDatasourcesContext{deprecatedDatasources}, filepath.Join(additionalExamplesPath, deprecatedDatasourcesFilename)) if err != nil { - log.Panicln(err) + log.Fatal(err) } } -func printTo(writer io.Writer, template *template.Template, model any) { - err := template.Execute(writer, model) +func printTo(template *template.Template, model any, filepath string) error { + var writer bytes.Buffer + err := template.Execute(&writer, model) if err != nil { - log.Panicln(err) + return err } + return os.WriteFile(filepath, writer.Bytes(), 0o600) } diff --git a/pkg/internal/tools/doc-gen-helper/templates.go b/pkg/internal/tools/doc-gen-helper/templates.go index d4dbc16f86..7121145a68 100644 --- a/pkg/internal/tools/doc-gen-helper/templates.go +++ b/pkg/internal/tools/doc-gen-helper/templates.go @@ -3,7 +3,8 @@ package main import "text/template" var DeprecatedResourcesTemplate, _ = template.New("deprecatedResourcesTemplate").Parse( - `## Currently deprecated resources + ` +{{if gt (len .Resources) 0}} ## Currently deprecated resources {{end}} {{ range .Resources -}} - {{ .NameRelativeLink }}{{ if .ReplacementRelativeLink }} - use {{ .ReplacementRelativeLink }} instead{{ end }} @@ -11,7 +12,8 @@ var DeprecatedResourcesTemplate, _ = template.New("deprecatedResourcesTemplate") ) var DeprecatedDatasourcesTemplate, _ = template.New("deprecatedDatasourcesTemplate").Parse( - `## Currently deprecated datasources + ` +{{if gt (len .Datasources) 0}} ## Currently deprecated data sources {{end}} {{ range .Datasources -}} - {{ .NameRelativeLink }}{{ if .ReplacementRelativeLink }} - use {{ .ReplacementRelativeLink }} instead{{ end }} diff --git a/pkg/internal/tracking/context.go b/pkg/internal/tracking/context.go index f228fa4773..412ff9eb77 100644 --- a/pkg/internal/tracking/context.go +++ b/pkg/internal/tracking/context.go @@ -10,7 +10,7 @@ import ( const ( CurrentSchemaVersion string = "1" - ProviderVersion string = "v0.100.0" // TODO(SNOW-1814934): Currently hardcoded, make it computed + ProviderVersion string = "v1.0.0" // TODO(SNOW-1814934): Currently hardcoded, make it computed MetadataPrefix string = "terraform_provider_usage_tracking" ) diff --git a/pkg/provider/datasources/datasources.go b/pkg/provider/datasources/datasources.go index c31b954a22..5edf2f8ca8 100644 --- a/pkg/provider/datasources/datasources.go +++ b/pkg/provider/datasources/datasources.go @@ -28,8 +28,6 @@ const ( Pipes datasource = "snowflake_pipes" Procedures datasource = "snowflake_procedures" ResourceMonitors datasource = "snowflake_resource_monitors" - Role datasource = "snowflake_role" - Roles datasource = "snowflake_roles" RowAccessPolicies datasource = "snowflake_row_access_policies" Schemas datasource = "snowflake_schemas" Secrets datasource = "snowflake_secrets" diff --git a/pkg/provider/previewfeatures/preview_features.go b/pkg/provider/previewfeatures/preview_features.go new file mode 100644 index 0000000000..e14600fe2c --- /dev/null +++ b/pkg/provider/previewfeatures/preview_features.go @@ -0,0 +1,157 @@ +package previewfeatures + +import ( + "fmt" + "slices" + "strings" +) + +type feature string + +const ( + CurrentAccountDatasource feature = "snowflake_current_account_datasource" + AccountAuthenticationPolicyAttachmentResource feature = "snowflake_account_authentication_policy_attachment_resource" + AccountPasswordPolicyAttachmentResource feature = "snowflake_account_password_policy_attachment_resource" + AlertResource feature = "snowflake_alert_resource" + AlertsDatasource feature = "snowflake_alerts_datasource" + ApiIntegrationResource feature = "snowflake_api_integration_resource" + AuthenticationPolicyResource feature = "snowflake_authentication_policy_resource" + CortexSearchServiceResource feature = "snowflake_cortex_search_service_resource" + CortexSearchServicesDatasource feature = "snowflake_cortex_search_services_datasource" + DatabaseDatasource feature = "snowflake_database_datasource" + DatabaseRoleDatasource feature = "snowflake_database_role_datasource" + DynamicTableResource feature = "snowflake_dynamic_table_resource" + DynamicTablesDatasource feature = "snowflake_dynamic_tables_datasource" + ExternalFunctionResource feature = "snowflake_external_function_resource" + ExternalFunctionsDatasource feature = "snowflake_external_functions_datasource" + ExternalTableResource feature = "snowflake_external_table_resource" + ExternalTablesDatasource feature = "snowflake_external_tables_datasource" + ExternalVolumeResource feature = "snowflake_external_volume_resource" + FailoverGroupResource feature = "snowflake_failover_group_resource" + FailoverGroupsDatasource feature = "snowflake_failover_groups_datasource" + FileFormatResource feature = "snowflake_file_format_resource" + FileFormatsDatasource feature = "snowflake_file_formats_datasource" + FunctionJavaResource feature = "snowflake_function_java_resource" + FunctionJavascriptResource feature = "snowflake_function_javascript_resource" + FunctionPythonResource feature = "snowflake_function_python_resource" + FunctionScalaResource feature = "snowflake_function_scala_resource" + FunctionSqlResource feature = "snowflake_function_sql_resource" + FunctionsDatasource feature = "snowflake_functions_datasource" + ManagedAccountResource feature = "snowflake_managed_account_resource" + MaterializedViewResource feature = "snowflake_materialized_view_resource" + MaterializedViewsDatasource feature = "snowflake_materialized_views_datasource" + NetworkPolicyAttachmentResource feature = "snowflake_network_policy_attachment_resource" + NetworkRuleResource feature = "snowflake_network_rule_resource" + EmailNotificationIntegrationResource feature = "snowflake_email_notification_integration_resource" + NotificationIntegrationResource feature = "snowflake_notification_integration_resource" + ObjectParameterResource feature = "snowflake_object_parameter_resource" + PasswordPolicyResource feature = "snowflake_password_policy_resource" + PipeResource feature = "snowflake_pipe_resource" + PipesDatasource feature = "snowflake_pipes_datasource" + ProcedureJavaResource feature = "snowflake_procecure_java_resource" + ProcedureJavascriptResource feature = "snowflake_procecure_javascript_resource" + ProcedurePythonResource feature = "snowflake_procecure_python_resource" + ProcedureScalaResource feature = "snowflake_procecure_scala_resource" + ProcedureSqlResource feature = "snowflake_procecure_sql_resource" + ProceduresDatasource feature = "snowflake_procedures_datasource" + CurrentRoleDatasource feature = "snowflake_current_role_datasource" + SequenceResource feature = "snowflake_sequence_resource" + SequencesDatasource feature = "snowflake_sequences_datasource" + ShareResource feature = "snowflake_share_resource" + SharesDatasource feature = "snowflake_shares_datasource" + ParametersDatasource feature = "snowflake_parameters_datasource" + StageResource feature = "snowflake_stage_resource" + StagesDatasource feature = "snowflake_stages_datasource" + StorageIntegrationResource feature = "snowflake_storage_integration_resource" + StorageIntegrationsDatasource feature = "snowflake_storage_integrations_datasource" + SystemGenerateSCIMAccessTokenDatasource feature = "snowflake_system_generate_scim_access_token_datasource" + SystemGetAWSSNSIAMPolicyDatasource feature = "snowflake_system_get_aws_sns_iam_policy_datasource" + SystemGetPrivateLinkConfigDatasource feature = "snowflake_system_get_privatelink_config_datasource" + SystemGetSnowflakePlatformInfoDatasource feature = "snowflake_system_get_snowflake_platform_info_datasource" + TableResource feature = "snowflake_table_resource" + TablesDatasource feature = "snowflake_tables_datasource" + TableColumnMaskingPolicyApplicationResource feature = "snowflake_table_column_masking_policy_application_resource" + TableConstraintResource feature = "snowflake_table_constraint_resource" + UserAuthenticationPolicyAttachmentResource feature = "snowflake_user_authentication_policy_attachment_resource" + UserPublicKeysResource feature = "snowflake_user_public_keys_resource" + UserPasswordPolicyAttachmentResource feature = "snowflake_user_password_policy_attachment_resource" +) + +var allPreviewFeatures = []feature{ + CurrentAccountDatasource, + AccountAuthenticationPolicyAttachmentResource, + AccountPasswordPolicyAttachmentResource, + AlertResource, + AlertsDatasource, + ApiIntegrationResource, + AuthenticationPolicyResource, + CortexSearchServiceResource, + CortexSearchServicesDatasource, + DatabaseDatasource, + DatabaseRoleDatasource, + DynamicTableResource, + DynamicTablesDatasource, + ExternalFunctionResource, + ExternalFunctionsDatasource, + ExternalTableResource, + ExternalTablesDatasource, + ExternalVolumeResource, + FailoverGroupResource, + FailoverGroupsDatasource, + FileFormatResource, + FileFormatsDatasource, + ManagedAccountResource, + MaterializedViewResource, + MaterializedViewsDatasource, + NetworkPolicyAttachmentResource, + NetworkRuleResource, + EmailNotificationIntegrationResource, + NotificationIntegrationResource, + ObjectParameterResource, + PasswordPolicyResource, + PipeResource, + PipesDatasource, + CurrentRoleDatasource, + SequenceResource, + SequencesDatasource, + ShareResource, + SharesDatasource, + ParametersDatasource, + StageResource, + StagesDatasource, + StorageIntegrationResource, + StorageIntegrationsDatasource, + SystemGenerateSCIMAccessTokenDatasource, + SystemGetAWSSNSIAMPolicyDatasource, + SystemGetPrivateLinkConfigDatasource, + SystemGetSnowflakePlatformInfoDatasource, + TableColumnMaskingPolicyApplicationResource, + TableConstraintResource, + UserAuthenticationPolicyAttachmentResource, + UserPublicKeysResource, + UserPasswordPolicyAttachmentResource, +} +var AllPreviewFeatures = make([]string, len(allPreviewFeatures)) + +func init() { + for i, v := range allPreviewFeatures { + AllPreviewFeatures[i] = string(v) + } +} + +func EnsurePreviewFeatureEnabled(feat feature, enabledFeatures []string) error { + if !slices.ContainsFunc(enabledFeatures, func(s string) bool { + return s == string(feat) + }) { + return fmt.Errorf("%[1]s is currently a preview feature, and must be enabled by adding %[1]s to `preview_features_enabled` in Terraform configuration.", feat) + } + return nil +} + +func StringToFeature(featRaw string) (feature, error) { + feat := feature(strings.ToLower(featRaw)) + if !slices.Contains(allPreviewFeatures, feat) { + return "", fmt.Errorf("invalid feature: %s", featRaw) + } + return feat, nil +} diff --git a/pkg/provider/previewfeatures/preview_features_test.go b/pkg/provider/previewfeatures/preview_features_test.go new file mode 100644 index 0000000000..f3fb50eece --- /dev/null +++ b/pkg/provider/previewfeatures/preview_features_test.go @@ -0,0 +1,91 @@ +package previewfeatures + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_StringToFeature(t *testing.T) { + type test struct { + input string + want feature + } + + valid := []test{ + // Case insensitive. + {input: "SNOWFLAKE_CURRENT_ACCOUNT_DATASOURCE", want: CurrentAccountDatasource}, + + // Supported Values. + {input: "snowflake_current_account_datasource", want: CurrentAccountDatasource}, + {input: "snowflake_account_password_policy_attachment_resource", want: AccountPasswordPolicyAttachmentResource}, + {input: "snowflake_alert_resource", want: AlertResource}, + {input: "snowflake_alerts_datasource", want: AlertsDatasource}, + {input: "snowflake_api_integration_resource", want: ApiIntegrationResource}, + {input: "snowflake_cortex_search_service_resource", want: CortexSearchServiceResource}, + {input: "snowflake_cortex_search_services_datasource", want: CortexSearchServicesDatasource}, + {input: "snowflake_database_datasource", want: DatabaseDatasource}, + {input: "snowflake_database_role_datasource", want: DatabaseRoleDatasource}, + {input: "snowflake_dynamic_table_resource", want: DynamicTableResource}, + {input: "snowflake_dynamic_tables_datasource", want: DynamicTablesDatasource}, + {input: "snowflake_external_function_resource", want: ExternalFunctionResource}, + {input: "snowflake_external_functions_datasource", want: ExternalFunctionsDatasource}, + {input: "snowflake_external_table_resource", want: ExternalTableResource}, + {input: "snowflake_external_tables_datasource", want: ExternalTablesDatasource}, + {input: "snowflake_external_volume_resource", want: ExternalVolumeResource}, + {input: "snowflake_failover_group_resource", want: FailoverGroupResource}, + {input: "snowflake_failover_groups_datasource", want: FailoverGroupsDatasource}, + {input: "snowflake_file_format_resource", want: FileFormatResource}, + {input: "snowflake_file_formats_datasource", want: FileFormatsDatasource}, + {input: "snowflake_managed_account_resource", want: ManagedAccountResource}, + {input: "snowflake_materialized_view_resource", want: MaterializedViewResource}, + {input: "snowflake_materialized_views_datasource", want: MaterializedViewsDatasource}, + {input: "snowflake_network_policy_attachment_resource", want: NetworkPolicyAttachmentResource}, + {input: "snowflake_network_rule_resource", want: NetworkRuleResource}, + {input: "snowflake_email_notification_integration_resource", want: EmailNotificationIntegrationResource}, + {input: "snowflake_notification_integration_resource", want: NotificationIntegrationResource}, + {input: "snowflake_object_parameter_resource", want: ObjectParameterResource}, + {input: "snowflake_password_policy_resource", want: PasswordPolicyResource}, + {input: "snowflake_pipe_resource", want: PipeResource}, + {input: "snowflake_pipes_datasource", want: PipesDatasource}, + {input: "snowflake_current_role_datasource", want: CurrentRoleDatasource}, + {input: "snowflake_sequence_resource", want: SequenceResource}, + {input: "snowflake_sequences_datasource", want: SequencesDatasource}, + {input: "snowflake_share_resource", want: ShareResource}, + {input: "snowflake_shares_datasource", want: SharesDatasource}, + {input: "snowflake_parameters_datasource", want: ParametersDatasource}, + {input: "snowflake_stage_resource", want: StageResource}, + {input: "snowflake_stages_datasource", want: StagesDatasource}, + {input: "snowflake_storage_integration_resource", want: StorageIntegrationResource}, + {input: "snowflake_storage_integrations_datasource", want: StorageIntegrationsDatasource}, + {input: "snowflake_system_generate_scim_access_token_datasource", want: SystemGenerateSCIMAccessTokenDatasource}, + {input: "snowflake_system_get_aws_sns_iam_policy_datasource", want: SystemGetAWSSNSIAMPolicyDatasource}, + {input: "snowflake_system_get_privatelink_config_datasource", want: SystemGetPrivateLinkConfigDatasource}, + {input: "snowflake_system_get_snowflake_platform_info_datasource", want: SystemGetSnowflakePlatformInfoDatasource}, + {input: "snowflake_table_column_masking_policy_application_resource", want: TableColumnMaskingPolicyApplicationResource}, + {input: "snowflake_table_constraint_resource", want: TableConstraintResource}, + {input: "snowflake_user_authentication_policy_attachment_resource", want: UserAuthenticationPolicyAttachmentResource}, + {input: "snowflake_user_public_keys_resource", want: UserPublicKeysResource}, + {input: "snowflake_user_password_policy_attachment_resource", want: UserPasswordPolicyAttachmentResource}, + } + + invalid := []test{ + {input: ""}, + {input: "foo"}, + } + + for _, tc := range valid { + t.Run(tc.input, func(t *testing.T) { + got, err := StringToFeature(tc.input) + require.NoError(t, err) + require.Equal(t, tc.want, got) + }) + } + + for _, tc := range invalid { + t.Run(tc.input, func(t *testing.T) { + _, err := StringToFeature(tc.input) + require.ErrorContains(t, err, "invalid feature") + }) + } +} diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 8fdbb085db..1664c48f52 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -15,6 +15,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/validators" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -71,11 +72,11 @@ func Provider() *schema.Provider { }, "password": { Type: schema.TypeString, - Description: envNameFieldDescription("Password for user + password auth. Cannot be used with `browser_auth` or `private_key_path`.", snowflakeenvs.Password), + Description: envNameFieldDescription("Password for user + password auth. Cannot be used with `private_key` and `private_key_passphrase`.", snowflakeenvs.Password), Optional: true, Sensitive: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Password, nil), - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "private_key_passphrase", "oauth_access_token", "oauth_refresh_token"}, + ConflictsWith: []string{"private_key", "private_key_passphrase"}, }, "warehouse": { Type: schema.TypeString, @@ -133,7 +134,7 @@ func Provider() *schema.Provider { }, "authenticator": { Type: schema.TypeString, - Description: envNameFieldDescription(fmt.Sprintf("Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid options are: %v. Value `JWT` is deprecated and will be removed in future releases.", docs.PossibleValuesListed(sdk.AllAuthenticationTypes)), snowflakeenvs.Authenticator), + Description: envNameFieldDescription(fmt.Sprintf("Specifies the [authentication type](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#AuthType) to use when connecting to Snowflake. Valid options are: %v.", docs.PossibleValuesListed(sdk.AllAuthenticationTypes)), snowflakeenvs.Authenticator), Optional: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Authenticator, string(sdk.AuthenticationTypeEmpty)), ValidateDiagFunc: validators.NormalizeValidation(sdk.ToExtendedAuthenticatorType), @@ -274,11 +275,11 @@ func Provider() *schema.Provider { }, "private_key": { Type: schema.TypeString, - Description: envNameFieldDescription("Private Key for username+private-key auth. Cannot be used with `browser_auth` or `password`.", snowflakeenvs.PrivateKey), + Description: envNameFieldDescription("Private Key for username+private-key auth. Cannot be used with `password`.", snowflakeenvs.PrivateKey), Optional: true, Sensitive: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.PrivateKey, nil), - ConflictsWith: []string{"browser_auth", "password", "oauth_access_token", "private_key_path", "oauth_refresh_token"}, + ConflictsWith: []string{"password"}, }, "private_key_passphrase": { Type: schema.TypeString, @@ -286,7 +287,7 @@ func Provider() *schema.Provider { Optional: true, Sensitive: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.PrivateKeyPassphrase, nil), - ConflictsWith: []string{"browser_auth", "password", "oauth_access_token", "oauth_refresh_token"}, + ConflictsWith: []string{"password"}, }, "disable_telemetry": { Type: schema.TypeBool, @@ -356,109 +357,14 @@ func Provider() *schema.Provider { Optional: true, DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Profile, "default"), }, - // Deprecated attributes - "account": { - Type: schema.TypeString, - Description: envNameFieldDescription("Use `account_name` and `organization_name` instead. Specifies your Snowflake account identifier assigned, by Snowflake. The [account locator](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-2-account-locator-in-a-region) format is not supported. For information about account identifiers, see the [Snowflake documentation](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html). Required unless using `profile`.", snowflakeenvs.Account), - Optional: true, - DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Account, nil), - Deprecated: "Use `account_name` and `organization_name` instead of `account`", - }, - "username": { - Type: schema.TypeString, - Description: envNameFieldDescription("Username for user + password authentication. Required unless using `profile`.", snowflakeenvs.Username), - Optional: true, - DefaultFunc: schema.EnvDefaultFunc(snowflakeenvs.Username, nil), - Deprecated: "Use `user` instead of `username`", - }, - "region": { - Type: schema.TypeString, - Description: "Snowflake region, such as \"eu-central-1\", with this parameter. However, since this parameter is deprecated, it is best to specify the region as part of the account parameter. For details, see the description of the account parameter. [Snowflake region](https://docs.snowflake.com/en/user-guide/intro-regions.html) to use. Required if using the [legacy format for the `account` identifier](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#format-2-legacy-account-locator-in-a-region) in the form of `.`. Can also be sourced from the `SNOWFLAKE_REGION` environment variable. ", - Optional: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_REGION", nil), - Deprecated: "Specify the region as part of the account parameter", - }, - "session_params": { - Type: schema.TypeMap, - Description: "Sets session parameters. [Parameters](https://docs.snowflake.com/en/sql-reference/parameters)", - Optional: true, - Deprecated: "Use `params` instead", - }, - "oauth_access_token": { - Type: schema.TypeString, - Description: "Token for use with OAuth. Generating the token is left to other tools. Cannot be used with `browser_auth`, `private_key_path`, `oauth_refresh_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable.", - Optional: true, - Sensitive: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_ACCESS_TOKEN", nil), - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "private_key_passphrase", "password", "oauth_refresh_token"}, - Deprecated: "Use `token` instead", - }, - "oauth_refresh_token": { - Type: schema.TypeString, - Description: "Token for use with OAuth. Setup and generation of the token is left to other tools. Should be used in conjunction with `oauth_client_id`, `oauth_client_secret`, `oauth_endpoint`, `oauth_redirect_url`. Cannot be used with `browser_auth`, `private_key_path`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_OAUTH_REFRESH_TOKEN` environment variable.", - Optional: true, - Sensitive: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_REFRESH_TOKEN", nil), - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "private_key_passphrase", "password", "oauth_access_token"}, - RequiredWith: []string{"oauth_client_id", "oauth_client_secret", "oauth_endpoint", "oauth_redirect_url"}, - Deprecated: "Use `token_accessor.0.refresh_token` instead", - }, - "oauth_client_id": { - Type: schema.TypeString, - Description: "Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_CLIENT_ID` environment variable.", - Optional: true, - Sensitive: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_CLIENT_ID", nil), - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "private_key_passphrase", "password", "oauth_access_token"}, - RequiredWith: []string{"oauth_refresh_token", "oauth_client_secret", "oauth_endpoint", "oauth_redirect_url"}, - Deprecated: "Use `token_accessor.0.client_id` instead", - }, - "oauth_client_secret": { - Type: schema.TypeString, - Description: "Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_CLIENT_SECRET` environment variable.", - Optional: true, - Sensitive: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_CLIENT_SECRET", nil), - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "private_key_passphrase", "password", "oauth_access_token"}, - RequiredWith: []string{"oauth_client_id", "oauth_refresh_token", "oauth_endpoint", "oauth_redirect_url"}, - Deprecated: "Use `token_accessor.0.client_secret` instead", - }, - "oauth_endpoint": { - Type: schema.TypeString, - Description: "Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_ENDPOINT` environment variable.", - Optional: true, - Sensitive: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_ENDPOINT", nil), - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "private_key_passphrase", "password", "oauth_access_token"}, - RequiredWith: []string{"oauth_client_id", "oauth_client_secret", "oauth_refresh_token", "oauth_redirect_url"}, - Deprecated: "Use `token_accessor.0.token_endpoint` instead", - }, - "oauth_redirect_url": { - Type: schema.TypeString, - Description: "Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_OAUTH_REDIRECT_URL` environment variable.", - Optional: true, - Sensitive: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_REDIRECT_URL", nil), - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "private_key_passphrase", "password", "oauth_access_token"}, - RequiredWith: []string{"oauth_client_id", "oauth_client_secret", "oauth_endpoint", "oauth_refresh_token"}, - Deprecated: "Use `token_accessor.0.redirect_uri` instead", - }, - "browser_auth": { - Type: schema.TypeBool, - Description: "Required when `oauth_refresh_token` is used. Can also be sourced from `SNOWFLAKE_USE_BROWSER_AUTH` environment variable.", - Optional: true, - Sensitive: false, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_USE_BROWSER_AUTH", nil), - Deprecated: "Use `authenticator` instead", - }, - "private_key_path": { - Type: schema.TypeString, - Description: "Path to a private key for using keypair authentication. Cannot be used with `browser_auth`, `oauth_access_token` or `password`. Can also be sourced from `SNOWFLAKE_PRIVATE_KEY_PATH` environment variable.", - Optional: true, - Sensitive: true, - DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_PRIVATE_KEY_PATH", nil), - ConflictsWith: []string{"browser_auth", "password", "oauth_access_token", "private_key"}, - Deprecated: "use the [file Function](https://developer.hashicorp.com/terraform/language/functions/file) instead", + "preview_features_enabled": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateDiagFunc: validators.StringInSlice(previewfeatures.AllPreviewFeatures, true), + }, + Description: fmt.Sprintf("A list of preview features that are handled by the provider. See [preview features list](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md). Preview features may have breaking changes in future releases, even without raising the major version. This field can not be set with environmental variables. Valid options are: %v.", docs.PossibleValuesListed(previewfeatures.AllPreviewFeatures)), }, }, ResourcesMap: getResources(), @@ -482,7 +388,6 @@ func getResources() map[string]*schema.Resource { "snowflake_api_integration": resources.APIIntegration(), "snowflake_authentication_policy": resources.AuthenticationPolicy(), "snowflake_cortex_search_service": resources.CortexSearchService(), - "snowflake_database_old": resources.DatabaseOld(), "snowflake_database": resources.Database(), "snowflake_database_role": resources.DatabaseRole(), "snowflake_dynamic_table": resources.DynamicTable(), @@ -494,7 +399,6 @@ func getResources() map[string]*schema.Resource { "snowflake_external_volume": resources.ExternalVolume(), "snowflake_failover_group": resources.FailoverGroup(), "snowflake_file_format": resources.FileFormat(), - "snowflake_function": resources.Function(), "snowflake_function_java": resources.FunctionJava(), "snowflake_function_javascript": resources.FunctionJavascript(), "snowflake_function_python": resources.FunctionPython(), @@ -515,23 +419,19 @@ func getResources() map[string]*schema.Resource { "snowflake_network_policy_attachment": resources.NetworkPolicyAttachment(), "snowflake_network_rule": resources.NetworkRule(), "snowflake_notification_integration": resources.NotificationIntegration(), - "snowflake_oauth_integration": resources.OAuthIntegration(), "snowflake_oauth_integration_for_partner_applications": resources.OauthIntegrationForPartnerApplications(), "snowflake_oauth_integration_for_custom_clients": resources.OauthIntegrationForCustomClients(), "snowflake_object_parameter": resources.ObjectParameter(), "snowflake_password_policy": resources.PasswordPolicy(), "snowflake_pipe": resources.Pipe(), "snowflake_primary_connection": resources.PrimaryConnection(), - "snowflake_procedure": resources.Procedure(), "snowflake_procedure_java": resources.ProcedureJava(), "snowflake_procedure_javascript": resources.ProcedureJavascript(), "snowflake_procedure_python": resources.ProcedurePython(), "snowflake_procedure_scala": resources.ProcedureScala(), "snowflake_procedure_sql": resources.ProcedureSql(), "snowflake_resource_monitor": resources.ResourceMonitor(), - "snowflake_role": resources.Role(), "snowflake_row_access_policy": resources.RowAccessPolicy(), - "snowflake_saml_integration": resources.SAMLIntegration(), "snowflake_saml2_integration": resources.SAML2Integration(), "snowflake_schema": resources.Schema(), "snowflake_scim_integration": resources.SCIMIntegration(), @@ -543,12 +443,10 @@ func getResources() map[string]*schema.Resource { "snowflake_secret_with_generic_string": resources.SecretWithGenericString(), "snowflake_sequence": resources.Sequence(), "snowflake_service_user": resources.ServiceUser(), - "snowflake_session_parameter": resources.SessionParameter(), "snowflake_share": resources.Share(), "snowflake_shared_database": resources.SharedDatabase(), "snowflake_stage": resources.Stage(), "snowflake_storage_integration": resources.StorageIntegration(), - "snowflake_stream": resources.Stream(), "snowflake_stream_on_directory_table": resources.StreamOnDirectoryTable(), "snowflake_stream_on_external_table": resources.StreamOnExternalTable(), "snowflake_stream_on_table": resources.StreamOnTable(), @@ -559,9 +457,7 @@ func getResources() map[string]*schema.Resource { "snowflake_table_constraint": resources.TableConstraint(), "snowflake_tag": resources.Tag(), "snowflake_tag_association": resources.TagAssociation(), - "snowflake_tag_masking_policy_association": resources.TagMaskingPolicyAssociation(), "snowflake_task": resources.Task(), - "snowflake_unsafe_execute": resources.UnsafeExecute(), "snowflake_user": resources.User(), "snowflake_user_authentication_policy_attachment": resources.UserAuthenticationPolicyAttachment(), "snowflake_user_password_policy_attachment": resources.UserPasswordPolicyAttachment(), @@ -604,8 +500,6 @@ func getDataSources() map[string]*schema.Resource { "snowflake_pipes": datasources.Pipes(), "snowflake_procedures": datasources.Procedures(), "snowflake_resource_monitors": datasources.ResourceMonitors(), - "snowflake_role": datasources.Role(), - "snowflake_roles": datasources.Roles(), "snowflake_row_access_policies": datasources.RowAccessPolicies(), "snowflake_schemas": datasources.Schemas(), "snowflake_secrets": datasources.Secrets(), @@ -630,15 +524,15 @@ func getDataSources() map[string]*schema.Resource { } var ( - configuredClient *sdk.Client configureClientError error //nolint:errname + configureProviderCtx *provider.Context ) func ConfigureProvider(ctx context.Context, s *schema.ResourceData) (any, diag.Diagnostics) { // hacky way to speed up our acceptance tests if os.Getenv("TF_ACC") != "" && os.Getenv("SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE") == "true" { - if configuredClient != nil { - return &provider.Context{Client: configuredClient}, nil + if configureProviderCtx != nil { + return configureProviderCtx, nil } if configureClientError != nil { return nil, diag.FromErr(configureClientError) @@ -660,12 +554,22 @@ func ConfigureProvider(ctx context.Context, s *schema.ResourceData) (any, diag.D client, clientErr := sdk.NewClient(config) + providerCtx := &provider.Context{Client: client} + + if v, ok := s.GetOk("preview_features_enabled"); ok { + providerCtx.EnabledFeatures = expandStringList(v.(*schema.Set).List()) + } + + if os.Getenv("TF_ACC") != "" && os.Getenv("SF_TF_ACC_TEST_ENABLE_ALL_PREVIEW_FEATURES") == "true" { + providerCtx.EnabledFeatures = previewfeatures.AllPreviewFeatures + } + // needed for tests verifying different provider setups if os.Getenv(resource.EnvTfAcc) != "" && os.Getenv(string(testenvs.ConfigureClientOnce)) == "true" { - configuredClient = client + configureProviderCtx = providerCtx configureClientError = clientErr } else { - configuredClient = nil + configureProviderCtx = nil configureClientError = nil } @@ -673,7 +577,19 @@ func ConfigureProvider(ctx context.Context, s *schema.ResourceData) (any, diag.D return nil, diag.FromErr(clientErr) } - return &provider.Context{Client: client}, nil + return providerCtx, nil +} + +// TODO: reuse with the function from resources package +func expandStringList(configured []interface{}) []string { + vs := make([]string, 0, len(configured)) + for _, v := range configured { + val, ok := v.(string) + if ok && val != "" { + vs = append(vs, val) + } + } + return vs } func getDriverConfigFromTOML(profile string) (*gosnowflake.Config, error) { @@ -804,14 +720,6 @@ func getDriverConfigFromTerraform(s *schema.ResourceData) (*gosnowflake.Config, handleBooleanStringAttribute(s, "disable_console_login", &config.DisableConsoleLogin), // profile is handled in the calling function // TODO(SNOW-1761318): handle DisableSamlURLCheck after upgrading the driver to at least 1.10.1 - - // deprecated - handleStringField(s, "account", &config.Account), - handleStringField(s, "username", &config.User), - handleStringField(s, "region", &config.Region), - // session params are handled below - // browser auth is handled below - // private key path is handled below ) if err != nil { return nil, err @@ -829,11 +737,6 @@ func getDriverConfigFromTerraform(s *schema.ResourceData) (*gosnowflake.Config, m = v.(map[string]interface{}) } - // backwards compatibility until we can remove this - if v, ok := s.GetOk("session_params"); ok { - m = v.(map[string]interface{}) - } - params := make(map[string]*string) for key, value := range m { strValue := value.(string) @@ -841,11 +744,6 @@ func getDriverConfigFromTerraform(s *schema.ResourceData) (*gosnowflake.Config, } config.Params = params - // backwards compatibility until we can remove this - if v, ok := s.GetOk("browser_auth"); ok && v.(bool) { - config.Authenticator = gosnowflake.AuthTypeExternalBrowser - } - if v, ok := s.GetOk("token_accessor"); ok { if len(v.([]any)) > 0 { tokenAccessor := v.([]any)[0].(map[string]any) @@ -863,10 +761,9 @@ func getDriverConfigFromTerraform(s *schema.ResourceData) (*gosnowflake.Config, } } - privateKeyPath := s.Get("private_key_path").(string) privateKey := s.Get("private_key").(string) privateKeyPassphrase := s.Get("private_key_passphrase").(string) - v, err := getPrivateKey(privateKeyPath, privateKey, privateKeyPassphrase) + v, err := getPrivateKey(privateKey, privateKeyPassphrase) if err != nil { return nil, fmt.Errorf("could not retrieve private key: %w", err) } diff --git a/pkg/provider/provider_acceptance_test.go b/pkg/provider/provider_acceptance_test.go index 6321957a17..e8fb41a1ca 100644 --- a/pkg/provider/provider_acceptance_test.go +++ b/pkg/provider/provider_acceptance_test.go @@ -6,11 +6,13 @@ import ( "net/url" "os" "regexp" + "strings" "testing" "time" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" internalprovider "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" tfconfig "github.com/hashicorp/terraform-plugin-testing/config" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" @@ -172,6 +174,13 @@ func TestAcc_Provider_configHierarchy(t *testing.T) { }) } +func configAccountId(t *testing.T, cfg *gosnowflake.Config) sdk.AccountIdentifier { + t.Helper() + accountIdRaw := cfg.Account + parts := strings.SplitN(accountIdRaw, "-", 2) + return sdk.NewAccountIdentifier(parts[0], parts[1]) +} + func TestAcc_Provider_configureClientOnceSwitching(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) @@ -308,7 +317,6 @@ func TestAcc_Provider_envConfig(t *testing.T) { PreCheck: func() { testenvs.AssertEnvNotSet(t, snowflakeenvs.User) testenvs.AssertEnvNotSet(t, snowflakeenvs.Password) - testenvs.AssertEnvNotSet(t, snowflakeenvs.Account) testenvs.AssertEnvNotSet(t, snowflakeenvs.ConfigPath) t.Setenv(snowflakeenvs.ConfigPath, tmpServiceUserConfig.Path) @@ -670,11 +678,6 @@ func TestAcc_Provider_JwtAuth(t *testing.T) { }, Config: config.FromModels(t, providermodel.SnowflakeProvider().WithProfile(tmpServiceUserConfig.Profile).WithAuthenticatorType(sdk.AuthenticationTypeJwt), datasourceModel()), }, - // authenticate with unencrypted private key with a legacy authenticator value - // solves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2983 - { - Config: config.FromModels(t, providermodel.SnowflakeProvider().WithProfile(tmpServiceUserConfig.Profile).WithAuthenticatorType(sdk.AuthenticationTypeJwtLegacy), datasourceModel()), - }, // check encrypted private key with incorrect password { PreConfig: func() { @@ -795,10 +798,86 @@ func TestAcc_Provider_invalidConfigurations(t *testing.T) { Config: config.FromModels(t, providermodel.SnowflakeProvider().WithProfile("non-existing"), datasourceModel()), ExpectError: regexp.MustCompile(fmt.Sprintf(`profile "non-existing" not found in file %s`, tmpServiceUserConfig.Path)), }, + { + Config: providerConfigWithDatasourcePreviewFeatureEnabled(testprofiles.Default, "snowflake_invalid_feature"), + ExpectError: regexp.MustCompile(`expected .* preview_features_enabled.* to be one of((.|\n)*), got snowflake_invalid_feature`), + }, + }, + }) +} + +func TestAcc_Provider_PreviewFeaturesEnabled(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + t.Setenv(string(testenvs.EnableAllPreviewFeatures), "") + acc.TestAccPreCheck(t) + + tmpServiceUser := acc.TestClient().SetUpTemporaryServiceUser(t) + tmpServiceUserConfig := acc.TestClient().TempTomlConfigForServiceUser(t, tmpServiceUser) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { + t.Setenv(snowflakeenvs.ConfigPath, tmpServiceUserConfig.Path) + }, + Config: config.FromModels(t, providermodel.SnowflakeProvider().WithProfile(tmpServiceUserConfig.Profile).WithPreviewFeaturesEnabled(string(previewfeatures.DatabaseDatasource)), datasourceModel()), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(datasourceModel().DatasourceReference(), "name"), + ), + }, }, }) } +func TestAcc_Provider_PreviewFeaturesDisabled(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + t.Setenv(string(testenvs.EnableAllPreviewFeatures), "") + acc.TestAccPreCheck(t) + + tmpServiceUser := acc.TestClient().SetUpTemporaryServiceUser(t) + tmpServiceUserConfig := acc.TestClient().TempTomlConfigForServiceUser(t, tmpServiceUser) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { + t.Setenv(snowflakeenvs.ConfigPath, tmpServiceUserConfig.Path) + }, + Config: config.FromModels(t, providermodel.SnowflakeProvider().WithProfile(tmpServiceUserConfig.Profile), datasourceModel()), + ExpectError: regexp.MustCompile("snowflake_database_datasource is currently a preview feature, and must be enabled by adding snowflake_database_datasource to `preview_features_enabled` in Terraform configuration"), + }, + }, + }) +} + +func providerConfigWithDatasourcePreviewFeatureEnabled(profile, feature string) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" + preview_features_enabled = ["%[2]s_datasource"] +} +data %[2]s t {} +`, profile, feature) +} + +func providerConfigWithDatasourcePreviewFeature(profile, feature string) string { + return fmt.Sprintf(` +provider "snowflake" { + profile = "%[1]s" +} +data %[2]s t {} +`, profile, feature) +} + func datasourceModel() config.DatasourceModel { return datasourcemodel.Database("t", acc.TestDatabaseName) } diff --git a/pkg/provider/provider_helpers.go b/pkg/provider/provider_helpers.go index dd6aa9ab92..0a412e1d62 100644 --- a/pkg/provider/provider_helpers.go +++ b/pkg/provider/provider_helpers.go @@ -3,12 +3,10 @@ package provider import ( "crypto/rsa" "encoding/json" - "errors" "fmt" "io" "net/http" "net/url" - "os" "strconv" "strings" "time" @@ -16,7 +14,6 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/mitchellh/go-homedir" "github.com/snowflakedb/gosnowflake" ) @@ -44,39 +41,14 @@ func toProtocol(s string) (protocol, error) { } } -func getPrivateKey(privateKeyPath, privateKeyString, privateKeyPassphrase string) (*rsa.PrivateKey, error) { - if privateKeyPath == "" && privateKeyString == "" { +func getPrivateKey(privateKeyString, privateKeyPassphrase string) (*rsa.PrivateKey, error) { + if privateKeyString == "" { return nil, nil } privateKeyBytes := []byte(privateKeyString) - var err error - if len(privateKeyBytes) == 0 && privateKeyPath != "" { - privateKeyBytes, err = readFile(privateKeyPath) - if err != nil { - return nil, fmt.Errorf("private Key file could not be read err = %w", err) - } - } return sdk.ParsePrivateKey(privateKeyBytes, []byte(privateKeyPassphrase)) } -func readFile(privateKeyPath string) ([]byte, error) { - expandedPrivateKeyPath, err := homedir.Expand(privateKeyPath) - if err != nil { - return nil, fmt.Errorf("invalid Path to private key err = %w", err) - } - - privateKeyBytes, err := os.ReadFile(expandedPrivateKeyPath) - if err != nil { - return nil, fmt.Errorf("could not read private key err = %w", err) - } - - if len(privateKeyBytes) == 0 { - return nil, errors.New("private key is empty") - } - - return privateKeyBytes, nil -} - type GetRefreshTokenResponseBody struct { AccessToken string `json:"access_token"` TokenType string `json:"token_type"` diff --git a/pkg/provider/resources/resources.go b/pkg/provider/resources/resources.go index 4a576e79ed..b6739b9604 100644 --- a/pkg/provider/resources/resources.go +++ b/pkg/provider/resources/resources.go @@ -15,7 +15,6 @@ const ( ApiIntegration resource = "snowflake_api_integration" AuthenticationPolicy resource = "snowflake_authentication_policy" CortexSearchService resource = "snowflake_cortex_search_service" - DatabaseOld resource = "snowflake_database_old" Database resource = "snowflake_database" DatabaseRole resource = "snowflake_database_role" DynamicTable resource = "snowflake_dynamic_table" @@ -34,7 +33,6 @@ const ( GrantPrivilegesToAccountRole resource = "snowflake_grant_privileges_to_account_role" GrantPrivilegesToDatabaseRole resource = "snowflake_grant_privileges_to_database_role" GrantPrivilegesToShare resource = "snowflake_grant_privileges_to_share" - Function resource = "snowflake_function" FunctionJava resource = "snowflake_function_java" FunctionJavascript resource = "snowflake_function_javascript" FunctionPython resource = "snowflake_function_python" @@ -55,14 +53,12 @@ const ( PasswordPolicy resource = "snowflake_password_policy" Pipe resource = "snowflake_pipe" PrimaryConnection resource = "snowflake_primary_connection" - Procedure resource = "snowflake_procedure" ProcedureJava resource = "snowflake_procedure_java" ProcedureJavascript resource = "snowflake_procedure_javascript" ProcedurePython resource = "snowflake_procedure_python" ProcedureScala resource = "snowflake_procedure_scala" ProcedureSql resource = "snowflake_procedure_sql" ResourceMonitor resource = "snowflake_resource_monitor" - Role resource = "snowflake_role" RowAccessPolicy resource = "snowflake_row_access_policy" SamlSecurityIntegration resource = "snowflake_saml_integration" Saml2SecurityIntegration resource = "snowflake_saml2_integration" @@ -81,7 +77,6 @@ const ( SharedDatabase resource = "snowflake_shared_database" Stage resource = "snowflake_stage" StorageIntegration resource = "snowflake_storage_integration" - Stream resource = "snowflake_stream" StreamOnDirectoryTable resource = "snowflake_stream_on_directory_table" StreamOnExternalTable resource = "snowflake_stream_on_external_table" StreamOnTable resource = "snowflake_stream_on_table" @@ -94,7 +89,6 @@ const ( TagAssociation resource = "snowflake_tag_association" TagMaskingPolicyAssociation resource = "snowflake_tag_masking_policy_association" Task resource = "snowflake_task" - UnsafeExecute resource = "snowflake_unsafe_execute" User resource = "snowflake_user" UserAuthenticationPolicyAttachment resource = "snowflake_user_authentication_policy_attachment" UserPasswordPolicyAttachment resource = "snowflake_user_password_policy_attachment" diff --git a/pkg/resources/account_authentication_policy_attachment.go b/pkg/resources/account_authentication_policy_attachment.go index bb61f3d215..a71a02be00 100644 --- a/pkg/resources/account_authentication_policy_attachment.go +++ b/pkg/resources/account_authentication_policy_attachment.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -29,9 +30,9 @@ func AccountAuthenticationPolicyAttachment() *schema.Resource { return &schema.Resource{ Description: "Specifies the authentication policy to use for the current account. To set the authentication policy of a different account, use a provider alias.", - CreateContext: TrackingCreateWrapper(resources.AccountAuthenticationPolicyAttachment, CreateAccountAuthenticationPolicyAttachment), - ReadContext: TrackingReadWrapper(resources.AccountAuthenticationPolicyAttachment, ReadAccountAuthenticationPolicyAttachment), - DeleteContext: TrackingDeleteWrapper(resources.AccountAuthenticationPolicyAttachment, DeleteAccountAuthenticationPolicyAttachment), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.AccountAuthenticationPolicyAttachmentResource), TrackingCreateWrapper(resources.AccountAuthenticationPolicyAttachment, CreateAccountAuthenticationPolicyAttachment)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.AccountAuthenticationPolicyAttachmentResource), TrackingReadWrapper(resources.AccountAuthenticationPolicyAttachment, ReadAccountAuthenticationPolicyAttachment)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.AccountAuthenticationPolicyAttachmentResource), TrackingDeleteWrapper(resources.AccountAuthenticationPolicyAttachment, DeleteAccountAuthenticationPolicyAttachment)), Schema: accountAuthenticationPolicyAttachmentSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/account_password_policy_attachment.go b/pkg/resources/account_password_policy_attachment.go index 03375b0c75..12e47afea4 100644 --- a/pkg/resources/account_password_policy_attachment.go +++ b/pkg/resources/account_password_policy_attachment.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -29,9 +30,9 @@ func AccountPasswordPolicyAttachment() *schema.Resource { return &schema.Resource{ Description: "Specifies the password policy to use for the current account. To set the password policy of a different account, use a provider alias.", - CreateContext: TrackingCreateWrapper(resources.AccountPasswordPolicyAttachment, CreateAccountPasswordPolicyAttachment), - ReadContext: TrackingReadWrapper(resources.AccountPasswordPolicyAttachment, ReadAccountPasswordPolicyAttachment), - DeleteContext: TrackingDeleteWrapper(resources.AccountPasswordPolicyAttachment, DeleteAccountPasswordPolicyAttachment), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.AccountPasswordPolicyAttachmentResource), TrackingCreateWrapper(resources.AccountPasswordPolicyAttachment, CreateAccountPasswordPolicyAttachment)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.AccountPasswordPolicyAttachmentResource), TrackingReadWrapper(resources.AccountPasswordPolicyAttachment, ReadAccountPasswordPolicyAttachment)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.AccountPasswordPolicyAttachmentResource), TrackingDeleteWrapper(resources.AccountPasswordPolicyAttachment, DeleteAccountPasswordPolicyAttachment)), Schema: accountPasswordPolicyAttachmentSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/alert.go b/pkg/resources/alert.go index 6ca90697db..6d7dc22bd1 100644 --- a/pkg/resources/alert.go +++ b/pkg/resources/alert.go @@ -8,6 +8,7 @@ import ( "strings" "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -112,10 +113,10 @@ var alertSchema = map[string]*schema.Schema{ // Alert returns a pointer to the resource representing an alert. func Alert() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.Alert, CreateAlert), - ReadContext: TrackingReadWrapper(resources.Alert, ReadAlert), - UpdateContext: TrackingUpdateWrapper(resources.Alert, UpdateAlert), - DeleteContext: TrackingDeleteWrapper(resources.Alert, DeleteAlert), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.AlertResource), TrackingCreateWrapper(resources.Alert, CreateAlert)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.AlertResource), TrackingReadWrapper(resources.Alert, ReadAlert)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.AlertResource), TrackingUpdateWrapper(resources.Alert, UpdateAlert)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.AlertResource), TrackingDeleteWrapper(resources.Alert, DeleteAlert)), Schema: alertSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/api_integration.go b/pkg/resources/api_integration.go index fee0a6312c..9ec63b7632 100644 --- a/pkg/resources/api_integration.go +++ b/pkg/resources/api_integration.go @@ -6,6 +6,7 @@ import ( "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -124,10 +125,10 @@ var apiIntegrationSchema = map[string]*schema.Schema{ // APIIntegration returns a pointer to the resource representing an api integration. func APIIntegration() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.ApiIntegration, CreateAPIIntegration), - ReadContext: TrackingReadWrapper(resources.ApiIntegration, ReadAPIIntegration), - UpdateContext: TrackingUpdateWrapper(resources.ApiIntegration, UpdateAPIIntegration), - DeleteContext: TrackingDeleteWrapper(resources.ApiIntegration, DeleteAPIIntegration), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.ApiIntegrationResource), TrackingCreateWrapper(resources.ApiIntegration, CreateAPIIntegration)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.ApiIntegrationResource), TrackingReadWrapper(resources.ApiIntegration, ReadAPIIntegration)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.ApiIntegrationResource), TrackingUpdateWrapper(resources.ApiIntegration, UpdateAPIIntegration)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.ApiIntegrationResource), TrackingDeleteWrapper(resources.ApiIntegration, DeleteAPIIntegration)), Schema: apiIntegrationSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/authentication_policy.go b/pkg/resources/authentication_policy.go index 851cbe67d9..0ea6648178 100644 --- a/pkg/resources/authentication_policy.go +++ b/pkg/resources/authentication_policy.go @@ -6,6 +6,7 @@ import ( "fmt" "reflect" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -110,10 +111,10 @@ var authenticationPolicySchema = map[string]*schema.Schema{ // AuthenticationPolicy returns a pointer to the resource representing an authentication policy. func AuthenticationPolicy() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.AuthenticationPolicy, CreateContextAuthenticationPolicy), - ReadContext: TrackingReadWrapper(resources.AuthenticationPolicy, ReadContextAuthenticationPolicy), - UpdateContext: TrackingUpdateWrapper(resources.AuthenticationPolicy, UpdateContextAuthenticationPolicy), - DeleteContext: TrackingDeleteWrapper(resources.AuthenticationPolicy, DeleteContextAuthenticationPolicy), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.AuthenticationPolicyResource), TrackingCreateWrapper(resources.AuthenticationPolicy, CreateContextAuthenticationPolicy)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.AuthenticationPolicyResource), TrackingReadWrapper(resources.AuthenticationPolicy, ReadContextAuthenticationPolicy)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.AuthenticationPolicyResource), TrackingUpdateWrapper(resources.AuthenticationPolicy, UpdateContextAuthenticationPolicy)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.AuthenticationPolicyResource), TrackingDeleteWrapper(resources.AuthenticationPolicy, DeleteContextAuthenticationPolicy)), Description: "Resource used to manage authentication policy objects. For more information, check [authentication policy documentation](https://docs.snowflake.com/en/sql-reference/sql/create-authentication-policy).", Schema: authenticationPolicySchema, diff --git a/pkg/resources/common.go b/pkg/resources/common.go index 4c84ac1c4c..0a86a7bd63 100644 --- a/pkg/resources/common.go +++ b/pkg/resources/common.go @@ -5,7 +5,9 @@ import ( "regexp" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/tracking" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -156,3 +158,51 @@ func TrackingCustomDiffWrapper(resourceName resources.Resource, customdiffImplem return customdiffImplementation(ctx, diff, meta) } } + +func ensureResourceIsEnabled(featureRaw string, meta any) error { + enabled := meta.(*provider.Context).EnabledFeatures + feature, err := previewfeatures.StringToFeature(featureRaw) + if err != nil { + return err + } + if err := previewfeatures.EnsurePreviewFeatureEnabled(feature, enabled); err != nil { + return err + } + return nil +} + +func PreviewFeatureCreateContextWrapper(featureRaw string, createFunc schema.CreateContextFunc) schema.CreateContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + if err := ensureResourceIsEnabled(featureRaw, meta); err != nil { + return diag.FromErr(err) + } + return createFunc(ctx, d, meta) + } +} + +func PreviewFeatureReadContextWrapper(featureRaw string, readFunc schema.ReadContextFunc) schema.ReadContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + if err := ensureResourceIsEnabled(featureRaw, meta); err != nil { + return diag.FromErr(err) + } + return readFunc(ctx, d, meta) + } +} + +func PreviewFeatureUpdateContextWrapper(featureRaw string, updateFunc schema.UpdateContextFunc) schema.UpdateContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + if err := ensureResourceIsEnabled(featureRaw, meta); err != nil { + return diag.FromErr(err) + } + return updateFunc(ctx, d, meta) + } +} + +func PreviewFeatureDeleteContextWrapper(featureRaw string, deleteFunc schema.DeleteContextFunc) schema.DeleteContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + if err := ensureResourceIsEnabled(featureRaw, meta); err != nil { + return diag.FromErr(err) + } + return deleteFunc(ctx, d, meta) + } +} diff --git a/pkg/resources/cortex_search_service.go b/pkg/resources/cortex_search_service.go index f9ad0a933f..09ccb42328 100644 --- a/pkg/resources/cortex_search_service.go +++ b/pkg/resources/cortex_search_service.go @@ -7,10 +7,10 @@ import ( "log" "time" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -83,10 +83,10 @@ var cortexSearchServiceSchema = map[string]*schema.Schema{ // CortexSearchService returns a pointer to the resource representing a Cortex search service. func CortexSearchService() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.CortexSearchService, CreateCortexSearchService), - ReadContext: TrackingReadWrapper(resources.CortexSearchService, ReadCortexSearchService), - UpdateContext: TrackingUpdateWrapper(resources.CortexSearchService, UpdateCortexSearchService), - DeleteContext: TrackingDeleteWrapper(resources.CortexSearchService, DeleteCortexSearchService), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.CortexSearchServiceResource), TrackingCreateWrapper(resources.CortexSearchService, CreateCortexSearchService)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.CortexSearchServiceResource), TrackingReadWrapper(resources.CortexSearchService, ReadCortexSearchService)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.CortexSearchServiceResource), TrackingUpdateWrapper(resources.CortexSearchService, UpdateCortexSearchService)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.CortexSearchServiceResource), TrackingDeleteWrapper(resources.CortexSearchService, DeleteCortexSearchService)), Schema: cortexSearchServiceSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/database_old.go b/pkg/resources/database_old.go deleted file mode 100644 index 6c6b6c3641..0000000000 --- a/pkg/resources/database_old.go +++ /dev/null @@ -1,368 +0,0 @@ -package resources - -import ( - "context" - "fmt" - "log" - "slices" - "strconv" - - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" -) - -var databaseOldSchema = map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the identifier for the database; must be unique for your account.", - }, - "comment": { - Type: schema.TypeString, - Optional: true, - Default: "", - Description: "Specifies a comment for the database.", - }, - "is_transient": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: "Specifies a database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", - ForceNew: true, - }, - "data_retention_time_in_days": { - Type: schema.TypeInt, - Optional: true, - Default: IntDefault, - Description: "Number of days for which Snowflake retains historical data for performing Time Travel actions (SELECT, CLONE, UNDROP) on the object. A value of 0 effectively disables Time Travel for the specified database. Default value for this field is set to -1, which is a fallback to use Snowflake default. For more information, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel).", - ValidateFunc: validation.IntBetween(-1, 90), - }, - "from_share": { - Type: schema.TypeMap, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: "Specify a provider and a share in this map to create a database from a share. As of version 0.87.0, the provider field is the account locator.", - Optional: true, - ForceNew: true, - ConflictsWith: []string{"from_database", "from_replica"}, - }, - "from_database": { - Type: schema.TypeString, - Description: "Specify a database to create a clone from.", - Optional: true, - ForceNew: true, - ConflictsWith: []string{"from_share", "from_replica"}, - }, - "from_replica": { - Type: schema.TypeString, - Description: "Specify a fully-qualified path to a database to create a replica from. A fully qualified path follows the format of `\"\".\"\".\"\"`. An example would be: `\"myorg1\".\"account1\".\"db1\"`", - Optional: true, - ForceNew: true, - ConflictsWith: []string{"from_share", "from_database"}, - }, - "replication_configuration": { - Type: schema.TypeList, - Description: "When set, specifies the configurations for database replication.", - Optional: true, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "accounts": { - Type: schema.TypeList, - Required: true, - MinItems: 1, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - "ignore_edition_check": { - Type: schema.TypeBool, - Default: true, - Optional: true, - }, - }, - }, - }, -} - -// Database returns a pointer to the resource representing a database. -func DatabaseOld() *schema.Resource { - return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.DatabaseOld, CreateDatabaseOld), - ReadContext: TrackingReadWrapper(resources.DatabaseOld, ReadDatabaseOld), - DeleteContext: TrackingDeleteWrapper(resources.DatabaseOld, DeleteDatabaseOld), - UpdateContext: TrackingUpdateWrapper(resources.DatabaseOld, UpdateDatabaseOld), - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_database or snowflake_shared_database or snowflake_secondary_database instead.", - - Schema: databaseOldSchema, - Importer: &schema.ResourceImporter{ - StateContext: TrackingImportWrapper(resources.DatabaseOld, ImportName[sdk.AccountObjectIdentifier]), - }, - } -} - -// CreateDatabase implements schema.CreateFunc. -func CreateDatabaseOld(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - id := sdk.NewAccountObjectIdentifier(name) - - // Is it a Shared Database? - if fromShare, ok := d.GetOk("from_share"); ok { - account := fromShare.(map[string]interface{})["provider"].(string) - share := fromShare.(map[string]interface{})["share"].(string) - shareID := sdk.NewExternalObjectIdentifier(sdk.NewAccountIdentifierFromAccountLocator(account), sdk.NewAccountObjectIdentifier(share)) - opts := &sdk.CreateSharedDatabaseOptions{} - if v, ok := d.GetOk("comment"); ok { - opts.Comment = sdk.String(v.(string)) - } - err := client.Databases.CreateShared(ctx, id, shareID, opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating database %v: %w", name, err)) - } - d.SetId(name) - return ReadDatabaseOld(ctx, d, meta) - } - // Is it a Secondary Database? - if primaryName, ok := d.GetOk("from_replica"); ok { - primaryID := sdk.NewExternalObjectIdentifierFromFullyQualifiedName(primaryName.(string)) - opts := &sdk.CreateSecondaryDatabaseOptions{} - if v := d.Get("data_retention_time_in_days"); v.(int) != IntDefault { - opts.DataRetentionTimeInDays = sdk.Int(v.(int)) - } - err := client.Databases.CreateSecondary(ctx, id, primaryID, opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating database %v: %w", name, err)) - } - d.SetId(name) - // todo: add failover_configuration block - return ReadDatabaseOld(ctx, d, meta) - } - - // Otherwise it is a Standard Database - opts := sdk.CreateDatabaseOptions{} - if v, ok := d.GetOk("comment"); ok { - opts.Comment = sdk.String(v.(string)) - } - - if v, ok := d.GetOk("is_transient"); ok && v.(bool) { - opts.Transient = sdk.Bool(v.(bool)) - } - - if v, ok := d.GetOk("from_database"); ok { - opts.Clone = &sdk.Clone{ - SourceObject: sdk.NewAccountObjectIdentifier(v.(string)), - } - } - - if v := d.Get("data_retention_time_in_days"); v.(int) != IntDefault { - opts.DataRetentionTimeInDays = sdk.Int(v.(int)) - } - - err := client.Databases.Create(ctx, id, &opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating database %v: %w", name, err)) - } - d.SetId(name) - - if v, ok := d.GetOk("replication_configuration"); ok { - replicationConfiguration := v.([]interface{})[0].(map[string]interface{}) - accounts := replicationConfiguration["accounts"].([]interface{}) - accountIDs := make([]sdk.AccountIdentifier, len(accounts)) - for i, account := range accounts { - accountIDs[i] = sdk.NewAccountIdentifierFromAccountLocator(account.(string)) - } - opts := &sdk.AlterDatabaseReplicationOptions{ - EnableReplication: &sdk.EnableReplication{ - ToAccounts: accountIDs, - }, - } - if ignoreEditionCheck, ok := replicationConfiguration["ignore_edition_check"]; ok { - opts.EnableReplication.IgnoreEditionCheck = sdk.Bool(ignoreEditionCheck.(bool)) - } - err := client.Databases.AlterReplication(ctx, id, opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error enabling replication for database %v: %w", name, err)) - } - } - - return ReadDatabaseOld(ctx, d, meta) -} - -func ReadDatabaseOld(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) - - database, err := client.Databases.ShowByID(ctx, id) - if err != nil { - d.SetId("") - log.Printf("Database %s not found, err = %s", id.Name(), err) - return nil - } - - if err := d.Set("comment", database.Comment); err != nil { - return diag.FromErr(err) - } - - dataRetention, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameterDataRetentionTimeInDays) - if err != nil { - return diag.FromErr(err) - } - paramDataRetention, err := strconv.Atoi(dataRetention.Value) - if err != nil { - return diag.FromErr(err) - } - - if dataRetentionDays := d.Get("data_retention_time_in_days"); dataRetentionDays.(int) != IntDefault || database.RetentionTime != paramDataRetention { - if err := d.Set("data_retention_time_in_days", database.RetentionTime); err != nil { - return diag.FromErr(err) - } - } - - if err := d.Set("is_transient", database.Transient); err != nil { - return diag.FromErr(err) - } - - return nil -} - -func UpdateDatabaseOld(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) - client := meta.(*provider.Context).Client - - if d.HasChange("name") { - newName := d.Get("name").(string) - newId := sdk.NewAccountObjectIdentifier(newName) - opts := &sdk.AlterDatabaseOptions{ - NewName: &newId, - } - err := client.Databases.Alter(ctx, id, opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error updating database name on %v err = %w", d.Id(), err)) - } - d.SetId(helpers.EncodeSnowflakeID(newId)) - id = newId - } - - if d.HasChange("comment") { - comment := "" - if c := d.Get("comment"); c != nil { - comment = c.(string) - } - opts := &sdk.AlterDatabaseOptions{ - Set: &sdk.DatabaseSet{ - Comment: sdk.String(comment), - }, - } - err := client.Databases.Alter(ctx, id, opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error updating database comment on %v err = %w", d.Id(), err)) - } - } - - if d.HasChange("data_retention_time_in_days") { - if days := d.Get("data_retention_time_in_days"); days.(int) != IntDefault { - err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ - Set: &sdk.DatabaseSet{ - DataRetentionTimeInDays: sdk.Int(days.(int)), - }, - }) - if err != nil { - return diag.FromErr(fmt.Errorf("error when setting database data retention time on %v err = %w", d.Id(), err)) - } - } else { - err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ - Unset: &sdk.DatabaseUnset{ - DataRetentionTimeInDays: sdk.Bool(true), - }, - }) - if err != nil { - return diag.FromErr(fmt.Errorf("error when usetting database data retention time on %v err = %w", d.Id(), err)) - } - } - } - - // If replication configuration changes, need to update accounts that have permission to replicate database - if d.HasChange("replication_configuration") { - oldConfig, newConfig := d.GetChange("replication_configuration") - - newAccountIDs := make([]sdk.AccountIdentifier, 0) - ignoreEditionCheck := false - if len(newConfig.([]interface{})) != 0 { - newAccounts := newConfig.([]interface{})[0].(map[string]interface{})["accounts"].([]interface{}) - for _, account := range newAccounts { - newAccountIDs = append(newAccountIDs, sdk.NewAccountIdentifierFromAccountLocator(account.(string))) - } - ignoreEditionCheck = newConfig.([]interface{})[0].(map[string]interface{})["ignore_edition_check"].(bool) - } - - oldAccountIDs := make([]sdk.AccountIdentifier, 0) - if len(oldConfig.([]interface{})) != 0 { - oldAccounts := oldConfig.([]interface{})[0].(map[string]interface{})["accounts"].([]interface{}) - for _, account := range oldAccounts { - oldAccountIDs = append(oldAccountIDs, sdk.NewAccountIdentifierFromAccountLocator(account.(string))) - } - } - - accountsToRemove := make([]sdk.AccountIdentifier, 0) - accountsToAdd := make([]sdk.AccountIdentifier, 0) - // Find accounts to remove - for _, oldAccountID := range oldAccountIDs { - if !slices.Contains(newAccountIDs, oldAccountID) { - accountsToRemove = append(accountsToRemove, oldAccountID) - } - } - - // Find accounts to add - for _, newAccountID := range newAccountIDs { - if !slices.Contains(oldAccountIDs, newAccountID) { - accountsToAdd = append(accountsToAdd, newAccountID) - } - } - if len(accountsToAdd) > 0 { - opts := &sdk.AlterDatabaseReplicationOptions{ - EnableReplication: &sdk.EnableReplication{ - ToAccounts: accountsToAdd, - }, - } - if ignoreEditionCheck { - opts.EnableReplication.IgnoreEditionCheck = sdk.Bool(ignoreEditionCheck) - } - err := client.Databases.AlterReplication(ctx, id, opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error enabling replication configuration on %v err = %w", d.Id(), err)) - } - } - - if len(accountsToRemove) > 0 { - opts := &sdk.AlterDatabaseReplicationOptions{ - DisableReplication: &sdk.DisableReplication{ - ToAccounts: accountsToRemove, - }, - } - err := client.Databases.AlterReplication(ctx, id, opts) - if err != nil { - return diag.FromErr(fmt.Errorf("error disabling replication configuration on %v err = %w", d.Id(), err)) - } - } - } - - return ReadDatabaseOld(ctx, d, meta) -} - -func DeleteDatabaseOld(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) - err := client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ - IfExists: sdk.Bool(true), - }) - if err != nil { - return diag.FromErr(err) - } - d.SetId("") - return nil -} diff --git a/pkg/resources/database_old_acceptance_test.go b/pkg/resources/database_old_acceptance_test.go deleted file mode 100644 index 31d5e46702..0000000000 --- a/pkg/resources/database_old_acceptance_test.go +++ /dev/null @@ -1,448 +0,0 @@ -package resources_test - -import ( - "context" - "fmt" - "strconv" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_DatabaseWithUnderscore(t *testing.T) { - prefix := acc.TestClient().Ids.AlphaWithPrefix("_") - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.DatabaseOld), - Steps: []resource.TestStep{ - { - Config: dbConfig(prefix), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", prefix), - resource.TestCheckResourceAttr("snowflake_database_old.db", "comment", "test comment"), - resource.TestCheckResourceAttrSet("snowflake_database_old.db", "data_retention_time_in_days"), - ), - }, - }, - }) -} - -func TestAcc_Database(t *testing.T) { - prefix := acc.TestClient().Ids.Alpha() - prefix2 := acc.TestClient().Ids.Alpha() - - secondaryAccountName := acc.SecondaryTestClient().Context.CurrentAccount(t) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.DatabaseOld), - Steps: []resource.TestStep{ - { - Config: dbConfig(prefix), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", prefix), - resource.TestCheckResourceAttr("snowflake_database_old.db", "comment", "test comment"), - resource.TestCheckResourceAttrSet("snowflake_database_old.db", "data_retention_time_in_days"), - ), - }, - // RENAME - { - Config: dbConfig(prefix2), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", prefix2), - resource.TestCheckResourceAttr("snowflake_database_old.db", "comment", "test comment"), - resource.TestCheckResourceAttrSet("snowflake_database_old.db", "data_retention_time_in_days"), - ), - }, - // CHANGE PROPERTIES - { - Config: dbConfig2(prefix2), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", prefix2), - resource.TestCheckResourceAttr("snowflake_database_old.db", "comment", "test comment 2"), - resource.TestCheckResourceAttr("snowflake_database_old.db", "data_retention_time_in_days", "3"), - ), - }, - // ADD REPLICATION - // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2369 error - { - Config: dbConfigWithReplication(prefix2, secondaryAccountName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", prefix2), - resource.TestCheckResourceAttr("snowflake_database_old.db", "comment", "test comment 2"), - resource.TestCheckResourceAttr("snowflake_database_old.db", "data_retention_time_in_days", "3"), - resource.TestCheckResourceAttr("snowflake_database_old.db", "replication_configuration.#", "1"), - resource.TestCheckResourceAttr("snowflake_database_old.db", "replication_configuration.0.accounts.#", "1"), - resource.TestCheckResourceAttr("snowflake_database_old.db", "replication_configuration.0.accounts.0", secondaryAccountName), - ), - }, - // IMPORT - { - ResourceName: "snowflake_database_old.db", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"replication_configuration"}, - }, - }, - }) -} - -func TestAcc_DatabaseRemovedOutsideOfTerraform(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifier() - name := id.Name() - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.DatabaseOld), - Steps: []resource.TestStep{ - { - ConfigDirectory: config.TestNameDirectory(), - ConfigVariables: map[string]config.Variable{ - "db": config.StringVariable(name), - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", name), - resource.TestCheckResourceAttr("snowflake_database_old.db", "comment", "test comment"), - testAccCheckDatabaseExistence(t, id, true), - ), - }, - { - PreConfig: func() { acc.TestClient().Database.DropDatabaseFunc(t, id)() }, - ConfigDirectory: config.TestNameDirectory(), - ConfigVariables: map[string]config.Variable{ - "db": config.StringVariable(name), - }, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", name), - resource.TestCheckResourceAttr("snowflake_database_old.db", "comment", "test comment"), - testAccCheckDatabaseExistence(t, id, true), - ), - }, - }, - }) -} - -// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2021 -func TestAcc_Database_issue2021(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - - secondaryAccountName := acc.SecondaryTestClient().Context.CurrentAccount(t) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.DatabaseOld), - Steps: []resource.TestStep{ - { - Config: dbConfigWithReplication(name, secondaryAccountName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.db", "name", name), - resource.TestCheckResourceAttr("snowflake_database_old.db", "replication_configuration.#", "1"), - resource.TestCheckResourceAttr("snowflake_database_old.db", "replication_configuration.0.accounts.#", "1"), - resource.TestCheckResourceAttr("snowflake_database_old.db", "replication_configuration.0.accounts.0", secondaryAccountName), - testAccCheckIfDatabaseIsReplicated(t, name), - ), - }, - }, - }) -} - -// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2356 issue is fixed. -func TestAcc_Database_DefaultDataRetentionTime(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifier() - - configVariablesWithoutDatabaseDataRetentionTime := func() config.Variables { - return config.Variables{ - "database": config.StringVariable(id.Name()), - } - } - - configVariablesWithDatabaseDataRetentionTime := func(databaseDataRetentionTime int) config.Variables { - vars := configVariablesWithoutDatabaseDataRetentionTime() - vars["database_data_retention_time"] = config.IntegerVariable(databaseDataRetentionTime) - return vars - } - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.DatabaseOld), - Steps: []resource.TestStep{ - { - PreConfig: func() { - revertParameter := acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterDataRetentionTimeInDays, "5") - t.Cleanup(revertParameter) - }, - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutDatabaseDataRetentionTime(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", r.IntDefaultString), - checkAccountAndDatabaseDataRetentionTime(t, id, 5, 5), - ), - }, - { - PreConfig: func() { - _ = acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterDataRetentionTimeInDays, "10") - }, - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutDatabaseDataRetentionTime(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", r.IntDefaultString), - checkAccountAndDatabaseDataRetentionTime(t, id, 10, 10), - ), - }, - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithDatabaseDataRetentionTime(5), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", "5"), - checkAccountAndDatabaseDataRetentionTime(t, id, 10, 5), - ), - }, - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithDatabaseDataRetentionTime(15), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", "15"), - checkAccountAndDatabaseDataRetentionTime(t, id, 10, 15), - ), - }, - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutDatabaseDataRetentionTime(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", r.IntDefaultString), - checkAccountAndDatabaseDataRetentionTime(t, id, 10, 10), - ), - }, - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithDatabaseDataRetentionTime(0), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", "0"), - checkAccountAndDatabaseDataRetentionTime(t, id, 10, 0), - ), - }, - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithDatabaseDataRetentionTime(3), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", "3"), - checkAccountAndDatabaseDataRetentionTime(t, id, 10, 3), - ), - }, - }, - }) -} - -// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2356 issue is fixed. -func TestAcc_Database_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) { - id := acc.TestClient().Ids.RandomAccountObjectIdentifier() - - configVariablesWithoutDatabaseDataRetentionTime := func() config.Variables { - return config.Variables{ - "database": config.StringVariable(id.Name()), - } - } - - configVariablesWithDatabaseDataRetentionTime := func(databaseDataRetentionTime int) config.Variables { - vars := configVariablesWithoutDatabaseDataRetentionTime() - vars["database_data_retention_time"] = config.IntegerVariable(databaseDataRetentionTime) - return vars - } - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.DatabaseOld), - Steps: []resource.TestStep{ - { - PreConfig: func() { - revertParameter := acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterDataRetentionTimeInDays, "5") - t.Cleanup(revertParameter) - }, - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutDatabaseDataRetentionTime(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", r.IntDefaultString), - checkAccountAndDatabaseDataRetentionTime(t, id, 5, 5), - ), - }, - { - PreConfig: func() { acc.TestClient().Database.UpdateDataRetentionTime(t, id, 20) }, - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutDatabaseDataRetentionTime(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", r.IntDefaultString), - checkAccountAndDatabaseDataRetentionTime(t, id, 5, 5), - ), - }, - { - PreConfig: func() { - _ = acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterDataRetentionTimeInDays, "10") - }, - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithDatabaseDataRetentionTime(3), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_database_old.test", "data_retention_time_in_days", "3"), - checkAccountAndDatabaseDataRetentionTime(t, id, 10, 3), - ), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PostApplyPostRefresh: []plancheck.PlanCheck{ - plancheck.ExpectEmptyPlan(), - }, - }, - }, - }, - }) -} - -func dbConfig(prefix string) string { - s := ` -resource "snowflake_database_old" "db" { - name = "%s" - comment = "test comment" -} -` - return fmt.Sprintf(s, prefix) -} - -func dbConfig2(prefix string) string { - s := ` -resource "snowflake_database_old" "db" { - name = "%s" - comment = "test comment 2" - data_retention_time_in_days = 3 -} -` - return fmt.Sprintf(s, prefix) -} - -func dbConfigWithReplication(prefix string, secondaryAccountName string) string { - s := ` -resource "snowflake_database_old" "db" { - name = "%s" - comment = "test comment 2" - data_retention_time_in_days = 3 - replication_configuration { - accounts = [ - "%s" - ] - } -} -` - return fmt.Sprintf(s, prefix, secondaryAccountName) -} - -// TODO [SNOW-936093]: this is used mostly as check for unsafe execute, not as normal check destroy in other resources. Handle with the helpers cleanup. -func testAccCheckDatabaseExistence(t *testing.T, id sdk.AccountObjectIdentifier, shouldExist bool) func(state *terraform.State) error { - t.Helper() - return func(state *terraform.State) error { - _, err := acc.TestClient().Database.Show(t, id) - if shouldExist { - if err != nil { - return fmt.Errorf("error while retrieving database %s, err = %w", id, err) - } - } else { - if err == nil { - return fmt.Errorf("database %v still exists", id) - } - } - return nil - } -} - -func testAccCheckIfDatabaseIsReplicated(t *testing.T, id string) func(state *terraform.State) error { - t.Helper() - return func(state *terraform.State) error { - replicationDatabases, err := acc.TestClient().Database.ShowAllReplicationDatabases(t) - if err != nil { - return err - } - - var exists bool - for _, o := range replicationDatabases { - if o.Name == id { - exists = true - break - } - } - - if !exists { - return fmt.Errorf("database %s should be replicated", id) - } - - return nil - } -} - -func checkAccountAndDatabaseDataRetentionTime(t *testing.T, id sdk.AccountObjectIdentifier, expectedAccountRetentionDays int, expectedDatabaseRetentionsDays int) func(state *terraform.State) error { - t.Helper() - return func(state *terraform.State) error { - providerContext := acc.TestAccProvider.Meta().(*provider.Context) - client := providerContext.Client - ctx := context.Background() - - database, err := acc.TestClient().Database.Show(t, id) - if err != nil { - return err - } - - if database.RetentionTime != expectedDatabaseRetentionsDays { - return fmt.Errorf("invalid database retention time, expected: %d, got: %d", expectedDatabaseRetentionsDays, database.RetentionTime) - } - - param, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameterDataRetentionTimeInDays) - if err != nil { - return err - } - accountRetentionDays, err := strconv.Atoi(param.Value) - if err != nil { - return err - } - - if accountRetentionDays != expectedAccountRetentionDays { - return fmt.Errorf("invalid account retention time, expected: %d, got: %d", expectedAccountRetentionDays, accountRetentionDays) - } - - return nil - } -} diff --git a/pkg/resources/database_state_upgraders.go b/pkg/resources/database_state_upgraders.go index 06ff004771..91bcd24c1a 100644 --- a/pkg/resources/database_state_upgraders.go +++ b/pkg/resources/database_state_upgraders.go @@ -17,15 +17,15 @@ func v092DatabaseStateUpgrader(ctx context.Context, rawState map[string]any, met } if v, ok := rawState["from_share"]; ok && v != nil && len(v.(map[string]any)) > 0 { - return nil, fmt.Errorf("failed to upgrade the state with database created from share, please use snowflake_shared_database or deprecated snowflake_database_old instead") + return nil, fmt.Errorf("failed to upgrade the state with database created from share, please use snowflake_shared_database instead") } if v, ok := rawState["from_replica"]; ok && v != nil && len(v.(string)) > 0 { - return nil, fmt.Errorf("failed to upgrade the state with database created from replica, please use snowflake_secondary_database or deprecated snowflake_database_old instead") + return nil, fmt.Errorf("failed to upgrade the state with database created from replica, please use snowflake_secondary_database instead") } if v, ok := rawState["from_database"]; ok && v != nil && len(v.(string)) > 0 { - return nil, fmt.Errorf("failed to upgrade the state with database created from database, please use snowflake_database or deprecated snowflake_database_old instead. Dislaimer: Right now, database cloning is not supported. They can be imported into mentioned resources, but any differetnce in behavior from standard database won't be handled (and can result in errors)") + return nil, fmt.Errorf("failed to upgrade the state with database created from database, please use snowflake_database instead. Dislaimer: Right now, database cloning is not supported. They can be imported into the mentioned resource, but any differetnce in behavior from standard database won't be handled (and can result in errors)") } if replicationConfigurations, ok := rawState["replication_configuration"]; ok && len(replicationConfigurations.([]any)) == 1 { diff --git a/pkg/resources/deprecated_helpers_test.go b/pkg/resources/deprecated_helpers_test.go deleted file mode 100644 index 34601967d1..0000000000 --- a/pkg/resources/deprecated_helpers_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package resources_test - -import ( - "testing" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/stretchr/testify/require" -) - -/** - * Will be removed while adding security integrations to the SDK. - */ - -func samlIntegration(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { - t.Helper() - r := require.New(t) - d := schema.TestResourceDataRaw(t, resources.SAMLIntegration().Schema, params) - r.NotNil(d) - d.SetId(id) - return d -} - -func oauthIntegration(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { - t.Helper() - r := require.New(t) - d := schema.TestResourceDataRaw(t, resources.OAuthIntegration().Schema, params) - r.NotNil(d) - d.SetId(id) - return d -} diff --git a/pkg/resources/dynamic_table.go b/pkg/resources/dynamic_table.go index 72446d5b91..409939b62d 100644 --- a/pkg/resources/dynamic_table.go +++ b/pkg/resources/dynamic_table.go @@ -7,6 +7,7 @@ import ( "strings" "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -166,10 +167,10 @@ var dynamicTableSchema = map[string]*schema.Schema{ // DynamicTable returns a pointer to the resource representing a dynamic table. func DynamicTable() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.DynamicTable, CreateDynamicTable), - ReadContext: TrackingReadWrapper(resources.DynamicTable, ReadDynamicTable), - UpdateContext: TrackingUpdateWrapper(resources.DynamicTable, UpdateDynamicTable), - DeleteContext: TrackingDeleteWrapper(resources.DynamicTable, DeleteDynamicTable), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.DynamicTableResource), TrackingCreateWrapper(resources.DynamicTable, CreateDynamicTable)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.DynamicTableResource), TrackingReadWrapper(resources.DynamicTable, ReadDynamicTable)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.DynamicTableResource), TrackingUpdateWrapper(resources.DynamicTable, UpdateDynamicTable)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.DynamicTableResource), TrackingDeleteWrapper(resources.DynamicTable, DeleteDynamicTable)), Schema: dynamicTableSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/email_notification_integration.go b/pkg/resources/email_notification_integration.go index bd0a38c64e..b5a794a5c0 100644 --- a/pkg/resources/email_notification_integration.go +++ b/pkg/resources/email_notification_integration.go @@ -6,6 +6,7 @@ import ( "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -45,10 +46,10 @@ var emailNotificationIntegrationSchema = map[string]*schema.Schema{ // EmailNotificationIntegration returns a pointer to the resource representing a notification integration. func EmailNotificationIntegration() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.EmailNotificationIntegration, CreateEmailNotificationIntegration), - ReadContext: TrackingReadWrapper(resources.EmailNotificationIntegration, ReadEmailNotificationIntegration), - UpdateContext: TrackingUpdateWrapper(resources.EmailNotificationIntegration, UpdateEmailNotificationIntegration), - DeleteContext: TrackingDeleteWrapper(resources.EmailNotificationIntegration, DeleteEmailNotificationIntegration), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.EmailNotificationIntegrationResource), TrackingCreateWrapper(resources.EmailNotificationIntegration, CreateEmailNotificationIntegration)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.EmailNotificationIntegrationResource), TrackingReadWrapper(resources.EmailNotificationIntegration, ReadEmailNotificationIntegration)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.EmailNotificationIntegrationResource), TrackingUpdateWrapper(resources.EmailNotificationIntegration, UpdateEmailNotificationIntegration)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.EmailNotificationIntegrationResource), TrackingDeleteWrapper(resources.EmailNotificationIntegration, DeleteEmailNotificationIntegration)), Schema: emailNotificationIntegrationSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/execute.go b/pkg/resources/execute.go index 6ad9b189cd..acd5c26926 100644 --- a/pkg/resources/execute.go +++ b/pkg/resources/execute.go @@ -61,7 +61,7 @@ func Execute() *schema.Resource { Description: "Resource allowing execution of ANY SQL statement.", - CustomizeDiff: TrackingCustomDiffWrapper(resources.UnsafeExecute, customdiff.All( + CustomizeDiff: TrackingCustomDiffWrapper(resources.Execute, customdiff.All( customdiff.ForceNewIfChange("execute", func(ctx context.Context, oldValue, newValue, meta any) bool { return oldValue != "" }), diff --git a/pkg/resources/execute_acceptance_test.go b/pkg/resources/execute_acceptance_test.go index d97722701f..e11b6370a3 100644 --- a/pkg/resources/execute_acceptance_test.go +++ b/pkg/resources/execute_acceptance_test.go @@ -847,3 +847,21 @@ func TestAcc_Execute_ImportWithRandomId(t *testing.T) { }, }) } + +// TODO [SNOW-1348121]: Move this to the file with check_destroy functions. +func testAccCheckDatabaseExistence(t *testing.T, id sdk.AccountObjectIdentifier, shouldExist bool) func(state *terraform.State) error { + t.Helper() + return func(state *terraform.State) error { + _, err := acc.TestClient().Database.Show(t, id) + if shouldExist { + if err != nil { + return fmt.Errorf("error while retrieving database %s, err = %w", id, err) + } + } else { + if err == nil { + return fmt.Errorf("database %v still exists", id) + } + } + return nil + } +} diff --git a/pkg/resources/external_function.go b/pkg/resources/external_function.go index 5330787394..ebd37b5c81 100644 --- a/pkg/resources/external_function.go +++ b/pkg/resources/external_function.go @@ -9,6 +9,7 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -187,10 +188,10 @@ func ExternalFunction() *schema.Resource { return &schema.Resource{ SchemaVersion: 2, - CreateContext: TrackingCreateWrapper(resources.ExternalFunction, CreateContextExternalFunction), - ReadContext: TrackingReadWrapper(resources.ExternalFunction, ReadContextExternalFunction), - UpdateContext: TrackingUpdateWrapper(resources.ExternalFunction, UpdateContextExternalFunction), - DeleteContext: TrackingDeleteWrapper(resources.ExternalFunction, DeleteContextExternalFunction), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.ExternalFunctionResource), TrackingCreateWrapper(resources.ExternalFunction, CreateContextExternalFunction)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.ExternalFunctionResource), TrackingReadWrapper(resources.ExternalFunction, ReadContextExternalFunction)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.ExternalFunctionResource), TrackingUpdateWrapper(resources.ExternalFunction, UpdateContextExternalFunction)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.ExternalFunctionResource), TrackingDeleteWrapper(resources.ExternalFunction, DeleteContextExternalFunction)), Schema: externalFunctionSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/external_table.go b/pkg/resources/external_table.go index 56404cf703..bd03b75bf6 100644 --- a/pkg/resources/external_table.go +++ b/pkg/resources/external_table.go @@ -7,6 +7,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -140,10 +141,10 @@ var externalTableSchema = map[string]*schema.Schema{ func ExternalTable() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.ExternalTable, CreateExternalTable), - ReadContext: TrackingReadWrapper(resources.ExternalTable, ReadExternalTable), - UpdateContext: TrackingUpdateWrapper(resources.ExternalTable, UpdateExternalTable), - DeleteContext: TrackingDeleteWrapper(resources.ExternalTable, DeleteExternalTable), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.ExternalTableResource), TrackingCreateWrapper(resources.ExternalTable, CreateExternalTable)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.ExternalTableResource), TrackingReadWrapper(resources.ExternalTable, ReadExternalTable)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.ExternalTableResource), TrackingUpdateWrapper(resources.ExternalTable, UpdateExternalTable)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.ExternalTableResource), TrackingDeleteWrapper(resources.ExternalTable, DeleteExternalTable)), Schema: externalTableSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/external_volume.go b/pkg/resources/external_volume.go index 7486f779b1..e12e737cb3 100644 --- a/pkg/resources/external_volume.go +++ b/pkg/resources/external_volume.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -118,11 +119,12 @@ var externalVolumeSchema = map[string]*schema.Schema{ // ExternalVolume returns a pointer to the resource representing an external volume. func ExternalVolume() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.ExternalVolume, CreateContextExternalVolume), - UpdateContext: TrackingUpdateWrapper(resources.ExternalVolume, UpdateContextExternalVolume), - ReadContext: TrackingReadWrapper(resources.ExternalVolume, ReadContextExternalVolume(true)), - DeleteContext: TrackingDeleteWrapper(resources.ExternalVolume, DeleteContextExternalVolume), - Description: "Resource used to manage external volume objects. For more information, check [external volume documentation](https://docs.snowflake.com/en/sql-reference/commands-data-loading#external-volume).", + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.ExternalVolumeResource), TrackingCreateWrapper(resources.ExternalVolume, CreateContextExternalVolume)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.ExternalVolumeResource), TrackingReadWrapper(resources.ExternalVolume, ReadContextExternalVolume(true))), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.ExternalVolumeResource), TrackingUpdateWrapper(resources.ExternalVolume, UpdateContextExternalVolume)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.ExternalVolumeResource), TrackingDeleteWrapper(resources.ExternalVolume, DeleteContextExternalVolume)), + + Description: "Resource used to manage external volume objects. For more information, check [external volume documentation](https://docs.snowflake.com/en/sql-reference/commands-data-loading#external-volume).", Schema: externalVolumeSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/failover_group.go b/pkg/resources/failover_group.go index 4a8ab0e3fe..6b77bf8f85 100644 --- a/pkg/resources/failover_group.go +++ b/pkg/resources/failover_group.go @@ -8,6 +8,7 @@ import ( "strconv" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -141,10 +142,10 @@ var failoverGroupSchema = map[string]*schema.Schema{ // FailoverGroup returns a pointer to the resource representing a failover group. func FailoverGroup() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.FailoverGroup, CreateFailoverGroup), - ReadContext: TrackingReadWrapper(resources.FailoverGroup, ReadFailoverGroup), - UpdateContext: TrackingUpdateWrapper(resources.FailoverGroup, UpdateFailoverGroup), - DeleteContext: TrackingDeleteWrapper(resources.FailoverGroup, DeleteFailoverGroup), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.FailoverGroupResource), TrackingCreateWrapper(resources.FailoverGroup, CreateFailoverGroup)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.FailoverGroupResource), TrackingReadWrapper(resources.FailoverGroup, ReadFailoverGroup)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.FailoverGroupResource), TrackingUpdateWrapper(resources.FailoverGroup, UpdateFailoverGroup)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.FailoverGroupResource), TrackingDeleteWrapper(resources.FailoverGroup, DeleteFailoverGroup)), Schema: failoverGroupSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/file_format.go b/pkg/resources/file_format.go index 8ff7cec53f..f4ab8992d6 100644 --- a/pkg/resources/file_format.go +++ b/pkg/resources/file_format.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -317,10 +318,10 @@ func (ffi *fileFormatID) String() (string, error) { // FileFormat returns a pointer to the resource representing a file format. func FileFormat() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.FileFormat, CreateFileFormat), - ReadContext: TrackingReadWrapper(resources.FileFormat, ReadFileFormat), - UpdateContext: TrackingUpdateWrapper(resources.FileFormat, UpdateFileFormat), - DeleteContext: TrackingDeleteWrapper(resources.FileFormat, DeleteFileFormat), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.FileFormatResource), TrackingCreateWrapper(resources.FileFormat, CreateFileFormat)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.FileFormatResource), TrackingReadWrapper(resources.FileFormat, ReadFileFormat)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.FileFormatResource), TrackingUpdateWrapper(resources.FileFormat, UpdateFileFormat)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.FileFormatResource), TrackingDeleteWrapper(resources.FileFormat, DeleteFileFormat)), CustomizeDiff: TrackingCustomDiffWrapper(resources.FileFormat, customdiff.All( ComputedIfAnyAttributeChanged(fileFormatSchema, FullyQualifiedNameAttributeName, "name"), diff --git a/pkg/resources/function.go b/pkg/resources/function.go deleted file mode 100644 index 663dcf5693..0000000000 --- a/pkg/resources/function.go +++ /dev/null @@ -1,790 +0,0 @@ -package resources - -import ( - "context" - "fmt" - "log" - "regexp" - "strings" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" - "github.com/hashicorp/go-cty/cty" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" -) - -var languages = []string{"javascript", "scala", "java", "sql", "python"} - -var functionSchema = map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the identifier for the function; does not have to be unique for the schema in which the function is created. Don't use the | character.", - }, - "database": { - Type: schema.TypeString, - Required: true, - Description: "The database in which to create the function. Don't use the | character.", - ForceNew: true, - }, - "schema": { - Type: schema.TypeString, - Required: true, - Description: "The schema in which to create the function. Don't use the | character.", - ForceNew: true, - }, - "arguments": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - // Suppress the diff shown if the values are equal when both compared in lower case. - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return strings.EqualFold(old, new) - }, - Description: "The argument name", - }, - // TODO(SNOW-1596962): Fully support VECTOR data type sdk.ParseFunctionArgumentsFromString could be a base for another function that takes argument names into consideration. - "type": { - Type: schema.TypeString, - Required: true, - // Suppress the diff shown if the values are equal when both compared in lower case. - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return strings.EqualFold(old, new) - }, - Description: "The argument type", - }, - }, - }, - Optional: true, - Description: "List of the arguments for the function", - ForceNew: true, - }, - "return_type": { - Type: schema.TypeString, - Description: "The return type of the function", - // Suppress the diff shown if the values are equal when both compared in lower case. - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return strings.EqualFold(old, new) - }, - Required: true, - ForceNew: true, - }, - "statement": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the javascript / java / scala / sql / python code used to create the function.", - ForceNew: true, - DiffSuppressFunc: DiffSuppressStatement, - }, - "language": { - Type: schema.TypeString, - Optional: true, - Default: "SQL", - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return strings.EqualFold(old, new) - }, - ValidateFunc: validation.StringInSlice(languages, true), - Description: "Specifies the language of the stored function code.", - }, - "null_input_behavior": { - Type: schema.TypeString, - Optional: true, - Default: "CALLED ON NULL INPUT", - ForceNew: true, - // We do not use STRICT, because Snowflake then in the Read phase returns RETURNS NULL ON NULL INPUT - ValidateFunc: validation.StringInSlice([]string{"CALLED ON NULL INPUT", "RETURNS NULL ON NULL INPUT"}, false), - Description: "Specifies the behavior of the function when called with null inputs.", - }, - "return_behavior": { - Type: schema.TypeString, - Optional: true, - Default: "VOLATILE", - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"VOLATILE", "IMMUTABLE"}, false), - Description: "Specifies the behavior of the function when returning results", - }, - "is_secure": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: "Specifies that the function is secure.", - }, - "comment": { - Type: schema.TypeString, - Optional: true, - Default: "user-defined function", - Description: "Specifies a comment for the function.", - }, - "runtime_version": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "Required for Python functions. Specifies Python runtime version.", - }, - "packages": { - Type: schema.TypeList, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Optional: true, - ForceNew: true, - Description: "List of package imports to use for Java / Python functions. For Java, package imports should be of the form: package_name:version_number, where package_name is snowflake_domain:package. For Python use it should be: ('numpy','pandas','xgboost==1.5.0').", - }, - "imports": { - Type: schema.TypeList, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Optional: true, - ForceNew: true, - Description: "Imports for Java / Python functions. For Java this a list of jar files, for Python this is a list of Python files.", - }, - "handler": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "The handler method for Java / Python function.", - }, - "target_path": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "The target path for the Java / Python functions. For Java, it is the path of compiled jar files and for the Python it is the path of the Python files.", - }, - FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, -} - -func Function() *schema.Resource { - return &schema.Resource{ - SchemaVersion: 2, - - CreateContext: TrackingCreateWrapper(resources.Function, CreateContextFunction), - ReadContext: TrackingReadWrapper(resources.Function, ReadContextFunction), - UpdateContext: TrackingUpdateWrapper(resources.Function, UpdateContextFunction), - DeleteContext: TrackingDeleteWrapper(resources.Function, DeleteFunction), - - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_function_java, snowflake_function_javascript, snowflake_function_python, snowflake_function_scala, and snowflake_function_sql instead.", - - CustomizeDiff: TrackingCustomDiffWrapper(resources.Function, customdiff.All( - // TODO(SNOW-1348103): add `arguments` to ComputedIfAnyAttributeChanged. This can't be done now because this function compares values without diff suppress. - ComputedIfAnyAttributeChanged(functionSchema, FullyQualifiedNameAttributeName, "name"), - )), - - Schema: functionSchema, - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - - StateUpgraders: []schema.StateUpgrader{ - { - Version: 0, - // setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject - Type: cty.EmptyObject, - Upgrade: v085FunctionIdStateUpgrader, - }, - { - Version: 1, - // setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject - Type: cty.EmptyObject, - Upgrade: v0941ResourceIdentifierWithArguments, - }, - }, - } -} - -func CreateContextFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - lang := strings.ToUpper(d.Get("language").(string)) - switch lang { - case "JAVA": - return createJavaFunction(ctx, d, meta) - case "JAVASCRIPT": - return createJavascriptFunction(ctx, d, meta) - case "PYTHON": - return createPythonFunction(ctx, d, meta) - case "SCALA": - return createScalaFunction(ctx, d, meta) - case "", "SQL": // SQL if language is not set - return createSQLFunction(ctx, d, meta) - default: - return diag.Diagnostics{ - diag.Diagnostic{ - Severity: diag.Error, - Summary: "Invalid language", - Detail: fmt.Sprintf("Language %s is not supported", lang), - }, - } - } -} - -func createJavaFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - id := sdk.NewSchemaObjectIdentifier(database, sc, name) - - // Set required - returns, diags := parseFunctionReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - handler := d.Get("handler").(string) - // create request with required - request := sdk.NewCreateForJavaFunctionRequest(id, *returns, handler) - functionDefinition := d.Get("statement").(string) - request.WithFunctionDefinitionWrapped(functionDefinition) - - // Set optionals - if v, ok := d.GetOk("is_secure"); ok { - request.WithSecure(v.(bool)) - } - arguments, diags := parseFunctionArguments(d) - if diags != nil { - return diags - } - if len(arguments) > 0 { - request.WithArguments(arguments) - } - if v, ok := d.GetOk("null_input_behavior"); ok { - request.WithNullInputBehavior(sdk.NullInputBehavior(v.(string))) - } - if v, ok := d.GetOk("return_behavior"); ok { - request.WithReturnResultsBehavior(sdk.ReturnResultsBehavior(v.(string))) - } - if v, ok := d.GetOk("runtime_version"); ok { - request.WithRuntimeVersion(v.(string)) - } - if v, ok := d.GetOk("comment"); ok { - request.WithComment(v.(string)) - } - if _, ok := d.GetOk("imports"); ok { - var imports []sdk.FunctionImportRequest - for _, item := range d.Get("imports").([]interface{}) { - imports = append(imports, *sdk.NewFunctionImportRequest().WithImport(item.(string))) - } - request.WithImports(imports) - } - if _, ok := d.GetOk("packages"); ok { - var packages []sdk.FunctionPackageRequest - for _, item := range d.Get("packages").([]interface{}) { - packages = append(packages, *sdk.NewFunctionPackageRequest().WithPackage(item.(string))) - } - request.WithPackages(packages) - } - if v, ok := d.GetOk("target_path"); ok { - request.WithTargetPath(v.(string)) - } - - if err := client.Functions.CreateForJava(ctx, request); err != nil { - return diag.FromErr(err) - } - argumentTypes := make([]sdk.DataType, 0, len(arguments)) - for _, item := range arguments { - argumentTypes = append(argumentTypes, item.ArgDataTypeOld) - } - nid := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argumentTypes...) - d.SetId(nid.FullyQualifiedName()) - return ReadContextFunction(ctx, d, meta) -} - -func createScalaFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - id := sdk.NewSchemaObjectIdentifier(database, sc, name) - - // Set required - returnType := d.Get("return_type").(string) - returnDataType, diags := convertFunctionDataType(returnType) - if diags != nil { - return diags - } - functionDefinition := d.Get("statement").(string) - handler := d.Get("handler").(string) - var runtimeVersion string - if v, ok := d.GetOk("runtime_version"); ok { - runtimeVersion = v.(string) - } else { - return diag.Errorf("Runtime version is required for Scala function") - } - - // create request with required - request := sdk.NewCreateForScalaFunctionRequest(id, nil, handler, runtimeVersion).WithResultDataTypeOld(sdk.LegacyDataTypeFrom(returnDataType)) - request.WithFunctionDefinitionWrapped(functionDefinition) - - // Set optionals - if v, ok := d.GetOk("is_secure"); ok { - request.WithSecure(v.(bool)) - } - arguments, diags := parseFunctionArguments(d) - if diags != nil { - return diags - } - if len(arguments) > 0 { - request.WithArguments(arguments) - } - if v, ok := d.GetOk("null_input_behavior"); ok { - request.WithNullInputBehavior(sdk.NullInputBehavior(v.(string))) - } - if v, ok := d.GetOk("return_behavior"); ok { - request.WithReturnResultsBehavior(sdk.ReturnResultsBehavior(v.(string))) - } - if v, ok := d.GetOk("comment"); ok { - request.WithComment(v.(string)) - } - if _, ok := d.GetOk("imports"); ok { - var imports []sdk.FunctionImportRequest - for _, item := range d.Get("imports").([]interface{}) { - imports = append(imports, *sdk.NewFunctionImportRequest().WithImport(item.(string))) - } - request.WithImports(imports) - } - if _, ok := d.GetOk("packages"); ok { - var packages []sdk.FunctionPackageRequest - for _, item := range d.Get("packages").([]interface{}) { - packages = append(packages, *sdk.NewFunctionPackageRequest().WithPackage(item.(string))) - } - request.WithPackages(packages) - } - if v, ok := d.GetOk("target_path"); ok { - request.WithTargetPath(v.(string)) - } - - if err := client.Functions.CreateForScala(ctx, request); err != nil { - return diag.FromErr(err) - } - argumentTypes := make([]sdk.DataType, 0, len(arguments)) - for _, item := range arguments { - argumentTypes = append(argumentTypes, item.ArgDataTypeOld) - } - nid := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argumentTypes...) - d.SetId(nid.FullyQualifiedName()) - return ReadContextFunction(ctx, d, meta) -} - -func createSQLFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - id := sdk.NewSchemaObjectIdentifier(database, sc, name) - - // Set required - returns, diags := parseFunctionReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - functionDefinition := d.Get("statement").(string) - // create request with required - request := sdk.NewCreateForSQLFunctionRequestDefinitionWrapped(id, *returns, functionDefinition) - - // Set optionals - if v, ok := d.GetOk("is_secure"); ok { - request.WithSecure(v.(bool)) - } - arguments, diags := parseFunctionArguments(d) - if diags != nil { - return diags - } - if len(arguments) > 0 { - request.WithArguments(arguments) - } - if v, ok := d.GetOk("return_behavior"); ok { - request.WithReturnResultsBehavior(sdk.ReturnResultsBehavior(v.(string))) - } - if v, ok := d.GetOk("comment"); ok { - request.WithComment(v.(string)) - } - - if err := client.Functions.CreateForSQL(ctx, request); err != nil { - return diag.FromErr(err) - } - argumentTypes := make([]sdk.DataType, 0, len(arguments)) - for _, item := range arguments { - argumentTypes = append(argumentTypes, item.ArgDataTypeOld) - } - nid := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argumentTypes...) - d.SetId(nid.FullyQualifiedName()) - return ReadContextFunction(ctx, d, meta) -} - -func createPythonFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - id := sdk.NewSchemaObjectIdentifier(database, sc, name) - - // Set required - returns, diags := parseFunctionReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - functionDefinition := d.Get("statement").(string) - version := d.Get("runtime_version").(string) - handler := d.Get("handler").(string) - // create request with required - request := sdk.NewCreateForPythonFunctionRequest(id, *returns, version, handler) - request.WithFunctionDefinitionWrapped(functionDefinition) - - // Set optionals - if v, ok := d.GetOk("is_secure"); ok { - request.WithSecure(v.(bool)) - } - arguments, diags := parseFunctionArguments(d) - if diags != nil { - return diags - } - if len(arguments) > 0 { - request.WithArguments(arguments) - } - if v, ok := d.GetOk("null_input_behavior"); ok { - request.WithNullInputBehavior(sdk.NullInputBehavior(v.(string))) - } - if v, ok := d.GetOk("return_behavior"); ok { - request.WithReturnResultsBehavior(sdk.ReturnResultsBehavior(v.(string))) - } - - if v, ok := d.GetOk("comment"); ok { - request.WithComment(v.(string)) - } - if _, ok := d.GetOk("imports"); ok { - var imports []sdk.FunctionImportRequest - for _, item := range d.Get("imports").([]interface{}) { - imports = append(imports, *sdk.NewFunctionImportRequest().WithImport(item.(string))) - } - request.WithImports(imports) - } - if _, ok := d.GetOk("packages"); ok { - var packages []sdk.FunctionPackageRequest - for _, item := range d.Get("packages").([]interface{}) { - packages = append(packages, *sdk.NewFunctionPackageRequest().WithPackage(item.(string))) - } - request.WithPackages(packages) - } - - if err := client.Functions.CreateForPython(ctx, request); err != nil { - return diag.FromErr(err) - } - argumentTypes := make([]sdk.DataType, 0, len(arguments)) - for _, item := range arguments { - argumentTypes = append(argumentTypes, item.ArgDataTypeOld) - } - nid := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argumentTypes...) - d.SetId(nid.FullyQualifiedName()) - return ReadContextFunction(ctx, d, meta) -} - -func createJavascriptFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - id := sdk.NewSchemaObjectIdentifier(database, sc, name) - - // Set required - returns, diags := parseFunctionReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - functionDefinition := d.Get("statement").(string) - // create request with required - request := sdk.NewCreateForJavascriptFunctionRequestDefinitionWrapped(id, *returns, functionDefinition) - - // Set optionals - if v, ok := d.GetOk("is_secure"); ok { - request.WithSecure(v.(bool)) - } - arguments, diags := parseFunctionArguments(d) - if diags != nil { - return diags - } - if len(arguments) > 0 { - request.WithArguments(arguments) - } - if v, ok := d.GetOk("null_input_behavior"); ok { - request.WithNullInputBehavior(sdk.NullInputBehavior(v.(string))) - } - if v, ok := d.GetOk("return_behavior"); ok { - request.WithReturnResultsBehavior(sdk.ReturnResultsBehavior(v.(string))) - } - if v, ok := d.GetOk("comment"); ok { - request.WithComment(v.(string)) - } - - if err := client.Functions.CreateForJavascript(ctx, request); err != nil { - return diag.FromErr(err) - } - argumentTypes := make([]sdk.DataType, 0, len(arguments)) - for _, item := range arguments { - argumentTypes = append(argumentTypes, item.ArgDataTypeOld) - } - nid := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argumentTypes...) - d.SetId(nid.FullyQualifiedName()) - return ReadContextFunction(ctx, d, meta) -} - -func ReadContextFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - diags := diag.Diagnostics{} - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("name", id.Name()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("database", id.DatabaseName()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("schema", id.SchemaName()); err != nil { - return diag.FromErr(err) - } - - arguments := d.Get("arguments").([]interface{}) - argumentTypes := make([]string, len(arguments)) - for i, arg := range arguments { - argumentTypes[i] = arg.(map[string]interface{})["type"].(string) - } - functionDetails, err := client.Functions.Describe(ctx, id) - if err != nil { - // if function is not found then mark resource to be removed from state file during apply or refresh - d.SetId("") - return diag.Diagnostics{ - diag.Diagnostic{ - Severity: diag.Warning, - Summary: "Describe function failed.", - Detail: "See our document on design decisions for functions: ", - }, - } - } - for _, desc := range functionDetails { - if desc.Value == nil { - continue - } - switch desc.Property { - case "signature": - // Format in Snowflake DB is: (argName argType, argName argType, ...) - value := strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "(", ""), ")", "") - if value != "" { // Do nothing for functions without arguments - pairs := strings.Split(value, ", ") - - var arguments []interface{} - for _, pair := range pairs { - item := strings.Split(pair, " ") - argument := map[string]interface{}{} - argument["name"] = item[0] - argument["type"] = item[1] - arguments = append(arguments, argument) - } - if err := d.Set("arguments", arguments); err != nil { - diag.FromErr(err) - } - } - case "null handling": - if err := d.Set("null_input_behavior", *desc.Value); err != nil { - diag.FromErr(err) - } - case "volatility": - if err := d.Set("return_behavior", *desc.Value); err != nil { - diag.FromErr(err) - } - case "body": - if err := d.Set("statement", *desc.Value); err != nil { - diag.FromErr(err) - } - case "returns": - // Format in Snowflake DB is returnType() - re := regexp.MustCompile(`^(.*)\([0-9]*\)$`) - rt := *desc.Value - match := re.FindStringSubmatch(rt) - if match != nil { - rt = match[1] - } - if err := d.Set("return_type", rt); err != nil { - diag.FromErr(err) - } - case "language": - if snowflake.Contains(languages, strings.ToLower(*desc.Value)) { - if err := d.Set("language", *desc.Value); err != nil { - diag.FromErr(err) - } - } else { - log.Printf("[INFO] Unexpected language for function %v returned from Snowflake", *desc.Value) - } - case "packages": - value := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "[", ""), "]", ""), "'", "") - if value != "" { // Do nothing for Java / Python functions without packages - packages := strings.Split(value, ",") - if err := d.Set("packages", packages); err != nil { - diag.FromErr(err) - } - } - case "imports": - value := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "[", ""), "]", ""), "'", "") - if value != "" { // Do nothing for Java functions without imports - imports := strings.Split(value, ",") - if err := d.Set("imports", imports); err != nil { - diag.FromErr(err) - } - } - case "handler": - if err := d.Set("handler", *desc.Value); err != nil { - diag.FromErr(err) - } - case "target_path": - if err := d.Set("target_path", *desc.Value); err != nil { - diag.FromErr(err) - } - case "runtime_version": - if err := d.Set("runtime_version", *desc.Value); err != nil { - diag.FromErr(err) - } - default: - log.Printf("[INFO] Unexpected function property %v returned from Snowflake with value %v", desc.Property, *desc.Value) - } - } - - function, err := client.Functions.ShowByID(ctx, id) - if err != nil { - return diag.FromErr(err) - } - - if err := d.Set("is_secure", function.IsSecure); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("comment", function.Description); err != nil { - return diag.FromErr(err) - } - - return diags -} - -func UpdateContextFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - if d.HasChange("name") { - name := d.Get("name").(string) - newId := sdk.NewSchemaObjectIdentifierWithArguments(id.DatabaseName(), id.SchemaName(), name, id.ArgumentDataTypes()...) - - if err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithRenameTo(newId.SchemaObjectId())); err != nil { - return diag.FromErr(err) - } - - d.SetId(newId.FullyQualifiedName()) - id = newId - } - - if d.HasChange("is_secure") { - secure := d.Get("is_secure") - if secure.(bool) { - if err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSetSecure(true)); err != nil { - return diag.FromErr(err) - } - } else { - if err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnsetSecure(true)); err != nil { - return diag.FromErr(err) - } - } - } - - if d.HasChange("comment") { - comment := d.Get("comment") - if comment != "" { - if err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*sdk.NewFunctionSetRequest().WithComment(comment.(string)))); err != nil { - return diag.FromErr(err) - } - } else { - if err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnset(*sdk.NewFunctionUnsetRequest().WithComment(true))); err != nil { - return diag.FromErr(err) - } - } - } - - return ReadContextFunction(ctx, d, meta) -} - -func parseFunctionArguments(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, diag.Diagnostics) { - args := make([]sdk.FunctionArgumentRequest, 0) - if v, ok := d.GetOk("arguments"); ok { - for _, arg := range v.([]interface{}) { - argName := arg.(map[string]interface{})["name"].(string) - argType := arg.(map[string]interface{})["type"].(string) - argDataType, diags := convertFunctionDataType(argType) - if diags != nil { - return nil, diags - } - args = append(args, sdk.FunctionArgumentRequest{ArgName: argName, ArgDataTypeOld: sdk.LegacyDataTypeFrom(argDataType)}) - } - } - return args, nil -} - -func convertFunctionDataType(s string) (datatypes.DataType, diag.Diagnostics) { - dataType, err := datatypes.ParseDataType(s) - if err != nil { - return nil, diag.FromErr(err) - } - return dataType, nil -} - -func convertFunctionColumns(s string) ([]sdk.FunctionColumn, diag.Diagnostics) { - pattern := regexp.MustCompile(`(\w+)\s+(\w+)`) - matches := pattern.FindAllStringSubmatch(s, -1) - var columns []sdk.FunctionColumn - for _, match := range matches { - if len(match) == 3 { - dataType, err := datatypes.ParseDataType(match[2]) - if err != nil { - return nil, diag.FromErr(err) - } - columns = append(columns, sdk.FunctionColumn{ - ColumnName: match[1], - ColumnDataTypeOld: sdk.LegacyDataTypeFrom(dataType), - }) - } - } - return columns, nil -} - -func parseFunctionReturnsRequest(s string) (*sdk.FunctionReturnsRequest, diag.Diagnostics) { - returns := sdk.NewFunctionReturnsRequest() - if strings.HasPrefix(strings.ToLower(s), "table") { - columns, diags := convertFunctionColumns(s) - if diags != nil { - return nil, diags - } - var cr []sdk.FunctionColumnRequest - for _, item := range columns { - cr = append(cr, *sdk.NewFunctionColumnRequest(item.ColumnName, nil).WithColumnDataTypeOld(item.ColumnDataTypeOld)) - } - returns.WithTable(*sdk.NewFunctionReturnsTableRequest().WithColumns(cr)) - } else { - returnDataType, diags := convertFunctionDataType(s) - if diags != nil { - return nil, diags - } - returns.WithResultDataType(*sdk.NewFunctionReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.LegacyDataTypeFrom(returnDataType))) - } - return returns, nil -} diff --git a/pkg/resources/function_acceptance_test.go b/pkg/resources/function_acceptance_test.go deleted file mode 100644 index df8bb28014..0000000000 --- a/pkg/resources/function_acceptance_test.go +++ /dev/null @@ -1,451 +0,0 @@ -package resources_test - -import ( - "fmt" - "strings" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func testAccFunction(t *testing.T, configDirectory string) { - t.Helper() - - name := acc.TestClient().Ids.Alpha() - resourceName := "snowflake_function.f" - m := func() map[string]config.Variable { - return map[string]config.Variable{ - "name": config.StringVariable(name), - "database": config.StringVariable(acc.TestDatabaseName), - "schema": config.StringVariable(acc.TestSchemaName), - "comment": config.StringVariable("Terraform acceptance test"), - } - } - variableSet2 := m() - variableSet2["comment"] = config.StringVariable("Terraform acceptance test - updated") - - ignoreDuringImport := []string{"null_input_behavior"} - if strings.Contains(configDirectory, "/sql") { - ignoreDuringImport = append(ignoreDuringImport, "return_behavior") - } - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Function), - Steps: []resource.TestStep{ - { - ConfigDirectory: acc.ConfigurationDirectory(configDirectory), - ConfigVariables: m(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test"), - - // computed attributes - resource.TestCheckResourceAttrSet(resourceName, "return_type"), - resource.TestCheckResourceAttrSet(resourceName, "statement"), - resource.TestCheckResourceAttrSet(resourceName, "is_secure"), - ), - }, - - // test - change comment - { - ConfigDirectory: acc.ConfigurationDirectory(configDirectory), - ConfigVariables: variableSet2, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test - updated"), - ), - }, - - // test - import - { - ConfigDirectory: acc.ConfigurationDirectory(configDirectory), - ConfigVariables: variableSet2, - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: ignoreDuringImport, - }, - }, - }) -} - -func TestAcc_Function_Javascript(t *testing.T) { - testAccFunction(t, "TestAcc_Function/javascript") -} - -func TestAcc_Function_SQL(t *testing.T) { - testAccFunction(t, "TestAcc_Function/sql") -} - -func TestAcc_Function_Java(t *testing.T) { - testAccFunction(t, "TestAcc_Function/java") -} - -func TestAcc_Function_Scala(t *testing.T) { - testAccFunction(t, "TestAcc_Function/scala") -} - -/* - Error: 391528 (42601): SQL compilation error: An active warehouse is required for creating Python UDFs. -func TestAcc_Function_Python(t *testing.T) { - testAccFunction(t, "TestAcc_Function/python") -} -*/ - -func TestAcc_Function_complex(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - resourceName := "snowflake_function.f" - m := func() map[string]config.Variable { - return map[string]config.Variable{ - "name": config.StringVariable(name), - "database": config.StringVariable(acc.TestDatabaseName), - "schema": config.StringVariable(acc.TestSchemaName), - "comment": config.StringVariable("Terraform acceptance test"), - } - } - variableSet2 := m() - variableSet2["comment"] = config.StringVariable("Terraform acceptance test - updated") - - statement := "\t\tif (D <= 0) {\n\t\t\treturn 1;\n\t\t} else {\n\t\t\tvar result = 1;\n\t\t\tfor (var i = 2; i <= D; i++) {\n\t\t\t\tresult = result * i;\n\t\t\t}\n\t\t\treturn result;\n\t\t}\n" - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Function), - Steps: []resource.TestStep{ - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Function/complex"), - ConfigVariables: m(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr(resourceName, "statement", statement), - resource.TestCheckResourceAttr(resourceName, "arguments.#", "1"), - resource.TestCheckResourceAttr(resourceName, "arguments.0.name", "d"), - resource.TestCheckResourceAttr(resourceName, "arguments.0.type", "FLOAT"), - resource.TestCheckResourceAttr(resourceName, "return_behavior", "VOLATILE"), - resource.TestCheckResourceAttr(resourceName, "return_type", "FLOAT"), - resource.TestCheckResourceAttr(resourceName, "language", "JAVASCRIPT"), - resource.TestCheckResourceAttr(resourceName, "null_input_behavior", "CALLED ON NULL INPUT"), - - // computed attributes - resource.TestCheckResourceAttrSet(resourceName, "return_type"), - resource.TestCheckResourceAttrSet(resourceName, "statement"), - resource.TestCheckResourceAttrSet(resourceName, "is_secure"), - ), - }, - - // test - change comment - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Function/complex"), - ConfigVariables: variableSet2, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test - updated"), - ), - }, - - // test - import - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Function/complex"), - ConfigVariables: variableSet2, - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - "language", - }, - }, - }, - }) -} - -// proves issue https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2490 -func TestAcc_Function_migrateFromVersion085(t *testing.T) { - id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) - name := id.Name() - comment := random.Comment() - resourceName := "snowflake_function.f" - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Function), - - // Using the string config because of the validation in teststep_validate.go: - // teststep.Config.HasConfigurationFiles() returns true both for ConfigFile and ConfigDirectory. - // It returns false for Config. I don't understand why they have such a validation, but we will work around it later. - // Added as subtask SNOW-1057066 to SNOW-926148. - Steps: []resource.TestStep{ - { - PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, - ExternalProviders: map[string]resource.ExternalProvider{ - "snowflake": { - VersionConstraint: "=0.85.0", - Source: "Snowflake-Labs/snowflake", - }, - }, - Config: functionConfig(acc.TestDatabaseName, acc.TestSchemaName, name, comment), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|%s|%s|VARCHAR", acc.TestDatabaseName, acc.TestSchemaName, name)), - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - ), - }, - { - ExternalProviders: map[string]resource.ExternalProvider{ - "snowflake": { - VersionConstraint: "=0.94.1", - Source: "Snowflake-Labs/snowflake", - }, - }, - Config: functionConfig(acc.TestDatabaseName, acc.TestSchemaName, name, comment), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - ), - }, - }, - }) -} - -func TestAcc_Function_Rename(t *testing.T) { - oldId := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) - newId := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) - comment := random.Comment() - newComment := random.Comment() - resourceName := "snowflake_function.f" - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Function), - Steps: []resource.TestStep{ - { - Config: functionConfig(acc.TestDatabaseName, acc.TestSchemaName, oldId.Name(), comment), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", oldId.Name()), - resource.TestCheckResourceAttr(resourceName, "fully_qualified_name", oldId.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "comment", comment), - ), - }, - { - Config: functionConfig(acc.TestDatabaseName, acc.TestSchemaName, newId.Name(), newComment), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", newId.Name()), - resource.TestCheckResourceAttr(resourceName, "fully_qualified_name", newId.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "comment", newComment), - ), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate), - }, - PostApplyPostRefresh: []plancheck.PlanCheck{ - plancheck.ExpectEmptyPlan(), - }, - }, - }, - }, - }) -} - -func functionConfig(database string, schema string, name string, comment string) string { - return fmt.Sprintf(` -resource "snowflake_function" "f" { - database = "%[1]s" - schema = "%[2]s" - name = "%[3]s" - comment = "%[4]s" - return_type = "VARCHAR" - return_behavior = "IMMUTABLE" - statement = "SELECT PARAM" - - arguments { - name = "PARAM" - type = "VARCHAR" - } -} -`, database, schema, name, comment) -} - -// TODO [SNOW-1348103]: do not trim the data type (e.g. NUMBER(10, 2) -> NUMBER loses the information as shown in this test); finish the test -// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735 -func TestAcc_Function_gh2735(t *testing.T) { - t.Skipf("Will be fixed with functions redesign in SNOW-1348103") - name := acc.TestClient().Ids.Alpha() - resourceName := "snowflake_function.f" - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Function), - Steps: []resource.TestStep{ - { - Config: functionConfigGh2735(acc.TestDatabaseName, acc.TestSchemaName, name), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - ), - }, - }, - }) -} - -func functionConfigGh2735(database string, schema string, name string) string { - return fmt.Sprintf(` -resource "snowflake_function" "f" { - database = "%[1]s" - schema = "%[2]s" - name = "%[3]s" - return_type = "TABLE (NUM1 NUMBER, NUM2 NUMBER(10,2))" - - statement = <"] # TODO: Replace @@ -30,7 +30,7 @@ resource "snowflake_database" "primary" { } resource "snowflake_database" "secondary" { - name = "test" - data_retention_time_in_days = 0 # to avoid in-place update to -1 - from_replica = ".\"${snowflake_database.primary.name}\"" # TODO: Replace + name = "test" + data_retention_time_in_days = 0 # to avoid in-place update to -1 + from_replica = ".\"${snowflake_database.primary.name}\"" # TODO: Replace } diff --git a/pkg/resources/manual_tests/upgrade_shared_database/step_2.tf b/pkg/resources/manual_tests/upgrade_shared_database/step_2.tf index ed8bce3132..0ab2f37c8c 100644 --- a/pkg/resources/manual_tests/upgrade_shared_database/step_2.tf +++ b/pkg/resources/manual_tests/upgrade_shared_database/step_2.tf @@ -1,6 +1,6 @@ # Commands to run # - terraform init - upgrade -# - terraform plan (should observe upgrader errors similar to: failed to upgrade the state with database created from share, please use snowflake_shared_database or deprecated snowflake_database_old instead) +# - terraform plan (should observe upgrader errors similar to: failed to upgrade the state with database created from share, please use snowflake_shared_database instead) # - terraform state rm snowflake_database.from_share (remove shared database from the state) terraform { @@ -16,32 +16,32 @@ provider "snowflake" {} provider "snowflake" { profile = "secondary_test_account" - alias = second_account + alias = second_account } resource "snowflake_share" "test" { provider = snowflake.second_account - name = "test_share" + name = "test_share" accounts = ["."] # TODO: Replace } resource "snowflake_database" "test" { provider = snowflake.second_account - name = "test_database" + name = "test_database" } resource "snowflake_grant_privileges_to_share" "test" { - provider = snowflake.second_account - privileges = ["USAGE"] + provider = snowflake.second_account + privileges = ["USAGE"] on_database = snowflake_database.test.name - to_share = snowflake_share.test.name + to_share = snowflake_share.test.name } resource "snowflake_database" "from_share" { - depends_on = [ snowflake_grant_privileges_to_share.test ] - name = snowflake_database.test.name + depends_on = [snowflake_grant_privileges_to_share.test] + name = snowflake_database.test.name from_share = { provider = "" # TODO: Replace - share = snowflake_share.test.name + share = snowflake_share.test.name } } diff --git a/pkg/resources/materialized_view.go b/pkg/resources/materialized_view.go index 2dacd668e6..4cb416b2c4 100644 --- a/pkg/resources/materialized_view.go +++ b/pkg/resources/materialized_view.go @@ -5,6 +5,7 @@ import ( "fmt" "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -73,10 +74,10 @@ var materializedViewSchema = map[string]*schema.Schema{ // MaterializedView returns a pointer to the resource representing a view. func MaterializedView() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.MaterializedView, CreateMaterializedView), - ReadContext: TrackingReadWrapper(resources.MaterializedView, ReadMaterializedView), - UpdateContext: TrackingUpdateWrapper(resources.MaterializedView, UpdateMaterializedView), - DeleteContext: TrackingDeleteWrapper(resources.MaterializedView, DeleteMaterializedView), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.MaterializedViewResource), TrackingCreateWrapper(resources.MaterializedView, CreateMaterializedView)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.MaterializedViewResource), TrackingReadWrapper(resources.MaterializedView, ReadMaterializedView)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.MaterializedViewResource), TrackingUpdateWrapper(resources.MaterializedView, UpdateMaterializedView)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.MaterializedViewResource), TrackingDeleteWrapper(resources.MaterializedView, DeleteMaterializedView)), CustomizeDiff: TrackingCustomDiffWrapper(resources.MaterializedView, customdiff.All( ComputedIfAnyAttributeChanged(materializedViewSchema, FullyQualifiedNameAttributeName, "name"), diff --git a/pkg/resources/network_policy_attachment.go b/pkg/resources/network_policy_attachment.go index c68ab02f85..ebdee5825a 100644 --- a/pkg/resources/network_policy_attachment.go +++ b/pkg/resources/network_policy_attachment.go @@ -6,6 +6,7 @@ import ( "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -39,10 +40,10 @@ var networkPolicyAttachmentSchema = map[string]*schema.Schema{ // NetworkPolicyAttachment returns a pointer to the resource representing a network policy attachment. func NetworkPolicyAttachment() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.NetworkPolicyAttachment, CreateNetworkPolicyAttachment), - ReadContext: TrackingReadWrapper(resources.NetworkPolicyAttachment, ReadNetworkPolicyAttachment), - UpdateContext: TrackingUpdateWrapper(resources.NetworkPolicyAttachment, UpdateNetworkPolicyAttachment), - DeleteContext: TrackingDeleteWrapper(resources.NetworkPolicyAttachment, DeleteNetworkPolicyAttachment), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.NetworkPolicyAttachmentResource), TrackingCreateWrapper(resources.NetworkPolicyAttachment, CreateNetworkPolicyAttachment)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.NetworkPolicyAttachmentResource), TrackingReadWrapper(resources.NetworkPolicyAttachment, ReadNetworkPolicyAttachment)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.NetworkPolicyAttachmentResource), TrackingUpdateWrapper(resources.NetworkPolicyAttachment, UpdateNetworkPolicyAttachment)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.NetworkPolicyAttachmentResource), TrackingDeleteWrapper(resources.NetworkPolicyAttachment, DeleteNetworkPolicyAttachment)), Schema: networkPolicyAttachmentSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/network_rule.go b/pkg/resources/network_rule.go index 5ba1a49c55..402df375e1 100644 --- a/pkg/resources/network_rule.go +++ b/pkg/resources/network_rule.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" @@ -67,10 +68,10 @@ var networkRuleSchema = map[string]*schema.Schema{ // NetworkRule returns a pointer to the resource representing a network rule. func NetworkRule() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.NetworkRule, CreateContextNetworkRule), - ReadContext: TrackingReadWrapper(resources.NetworkRule, ReadContextNetworkRule), - UpdateContext: TrackingUpdateWrapper(resources.NetworkRule, UpdateContextNetworkRule), - DeleteContext: TrackingDeleteWrapper(resources.NetworkRule, DeleteContextNetworkRule), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.NetworkRuleResource), TrackingCreateWrapper(resources.NetworkRule, CreateContextNetworkRule)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.NetworkRuleResource), TrackingReadWrapper(resources.NetworkRule, ReadContextNetworkRule)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.NetworkRuleResource), TrackingUpdateWrapper(resources.NetworkRule, UpdateContextNetworkRule)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.NetworkRuleResource), TrackingDeleteWrapper(resources.NetworkRule, DeleteContextNetworkRule)), Schema: networkRuleSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/notification_integration.go b/pkg/resources/notification_integration.go index 109a46e0ba..0f72ec5c54 100644 --- a/pkg/resources/notification_integration.go +++ b/pkg/resources/notification_integration.go @@ -6,6 +6,7 @@ import ( "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -154,10 +155,10 @@ var notificationIntegrationSchema = map[string]*schema.Schema{ // NotificationIntegration returns a pointer to the resource representing a notification integration. func NotificationIntegration() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.NotificationIntegration, CreateNotificationIntegration), - ReadContext: TrackingReadWrapper(resources.NotificationIntegration, ReadNotificationIntegration), - UpdateContext: TrackingUpdateWrapper(resources.NotificationIntegration, UpdateNotificationIntegration), - DeleteContext: TrackingDeleteWrapper(resources.NotificationIntegration, DeleteNotificationIntegration), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.NotificationIntegrationResource), TrackingCreateWrapper(resources.NotificationIntegration, CreateNotificationIntegration)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.NotificationIntegrationResource), TrackingReadWrapper(resources.NotificationIntegration, ReadNotificationIntegration)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.NotificationIntegrationResource), TrackingUpdateWrapper(resources.NotificationIntegration, UpdateNotificationIntegration)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.NotificationIntegrationResource), TrackingDeleteWrapper(resources.NotificationIntegration, DeleteNotificationIntegration)), Schema: notificationIntegrationSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/oauth_integration.go b/pkg/resources/oauth_integration.go deleted file mode 100644 index 22b812ed91..0000000000 --- a/pkg/resources/oauth_integration.go +++ /dev/null @@ -1,347 +0,0 @@ -package resources - -import ( - "context" - "fmt" - "log" - "strconv" - "strings" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" -) - -var oauthIntegrationSchema = map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account.", - }, - "oauth_client": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the OAuth client type.", - ValidateFunc: validation.StringInSlice([]string{ - "TABLEAU_DESKTOP", "TABLEAU_SERVER", "LOOKER", "CUSTOM", - }, false), - }, - "oauth_redirect_uri": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the client URI. After a user is authenticated, the web browser is redirected to this URI.", - }, - "oauth_client_type": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the type of client being registered. Snowflake supports both confidential and public clients.", - ValidateFunc: validation.StringInSlice([]string{ - "CONFIDENTIAL", "PUBLIC", - }, false), - }, - "oauth_issue_refresh_tokens": { - Type: schema.TypeBool, - Optional: true, - Description: "Specifies whether to allow the client to exchange a refresh token for an access token when the current access token has expired.", - }, - "oauth_refresh_token_validity": { - Type: schema.TypeInt, - Optional: true, - Description: "Specifies how long refresh tokens should be valid (in seconds). OAUTH_ISSUE_REFRESH_TOKENS must be set to TRUE.", - }, - "oauth_use_secondary_roles": { - Type: schema.TypeString, - Optional: true, - Default: "NONE", - Description: "Specifies whether default secondary roles set in the user properties are activated by default in the session being opened.", - ValidateFunc: validation.StringInSlice([]string{ - "IMPLICIT", "NONE", - }, false), - }, - "blocked_roles_list": { - Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, - Optional: true, - Description: "List of roles that a user cannot explicitly consent to using after authenticating. Do not include ACCOUNTADMIN, ORGADMIN or SECURITYADMIN as they are already implicitly enforced and will cause in-place updates.", - }, - "comment": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies a comment for the OAuth integration.", - }, - "enabled": { - Type: schema.TypeBool, - Optional: true, - Description: "Specifies whether this OAuth integration is enabled or disabled.", - }, - "created_on": { - Type: schema.TypeString, - Computed: true, - Description: "Date and time when the OAuth integration was created.", - }, -} - -// OAuthIntegration returns a pointer to the resource representing an OAuth integration. -func OAuthIntegration() *schema.Resource { - return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.OauthIntegration, CreateOAuthIntegration), - ReadContext: TrackingReadWrapper(resources.OauthIntegration, ReadOAuthIntegration), - UpdateContext: TrackingUpdateWrapper(resources.OauthIntegration, UpdateOAuthIntegration), - DeleteContext: TrackingDeleteWrapper(resources.OauthIntegration, DeleteOAuthIntegration), - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_oauth_integration_for_custom_clients or snowflake_oauth_integration_for_partner_applications instead.", - - Schema: oauthIntegrationSchema, - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - } -} - -// CreateOAuthIntegration implements schema.CreateFunc. -func CreateOAuthIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - db := client.GetConn().DB - name := d.Get("name").(string) - - stmt := snowflake.NewOAuthIntegrationBuilder(name).Create() - - // Set required fields - stmt.SetRaw(`TYPE=OAUTH`) - stmt.SetString(`OAUTH_CLIENT`, d.Get("oauth_client").(string)) - // Set optional fields - if _, ok := d.GetOk("oauth_redirect_uri"); ok { - stmt.SetString(`OAUTH_REDIRECT_URI`, d.Get("oauth_redirect_uri").(string)) - } - if _, ok := d.GetOk("oauth_client_type"); ok { - stmt.SetString(`OAUTH_CLIENT_TYPE`, d.Get("oauth_client_type").(string)) - } - if _, ok := d.GetOk("oauth_issue_refresh_tokens"); ok { - stmt.SetBool(`OAUTH_ISSUE_REFRESH_TOKENS`, d.Get("oauth_issue_refresh_tokens").(bool)) - } - if _, ok := d.GetOk("oauth_refresh_token_validity"); ok { - stmt.SetInt(`OAUTH_REFRESH_TOKEN_VALIDITY`, d.Get("oauth_refresh_token_validity").(int)) - } - if _, ok := d.GetOk("oauth_use_secondary_roles"); ok { - stmt.SetString(`OAUTH_USE_SECONDARY_ROLES`, d.Get("oauth_use_secondary_roles").(string)) - } - if _, ok := d.GetOk("blocked_roles_list"); ok { - stmt.SetStringList(`BLOCKED_ROLES_LIST`, expandStringList(d.Get("blocked_roles_list").(*schema.Set).List())) - } - if _, ok := d.GetOk("enabled"); ok { - stmt.SetBool(`ENABLED`, d.Get("enabled").(bool)) - } - if _, ok := d.GetOk("comment"); ok { - stmt.SetString(`COMMENT`, d.Get("comment").(string)) - } - - if err := snowflake.Exec(db, stmt.Statement()); err != nil { - return diag.FromErr(fmt.Errorf("error creating security integration err = %w", err)) - } - - d.SetId(name) - - return ReadOAuthIntegration(ctx, d, meta) -} - -// ReadOAuthIntegration implements schema.ReadFunc. -func ReadOAuthIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - db := client.GetConn().DB - id := d.Id() - - stmt := snowflake.NewOAuthIntegrationBuilder(id).Show() - row := snowflake.QueryRow(db, stmt) - - // Some properties can come from the SHOW INTEGRATION call - - s, err := snowflake.ScanOAuthIntegration(row) - if err != nil { - return diag.FromErr(fmt.Errorf("could not show security integration err = %w", err)) - } - - // Note: category must be Security or something is broken - if c := s.Category.String; c != "SECURITY" { - return diag.FromErr(fmt.Errorf("expected %v to be an Security integration, got %v err = %w", id, c, err)) - } - - if err := d.Set("oauth_client", strings.TrimPrefix(s.IntegrationType.String, "OAUTH - ")); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("name", s.Name.String); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("enabled", s.Enabled.Bool); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("comment", s.Comment.String); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("created_on", s.CreatedOn.String); err != nil { - return diag.FromErr(err) - } - - // Some properties come from the DESCRIBE INTEGRATION call - // We need to grab them in a loop - var k, pType string - var v, unused interface{} - stmt = snowflake.NewOAuthIntegrationBuilder(id).Describe() - rows, err := db.Query(stmt) - if err != nil { - return diag.FromErr(fmt.Errorf("could not describe security integration err = %w", err)) - } - defer rows.Close() - for rows.Next() { - if err := rows.Scan(&k, &pType, &v, &unused); err != nil { - return diag.FromErr(fmt.Errorf("unable to parse security integration rows err = %w", err)) - } - switch k { - case "ENABLED": - // We set this using the SHOW INTEGRATION call so let's ignore it here - case "COMMENT": - // We set this using the SHOW INTEGRATION call so let's ignore it here - case "OAUTH_ISSUE_REFRESH_TOKENS": - b, err := strconv.ParseBool(v.(string)) - if err != nil { - return diag.FromErr(fmt.Errorf("returned OAuth issue refresh tokens that is not boolean err = %w", err)) - } - if err := d.Set("oauth_issue_refresh_tokens", b); err != nil { - return diag.FromErr(fmt.Errorf("unable to set OAuth issue refresh tokens for security integration err = %w", err)) - } - case "OAUTH_REFRESH_TOKEN_VALIDITY": - i, err := strconv.Atoi(v.(string)) - if err != nil { - return diag.FromErr(fmt.Errorf("returned OAuth refresh token validity that is not integer err = %w", err)) - } - if err := d.Set("oauth_refresh_token_validity", i); err != nil { - return diag.FromErr(fmt.Errorf("unable to set OAuth refresh token validity for security integration err = %w", err)) - } - case "OAUTH_USE_SECONDARY_ROLES": - if err := d.Set("oauth_use_secondary_roles", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set OAuth use secondary roles for security integration err = %w", err)) - } - case "BLOCKED_ROLES_LIST": - blockedRolesAll := strings.Split(v.(string), ",") - - // Only roles other than ACCOUNTADMIN, ORGADMIN and SECURITYADMIN can be specified custom, - // those three are enforced with no option to remove them - blockedRolesCustom := []string{} - for _, role := range blockedRolesAll { - if role != "ACCOUNTADMIN" && role != "ORGADMIN" && role != "SECURITYADMIN" { - blockedRolesCustom = append(blockedRolesCustom, role) - } - } - - if err := d.Set("blocked_roles_list", blockedRolesCustom); err != nil { - return diag.FromErr(fmt.Errorf("unable to set blocked roles list for security integration err = %w", err)) - } - case "OAUTH_REDIRECT_URI": - if err := d.Set("oauth_redirect_uri", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set OAuth redirect URI for security integration err = %w", err)) - } - case "OAUTH_CLIENT_TYPE": - isTableau := strings.HasSuffix(s.IntegrationType.String, "TABLEAU_DESKTOP") || - strings.HasSuffix(s.IntegrationType.String, "TABLEAU_SERVER") - if !isTableau { - if err = d.Set("oauth_client_type", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set OAuth client type for security integration err = %w", err)) - } - } - case "OAUTH_ENFORCE_PKCE": - // Only used for custom OAuth clients (not supported yet) - case "OAUTH_AUTHORIZATION_ENDPOINT": - // Only used for custom OAuth clients (not supported yet) - case "OAUTH_TOKEN_ENDPOINT": - // Only used for custom OAuth clients (not supported yet) - case "OAUTH_ALLOWED_AUTHORIZATION_ENDPOINTS": - // Only used for custom OAuth clients (not supported yet) - case "OAUTH_ALLOWED_TOKEN_ENDPOINTS": - // Only used for custom OAuth clients (not supported yet) - case "PRE_AUTHORIZED_ROLES_LIST": - // Only used for custom OAuth clients (not supported yet) - - default: - log.Printf("[WARN] unexpected security integration property %v returned from Snowflake", k) - } - } - - return diag.FromErr(err) -} - -// UpdateOAuthIntegration implements schema.UpdateFunc. -func UpdateOAuthIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - db := client.GetConn().DB - id := d.Id() - - stmt := snowflake.NewOAuthIntegrationBuilder(id).Alter() - - var runSetStatement bool - - if d.HasChange("oauth_client") { - runSetStatement = true - stmt.SetString(`OAUTH_CLIENT`, d.Get("oauth_client").(string)) - } - - if d.HasChange("oauth_redirect_uri") { - runSetStatement = true - stmt.SetString(`OAUTH_REDIRECT_URI`, d.Get("oauth_redirect_uri").(string)) - } - - if d.HasChange("oauth_client_type") { - runSetStatement = true - stmt.SetString(`OAUTH_CLIENT_TYPE`, d.Get("oauth_client_type").(string)) - } - - if d.HasChange("oauth_issue_refresh_tokens") { - runSetStatement = true - stmt.SetBool(`OAUTH_ISSUE_REFRESH_TOKENS`, d.Get("oauth_issue_refresh_tokens").(bool)) - } - - if d.HasChange("oauth_refresh_token_validity") { - runSetStatement = true - stmt.SetInt(`OAUTH_REFRESH_TOKEN_VALIDITY`, d.Get("oauth_refresh_token_validity").(int)) - } - - if d.HasChange("oauth_use_secondary_roles") { - runSetStatement = true - stmt.SetString(`OAUTH_USE_SECONDARY_ROLES`, d.Get("oauth_use_secondary_roles").(string)) - } - - if d.HasChange("blocked_roles_list") { - runSetStatement = true - stmt.SetStringList(`BLOCKED_ROLES_LIST`, expandStringList(d.Get("blocked_roles_list").(*schema.Set).List())) - } - - if d.HasChange("enabled") { - runSetStatement = true - stmt.SetBool(`ENABLED`, d.Get("enabled").(bool)) - } - - if d.HasChange("comment") { - runSetStatement = true - stmt.SetString(`COMMENT`, d.Get("comment").(string)) - } - - if runSetStatement { - if err := snowflake.Exec(db, stmt.Statement()); err != nil { - return diag.FromErr(fmt.Errorf("error updating security integration err = %w", err)) - } - } - - return ReadOAuthIntegration(ctx, d, meta) -} - -// DeleteOAuthIntegration implements schema.DeleteFunc. -func DeleteOAuthIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return diag.FromErr(DeleteResource("", snowflake.NewOAuthIntegrationBuilder)(d, meta)) -} diff --git a/pkg/resources/oauth_integration_acceptance_test.go b/pkg/resources/oauth_integration_acceptance_test.go deleted file mode 100644 index 553913c7ba..0000000000 --- a/pkg/resources/oauth_integration_acceptance_test.go +++ /dev/null @@ -1,113 +0,0 @@ -package resources_test - -import ( - "fmt" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_OAuthIntegration(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - oauthClient := "CUSTOM" - clientType := "PUBLIC" - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: oauthIntegrationConfig(name, oauthClient, clientType, "SYSADMIN"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "name", name), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "oauth_client", oauthClient), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "oauth_client_type", clientType), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "oauth_issue_refresh_tokens", "true"), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "oauth_refresh_token_validity", "3600"), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "blocked_roles_list.#", "1"), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "blocked_roles_list.0", "SYSADMIN"), - ), - }, - { - // role change proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2358 issue - Config: oauthIntegrationConfig(name, oauthClient, clientType, "USERADMIN"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "name", name), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "blocked_roles_list.#", "1"), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "blocked_roles_list.0", "USERADMIN"), - ), - }, - { - ResourceName: "snowflake_oauth_integration.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func oauthIntegrationConfig(name, oauthClient, clientType string, blockedRole string) string { - return fmt.Sprintf(` - resource "snowflake_oauth_integration" "test" { - name = "%s" - oauth_client = "%s" - oauth_client_type = "%s" - oauth_redirect_uri = "https://www.example.com/oauth2/callback" - enabled = true - oauth_issue_refresh_tokens = true - oauth_refresh_token_validity = 3600 - blocked_roles_list = ["%s"] - } - `, name, oauthClient, clientType, blockedRole) -} - -func TestAcc_OAuthIntegrationTableau(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - oauthClient := "TABLEAU_DESKTOP" - clientType := "PUBLIC" // not used, but left to fail the test - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: oauthIntegrationConfigTableau(name, oauthClient, clientType), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "name", name), - resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "oauth_client", oauthClient), - // resource.TestCheckResourceAttr("snowflake_oauth_integration.test", "oauth_client_type", clientType), - ), - }, - { - ResourceName: "snowflake_oauth_integration.test", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func oauthIntegrationConfigTableau(name, oauthClient, clientType string) string { - return fmt.Sprintf(` - resource "snowflake_oauth_integration" "test" { - name = "%s" - oauth_client = "%s" - # oauth_client_type = "%s" # this cannot be set for TABLEAU - enabled = true - oauth_refresh_token_validity = 36000 - oauth_issue_refresh_tokens = true - blocked_roles_list = ["SYSADMIN"] - } - `, name, oauthClient, clientType) -} diff --git a/pkg/resources/oauth_integration_test.go b/pkg/resources/oauth_integration_test.go deleted file mode 100644 index 7a353dd8f2..0000000000 --- a/pkg/resources/oauth_integration_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package resources_test - -import ( - "context" - "database/sql" - "testing" - - internalprovider "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - - sqlmock "github.com/DATA-DOG/go-sqlmock" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - . "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers/mock" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/stretchr/testify/require" -) - -func TestOAuthIntegration(t *testing.T) { - r := require.New(t) - err := resources.OAuthIntegration().InternalValidate(provider.Provider().Schema, true) - r.NoError(err) -} - -func TestOAuthIntegrationCreate(t *testing.T) { - r := require.New(t) - - in := map[string]interface{}{ - "name": "test_oauth_integration", - "oauth_client": "TABLEAU_DESKTOP", - } - d := schema.TestResourceDataRaw(t, resources.OAuthIntegration().Schema, in) - r.NotNil(d) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec( - `^CREATE SECURITY INTEGRATION "test_oauth_integration" TYPE=OAUTH OAUTH_CLIENT='TABLEAU_DESKTOP' OAUTH_USE_SECONDARY_ROLES='NONE'$`, - ).WillReturnResult(sqlmock.NewResult(1, 1)) - expectReadOAuthIntegration(mock) - - diags := resources.CreateOAuthIntegration(context.Background(), d, &internalprovider.Context{ - Client: sdk.NewClientFromDB(db), - }) - r.Empty(diags) - }) -} - -func TestOAuthIntegrationRead(t *testing.T) { - r := require.New(t) - - d := oauthIntegration(t, "test_oauth_integration", map[string]interface{}{"name": "test_oauth_integration"}) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - expectReadOAuthIntegration(mock) - - diags := resources.ReadOAuthIntegration(context.Background(), d, &internalprovider.Context{ - Client: sdk.NewClientFromDB(db), - }) - r.Empty(diags) - }) -} - -func TestOAuthIntegrationDelete(t *testing.T) { - r := require.New(t) - - d := oauthIntegration(t, "drop_it", map[string]interface{}{"name": "drop_it"}) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec(`DROP SECURITY INTEGRATION "drop_it"`).WillReturnResult(sqlmock.NewResult(1, 1)) - diags := resources.DeleteOAuthIntegration(context.Background(), d, &internalprovider.Context{ - Client: sdk.NewClientFromDB(db), - }) - r.Empty(diags) - }) -} - -func expectReadOAuthIntegration(mock sqlmock.Sqlmock) { - showRows := sqlmock.NewRows([]string{ - "name", "type", "category", "enabled", "comment", "created_on", - }, - ).AddRow("test_oauth_integration", "OAUTH - TABLEAU_DESKTOP", "SECURITY", true, nil, "now") - mock.ExpectQuery(`^SHOW SECURITY INTEGRATIONS LIKE 'test_oauth_integration'$`).WillReturnRows(showRows) - - descRows := sqlmock.NewRows([]string{ - "property", "property_type", "property_value", "property_default", - }).AddRow("OAUTH_ISSUE_REFRESH_TOKENS", "Boolean", "true", "true"). - AddRow("OAUTH_REFRESH_TOKEN_VALIDITY", "Integer", "86400", "7776000"). - AddRow("BLOCKED_ROLES_LIST", "List", "ACCOUNTADMIN,SECURITYADMIN", nil) - - mock.ExpectQuery(`DESCRIBE SECURITY INTEGRATION "test_oauth_integration"$`).WillReturnRows(descRows) -} diff --git a/pkg/resources/object_parameter.go b/pkg/resources/object_parameter.go index 7d5b01ae08..dac4dad11d 100644 --- a/pkg/resources/object_parameter.go +++ b/pkg/resources/object_parameter.go @@ -5,6 +5,7 @@ import ( "fmt" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -72,10 +73,10 @@ var objectParameterSchema = map[string]*schema.Schema{ func ObjectParameter() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.ObjectParameter, CreateObjectParameter), - ReadContext: TrackingReadWrapper(resources.ObjectParameter, ReadObjectParameter), - UpdateContext: TrackingUpdateWrapper(resources.ObjectParameter, UpdateObjectParameter), - DeleteContext: TrackingDeleteWrapper(resources.ObjectParameter, DeleteObjectParameter), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.ObjectParameterResource), TrackingCreateWrapper(resources.ObjectParameter, CreateObjectParameter)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.ObjectParameterResource), TrackingReadWrapper(resources.ObjectParameter, ReadObjectParameter)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.ObjectParameterResource), TrackingUpdateWrapper(resources.ObjectParameter, UpdateObjectParameter)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.ObjectParameterResource), TrackingDeleteWrapper(resources.ObjectParameter, DeleteObjectParameter)), Schema: objectParameterSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/password_policy.go b/pkg/resources/password_policy.go index f450ef6104..a06d3f1575 100644 --- a/pkg/resources/password_policy.go +++ b/pkg/resources/password_policy.go @@ -3,6 +3,7 @@ package resources import ( "context" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -142,10 +143,10 @@ var passwordPolicySchema = map[string]*schema.Schema{ func PasswordPolicy() *schema.Resource { return &schema.Resource{ Description: "A password policy specifies the requirements that must be met to create and reset a password to authenticate to Snowflake.", - CreateContext: TrackingCreateWrapper(resources.PasswordPolicy, CreatePasswordPolicy), - ReadContext: TrackingReadWrapper(resources.PasswordPolicy, ReadPasswordPolicy), - UpdateContext: TrackingUpdateWrapper(resources.PasswordPolicy, UpdatePasswordPolicy), - DeleteContext: TrackingDeleteWrapper(resources.PasswordPolicy, DeletePasswordPolicy), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.PasswordPolicyResource), TrackingCreateWrapper(resources.PasswordPolicy, CreatePasswordPolicy)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.PasswordPolicyResource), TrackingReadWrapper(resources.PasswordPolicy, ReadPasswordPolicy)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.PasswordPolicyResource), TrackingUpdateWrapper(resources.PasswordPolicy, UpdatePasswordPolicy)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.PasswordPolicyResource), TrackingDeleteWrapper(resources.PasswordPolicy, DeletePasswordPolicy)), CustomizeDiff: TrackingCustomDiffWrapper(resources.PasswordPolicy, customdiff.All( ComputedIfAnyAttributeChanged(passwordPolicySchema, FullyQualifiedNameAttributeName, "name"), diff --git a/pkg/resources/pipe.go b/pkg/resources/pipe.go index c15140621f..99aa291f46 100644 --- a/pkg/resources/pipe.go +++ b/pkg/resources/pipe.go @@ -6,6 +6,7 @@ import ( "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -86,10 +87,10 @@ var pipeSchema = map[string]*schema.Schema{ func Pipe() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.Pipe, CreatePipe), - ReadContext: TrackingReadWrapper(resources.Pipe, ReadPipe), - UpdateContext: TrackingUpdateWrapper(resources.Pipe, UpdatePipe), - DeleteContext: TrackingDeleteWrapper(resources.Pipe, DeletePipe), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.PipeResource), TrackingCreateWrapper(resources.Pipe, CreatePipe)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.PipeResource), TrackingReadWrapper(resources.Pipe, ReadPipe)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.PipeResource), TrackingUpdateWrapper(resources.Pipe, UpdatePipe)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.PipeResource), TrackingDeleteWrapper(resources.Pipe, DeletePipe)), Schema: pipeSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/procedure.go b/pkg/resources/procedure.go deleted file mode 100644 index 2c567902f8..0000000000 --- a/pkg/resources/procedure.go +++ /dev/null @@ -1,804 +0,0 @@ -package resources - -import ( - "context" - "fmt" - "log" - "regexp" - "slices" - "strings" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" - "github.com/hashicorp/go-cty/cty" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" -) - -var procedureSchema = map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the identifier for the procedure; does not have to be unique for the schema in which the procedure is created. Don't use the | character.", - }, - "database": { - Type: schema.TypeString, - Required: true, - Description: "The database in which to create the procedure. Don't use the | character.", - ForceNew: true, - }, - "schema": { - Type: schema.TypeString, - Required: true, - Description: "The schema in which to create the procedure. Don't use the | character.", - ForceNew: true, - }, - "secure": { - Type: schema.TypeBool, - Optional: true, - Description: "Specifies that the procedure is secure. For more information about secure procedures, see Protecting Sensitive Information with Secure UDFs and Stored Procedures.", - Default: false, - }, - "arguments": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - // Suppress the diff shown if the values are equal when both compared in lower case. - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return strings.EqualFold(old, new) - }, - Description: "The argument name", - }, - "type": { - Type: schema.TypeString, - Required: true, - Description: "The argument type", - ValidateDiagFunc: IsDataTypeValid, - DiffSuppressFunc: DiffSuppressDataTypes, - }, - }, - }, - Optional: true, - Description: "List of the arguments for the procedure", - ForceNew: true, - }, - "return_type": { - Type: schema.TypeString, - Description: "The return type of the procedure", - // Suppress the diff shown if the values are equal when both compared in lower case. - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - if strings.EqualFold(old, new) { - return true - } - - varcharType := []string{"VARCHAR(16777216)", "VARCHAR", "text", "string", "NVARCHAR", "NVARCHAR2", "CHAR VARYING", "NCHAR VARYING"} - if slices.Contains(varcharType, strings.ToUpper(old)) && slices.Contains(varcharType, strings.ToUpper(new)) { - return true - } - - // all these types are equivalent https://docs.snowflake.com/en/sql-reference/data-types-numeric.html#int-integer-bigint-smallint-tinyint-byteint - integerTypes := []string{"INT", "INTEGER", "BIGINT", "SMALLINT", "TINYINT", "BYTEINT", "NUMBER(38,0)"} - if slices.Contains(integerTypes, strings.ToUpper(old)) && slices.Contains(integerTypes, strings.ToUpper(new)) { - return true - } - return false - }, - Required: true, - ForceNew: true, - }, - "statement": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the code used to create the procedure.", - ForceNew: true, - DiffSuppressFunc: DiffSuppressStatement, - }, - "language": { - Type: schema.TypeString, - Optional: true, - Default: "SQL", - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return strings.EqualFold(old, new) - }, - ValidateFunc: validation.StringInSlice([]string{"javascript", "java", "scala", "SQL", "python"}, true), - Description: "Specifies the language of the stored procedure code.", - }, - "execute_as": { - Type: schema.TypeString, - Optional: true, - Default: "OWNER", - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - return strings.EqualFold(old, new) - }, - ValidateFunc: validation.StringInSlice([]string{"CALLER", "OWNER"}, true), - Description: "Sets execution context. Allowed values are CALLER and OWNER (consult a proper section in the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#id1)). For more information see [caller's rights and owner's rights](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights).", - }, - "null_input_behavior": { - Type: schema.TypeString, - Optional: true, - Default: "CALLED ON NULL INPUT", - ForceNew: true, - // We do not use STRICT, because Snowflake then in the Read phase returns RETURNS NULL ON NULL INPUT - ValidateFunc: validation.StringInSlice([]string{"CALLED ON NULL INPUT", "RETURNS NULL ON NULL INPUT"}, false), - Description: "Specifies the behavior of the procedure when called with null inputs.", - }, - "return_behavior": { - Type: schema.TypeString, - Optional: true, - Default: "VOLATILE", - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"VOLATILE", "IMMUTABLE"}, false), - Description: "Specifies the behavior of the function when returning results", - Deprecated: "These keywords are deprecated for stored procedures. These keywords are not intended to apply to stored procedures. In a future release, these keywords will be removed from the documentation.", - }, - "comment": { - Type: schema.TypeString, - Optional: true, - Default: "user-defined procedure", - Description: "Specifies a comment for the procedure.", - }, - "runtime_version": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "Required for Python procedures. Specifies Python runtime version.", - }, - "packages": { - Type: schema.TypeList, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Optional: true, - ForceNew: true, - Description: "List of package imports to use for Java / Python procedures. For Java, package imports should be of the form: package_name:version_number, where package_name is snowflake_domain:package. For Python use it should be: ('numpy','pandas','xgboost==1.5.0').", - }, - "imports": { - Type: schema.TypeList, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Optional: true, - ForceNew: true, - Description: "Imports for Java / Python procedures. For Java this a list of jar files, for Python this is a list of Python files.", - }, - "handler": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "The handler method for Java / Python procedures.", - }, - FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, -} - -// Procedure returns a pointer to the resource representing a stored procedure. -func Procedure() *schema.Resource { - return &schema.Resource{ - SchemaVersion: 2, - - CreateContext: TrackingCreateWrapper(resources.Procedure, CreateContextProcedure), - ReadContext: TrackingReadWrapper(resources.Procedure, ReadContextProcedure), - UpdateContext: TrackingUpdateWrapper(resources.Procedure, UpdateContextProcedure), - DeleteContext: TrackingDeleteWrapper(resources.Procedure, DeleteProcedure), - - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_procedure_java, snowflake_procedure_javascript, snowflake_procedure_python, snowflake_procedure_scala, and snowflake_procedure_sql instead.", - - // TODO(SNOW-1348106): add `arguments` to ComputedIfAnyAttributeChanged for FullyQualifiedNameAttributeName. - // This can't be done now because this function compares values without diff suppress. - CustomizeDiff: TrackingCustomDiffWrapper(resources.Procedure, customdiff.All( - ComputedIfAnyAttributeChanged(procedureSchema, FullyQualifiedNameAttributeName, "name"), - )), - - Schema: procedureSchema, - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - - StateUpgraders: []schema.StateUpgrader{ - { - Version: 0, - // setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject - Type: cty.EmptyObject, - Upgrade: v085ProcedureStateUpgrader, - }, - { - Version: 1, - // setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject - Type: cty.EmptyObject, - Upgrade: v0941ResourceIdentifierWithArguments, - }, - }, - } -} - -func CreateContextProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - lang := strings.ToUpper(d.Get("language").(string)) - switch lang { - case "JAVA": - return createJavaProcedure(ctx, d, meta) - case "JAVASCRIPT": - return createJavaScriptProcedure(ctx, d, meta) - case "PYTHON": - return createPythonProcedure(ctx, d, meta) - case "SCALA": - return createScalaProcedure(ctx, d, meta) - case "SQL": - return createSQLProcedure(ctx, d, meta) - default: - return diag.Diagnostics{ - diag.Diagnostic{ - Severity: diag.Error, - Summary: "Invalid language", - Detail: fmt.Sprintf("Language %s is not supported", lang), - }, - } - } -} - -func createJavaProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - args, diags := getProcedureArguments(d) - if diags != nil { - return diags - } - argDataTypes := make([]sdk.DataType, len(args)) - for i, arg := range args { - argDataTypes[i] = arg.ArgDataTypeOld - } - id := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argDataTypes...) - - returns, diags := parseProcedureReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - procedureDefinition := d.Get("statement").(string) - runtimeVersion := d.Get("runtime_version").(string) - packages := make([]sdk.ProcedurePackageRequest, 0) - for _, item := range d.Get("packages").([]interface{}) { - packages = append(packages, *sdk.NewProcedurePackageRequest(item.(string))) - } - handler := d.Get("handler").(string) - req := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler) - req.WithProcedureDefinitionWrapped(procedureDefinition) - if len(args) > 0 { - req.WithArguments(args) - } - - // read optional params - if v, ok := d.GetOk("execute_as"); ok { - if strings.ToUpper(v.(string)) == "OWNER" { - req.WithExecuteAs(sdk.ExecuteAsOwner) - } else if strings.ToUpper(v.(string)) == "CALLER" { - req.WithExecuteAs(sdk.ExecuteAsCaller) - } - } - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - if v, ok := d.GetOk("secure"); ok { - req.WithSecure(v.(bool)) - } - if _, ok := d.GetOk("imports"); ok { - var imports []sdk.ProcedureImportRequest - for _, item := range d.Get("imports").([]interface{}) { - imports = append(imports, *sdk.NewProcedureImportRequest(item.(string))) - } - req.WithImports(imports) - } - - if err := client.Procedures.CreateForJava(ctx, req); err != nil { - return diag.FromErr(err) - } - d.SetId(id.FullyQualifiedName()) - return ReadContextProcedure(ctx, d, meta) -} - -func createJavaScriptProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - args, diags := getProcedureArguments(d) - if diags != nil { - return diags - } - argDataTypes := make([]sdk.DataType, len(args)) - for i, arg := range args { - argDataTypes[i] = arg.ArgDataTypeOld - } - id := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argDataTypes...) - - returnType := d.Get("return_type").(string) - returnDataType, diags := convertProcedureDataType(returnType) - if diags != nil { - return diags - } - procedureDefinition := d.Get("statement").(string) - req := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), nil, procedureDefinition).WithResultDataTypeOld(sdk.LegacyDataTypeFrom(returnDataType)) - if len(args) > 0 { - req.WithArguments(args) - } - - // read optional params - if v, ok := d.GetOk("execute_as"); ok { - if strings.ToUpper(v.(string)) == "OWNER" { - req.WithExecuteAs(sdk.ExecuteAsOwner) - } else if strings.ToUpper(v.(string)) == "CALLER" { - req.WithExecuteAs(sdk.ExecuteAsCaller) - } - } - if v, ok := d.GetOk("null_input_behavior"); ok { - req.WithNullInputBehavior(sdk.NullInputBehavior(v.(string))) - } - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - if v, ok := d.GetOk("secure"); ok { - req.WithSecure(v.(bool)) - } - - if err := client.Procedures.CreateForJavaScript(ctx, req); err != nil { - return diag.FromErr(err) - } - d.SetId(id.FullyQualifiedName()) - return ReadContextProcedure(ctx, d, meta) -} - -func createScalaProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - args, diags := getProcedureArguments(d) - if diags != nil { - return diags - } - argDataTypes := make([]sdk.DataType, len(args)) - for i, arg := range args { - argDataTypes[i] = arg.ArgDataTypeOld - } - id := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argDataTypes...) - - returns, diags := parseProcedureReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - procedureDefinition := d.Get("statement").(string) - runtimeVersion := d.Get("runtime_version").(string) - packages := make([]sdk.ProcedurePackageRequest, 0) - for _, item := range d.Get("packages").([]interface{}) { - packages = append(packages, *sdk.NewProcedurePackageRequest(item.(string))) - } - handler := d.Get("handler").(string) - req := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler) - req.WithProcedureDefinitionWrapped(procedureDefinition) - if len(args) > 0 { - req.WithArguments(args) - } - - // read optional params - if v, ok := d.GetOk("execute_as"); ok { - if strings.ToUpper(v.(string)) == "OWNER" { - req.WithExecuteAs(sdk.ExecuteAsOwner) - } else if strings.ToUpper(v.(string)) == "CALLER" { - req.WithExecuteAs(sdk.ExecuteAsCaller) - } - } - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - if v, ok := d.GetOk("secure"); ok { - req.WithSecure(v.(bool)) - } - if _, ok := d.GetOk("imports"); ok { - var imports []sdk.ProcedureImportRequest - for _, item := range d.Get("imports").([]interface{}) { - imports = append(imports, *sdk.NewProcedureImportRequest(item.(string))) - } - req.WithImports(imports) - } - - if err := client.Procedures.CreateForScala(ctx, req); err != nil { - return diag.FromErr(err) - } - d.SetId(id.FullyQualifiedName()) - return ReadContextProcedure(ctx, d, meta) -} - -func createSQLProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - args, diags := getProcedureArguments(d) - if diags != nil { - return diags - } - argDataTypes := make([]sdk.DataType, len(args)) - for i, arg := range args { - argDataTypes[i] = arg.ArgDataTypeOld - } - id := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argDataTypes...) - - returns, diags := parseProcedureSQLReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - procedureDefinition := d.Get("statement").(string) - req := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, procedureDefinition) - if len(args) > 0 { - req.WithArguments(args) - } - - // read optional params - if v, ok := d.GetOk("execute_as"); ok { - if strings.ToUpper(v.(string)) == "OWNER" { - req.WithExecuteAs(sdk.ExecuteAsOwner) - } else if strings.ToUpper(v.(string)) == "CALLER" { - req.WithExecuteAs(sdk.ExecuteAsCaller) - } - } - if v, ok := d.GetOk("null_input_behavior"); ok { - req.WithNullInputBehavior(sdk.NullInputBehavior(v.(string))) - } - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - if v, ok := d.GetOk("secure"); ok { - req.WithSecure(v.(bool)) - } - - if err := client.Procedures.CreateForSQL(ctx, req); err != nil { - return diag.FromErr(err) - } - d.SetId(id.FullyQualifiedName()) - return ReadContextProcedure(ctx, d, meta) -} - -func createPythonProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - name := d.Get("name").(string) - sc := d.Get("schema").(string) - database := d.Get("database").(string) - args, diags := getProcedureArguments(d) - if diags != nil { - return diags - } - argDataTypes := make([]sdk.DataType, len(args)) - for i, arg := range args { - argDataTypes[i] = arg.ArgDataTypeOld - } - id := sdk.NewSchemaObjectIdentifierWithArguments(database, sc, name, argDataTypes...) - - returns, diags := parseProcedureReturnsRequest(d.Get("return_type").(string)) - if diags != nil { - return diags - } - procedureDefinition := d.Get("statement").(string) - runtimeVersion := d.Get("runtime_version").(string) - packages := make([]sdk.ProcedurePackageRequest, 0) - for _, item := range d.Get("packages").([]interface{}) { - packages = append(packages, *sdk.NewProcedurePackageRequest(item.(string))) - } - handler := d.Get("handler").(string) - req := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler) - req.WithProcedureDefinitionWrapped(procedureDefinition) - if len(args) > 0 { - req.WithArguments(args) - } - - // read optional params - if v, ok := d.GetOk("execute_as"); ok { - if strings.ToUpper(v.(string)) == "OWNER" { - req.WithExecuteAs(sdk.ExecuteAsOwner) - } else if strings.ToUpper(v.(string)) == "CALLER" { - req.WithExecuteAs(sdk.ExecuteAsCaller) - } - } - - // [ { CALLED ON NULL INPUT | { RETURNS NULL ON NULL INPUT | STRICT } } ] does not work for java, scala or python - // posted in docs-discuss channel, either docs need to be updated to reflect reality or this feature needs to be added - // https://snowflake.slack.com/archives/C6380540P/p1707511734666249 - // if v, ok := d.GetOk("null_input_behavior"); ok { - // req.WithNullInputBehavior(sdk.Pointer(sdk.NullInputBehavior(v.(string)))) - // } - - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - if v, ok := d.GetOk("secure"); ok { - req.WithSecure(v.(bool)) - } - if _, ok := d.GetOk("imports"); ok { - var imports []sdk.ProcedureImportRequest - for _, item := range d.Get("imports").([]interface{}) { - imports = append(imports, *sdk.NewProcedureImportRequest(item.(string))) - } - req.WithImports(imports) - } - - if err := client.Procedures.CreateForPython(ctx, req); err != nil { - return diag.FromErr(err) - } - d.SetId(id.FullyQualifiedName()) - return ReadContextProcedure(ctx, d, meta) -} - -func ReadContextProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - diags := diag.Diagnostics{} - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("name", id.Name()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("database", id.DatabaseName()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("schema", id.SchemaName()); err != nil { - return diag.FromErr(err) - } - args := d.Get("arguments").([]interface{}) - argTypes := make([]string, len(args)) - for i, arg := range args { - argTypes[i] = arg.(map[string]interface{})["type"].(string) - } - procedureDetails, err := client.Procedures.Describe(ctx, id) - if err != nil { - // if procedure is not found then mark resource to be removed from state file during apply or refresh - d.SetId("") - return diag.Diagnostics{ - diag.Diagnostic{ - Severity: diag.Warning, - Summary: "Describe procedure failed.", - Detail: fmt.Sprintf("Describe procedure failed: %v", err), - }, - } - } - for _, desc := range procedureDetails { - if desc.Value == nil { - continue - } - switch desc.Property { - case "signature": - // Format in Snowflake DB is: (argName argType, argName argType, ...) - args := strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "(", ""), ")", "") - - if args != "" { // Do nothing for functions without arguments - argPairs := strings.Split(args, ", ") - var args []any - - for _, argPair := range argPairs { - argItem := strings.Split(argPair, " ") - - arg := map[string]interface{}{} - arg["name"] = argItem[0] - arg["type"] = argItem[1] - args = append(args, arg) - } - - if err := d.Set("arguments", args); err != nil { - return diag.FromErr(err) - } - } - case "null handling": - if err := d.Set("null_input_behavior", *desc.Value); err != nil { - return diag.FromErr(err) - } - case "body": - if err := d.Set("statement", *desc.Value); err != nil { - return diag.FromErr(err) - } - case "execute as": - if err := d.Set("execute_as", *desc.Value); err != nil { - return diag.FromErr(err) - } - case "returns": - if err := d.Set("return_type", *desc.Value); err != nil { - return diag.FromErr(err) - } - case "language": - if err := d.Set("language", *desc.Value); err != nil { - return diag.FromErr(err) - } - case "runtime_version": - if err := d.Set("runtime_version", *desc.Value); err != nil { - return diag.FromErr(err) - } - case "packages": - packagesString := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "[", ""), "]", ""), "'", "") - if packagesString != "" { // Do nothing for Java / Python functions without packages - packages := strings.Split(packagesString, ",") - if err := d.Set("packages", packages); err != nil { - return diag.FromErr(err) - } - } - case "imports": - importsString := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "[", ""), "]", ""), "'", ""), " ", "") - if importsString != "" { // Do nothing for Java functions without imports - imports := strings.Split(importsString, ",") - if err := d.Set("imports", imports); err != nil { - return diag.FromErr(err) - } - } - case "handler": - if err := d.Set("handler", *desc.Value); err != nil { - return diag.FromErr(err) - } - case "volatility": - if err := d.Set("return_behavior", *desc.Value); err != nil { - return diag.FromErr(err) - } - default: - log.Printf("[INFO] Unexpected procedure property %v returned from Snowflake with value %v", desc.Property, *desc.Value) - } - } - - procedure, err := client.Procedures.ShowByID(ctx, id) - if err != nil { - return diag.FromErr(err) - } - - if err := d.Set("secure", procedure.IsSecure); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("comment", procedure.Description); err != nil { - return diag.FromErr(err) - } - - return diags -} - -func UpdateContextProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - - if d.HasChange("name") { - newId := sdk.NewSchemaObjectIdentifier(id.DatabaseName(), id.SchemaName(), d.Get("name").(string)) - newIdWithArguments := sdk.NewSchemaObjectIdentifierWithArguments(id.DatabaseName(), id.SchemaName(), d.Get("name").(string), id.ArgumentDataTypes()...) - - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithRenameTo(newId.WithoutArguments())) - if err != nil { - return diag.FromErr(err) - } - - d.SetId(newIdWithArguments.FullyQualifiedName()) - id = newIdWithArguments - } - - if d.HasChange("comment") { - comment := d.Get("comment") - if comment != "" { - if err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*sdk.NewProcedureSetRequest().WithComment(comment.(string)))); err != nil { - return diag.FromErr(err) - } - } else { - if err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnset(*sdk.NewProcedureUnsetRequest().WithComment(true))); err != nil { - return diag.FromErr(err) - } - } - } - - if d.HasChange("execute_as") { - req := sdk.NewAlterProcedureRequest(id) - executeAs := d.Get("execute_as").(string) - if strings.ToUpper(executeAs) == "OWNER" { - req.WithExecuteAs(sdk.ExecuteAsOwner) - } else if strings.ToUpper(executeAs) == "CALLER" { - req.WithExecuteAs(sdk.ExecuteAsCaller) - } - if err := client.Procedures.Alter(ctx, req); err != nil { - return diag.FromErr(err) - } - } - - return ReadContextProcedure(ctx, d, meta) -} - -func getProcedureArguments(d *schema.ResourceData) ([]sdk.ProcedureArgumentRequest, diag.Diagnostics) { - args := make([]sdk.ProcedureArgumentRequest, 0) - if v, ok := d.GetOk("arguments"); ok { - for _, arg := range v.([]interface{}) { - argName := arg.(map[string]interface{})["name"].(string) - argType := arg.(map[string]interface{})["type"].(string) - argDataType, diags := convertProcedureDataType(argType) - if diags != nil { - return nil, diags - } - args = append(args, sdk.ProcedureArgumentRequest{ArgName: argName, ArgDataTypeOld: sdk.LegacyDataTypeFrom(argDataType)}) - } - } - return args, nil -} - -func convertProcedureDataType(s string) (datatypes.DataType, diag.Diagnostics) { - dataType, err := datatypes.ParseDataType(s) - if err != nil { - return nil, diag.FromErr(err) - } - return dataType, nil -} - -func convertProcedureColumns(s string) ([]sdk.ProcedureColumn, diag.Diagnostics) { - pattern := regexp.MustCompile(`(\w+)\s+(\w+)`) - matches := pattern.FindAllStringSubmatch(s, -1) - var columns []sdk.ProcedureColumn - for _, match := range matches { - if len(match) == 3 { - dataType, err := datatypes.ParseDataType(match[2]) - if err != nil { - return nil, diag.FromErr(err) - } - columns = append(columns, sdk.ProcedureColumn{ - ColumnName: match[1], - ColumnDataTypeOld: sdk.LegacyDataTypeFrom(dataType), - }) - } - } - return columns, nil -} - -func parseProcedureReturnsRequest(s string) (*sdk.ProcedureReturnsRequest, diag.Diagnostics) { - returns := sdk.NewProcedureReturnsRequest() - if strings.HasPrefix(strings.ToLower(s), "table") { - columns, diags := convertProcedureColumns(s) - if diags != nil { - return nil, diags - } - var cr []sdk.ProcedureColumnRequest - for _, item := range columns { - cr = append(cr, *sdk.NewProcedureColumnRequest(item.ColumnName, nil).WithColumnDataTypeOld(item.ColumnDataTypeOld)) - } - returns.WithTable(*sdk.NewProcedureReturnsTableRequest().WithColumns(cr)) - } else { - returnDataType, diags := convertProcedureDataType(s) - if diags != nil { - return nil, diags - } - returns.WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.LegacyDataTypeFrom(returnDataType))) - } - return returns, nil -} - -func parseProcedureSQLReturnsRequest(s string) (*sdk.ProcedureSQLReturnsRequest, diag.Diagnostics) { - returns := sdk.NewProcedureSQLReturnsRequest() - if strings.HasPrefix(strings.ToLower(s), "table") { - columns, diags := convertProcedureColumns(s) - if diags != nil { - return nil, diags - } - var cr []sdk.ProcedureColumnRequest - for _, item := range columns { - cr = append(cr, *sdk.NewProcedureColumnRequest(item.ColumnName, nil).WithColumnDataTypeOld(item.ColumnDataTypeOld)) - } - returns.WithTable(*sdk.NewProcedureReturnsTableRequest().WithColumns(cr)) - } else { - returnDataType, diags := convertProcedureDataType(s) - if diags != nil { - return nil, diags - } - returns.WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.LegacyDataTypeFrom(returnDataType))) - } - return returns, nil -} diff --git a/pkg/resources/procedure_acceptance_test.go b/pkg/resources/procedure_acceptance_test.go deleted file mode 100644 index 1039ebc459..0000000000 --- a/pkg/resources/procedure_acceptance_test.go +++ /dev/null @@ -1,583 +0,0 @@ -package resources_test - -import ( - "fmt" - "strings" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func testAccProcedure(t *testing.T, configDirectory string, args ...sdk.DataType) { - t.Helper() - - oldId := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments(args...) - newId := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments(args...) - - resourceName := "snowflake_procedure.p" - m := func() map[string]config.Variable { - return map[string]config.Variable{ - "name": config.StringVariable(oldId.Name()), - "database": config.StringVariable(acc.TestDatabaseName), - "schema": config.StringVariable(acc.TestSchemaName), - "comment": config.StringVariable("Terraform acceptance test"), - "execute_as": config.StringVariable("CALLER"), - } - } - variableSet2 := m() - variableSet2["name"] = config.StringVariable(newId.Name()) - variableSet2["comment"] = config.StringVariable("Terraform acceptance test - updated") - variableSet2["execute_as"] = config.StringVariable("OWNER") - - ignoreDuringImport := []string{"null_input_behavior"} - if strings.Contains(configDirectory, "/sql") { - ignoreDuringImport = append(ignoreDuringImport, "return_behavior") - } - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Procedure), - Steps: []resource.TestStep{ - { - ConfigDirectory: acc.ConfigurationDirectory(configDirectory), - ConfigVariables: m(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", oldId.Name()), - resource.TestCheckResourceAttr(resourceName, "fully_qualified_name", oldId.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr(resourceName, "return_behavior", "VOLATILE"), - resource.TestCheckResourceAttr(resourceName, "execute_as", "CALLER"), - - // computed attributes - resource.TestCheckResourceAttrSet(resourceName, "return_type"), - resource.TestCheckResourceAttrSet(resourceName, "statement"), - resource.TestCheckResourceAttrSet(resourceName, "secure"), - ), - }, - - // test - rename + change comment and caller (proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2642) - { - ConfigDirectory: acc.ConfigurationDirectory(configDirectory), - ConfigVariables: variableSet2, - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate), - }, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", newId.Name()), - resource.TestCheckResourceAttr(resourceName, "fully_qualified_name", newId.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test - updated"), - resource.TestCheckResourceAttr(resourceName, "execute_as", "OWNER"), - ), - }, - - // test - import - { - ConfigDirectory: acc.ConfigurationDirectory(configDirectory), - ConfigVariables: variableSet2, - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: ignoreDuringImport, - }, - }, - }) -} - -func TestAcc_Procedure_SQL(t *testing.T) { - testAccProcedure(t, "TestAcc_Procedure/sql", sdk.DataTypeVARCHAR) -} - -/* -Error: 391531 (42601): SQL compilation error: An active warehouse is required for creating Python stored procedures. -func TestAcc_Procedure_Python(t *testing.T) { - testAccProcedure(t, "TestAcc_Procedure/python") -} -*/ - -func TestAcc_Procedure_Javascript(t *testing.T) { - testAccProcedure(t, "TestAcc_Procedure/javascript") -} - -func TestAcc_Procedure_Java(t *testing.T) { - testAccProcedure(t, "TestAcc_Procedure/java", sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR) -} - -func TestAcc_Procedure_Scala(t *testing.T) { - testAccProcedure(t, "TestAcc_Procedure/scala", sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR) -} - -func TestAcc_Procedure_complex(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - resourceName := "snowflake_procedure.p" - m := func() map[string]config.Variable { - return map[string]config.Variable{ - "name": config.StringVariable(name), - "database": config.StringVariable(acc.TestDatabaseName), - "schema": config.StringVariable(acc.TestSchemaName), - "comment": config.StringVariable("Terraform acceptance test"), - "execute_as": config.StringVariable("CALLER"), - } - } - variableSet2 := m() - variableSet2["comment"] = config.StringVariable("Terraform acceptance test - updated") - - statement := "var x = 1\nreturn x\n" - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Procedure), - Steps: []resource.TestStep{ - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Procedure/complex"), - ConfigVariables: m(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr(resourceName, "statement", statement), - resource.TestCheckResourceAttr(resourceName, "execute_as", "CALLER"), - resource.TestCheckResourceAttr(resourceName, "arguments.#", "2"), - resource.TestCheckResourceAttr(resourceName, "arguments.0.name", "arg1"), - resource.TestCheckResourceAttr(resourceName, "arguments.0.type", "VARCHAR"), - resource.TestCheckResourceAttr(resourceName, "arguments.1.name", "arg2"), - resource.TestCheckResourceAttr(resourceName, "arguments.1.type", "DATE"), - resource.TestCheckResourceAttr(resourceName, "null_input_behavior", "RETURNS NULL ON NULL INPUT"), - - // computed attributes - resource.TestCheckResourceAttrSet(resourceName, "return_type"), - resource.TestCheckResourceAttrSet(resourceName, "statement"), - resource.TestCheckResourceAttrSet(resourceName, "execute_as"), - resource.TestCheckResourceAttrSet(resourceName, "secure"), - ), - }, - - // test - change comment - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Procedure/complex"), - ConfigVariables: variableSet2, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test - updated"), - ), - }, - - // test - import - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Procedure/complex"), - ConfigVariables: variableSet2, - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - "return_behavior", - }, - }, - }, - }) -} - -func TestAcc_Procedure_migrateFromVersion085(t *testing.T) { - id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - name := id.Name() - resourceName := "snowflake_procedure.p" - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Procedure), - - Steps: []resource.TestStep{ - { - PreConfig: func() { acc.SetV097CompatibleConfigPathEnv(t) }, - ExternalProviders: map[string]resource.ExternalProvider{ - "snowflake": { - VersionConstraint: "=0.85.0", - Source: "Snowflake-Labs/snowflake", - }, - }, - Config: procedureConfig(acc.TestDatabaseName, acc.TestSchemaName, name), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|%s|%s|", acc.TestDatabaseName, acc.TestSchemaName, name)), - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - ), - }, - { - ExternalProviders: map[string]resource.ExternalProvider{ - "snowflake": { - VersionConstraint: "=0.94.1", - Source: "Snowflake-Labs/snowflake", - }, - }, - Config: procedureConfig(acc.TestDatabaseName, acc.TestSchemaName, name), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()}, - }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - ), - }, - }, - }) -} - -func procedureConfig(database string, schema string, name string) string { - return fmt.Sprintf(` -resource "snowflake_procedure" "p" { - database = "%[1]s" - schema = "%[2]s" - name = "%[3]s" - language = "JAVASCRIPT" - return_type = "VARCHAR" - statement = <' command (SAML2_SNOWFLAKE_METADATA) - "saml2_snowflake_x509_cert": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: "The Base64 encoded self-signed certificate generated by Snowflake for use with Encrypting SAML Assertions and Signed SAML Requests. You must have at least one of these features (encrypted SAML assertions or signed SAML responses) enabled in your Snowflake account to access the certificate value.", - }, - "saml2_sign_request": { - Type: schema.TypeBool, - Optional: true, - Description: "The Boolean indicating whether SAML requests are signed. TRUE: allows SAML requests to be signed. FALSE: does not allow SAML requests to be signed.", - }, - "saml2_requested_nameid_format": { - Type: schema.TypeString, - Optional: true, - Description: "The SAML NameID format allows Snowflake to set an expectation of the identifying attribute of the user (i.e. SAML Subject) in the SAML assertion from the IdP to ensure a valid authentication to Snowflake. If a value is not specified, Snowflake sends the urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress value in the authentication request to the IdP. NameID must be one of the following values: urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified, urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress, urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName, urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName, urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos, urn:oasis:names:tc:SAML:2.0:nameid-format:persistent, urn:oasis:names:tc:SAML:2.0:nameid-format:transient .", - ValidateFunc: validation.StringInSlice([]string{ - "urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified", - "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", - "urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName", - "urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName", - "urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos", - "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent", - "urn:oasis:names:tc:SAML:2.0:nameid-format:transient", - }, true), - }, - "saml2_post_logout_redirect_url": { - Type: schema.TypeString, - Optional: true, - Description: "The endpoint to which Snowflake redirects users after clicking the Log Out button in the classic Snowflake web interface. Snowflake terminates the Snowflake session upon redirecting to the specified endpoint.", - }, - "saml2_force_authn": { - Type: schema.TypeBool, - Optional: true, - Description: "The Boolean indicating whether users, during the initial authentication flow, are forced to authenticate again to access Snowflake. When set to TRUE, Snowflake sets the ForceAuthn SAML parameter to TRUE in the outgoing request from Snowflake to the identity provider. TRUE: forces users to authenticate again to access Snowflake, even if a valid session with the identity provider exists. FALSE: does not force users to authenticate again to access Snowflake.", - }, - // Computed and Optionally Settable. Info you get by issuing a 'DESCRIBE INTEGRATION ' command (SAML2_SNOWFLAKE_METADATA) - "saml2_snowflake_issuer_url": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: "The string containing the EntityID / Issuer for the Snowflake service provider. If an incorrect value is specified, Snowflake returns an error message indicating the acceptable values to use.", - }, - // Computed and Optionally Settable. Info you get by issuing a 'DESCRIBE INTEGRATION ' command (SAML2_SNOWFLAKE_METADATA) - "saml2_snowflake_acs_url": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: "The string containing the Snowflake Assertion Consumer Service URL to which the IdP will send its SAML authentication response back to Snowflake. This property will be set in the SAML authentication request generated by Snowflake when initiating a SAML SSO operation with the IdP. If an incorrect value is specified, Snowflake returns an error message indicating the acceptable values to use. Default: https://..snowflakecomputing.com/fed/login", - }, - // Computed. Info you get by issuing a 'DESCRIBE INTEGRATION ' command (SAML2_SNOWFLAKE_METADATA) - "saml2_snowflake_metadata": { - Type: schema.TypeString, - Computed: true, - Description: "Metadata created by Snowflake to provide to SAML2 provider.", - }, - // Computed. Info you get by issuing a 'DESCRIBE INTEGRATION ' command (SAML2_DIGEST_METHODS_USED) - "saml2_digest_methods_used": { - Type: schema.TypeString, - Computed: true, - }, - // Computed. Info you get by issuing a 'DESCRIBE INTEGRATION ' command (SAML2_SIGNATURE_METHODS_USED) - "saml2_signature_methods_used": { - Type: schema.TypeString, - Computed: true, - }, - "created_on": { - Type: schema.TypeString, - Computed: true, - Description: "Date and time when the SAML integration was created.", - }, -} - -// SAMLIntegration returns a pointer to the resource representing a SAML2 security integration. -func SAMLIntegration() *schema.Resource { - return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.SamlSecurityIntegration, CreateSAMLIntegration), - ReadContext: TrackingReadWrapper(resources.SamlSecurityIntegration, ReadSAMLIntegration), - UpdateContext: TrackingUpdateWrapper(resources.SamlSecurityIntegration, UpdateSAMLIntegration), - DeleteContext: TrackingDeleteWrapper(resources.SamlSecurityIntegration, DeleteSAMLIntegration), - DeprecationMessage: "This resource is deprecated and will be removed in a future major version release. Please use snowflake_saml2_integration instead.", - - Schema: samlIntegrationSchema, - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - } -} - -// CreateSAMLIntegration implements schema.CreateFunc. -func CreateSAMLIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - db := client.GetConn().DB - name := d.Get("name").(string) - - stmt := snowflake.NewSamlIntegrationBuilder(name).Create() - - // Set required fields - stmt.SetRaw(`TYPE=SAML2`) - stmt.SetBool(`ENABLED`, d.Get("enabled").(bool)) - stmt.SetString(`SAML2_ISSUER`, d.Get("saml2_issuer").(string)) - stmt.SetString(`SAML2_SSO_URL`, d.Get("saml2_sso_url").(string)) - stmt.SetString(`SAML2_PROVIDER`, d.Get("saml2_provider").(string)) - - // Set optional fields - if _, ok := d.GetOk("saml2_x509_cert"); ok { - stmt.SetString(`SAML2_X509_CERT`, d.Get("saml2_x509_cert").(string)) - } - - if _, ok := d.GetOk("saml2_sp_initiated_login_page_label"); ok { - stmt.SetString(`SAML2_SP_INITIATED_LOGIN_PAGE_LABEL`, d.Get("saml2_sp_initiated_login_page_label").(string)) - } - - if _, ok := d.GetOk("saml2_enable_sp_initiated"); ok { - stmt.SetBool(`SAML2_ENABLE_SP_INITIATED`, d.Get("saml2_enable_sp_initiated").(bool)) - } - - if _, ok := d.GetOk("saml2_snowflake_x509_cert"); ok { - stmt.SetString(`SAML2_SNOWFLAKE_X509_CERT`, d.Get("saml2_snowflake_x509_cert").(string)) - } - - if _, ok := d.GetOk("saml2_sign_request"); ok { - stmt.SetBool(`SAML2_SIGN_REQUEST`, d.Get("saml2_sign_request").(bool)) - } - - if _, ok := d.GetOk("saml2_requested_nameid_format"); ok { - stmt.SetString(`SAML2_REQUESTED_NAMEID_FORMAT`, d.Get("saml2_requested_nameid_format").(string)) - } - - if _, ok := d.GetOk("saml2_post_logout_redirect_url"); ok { - stmt.SetString(`SAML2_POST_LOGOUT_REDIRECT_URL`, d.Get("saml2_post_logout_redirect_url").(string)) - } - - if _, ok := d.GetOk("saml2_force_authn"); ok { - stmt.SetBool(`SAML2_FORCE_AUTHN`, d.Get("saml2_force_authn").(bool)) - } - - if _, ok := d.GetOk("saml2_snowflake_issuer_url"); ok { - stmt.SetString(`SAML2_SNOWFLAKE_ISSUER_URL`, d.Get("saml2_snowflake_issuer_url").(string)) - } - - if _, ok := d.GetOk("saml2_snowflake_acs_url"); ok { - stmt.SetString(`SAML2_SNOWFLAKE_ACS_URL`, d.Get("saml2_snowflake_acs_url").(string)) - } - - err := snowflake.Exec(db, stmt.Statement()) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating security integration err = %w", err)) - } - - d.SetId(name) - - return ReadSAMLIntegration(ctx, d, meta) -} - -// ReadSAMLIntegration implements schema.ReadFunc. -func ReadSAMLIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - db := client.GetConn().DB - id := d.Id() - - stmt := snowflake.NewSamlIntegrationBuilder(id).Show() - row := snowflake.QueryRow(db, stmt) - - // Some properties can come from the SHOW INTEGRATION call - - s, err := snowflake.ScanSamlIntegration(row) - if err != nil { - return diag.FromErr(fmt.Errorf("could not show security integration err = %w", err)) - } - - // Note: category must be Security or something is broken - if c := s.Category.String; c != "SECURITY" { - return diag.FromErr(fmt.Errorf("expected %v to be an Security integration, got %v", id, c)) - } - - // Note: type must be SAML2 or something is broken - if c := s.IntegrationType.String; c != "SAML2" { - return diag.FromErr(fmt.Errorf("expected %v to be a SAML2 integration type, got %v", id, c)) - } - - if err := d.Set("name", s.Name.String); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("created_on", s.CreatedOn.String); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("enabled", s.Enabled.Bool); err != nil { - return diag.FromErr(err) - } - - // Some properties come from the DESCRIBE INTEGRATION call - // We need to grab them in a loop - var k, pType string - var v, unused interface{} - stmt = snowflake.NewSamlIntegrationBuilder(id).Describe() - rows, err := db.Query(stmt) - if err != nil { - return diag.FromErr(fmt.Errorf("could not describe security integration err = %w", err)) - } - defer rows.Close() - for rows.Next() { - if err := rows.Scan(&k, &pType, &v, &unused); err != nil { - return diag.FromErr(fmt.Errorf("unable to parse security integration rows err = %w", err)) - } - switch k { - case "ENABLED": - // set using the SHOW INTEGRATION, ignoring here - case "SAML2_ISSUER": - if err := d.Set("saml2_issuer", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_issuer for security integration err = %w", err)) - } - case "SAML2_SSO_URL": - if err := d.Set("saml2_sso_url", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_sso_url for security integration err = %w", err)) - } - case "SAML2_PROVIDER": - if err := d.Set("saml2_provider", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_provider for security integration err = %w", err)) - } - case "SAML2_X509_CERT": - if err := d.Set("saml2_x509_cert", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_x509_cert for security integration err = %w", err)) - } - case "SAML2_SP_INITIATED_LOGIN_PAGE_LABEL": - if err := d.Set("saml2_sp_initiated_login_page_label", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_sp_initiated_login_page_label for security integration")) - } - case "SAML2_ENABLE_SP_INITIATED": - var b bool - switch v2 := v.(type) { - case bool: - b = v2 - case string: - b, err = strconv.ParseBool(v.(string)) - if err != nil { - return diag.FromErr(fmt.Errorf("returned saml2_force_authn that is not boolean err = %w", err)) - } - default: - return diag.FromErr(fmt.Errorf("returned saml2_force_authn that is not boolean")) - } - if err := d.Set("saml2_enable_sp_initiated", b); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_enable_sp_initiated for security integration err = %w", err)) - } - case "SAML2_SNOWFLAKE_X509_CERT": - if err := d.Set("saml2_snowflake_x509_cert", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_snowflake_x509_cert for security integration err = %w", err)) - } - case "SAML2_SIGN_REQUEST": - var b bool - switch v2 := v.(type) { - case bool: - b = v2 - case string: - b, err = strconv.ParseBool(v.(string)) - if err != nil { - return diag.FromErr(fmt.Errorf("returned saml2_force_authn that is not boolean err = %w", err)) - } - default: - return diag.FromErr(fmt.Errorf("returned saml2_force_authn that is not boolean err = %w", err)) - } - if err := d.Set("saml2_sign_request", b); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_sign_request for security integration err = %w", err)) - } - case "SAML2_REQUESTED_NAMEID_FORMAT": - if err := d.Set("saml2_requested_nameid_format", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_requested_nameid_format for security integration err = %w", err)) - } - case "SAML2_POST_LOGOUT_REDIRECT_URL": - if err := d.Set("saml2_post_logout_redirect_url", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_post_logout_redirect_url for security integration err = %w", err)) - } - case "SAML2_FORCE_AUTHN": - var b bool - switch v2 := v.(type) { - case bool: - b = v2 - case string: - b, err = strconv.ParseBool(v.(string)) - if err != nil { - return diag.FromErr(fmt.Errorf("returned saml2_force_authn that is not boolean err = %w", err)) - } - default: - return diag.FromErr(fmt.Errorf("returned saml2_force_authn that is not boolean err = %w", err)) - } - if err := d.Set("saml2_force_authn", b); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_force_authn for security integration err = %w", err)) - } - case "SAML2_SNOWFLAKE_ISSUER_URL": - if err := d.Set("saml2_snowflake_issuer_url", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_snowflake_issuer_url for security integration err = %w", err)) - } - case "SAML2_SNOWFLAKE_ACS_URL": - if err := d.Set("saml2_snowflake_acs_url", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_snowflake_acs_url for security integration err = %w", err)) - } - case "SAML2_SNOWFLAKE_METADATA": - if err := d.Set("saml2_snowflake_metadata", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_snowflake_metadata for security integration err = %w", err)) - } - case "SAML2_DIGEST_METHODS_USED": - if err := d.Set("saml2_digest_methods_used", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_digest_methods_used for security integration err = %w", err)) - } - case "SAML2_SIGNATURE_METHODS_USED": - if err := d.Set("saml2_signature_methods_used", v.(string)); err != nil { - return diag.FromErr(fmt.Errorf("unable to set saml2_signature_methods_used for security integration err = %w", err)) - } - case "COMMENT": - // COMMENT cannot be set according to snowflake docs, so ignoring - default: - log.Printf("[WARN] unexpected security integration property %v returned from Snowflake", k) - } - } - - return diag.FromErr(err) -} - -// UpdateSAMLIntegration implements schema.UpdateFunc. -func UpdateSAMLIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - db := client.GetConn().DB - id := d.Id() - - stmt := snowflake.NewSamlIntegrationBuilder(id).Alter() - - var runSetStatement bool - - if d.HasChange("enabled") { - runSetStatement = true - stmt.SetBool(`ENABLED`, d.Get("enabled").(bool)) - } - - if d.HasChange("saml2_issuer") { - runSetStatement = true - stmt.SetString(`SAML2_ISSUER`, d.Get("saml2_issuer").(string)) - } - - if d.HasChange("saml2_sso_url") { - runSetStatement = true - stmt.SetString(`saml2_sso_url`, d.Get("saml2_sso_url").(string)) - } - - if d.HasChange("saml2_provider") { - runSetStatement = true - stmt.SetString(`saml2_provider`, d.Get("saml2_provider").(string)) - } - - if d.HasChange("saml2_x509_cert") { - runSetStatement = true - stmt.SetString(`saml2_x509_cert`, d.Get("saml2_x509_cert").(string)) - } - - if d.HasChange("saml2_sp_initiated_login_page_label") { - runSetStatement = true - stmt.SetString(`saml2_sp_initiated_login_page_label`, d.Get("saml2_sp_initiated_login_page_label").(string)) - } - - if d.HasChange("saml2_enable_sp_initiated") { - runSetStatement = true - stmt.SetBool(`saml2_enable_sp_initiated`, d.Get("saml2_enable_sp_initiated").(bool)) - } - - if d.HasChange("saml2_snowflake_x509_cert") { - runSetStatement = true - stmt.SetString(`saml2_snowflake_x509_cert`, d.Get("saml2_snowflake_x509_cert").(string)) - } - - if d.HasChange("saml2_sign_request") { - runSetStatement = true - stmt.SetBool(`saml2_sign_request`, d.Get("saml2_sign_request").(bool)) - } - - if d.HasChange("saml2_requested_nameid_format") { - runSetStatement = true - stmt.SetString(`saml2_requested_nameid_format`, d.Get("saml2_requested_nameid_format").(string)) - } - - if d.HasChange("saml2_post_logout_redirect_url") { - runSetStatement = true - stmt.SetString(`saml2_post_logout_redirect_url`, d.Get("saml2_post_logout_redirect_url").(string)) - } - - if d.HasChange("saml2_force_authn") { - runSetStatement = true - stmt.SetBool(`saml2_force_authn`, d.Get("saml2_force_authn").(bool)) - } - - if d.HasChange("saml2_snowflake_issuer_url") { - runSetStatement = true - stmt.SetString(`saml2_snowflake_issuer_url`, d.Get("saml2_snowflake_issuer_url").(string)) - } - - if d.HasChange("saml2_snowflake_acs_url") { - runSetStatement = true - stmt.SetString(`saml2_snowflake_acs_url`, d.Get("saml2_snowflake_acs_url").(string)) - } - - if runSetStatement { - if err := snowflake.Exec(db, stmt.Statement()); err != nil { - return diag.FromErr(fmt.Errorf("error updating security integration err = %w", err)) - } - } - - return ReadSAMLIntegration(ctx, d, meta) -} - -// DeleteSAMLIntegration implements schema.DeleteFunc. -func DeleteSAMLIntegration(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return diag.FromErr(DeleteResource("", snowflake.NewSamlIntegrationBuilder)(d, meta)) -} diff --git a/pkg/resources/saml_integration_acceptance_test.go b/pkg/resources/saml_integration_acceptance_test.go deleted file mode 100644 index 8d01727ba8..0000000000 --- a/pkg/resources/saml_integration_acceptance_test.go +++ /dev/null @@ -1,60 +0,0 @@ -package resources_test - -import ( - "fmt" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_SamlIntegration(t *testing.T) { - // TODO [SNOW-926148]: unskip - testenvs.SkipTestIfSet(t, testenvs.SkipSamlIntegrationTest, "because was skipped earlier") - - samlIntName := acc.TestClient().Ids.Alpha() - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: samlIntegrationConfig(samlIntName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_saml_integration.test_saml_int", "name", samlIntName), - resource.TestCheckResourceAttr("snowflake_saml_integration.test_saml_int", "saml2_issuer", "test_issuer"), - resource.TestCheckResourceAttr("snowflake_saml_integration.test_saml_int", "saml2_sso_url", "https://samltest.id/saml/sp"), - resource.TestCheckResourceAttr("snowflake_saml_integration.test_saml_int", "saml2_provider", "CUSTOM"), - resource.TestCheckResourceAttr("snowflake_saml_integration.test_saml_int", "saml2_x509_cert", "MIIERTCCAq2gAwIBAgIJAKmtzjCD1+tqMA0GCSqGSIb3DQEBCwUAMDUxMzAxBgNVBAMTKmlwLTE3Mi0zMS0yOC02NC51cy13ZXN0LTIuY29tcHV0ZS5pbnRlcm5hbDAeFw0xODA4MTgyMzI0MjNaFw0yODA4MTUyMzI0MjNaMDUxMzAxBgNVBAMTKmlwLTE3Mi0zMS0yOC02NC51cy13ZXN0LTIuY29tcHV0ZS5pbnRlcm5hbDCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBALhUlY3SkIOze+l8y6dBzM6p7B8OykJWlwizszU16Lih8D7KLhNJfahoVxbPxB3YFM/81PJLOeK2krvJ5zY6CJyQY3sPQAkZKI7I8qq9lmZ2g4QPqybNstXS6YUXJNUt/ixbbK/N97+LKTiSutbD1J7AoFnouMuLjlhN5VRZ43jez4xLSHVZaYuUFKn01Y9oLKbj46LQnZnJCAGpTgPqEQJr6GpVGw43bKyUpGoaPrdDRgRgtPMUWgFDkgcI3QiV1lsKfBs1t1E2UA7ACFnlJZpEuBtwgivzo3VeitiSaF3Jxh25EY5/vABpcgQQRz3RH2l8MMKdRsxb8VT3yh2S+CX55s+cN67LiCPr6f2u+KS1iKfB9mWN6o2S4lcmo82HIBbsuXJV0oA1HrGMyyc4Y9nng/I8iuAp8or1JrWRHQ+8NzO85DWK0rtvtLPxkvw0HK32glyuOP/9F05Z7+tiVIgn67buC0EdoUm1RSpibqmB1ST2PikslOlVbJuy4Ah93wIDAQABo1gwVjA1BgNVHREELjAsgippcC0xNzItMzEtMjgtNjQudXMtd2VzdC0yLmNvbXB1dGUuaW50ZXJuYWwwHQYDVR0OBBYEFAdsTxYfulJ5yunYtgYJHC9IcevzMA0GCSqGSIb3DQEBCwUAA4IBgQB3J6i7KreiHL8NPMglfWLHk1PZOgvIEEpKL+GRebvcbyqgcuc3VVPylq70VvGqhJxp1q/mzLfraUiypzfWFGm9zfwIg0H5TqRZYEPTvgIhIICjaDWRwZBDJG8D5G/KoV60DlUG0crPBlIuCCr/SRa5ZoDQqvucTfr3Rx4Ha6koXFSjoSXllR+jn4GnInhm/WH137a+v35PUcffNxfuehoGn6i4YeXF3cwJK4e35cOFW+dLbnaLk+Ty7HOGvpw86h979C6mJ9qEHYgq9rQyzlSPbLZGZSgVcIezunOaOsWm81BsXRNNJjzHGCqKf8RMhd8oZP55+2/SVRBwnkGyUNCuDPrJcymC95ZT2NW/KeWkz28HF2i31xQmecT2r3lQRSM8acvOXQsNEDCDvJvCzJT9c2AnsnO24r6arPXs/UWAxOI+MjclXPLkLD6uTHV+Oo8XZ7bOjegD5hL6/bKUWnNMurQNGrmi/jvqsCFLDKftl7ajuxKjtodnSuwhoY7NQy8="), - resource.TestCheckResourceAttrSet("snowflake_saml_integration.test_saml_int", "created_on"), - resource.TestCheckResourceAttrSet("snowflake_saml_integration.test_saml_int", "saml2_snowflake_x509_cert"), - resource.TestCheckResourceAttrSet("snowflake_saml_integration.test_saml_int", "saml2_snowflake_acs_url"), - resource.TestCheckResourceAttrSet("snowflake_saml_integration.test_saml_int", "saml2_snowflake_issuer_url"), - resource.TestCheckResourceAttrSet("snowflake_saml_integration.test_saml_int", "saml2_snowflake_metadata"), - resource.TestCheckResourceAttrSet("snowflake_saml_integration.test_saml_int", "saml2_digest_methods_used"), - resource.TestCheckResourceAttrSet("snowflake_saml_integration.test_saml_int", "saml2_signature_methods_used"), - ), - }, - }, - }) -} - -func samlIntegrationConfig(name string) string { - return fmt.Sprintf(` - resource "snowflake_saml_integration" "test_saml_int" { - name = "%s" - saml2_issuer = "test_issuer" - saml2_sso_url = "https://samltest.id/saml/sp" - saml2_provider = "CUSTOM" - saml2_x509_cert = "MIIERTCCAq2gAwIBAgIJAKmtzjCD1+tqMA0GCSqGSIb3DQEBCwUAMDUxMzAxBgNVBAMTKmlwLTE3Mi0zMS0yOC02NC51cy13ZXN0LTIuY29tcHV0ZS5pbnRlcm5hbDAeFw0xODA4MTgyMzI0MjNaFw0yODA4MTUyMzI0MjNaMDUxMzAxBgNVBAMTKmlwLTE3Mi0zMS0yOC02NC51cy13ZXN0LTIuY29tcHV0ZS5pbnRlcm5hbDCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBALhUlY3SkIOze+l8y6dBzM6p7B8OykJWlwizszU16Lih8D7KLhNJfahoVxbPxB3YFM/81PJLOeK2krvJ5zY6CJyQY3sPQAkZKI7I8qq9lmZ2g4QPqybNstXS6YUXJNUt/ixbbK/N97+LKTiSutbD1J7AoFnouMuLjlhN5VRZ43jez4xLSHVZaYuUFKn01Y9oLKbj46LQnZnJCAGpTgPqEQJr6GpVGw43bKyUpGoaPrdDRgRgtPMUWgFDkgcI3QiV1lsKfBs1t1E2UA7ACFnlJZpEuBtwgivzo3VeitiSaF3Jxh25EY5/vABpcgQQRz3RH2l8MMKdRsxb8VT3yh2S+CX55s+cN67LiCPr6f2u+KS1iKfB9mWN6o2S4lcmo82HIBbsuXJV0oA1HrGMyyc4Y9nng/I8iuAp8or1JrWRHQ+8NzO85DWK0rtvtLPxkvw0HK32glyuOP/9F05Z7+tiVIgn67buC0EdoUm1RSpibqmB1ST2PikslOlVbJuy4Ah93wIDAQABo1gwVjA1BgNVHREELjAsgippcC0xNzItMzEtMjgtNjQudXMtd2VzdC0yLmNvbXB1dGUuaW50ZXJuYWwwHQYDVR0OBBYEFAdsTxYfulJ5yunYtgYJHC9IcevzMA0GCSqGSIb3DQEBCwUAA4IBgQB3J6i7KreiHL8NPMglfWLHk1PZOgvIEEpKL+GRebvcbyqgcuc3VVPylq70VvGqhJxp1q/mzLfraUiypzfWFGm9zfwIg0H5TqRZYEPTvgIhIICjaDWRwZBDJG8D5G/KoV60DlUG0crPBlIuCCr/SRa5ZoDQqvucTfr3Rx4Ha6koXFSjoSXllR+jn4GnInhm/WH137a+v35PUcffNxfuehoGn6i4YeXF3cwJK4e35cOFW+dLbnaLk+Ty7HOGvpw86h979C6mJ9qEHYgq9rQyzlSPbLZGZSgVcIezunOaOsWm81BsXRNNJjzHGCqKf8RMhd8oZP55+2/SVRBwnkGyUNCuDPrJcymC95ZT2NW/KeWkz28HF2i31xQmecT2r3lQRSM8acvOXQsNEDCDvJvCzJT9c2AnsnO24r6arPXs/UWAxOI+MjclXPLkLD6uTHV+Oo8XZ7bOjegD5hL6/bKUWnNMurQNGrmi/jvqsCFLDKftl7ajuxKjtodnSuwhoY7NQy8=" - enabled = false - } - `, name) -} diff --git a/pkg/resources/saml_integration_test.go b/pkg/resources/saml_integration_test.go deleted file mode 100644 index 36327ec029..0000000000 --- a/pkg/resources/saml_integration_test.go +++ /dev/null @@ -1,109 +0,0 @@ -package resources_test - -import ( - "context" - "database/sql" - "testing" - - internalprovider "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - - sqlmock "github.com/DATA-DOG/go-sqlmock" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - . "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers/mock" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/stretchr/testify/require" -) - -func TestSAMLIntegration(t *testing.T) { - r := require.New(t) - err := resources.SAMLIntegration().InternalValidate(provider.Provider().Schema, true) - r.NoError(err) -} - -func TestSAMLIntegrationCreate(t *testing.T) { - r := require.New(t) - - in := map[string]interface{}{ - "name": "test_saml_integration", - "enabled": true, - "saml2_issuer": "test_issuer", - "saml2_sso_url": "https://testsamlissuer.com", - "saml2_provider": "CUSTOM", - "saml2_x509_cert": "MIICdummybase64certificate", - } - d := schema.TestResourceDataRaw(t, resources.SAMLIntegration().Schema, in) - r.NotNil(d) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec( - `^CREATE SECURITY INTEGRATION "test_saml_integration" TYPE=SAML2 SAML2_ISSUER='test_issuer' SAML2_PROVIDER='CUSTOM' SAML2_SSO_URL='https://testsamlissuer.com' SAML2_X509_CERT='MIICdummybase64certificate' ENABLED=true$`, - ).WillReturnResult(sqlmock.NewResult(1, 1)) - expectReadSAMLIntegration(mock) - - diags := resources.CreateSAMLIntegration(context.Background(), d, &internalprovider.Context{ - Client: sdk.NewClientFromDB(db), - }) - r.Empty(diags) - }) -} - -func TestSAMLIntegrationRead(t *testing.T) { - r := require.New(t) - - d := samlIntegration(t, "test_saml_integration", map[string]interface{}{"name": "test_saml_integration"}) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - expectReadSAMLIntegration(mock) - - diags := resources.ReadSAMLIntegration(context.Background(), d, &internalprovider.Context{ - Client: sdk.NewClientFromDB(db), - }) - r.Empty(diags) - }) -} - -func TestSAMLIntegrationDelete(t *testing.T) { - r := require.New(t) - - d := samlIntegration(t, "drop_it", map[string]interface{}{"name": "drop_it"}) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec(`DROP SECURITY INTEGRATION "drop_it"`).WillReturnResult(sqlmock.NewResult(1, 1)) - diags := resources.DeleteSAMLIntegration(context.Background(), d, &internalprovider.Context{ - Client: sdk.NewClientFromDB(db), - }) - r.Empty(diags) - }) -} - -func expectReadSAMLIntegration(mock sqlmock.Sqlmock) { - showRows := sqlmock.NewRows([]string{ - "name", "type", "category", "enabled", "created_on", - }, - ).AddRow("test_saml_integration", "SAML2", "SECURITY", true, "now") - mock.ExpectQuery(`^SHOW SECURITY INTEGRATIONS LIKE 'test_saml_integration'$`).WillReturnRows(showRows) - - descRows := sqlmock.NewRows([]string{ - "property", "property_type", "property_value", "property_default", - }).AddRow("SAML2_X509_CERT", "String", "MIICdummybase64certificate", nil). - AddRow("SAML2_PROVIDER", "String", "CUSTOM", nil). - AddRow("SAML2_ENABLE_SP_INITIATED", "Boolean", false, false). - AddRow("SAML2_SP_INITIATED_LOGIN_PAGE_LABEL", "String", "MyLabel", nil). - AddRow("SAML2_SSO_URL", "String", "https://testsamlissuer.com", nil). - AddRow("SAML2_ISSUER", "String", "test_issuer", nil). - AddRow("SAML2_SNOWFLAKE_X509_CERT", "String", "MIICdummybase64certificate", nil). - AddRow("SAML2_REQUESTED_NAMEID_FORMAT", "String", "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"). - AddRow("SAML2_FORCE_AUTHN", "Boolean", false, false). - AddRow("SAML2_POST_LOGOUT_REDIRECT_URL", "String", "https://myredirecturl.com", nil). - AddRow("SAML2_SIGN_REQUEST", "Boolean", false, false). - AddRow("SAML2_SNOWFLAKE_ACS_URL", "String", "https://myinstance.my-region-1.snowflakecomputing.com/fed/login", nil). - AddRow("SAML2_SNOWFLAKE_ISSUER_URL", "String", "https://myinstance.my-region-1.snowflakecomputing.com", nil). - AddRow("SAML2_SNOWFLAKE_METADATA", "String", "", nil). - AddRow("SAML2_DIGEST_METHODS_USED", "http://www.w3.org/2001/04/xmlenc#sha256", "CUSTOM", nil). - AddRow("SAML2_SIGNATURE_METHODS_USED", "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", "CUSTOM", nil). - AddRow("COMMENT", "String", "Some Comment", nil) - - mock.ExpectQuery(`DESCRIBE SECURITY INTEGRATION "test_saml_integration"$`).WillReturnRows(descRows) -} diff --git a/pkg/resources/sequence.go b/pkg/resources/sequence.go index 449dc56694..a8dadf56c0 100644 --- a/pkg/resources/sequence.go +++ b/pkg/resources/sequence.go @@ -3,6 +3,7 @@ package resources import ( "context" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -67,10 +68,10 @@ var sequenceSchema = map[string]*schema.Schema{ func Sequence() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.Sequence, CreateSequence), - ReadContext: TrackingReadWrapper(resources.Sequence, ReadSequence), - DeleteContext: TrackingDeleteWrapper(resources.Sequence, DeleteSequence), - UpdateContext: TrackingUpdateWrapper(resources.Sequence, UpdateSequence), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.SequenceResource), TrackingCreateWrapper(resources.Sequence, CreateSequence)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.SequenceResource), TrackingReadWrapper(resources.Sequence, ReadSequence)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.SequenceResource), TrackingUpdateWrapper(resources.Sequence, UpdateSequence)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.SequenceResource), TrackingDeleteWrapper(resources.Sequence, DeleteSequence)), Schema: sequenceSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/session_parameter.go b/pkg/resources/session_parameter.go deleted file mode 100644 index 1efbc5a40e..0000000000 --- a/pkg/resources/session_parameter.go +++ /dev/null @@ -1,155 +0,0 @@ -package resources - -import ( - "context" - "fmt" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -var sessionParameterSchema = map[string]*schema.Schema{ - "key": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "Name of session parameter. Valid values are those in [session parameters](https://docs.snowflake.com/en/sql-reference/parameters.html#session-parameters).", - }, - "value": { - Type: schema.TypeString, - Required: true, - Description: "Value of session parameter, as a string. Constraints are the same as those for the parameters in Snowflake documentation.", - }, - "on_account": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: "If true, the session parameter will be set on the account level.", - }, - "user": { - Type: schema.TypeString, - Optional: true, - Description: "The user to set the session parameter for. Required if on_account is false", - }, -} - -func SessionParameter() *schema.Resource { - return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.SessionParameter, CreateSessionParameter), - ReadContext: TrackingReadWrapper(resources.SessionParameter, ReadSessionParameter), - UpdateContext: TrackingUpdateWrapper(resources.SessionParameter, UpdateSessionParameter), - DeleteContext: TrackingDeleteWrapper(resources.SessionParameter, DeleteSessionParameter), - - Schema: sessionParameterSchema, - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - } -} - -// CreateSessionParameter implements schema.CreateFunc. -func CreateSessionParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - key := d.Get("key").(string) - value := d.Get("value").(string) - - onAccount := d.Get("on_account").(bool) - user := d.Get("user").(string) - parameter := sdk.SessionParameter(key) - - var err error - if onAccount { - err := client.Parameters.SetSessionParameterOnAccount(ctx, parameter, value) - if err != nil { - return diag.FromErr(err) - } - } else { - if user == "" { - return diag.FromErr(fmt.Errorf("user is required if on_account is false")) - } - userId := sdk.NewAccountObjectIdentifier(user) - err = client.Parameters.SetSessionParameterOnUser(ctx, userId, parameter, value) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating session parameter err = %w", err)) - } - } - - d.SetId(key) - - return ReadSessionParameter(ctx, d, meta) -} - -// ReadSessionParameter implements schema.ReadFunc. -func ReadSessionParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - - parameter := d.Id() - - onAccount := d.Get("on_account").(bool) - var err error - var p *sdk.Parameter - if onAccount { - p, err = client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameter(parameter)) - } else { - user := d.Get("user").(string) - userId := sdk.NewAccountObjectIdentifier(user) - p, err = client.Parameters.ShowUserParameter(ctx, sdk.UserParameter(parameter), userId) - } - if err != nil { - return diag.FromErr(fmt.Errorf("error reading session parameter err = %w", err)) - } - err = d.Set("value", p.Value) - if err != nil { - return diag.FromErr(fmt.Errorf("error setting session parameter err = %w", err)) - } - return nil -} - -// UpdateSessionParameter implements schema.UpdateFunc. -func UpdateSessionParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return CreateSessionParameter(ctx, d, meta) -} - -// DeleteSessionParameter implements schema.DeleteFunc. -func DeleteSessionParameter(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - key := d.Get("key").(string) - - onAccount := d.Get("on_account").(bool) - parameter := sdk.SessionParameter(key) - - if onAccount { - defaultParameter, err := client.Parameters.ShowAccountParameter(ctx, sdk.AccountParameter(key)) - if err != nil { - return diag.FromErr(err) - } - defaultValue := defaultParameter.Default - err = client.Parameters.SetSessionParameterOnAccount(ctx, parameter, defaultValue) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating session parameter err = %w", err)) - } - } else { - user := d.Get("user").(string) - if user == "" { - return diag.FromErr(fmt.Errorf("user is required if on_account is false")) - } - userId := sdk.NewAccountObjectIdentifier(user) - defaultParameter, err := client.Parameters.ShowSessionParameter(ctx, sdk.SessionParameter(key)) - if err != nil { - return diag.FromErr(err) - } - defaultValue := defaultParameter.Default - err = client.Parameters.SetSessionParameterOnUser(ctx, userId, parameter, defaultValue) - if err != nil { - return diag.FromErr(fmt.Errorf("error deleting session parameter err = %w", err)) - } - } - - d.SetId(key) - return nil -} diff --git a/pkg/resources/session_parameter_acceptance_test.go b/pkg/resources/session_parameter_acceptance_test.go deleted file mode 100644 index 884fe6a95f..0000000000 --- a/pkg/resources/session_parameter_acceptance_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package resources_test - -import ( - "fmt" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_SessionParameterWithUser(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - - user, userCleanup := acc.TestClient().User.CreateUser(t) - t.Cleanup(userCleanup) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: sessionParameterWithUser(user.ID(), "BINARY_OUTPUT_FORMAT", "BASE64"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_session_parameter.p", "key", "BINARY_OUTPUT_FORMAT"), - resource.TestCheckResourceAttr("snowflake_session_parameter.p", "value", "BASE64"), - resource.TestCheckResourceAttr("snowflake_session_parameter.p", "user", user.ID().Name()), - resource.TestCheckResourceAttr("snowflake_session_parameter.p", "on_account", "false"), - ), - }, - }, - }) -} - -func TestAcc_SessionParameterOnAccount(t *testing.T) { - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: sessionParameterOnAccount("AUTOCOMMIT", "false"), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_session_parameter.p", "key", "AUTOCOMMIT"), - resource.TestCheckResourceAttr("snowflake_session_parameter.p", "value", "false"), - resource.TestCheckResourceAttr("snowflake_session_parameter.p", "on_account", "true"), - ), - }, - }, - }) -} - -func sessionParameterWithUser(userId sdk.AccountObjectIdentifier, key, value string) string { - return fmt.Sprintf(` -resource "snowflake_session_parameter" "p" { - key = "%[2]s" - value = "%[3]s" - user = %[1]s -} -`, userId.FullyQualifiedName(), key, value) -} - -func sessionParameterOnAccount(key, value string) string { - s := ` -resource "snowflake_session_parameter" "p" { - key = "%s" - value = "%s" - on_account = true -} -` - return fmt.Sprintf(s, key, value) -} diff --git a/pkg/resources/share.go b/pkg/resources/share.go index 334a931f30..385c15b93b 100644 --- a/pkg/resources/share.go +++ b/pkg/resources/share.go @@ -7,6 +7,7 @@ import ( "strings" "time" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -48,10 +49,10 @@ var shareSchema = map[string]*schema.Schema{ // Share returns a pointer to the resource representing a share. func Share() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.Share, CreateShare), - ReadContext: TrackingReadWrapper(resources.Share, ReadShare), - UpdateContext: TrackingUpdateWrapper(resources.Share, UpdateShare), - DeleteContext: TrackingDeleteWrapper(resources.Share, DeleteShare), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.ShareResource), TrackingCreateWrapper(resources.Share, CreateShare)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.ShareResource), TrackingReadWrapper(resources.Share, ReadShare)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.ShareResource), TrackingUpdateWrapper(resources.Share, UpdateShare)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.ShareResource), TrackingDeleteWrapper(resources.Share, DeleteShare)), Schema: shareSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/stage.go b/pkg/resources/stage.go index 314454716c..e2ac46ed96 100644 --- a/pkg/resources/stage.go +++ b/pkg/resources/stage.go @@ -6,6 +6,7 @@ import ( "fmt" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -102,10 +103,10 @@ var stageSchema = map[string]*schema.Schema{ // TODO (SNOW-1019005): Remove snowflake package that is used in Create and Update operations func Stage() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.Stage, CreateStage), - ReadContext: TrackingReadWrapper(resources.Stage, ReadStage), - UpdateContext: TrackingUpdateWrapper(resources.Stage, UpdateStage), - DeleteContext: TrackingDeleteWrapper(resources.Stage, DeleteStage), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.StageResource), TrackingCreateWrapper(resources.Stage, CreateStage)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.StageResource), TrackingReadWrapper(resources.Stage, ReadStage)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.StageResource), TrackingUpdateWrapper(resources.Stage, UpdateStage)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.StageResource), TrackingDeleteWrapper(resources.Stage, DeleteStage)), Schema: stageSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/storage_integration.go b/pkg/resources/storage_integration.go index cd15e0cc6f..3723347bc5 100644 --- a/pkg/resources/storage_integration.go +++ b/pkg/resources/storage_integration.go @@ -7,15 +7,15 @@ import ( "slices" "strings" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) @@ -115,10 +115,10 @@ var storageIntegrationSchema = map[string]*schema.Schema{ // StorageIntegration returns a pointer to the resource representing a storage integration. func StorageIntegration() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.StorageIntegration, CreateStorageIntegration), - ReadContext: TrackingReadWrapper(resources.StorageIntegration, ReadStorageIntegration), - UpdateContext: TrackingUpdateWrapper(resources.StorageIntegration, UpdateStorageIntegration), - DeleteContext: TrackingDeleteWrapper(resources.StorageIntegration, DeleteStorageIntegration), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.StorageIntegrationResource), TrackingCreateWrapper(resources.StorageIntegration, CreateStorageIntegration)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.StorageIntegrationResource), TrackingReadWrapper(resources.StorageIntegration, ReadStorageIntegration)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.StorageIntegrationResource), TrackingUpdateWrapper(resources.StorageIntegration, UpdateStorageIntegration)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.StorageIntegrationResource), TrackingDeleteWrapper(resources.StorageIntegration, DeleteStorageIntegration)), Schema: storageIntegrationSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/stream.go b/pkg/resources/stream.go deleted file mode 100644 index 219161a107..0000000000 --- a/pkg/resources/stream.go +++ /dev/null @@ -1,328 +0,0 @@ -package resources - -import ( - "context" - "fmt" - "log" - "strings" - - providerresources "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -var streamSchema = map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created.", - }, - "schema": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "The schema in which to create the stream.", - }, - "database": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "The database in which to create the stream.", - }, - "comment": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies a comment for the stream.", - }, - "on_table": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "Specifies an identifier for the table the stream will monitor.", - ExactlyOneOf: []string{"on_table", "on_view", "on_stage"}, - DiffSuppressFunc: suppressIdentifierQuoting, - ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), - }, - "on_view": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "Specifies an identifier for the view the stream will monitor.", - ExactlyOneOf: []string{"on_table", "on_view", "on_stage"}, - DiffSuppressFunc: suppressIdentifierQuoting, - ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), - }, - "on_stage": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "Specifies an identifier for the stage the stream will monitor.", - ExactlyOneOf: []string{"on_table", "on_view", "on_stage"}, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // Suppress diff if the stage name is the same, even if database and schema are not specified - return strings.Trim(strings.Split(old, ".")[len(strings.Split(old, "."))-1], "\"") == strings.Trim(strings.Split(new, ".")[len(strings.Split(new, "."))-1], "\"") - }, - ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), - }, - "append_only": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Default: false, - Description: "Type of the stream that will be created.", - }, - "insert_only": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Default: false, - Description: "Create an insert only stream type.", - }, - "show_initial_rows": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Default: false, - Description: "Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed.", - }, - "owner": { - Type: schema.TypeString, - Computed: true, - Description: "Name of the role that owns the stream.", - }, - FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, -} - -func Stream() *schema.Resource { - return &schema.Resource{ - CreateContext: TrackingCreateWrapper(providerresources.Stream, CreateStream), - ReadContext: TrackingReadWrapper(providerresources.Stream, ReadStream), - UpdateContext: TrackingUpdateWrapper(providerresources.Stream, UpdateStream), - DeleteContext: TrackingDeleteWrapper(providerresources.Stream, DeleteStream), - DeprecationMessage: deprecatedResourceDescription( - string(providerresources.StreamOnDirectoryTable), - string(providerresources.StreamOnExternalTable), - string(providerresources.StreamOnTable), - string(providerresources.StreamOnView), - ), - - Schema: streamSchema, - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - } -} - -// CreateStream implements schema.CreateFunc. -func CreateStream(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - databaseName := d.Get("database").(string) - schemaName := d.Get("schema").(string) - name := d.Get("name").(string) - appendOnly := d.Get("append_only").(bool) - insertOnly := d.Get("insert_only").(bool) - showInitialRows := d.Get("show_initial_rows").(bool) - id := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, name) - - onTable, onTableSet := d.GetOk("on_table") - onView, onViewSet := d.GetOk("on_view") - onStage, onStageSet := d.GetOk("on_stage") - - switch { - case onTableSet: - tableObjectIdentifier, err := helpers.DecodeSnowflakeParameterID(onTable.(string)) - if err != nil { - return diag.FromErr(err) - } - tableId := tableObjectIdentifier.(sdk.SchemaObjectIdentifier) - - table, err := client.Tables.ShowByID(ctx, tableId) - if err != nil { - return diag.FromErr(err) - } - - if table.IsExternal { - req := sdk.NewCreateOnExternalTableStreamRequest(id, tableId) - if insertOnly { - req.WithInsertOnly(true) - } - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - err := client.Streams.CreateOnExternalTable(ctx, req) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating stream %v err = %w", name, err)) - } - } else { - req := sdk.NewCreateOnTableStreamRequest(id, tableId) - if appendOnly { - req.WithAppendOnly(true) - } - if showInitialRows { - req.WithShowInitialRows(true) - } - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - err := client.Streams.CreateOnTable(ctx, req) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating stream %v err = %w", name, err)) - } - } - case onViewSet: - viewObjectIdentifier, err := helpers.DecodeSnowflakeParameterID(onView.(string)) - viewId := viewObjectIdentifier.(sdk.SchemaObjectIdentifier) - if err != nil { - return diag.FromErr(err) - } - - _, err = client.Views.ShowByID(ctx, viewId) - if err != nil { - return diag.FromErr(err) - } - - req := sdk.NewCreateOnViewStreamRequest(id, viewId) - if appendOnly { - req.WithAppendOnly(true) - } - if showInitialRows { - req.WithShowInitialRows(true) - } - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - err = client.Streams.CreateOnView(ctx, req) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating stream %v err = %w", name, err)) - } - case onStageSet: - stageObjectIdentifier, err := helpers.DecodeSnowflakeParameterID(onStage.(string)) - stageId := stageObjectIdentifier.(sdk.SchemaObjectIdentifier) - if err != nil { - return diag.FromErr(err) - } - stageProperties, err := client.Stages.Describe(ctx, stageId) - if err != nil { - return diag.FromErr(err) - } - if findStagePropertyValueByName(stageProperties, "ENABLE") != "true" { - return diag.FromErr(fmt.Errorf("directory must be enabled on stage")) - } - req := sdk.NewCreateOnDirectoryTableStreamRequest(id, stageId) - if v, ok := d.GetOk("comment"); ok { - req.WithComment(v.(string)) - } - err = client.Streams.CreateOnDirectoryTable(ctx, req) - if err != nil { - return diag.FromErr(fmt.Errorf("error creating stream %v err = %w", name, err)) - } - } - - d.SetId(helpers.EncodeSnowflakeID(id)) - - return ReadStream(ctx, d, meta) -} - -// ReadStream implements schema.ReadFunc. -func ReadStream(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) - stream, err := client.Streams.ShowByID(ctx, id) - if err != nil { - log.Printf("[DEBUG] stream (%s) not found", d.Id()) - d.SetId("") - return nil - } - if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil { - return diag.FromErr(err) - } - if err := d.Set("name", stream.Name); err != nil { - return diag.FromErr(err) - } - if err := d.Set("database", stream.DatabaseName); err != nil { - return diag.FromErr(err) - } - if err := d.Set("schema", stream.SchemaName); err != nil { - return diag.FromErr(err) - } - switch *stream.SourceType { - case sdk.StreamSourceTypeStage: - if err := d.Set("on_stage", *stream.TableName); err != nil { - return diag.FromErr(err) - } - case sdk.StreamSourceTypeView: - if err := d.Set("on_view", *stream.TableName); err != nil { - return diag.FromErr(err) - } - default: - if err := d.Set("on_table", *stream.TableName); err != nil { - return diag.FromErr(err) - } - } - if err := d.Set("append_only", *stream.Mode == "APPEND_ONLY"); err != nil { - return diag.FromErr(err) - } - if err := d.Set("insert_only", *stream.Mode == "INSERT_ONLY"); err != nil { - return diag.FromErr(err) - } - // TODO: SHOW STREAMS doesn't return that value right now (I'm not sure if it ever did), but probably we can assume - // the customers got 'false' every time and hardcode it (it's only on create thing, so it's not necessary - // to track its value after creation). - if err := d.Set("show_initial_rows", false); err != nil { - return diag.FromErr(err) - } - if err := d.Set("comment", *stream.Comment); err != nil { - return diag.FromErr(err) - } - if err := d.Set("owner", *stream.Owner); err != nil { - return diag.FromErr(err) - } - return nil -} - -// UpdateStream implements schema.UpdateFunc. -func UpdateStream(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) - - if d.HasChange("comment") { - comment := d.Get("comment").(string) - if comment == "" { - err := client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithUnsetComment(true)) - if err != nil { - return diag.FromErr(fmt.Errorf("error unsetting stream comment on %v", d.Id())) - } - } else { - err := client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithSetComment(comment)) - if err != nil { - return diag.FromErr(fmt.Errorf("error setting stream comment on %v", d.Id())) - } - } - } - - return ReadStream(ctx, d, meta) -} - -// DeleteStream implements schema.DeleteFunc. -func DeleteStream(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - client := meta.(*provider.Context).Client - - streamId := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) - - err := client.Streams.Drop(ctx, sdk.NewDropStreamRequest(streamId)) - if err != nil { - return diag.FromErr(fmt.Errorf("error deleting stream %v err = %w", d.Id(), err)) - } - - d.SetId("") - - return nil -} diff --git a/pkg/resources/stream_acceptance_test.go b/pkg/resources/stream_acceptance_test.go deleted file mode 100644 index 54e825c3e2..0000000000 --- a/pkg/resources/stream_acceptance_test.go +++ /dev/null @@ -1,510 +0,0 @@ -package resources_test - -import ( - "fmt" - "regexp" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_StreamCreateOnStageWithoutDirectoryEnabled(t *testing.T) { - accName := acc.TestClient().Ids.Alpha() - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: acc.CheckDestroy(t, resources.Stream), - Steps: []resource.TestStep{ - { - Config: stageStreamConfig(accName, false), - ExpectError: regexp.MustCompile("directory must be enabled on stage"), - }, - }, - }) -} - -func TestAcc_StreamCreateOnStage(t *testing.T) { - accName := acc.TestClient().Ids.Alpha() - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: acc.CheckDestroy(t, resources.Stream), - Steps: []resource.TestStep{ - { - Config: stageStreamConfig(accName, true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "append_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "insert_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "show_initial_rows", "false"), - ), - }, - }, - }) -} - -// proves issue https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2672 -func TestAcc_Stream_OnTable(t *testing.T) { - tableName := acc.TestClient().Ids.Alpha() - tableName2 := acc.TestClient().Ids.Alpha() - id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: acc.CheckDestroy(t, resources.Stream), - Steps: []resource.TestStep{ - { - Config: streamConfigOnTable(acc.TestDatabaseName, acc.TestSchemaName, tableName, id.Name()), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", id.Name()), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "fully_qualified_name", id.FullyQualifiedName()), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_table", fmt.Sprintf("\"%s\".\"%s\".%s", acc.TestDatabaseName, acc.TestSchemaName, tableName)), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - ), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PostApplyPreRefresh: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()}, - }, - }, - { - Config: streamConfigOnTable(acc.TestDatabaseName, acc.TestSchemaName, tableName2, id.Name()), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", id.Name()), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "fully_qualified_name", id.FullyQualifiedName()), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_table", fmt.Sprintf("\"%s\".\"%s\".%s", acc.TestDatabaseName, acc.TestSchemaName, tableName2)), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - ), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PostApplyPreRefresh: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()}, - }, - }, - }, - }) -} - -// proves issue https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2672 -func TestAcc_Stream_OnView(t *testing.T) { - // TODO(SNOW-1423486): Fix using warehouse in all tests and remove unsetting testenvs.ConfigureClientOnce - t.Setenv(string(testenvs.ConfigureClientOnce), "") - - tableName := acc.TestClient().Ids.Alpha() - viewName := acc.TestClient().Ids.Alpha() - name := acc.TestClient().Ids.Alpha() - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: acc.CheckDestroy(t, resources.Stream), - Steps: []resource.TestStep{ - { - Config: streamConfigOnView(acc.TestDatabaseName, acc.TestSchemaName, tableName, viewName, name), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", name), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_view", fmt.Sprintf("\"%s\".\"%s\".%s", acc.TestDatabaseName, acc.TestSchemaName, viewName)), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - ), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PostApplyPreRefresh: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()}, - }, - }, - }, - }) -} - -func TestAcc_Stream(t *testing.T) { - // Current error is User: is not authorized to perform: sts:AssumeRole on resource: duration 1.162414333s args {}] () - t.Skip("Skipping TestAcc_Stream") - - accName := acc.TestClient().Ids.Alpha() - accNameExternalTable := acc.TestClient().Ids.Alpha() - bucketURL := testenvs.GetOrSkipTest(t, testenvs.AwsExternalBucketUrl) - roleName := testenvs.GetOrSkipTest(t, testenvs.AwsExternalRoleArn) - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.Stream), - Steps: []resource.TestStep{ - { - Config: streamConfig(accName, false), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_table", fmt.Sprintf("%s.%s.%s", accName, accName, "STREAM_ON_TABLE")), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "append_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "insert_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "show_initial_rows", "false"), - ), - }, - { - Config: streamConfig(accName, true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_table", fmt.Sprintf("%s.%s.%s", accName, accName, "STREAM_ON_TABLE")), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "append_only", "true"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "insert_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "show_initial_rows", "false"), - ), - }, - { - Config: externalTableStreamConfig(accNameExternalTable, false, bucketURL, roleName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", accNameExternalTable), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", accNameExternalTable), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accNameExternalTable), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_table", fmt.Sprintf("%s.%s.%s", accNameExternalTable, accNameExternalTable, "STREAM_ON_EXTERNAL_TABLE")), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "append_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "insert_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "show_initial_rows", "false"), - ), - }, - { - Config: externalTableStreamConfig(accNameExternalTable, true, bucketURL, roleName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", accNameExternalTable), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", accNameExternalTable), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accNameExternalTable), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_table", fmt.Sprintf("%s.%s.%s", accNameExternalTable, accNameExternalTable, "STREAM_ON_EXTERNAL_TABLE")), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "append_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "insert_only", "true"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "show_initial_rows", "false"), - ), - }, - { - Config: viewStreamConfig(accName, false), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_view", fmt.Sprintf("%s.%s.%s", accName, accName, "STREAM_ON_VIEW")), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "append_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "insert_only", "false"), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "show_initial_rows", "false"), - ), - }, - { - Config: stageStreamConfig(accName, true), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "name", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "database", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accName), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_stage", fmt.Sprintf("%s.%s.%s", accName, accName, "STREAM_ON_STAGE")), - resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - ), - }, - { - ResourceName: "snowflake_stream.test_stream", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func streamConfigOnTable(databaseName string, schemaName string, tableName string, name string) string { - return fmt.Sprintf(` -resource "snowflake_table" "test_stream_on_table" { - database = "%[1]s" - schema = "%[2]s" - name = "%[3]s" - comment = "Terraform acceptance test" - change_tracking = true - - column { - name = "column1" - type = "VARIANT" - } - column { - name = "column2" - type = "VARCHAR" - } -} - -resource "snowflake_stream" "test_stream" { - database = "%[1]s" - schema = "%[2]s" - name = "%[4]s" - comment = "Terraform acceptance test" - on_table = "\"%[1]s\".\"%[2]s\".\"${snowflake_table.test_stream_on_table.name}\"" -} -`, databaseName, schemaName, tableName, name) -} - -func streamConfigOnView(databaseName string, schemaName string, tableName string, viewName string, name string) string { - return fmt.Sprintf(` -resource "snowflake_table" "test" { - database = "%[1]s" - schema = "%[2]s" - name = "%[3]s" - comment = "Terraform acceptance test" - change_tracking = true - - column { - name = "column1" - type = "VARIANT" - } - column { - name = "column2" - type = "VARCHAR" - } -} - -resource "snowflake_view" "test" { - database = "%[1]s" - schema = "%[2]s" - name = "%[4]s" - change_tracking = true - - statement = "select * from \"${snowflake_table.test.name}\"" - column { - column_name = "column1" - } - column { - column_name = "column2" - } -} - -resource "snowflake_stream" "test_stream" { - database = "%[1]s" - schema = "%[2]s" - name = "%[5]s" - comment = "Terraform acceptance test" - on_view = "\"%[1]s\".\"%[2]s\".\"${snowflake_view.test.name}\"" -} -`, databaseName, schemaName, tableName, viewName, name) -} - -func streamConfig(name string, appendOnly bool) string { - appendOnlyConfig := "" - if appendOnly { - appendOnlyConfig = "append_only = true" - } - - s := ` -resource "snowflake_database" "test_database" { - name = "%s" - comment = "Terraform acceptance test" -} - -resource "snowflake_schema" "test_schema" { - name = "%s" - database = snowflake_database.test_database.name - comment = "Terraform acceptance test" -} - -resource "snowflake_table" "test_stream_on_table" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - name = "STREAM_ON_TABLE" - comment = "Terraform acceptance test" - change_tracking = true - - column { - name = "column1" - type = "VARIANT" - } - column { - name = "column2" - type = "VARCHAR" - } -} - -resource "snowflake_stream" "test_stream" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - name = "%s" - comment = "Terraform acceptance test" - on_table = "${snowflake_database.test_database.name}.${snowflake_schema.test_schema.name}.${snowflake_table.test_stream_on_table.name}" - %s -} -` - return fmt.Sprintf(s, name, name, name, appendOnlyConfig) -} - -func externalTableStreamConfig(name string, insertOnly bool, bucketURL string, roleName string) string { - // Refer to external_table_acceptance_test.go for the original source on - // external table resources and dependents (modified slightly here). - insertOnlyConfig := "" - if insertOnly { - insertOnlyConfig = "insert_only = true" - } - - s := ` -resource "snowflake_database" "test" { - name = "%v" - comment = "Terraform acceptance test" -} -resource "snowflake_schema" "test" { - name = "%v" - database = snowflake_database.test.name - comment = "Terraform acceptance test" -} -resource "snowflake_stage" "test" { - name = "%v" - url = "%s" - database = snowflake_database.test.name - schema = snowflake_schema.test.name - comment = "Terraform acceptance test" - storage_integration = snowflake_storage_integration.external_table_stream_integration.name -} -resource "snowflake_storage_integration" "external_table_stream_integration" { - name = "%v" - storage_allowed_locations = ["%s"] - storage_provider = "S3" - storage_aws_role_arn = "%s" -} -resource "snowflake_external_table" "test_external_stream_table" { - database = snowflake_database.test.name - schema = snowflake_schema.test.name - name = "STREAM_ON_EXTERNAL_TABLE" - comment = "Terraform acceptance test" - column { - name = "column1" - type = "STRING" - as = "TO_VARCHAR(TO_TIMESTAMP_NTZ(value:unix_timestamp_property::NUMBER, 3), 'yyyy-mm-dd-hh')" - } - column { - name = "column2" - type = "TIMESTAMP_NTZ(9)" - as = "($1:\"CreatedDate\"::timestamp)" - } - file_format = "TYPE = CSV" - location = "@${snowflake_database.test.name}.${snowflake_schema.test.name}.${snowflake_stage.test.name}" -} -resource "snowflake_stream" "test_external_table_stream" { - database = snowflake_database.test.name - schema = snowflake_schema.test.name - name = "%s" - comment = "Terraform acceptance test" - on_table = "${snowflake_database.test.name}.${snowflake_schema.test.name}.${snowflake_external_table.test_external_stream_table.name}" - %s -} -` - - return fmt.Sprintf(s, name, name, name, bucketURL, name, bucketURL, roleName, name, insertOnlyConfig) -} - -func viewStreamConfig(name string, appendOnly bool) string { - appendOnlyConfig := "" - if appendOnly { - appendOnlyConfig = "append_only = true" - } - - s := ` -resource "snowflake_database" "test_database" { - name = "%s" - comment = "Terraform acceptance test" -} - -resource "snowflake_schema" "test_schema" { - name = "%s" - database = snowflake_database.test_database.name - comment = "Terraform acceptance test" -} - -resource "snowflake_table" "test_stream_on_view" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - name = "STREAM_ON_VIEW_TABLE" - comment = "Terraform acceptance test" - change_tracking = true - - column { - name = "column1" - type = "VARIANT" - } - column { - name = "column2" - type = "VARCHAR(16777216)" - } -} - -resource "snowflake_view" "test_stream_on_view" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - name = "STREAM_ON_VIEW" - - statement = "select * from ${snowflake_table.test_stream_on_view.name}" -} - -resource "snowflake_stream" "test_stream" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - name = "%s" - comment = "Terraform acceptance test" - on_view = "${snowflake_database.test_database.name}.${snowflake_schema.test_schema.name}.${snowflake_view.test_stream_on_view.name}" - %s -} -` - return fmt.Sprintf(s, name, name, name, appendOnlyConfig) -} - -func stageStreamConfig(name string, directory bool) string { - s := ` -resource "snowflake_database" "test_database" { - name = "%s" - comment = "Terraform acceptance test" -} - -resource "snowflake_schema" "test_schema" { - name = "%s" - database = snowflake_database.test_database.name - comment = "Terraform acceptance test" -} - -resource "snowflake_stage" "test_stage" { - name = "%s" - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - directory = "ENABLE = %t" -} - -resource "snowflake_stream" "test_stream" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - name = "%s" - comment = "Terraform acceptance test" - on_stage = "${snowflake_database.test_database.name}.${snowflake_schema.test_schema.name}.${snowflake_stage.test_stage.name}" -} -` - return fmt.Sprintf(s, name, name, name, directory, name) -} diff --git a/pkg/resources/table.go b/pkg/resources/table.go index f0d4d77ea4..3eb8bbe823 100644 --- a/pkg/resources/table.go +++ b/pkg/resources/table.go @@ -9,6 +9,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -208,10 +209,10 @@ var tableSchema = map[string]*schema.Schema{ func Table() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.Table, CreateTable), - ReadContext: TrackingReadWrapper(resources.Table, ReadTable), - UpdateContext: TrackingUpdateWrapper(resources.Table, UpdateTable), - DeleteContext: TrackingDeleteWrapper(resources.Table, DeleteTable), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.TableResource), TrackingCreateWrapper(resources.Table, CreateTable)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.TableResource), TrackingReadWrapper(resources.Table, ReadTable)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.TableResource), TrackingUpdateWrapper(resources.Table, UpdateTable)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.TableResource), TrackingDeleteWrapper(resources.Table, DeleteTable)), CustomizeDiff: TrackingCustomDiffWrapper(resources.Table, customdiff.All( ComputedIfAnyAttributeChanged(tableSchema, FullyQualifiedNameAttributeName, "name"), diff --git a/pkg/resources/table_column_masking_policy_application.go b/pkg/resources/table_column_masking_policy_application.go index e48d4447ad..87a15a2ca6 100644 --- a/pkg/resources/table_column_masking_policy_application.go +++ b/pkg/resources/table_column_masking_policy_application.go @@ -4,6 +4,7 @@ import ( "context" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -37,9 +38,9 @@ var tableColumnMaskingPolicyApplicationSchema = map[string]*schema.Schema{ func TableColumnMaskingPolicyApplication() *schema.Resource { return &schema.Resource{ Description: "Applies a masking policy to a table column.", - CreateContext: TrackingCreateWrapper(resources.TableColumnMaskingPolicyApplication, CreateTableColumnMaskingPolicyApplication), - ReadContext: TrackingReadWrapper(resources.TableColumnMaskingPolicyApplication, ReadTableColumnMaskingPolicyApplication), - DeleteContext: TrackingDeleteWrapper(resources.TableColumnMaskingPolicyApplication, DeleteTableColumnMaskingPolicyApplication), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.TableColumnMaskingPolicyApplicationResource), TrackingCreateWrapper(resources.TableColumnMaskingPolicyApplication, CreateTableColumnMaskingPolicyApplication)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.TableColumnMaskingPolicyApplicationResource), TrackingReadWrapper(resources.TableColumnMaskingPolicyApplication, ReadTableColumnMaskingPolicyApplication)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.TableColumnMaskingPolicyApplicationResource), TrackingDeleteWrapper(resources.TableColumnMaskingPolicyApplication, DeleteTableColumnMaskingPolicyApplication)), Schema: tableColumnMaskingPolicyApplicationSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/table_constraint.go b/pkg/resources/table_constraint.go index 92e2f6a3f7..8969da1201 100644 --- a/pkg/resources/table_constraint.go +++ b/pkg/resources/table_constraint.go @@ -5,6 +5,7 @@ import ( "fmt" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/previewfeatures" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -182,10 +183,10 @@ var tableConstraintSchema = map[string]*schema.Schema{ func TableConstraint() *schema.Resource { return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.TableConstraint, CreateTableConstraint), - ReadContext: TrackingReadWrapper(resources.TableConstraint, ReadTableConstraint), - UpdateContext: TrackingUpdateWrapper(resources.TableConstraint, UpdateTableConstraint), - DeleteContext: TrackingDeleteWrapper(resources.TableConstraint, DeleteTableConstraint), + CreateContext: PreviewFeatureCreateContextWrapper(string(previewfeatures.TableConstraintResource), TrackingCreateWrapper(resources.TableConstraint, CreateTableConstraint)), + ReadContext: PreviewFeatureReadContextWrapper(string(previewfeatures.TableConstraintResource), TrackingReadWrapper(resources.TableConstraint, ReadTableConstraint)), + UpdateContext: PreviewFeatureUpdateContextWrapper(string(previewfeatures.TableConstraintResource), TrackingUpdateWrapper(resources.TableConstraint, UpdateTableConstraint)), + DeleteContext: PreviewFeatureDeleteContextWrapper(string(previewfeatures.TableConstraintResource), TrackingDeleteWrapper(resources.TableConstraint, DeleteTableConstraint)), Schema: tableConstraintSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/tag_association.go b/pkg/resources/tag_association.go index e76e5fc69f..f4f55e38dc 100644 --- a/pkg/resources/tag_association.go +++ b/pkg/resources/tag_association.go @@ -21,13 +21,6 @@ import ( ) var tagAssociationSchema = map[string]*schema.Schema{ - "object_name": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the object identifier for the tag association.", - ForceNew: true, - Deprecated: "Use `object_identifiers` instead", - }, "object_identifiers": { Type: schema.TypeSet, MinItems: 1, diff --git a/pkg/resources/tag_masking_policy_association.go b/pkg/resources/tag_masking_policy_association.go deleted file mode 100644 index 84c4ed21bf..0000000000 --- a/pkg/resources/tag_masking_policy_association.go +++ /dev/null @@ -1,190 +0,0 @@ -package resources - -import ( - "context" - "database/sql" - "errors" - "fmt" - "log" - "strings" - - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" -) - -const ( - tagAttachmentPolicyIDDelimiter = "|" -) - -var mpAttachmentPolicySchema = map[string]*schema.Schema{ - "tag_id": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the identifier for the tag. Note: format must follow: \"databaseName\".\"schemaName\".\"tagName\" or \"databaseName.schemaName.tagName\" or \"databaseName|schemaName.tagName\" (snowflake_tag.tag.id)", - ForceNew: true, - ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), - }, - "masking_policy_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "The resource id of the masking policy", - ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), - }, -} - -type attachmentID struct { - TagDatabaseName string - TagSchemaName string - TagName string - MaskingPolicyDatabaseName string - MaskingPolicySchemaName string - MaskingPolicyName string -} - -func (v *attachmentID) String() string { - return strings.Join([]string{ - v.TagDatabaseName, - v.TagSchemaName, - v.TagName, - v.MaskingPolicyDatabaseName, - v.MaskingPolicySchemaName, - v.MaskingPolicyName, - }, tagAttachmentPolicyIDDelimiter) -} - -func parseAttachmentID(id string) (*attachmentID, error) { - parts := strings.Split(id, tagAttachmentPolicyIDDelimiter) - if len(parts) != 6 { - return nil, fmt.Errorf("6 fields allowed") - } - return &attachmentID{ - TagDatabaseName: parts[0], - TagSchemaName: parts[1], - TagName: parts[2], - MaskingPolicyDatabaseName: parts[3], - MaskingPolicySchemaName: parts[4], - MaskingPolicyName: parts[5], - }, nil -} - -// Schema returns a pointer to the resource representing a schema. -func TagMaskingPolicyAssociation() *schema.Resource { - return &schema.Resource{ - CreateContext: TrackingCreateWrapper(resources.TagMaskingPolicyAssociation, CreateContextTagMaskingPolicyAssociation), - ReadContext: TrackingReadWrapper(resources.TagMaskingPolicyAssociation, ReadContextTagMaskingPolicyAssociation), - DeleteContext: TrackingDeleteWrapper(resources.TagMaskingPolicyAssociation, DeleteContextTagMaskingPolicyAssociation), - - Schema: mpAttachmentPolicySchema, - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - Description: "Attach a masking policy to a tag. Requires a current warehouse to be set. Either with SNOWFLAKE_WAREHOUSE env variable or in current session. If no warehouse is provided, a temporary warehouse will be created.", - DeprecationMessage: deprecatedResourceDescription(string(resources.Tag)), - } -} - -func CreateContextTagMaskingPolicyAssociation(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - - value := d.Get("tag_id").(string) - tagObjectIdentifier, err := helpers.DecodeSnowflakeParameterID(value) - if err != nil { - return diag.FromErr(err) - } - tagId := tagObjectIdentifier.(sdk.SchemaObjectIdentifier) - - value = d.Get("masking_policy_id").(string) - maskingPolicyObjectIdentifier, err := helpers.DecodeSnowflakeParameterID(value) - if err != nil { - return diag.FromErr(err) - } - maskingPolicyId := maskingPolicyObjectIdentifier.(sdk.SchemaObjectIdentifier) - - set := sdk.NewTagSetRequest().WithMaskingPolicies([]sdk.SchemaObjectIdentifier{maskingPolicyId}) - if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(tagId).WithSet(set)); err != nil { - return diag.FromErr(err) - } - aid := attachmentID{ - TagDatabaseName: tagId.DatabaseName(), - TagSchemaName: tagId.SchemaName(), - TagName: tagId.Name(), - MaskingPolicyDatabaseName: maskingPolicyId.DatabaseName(), - MaskingPolicySchemaName: maskingPolicyId.SchemaName(), - MaskingPolicyName: maskingPolicyId.Name(), - } - fmt.Printf("attachment id: %s\n", aid.String()) - d.SetId(aid.String()) - return ReadContextTagMaskingPolicyAssociation(ctx, d, meta) -} - -func ReadContextTagMaskingPolicyAssociation(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - diags := diag.Diagnostics{} - client := meta.(*provider.Context).Client - db := client.GetConn().DB - aid, err := parseAttachmentID(d.Id()) - if err != nil { - return diag.FromErr(err) - } - - // create temp warehouse to query the tag, and make sure to clean it up - warehouse, err := client.ContextFunctions.CurrentWarehouse(ctx) - if err != nil { - return diag.FromErr(err) - } - if warehouse == "" { - log.Printf("[DEBUG] no current warehouse set, creating a temporary warehouse") - randomWarehouseName := fmt.Sprintf("terraform-provider-snowflake-%v", helpers.RandomString()) - wid := sdk.NewAccountObjectIdentifier(randomWarehouseName) - if err := client.Warehouses.Create(ctx, wid, nil); err != nil { - return diag.FromErr(err) - } - defer func() { - if err := client.Warehouses.Drop(ctx, wid, nil); err != nil { - log.Printf("[WARN] error cleaning up temp warehouse %v", err) - } - }() - if err := client.Sessions.UseWarehouse(ctx, wid); err != nil { - return diag.FromErr(err) - } - } - // show attached masking policy - tid := sdk.NewSchemaObjectIdentifier(aid.TagDatabaseName, aid.TagSchemaName, aid.TagName) - mid := sdk.NewSchemaObjectIdentifier(aid.MaskingPolicyDatabaseName, aid.MaskingPolicySchemaName, aid.MaskingPolicyName) - builder := snowflake.NewTagBuilder(tid).WithMaskingPolicy(mid) - row := snowflake.QueryRow(db, builder.ShowAttachedPolicy()) - _, err = snowflake.ScanTagPolicy(row) - if errors.Is(err, sql.ErrNoRows) { - // If not found, mark resource to be removed from state file during apply or refresh - log.Printf("[DEBUG] attached policy (%s) not found", d.Id()) - d.SetId("") - return nil - } - if err != nil { - return diag.FromErr(err) - } - - return diags -} - -func DeleteContextTagMaskingPolicyAssociation(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - aid, err := parseAttachmentID(d.Id()) - if err != nil { - return diag.FromErr(err) - } - tid := sdk.NewSchemaObjectIdentifier(aid.TagDatabaseName, aid.TagSchemaName, aid.TagName) - mid := sdk.NewSchemaObjectIdentifier(aid.MaskingPolicyDatabaseName, aid.MaskingPolicySchemaName, aid.MaskingPolicyName) - unset := sdk.NewTagUnsetRequest().WithMaskingPolicies([]sdk.SchemaObjectIdentifier{mid}) - if err := client.Tags.Alter(ctx, sdk.NewAlterTagRequest(tid).WithUnset(unset)); err != nil { - return diag.FromErr(err) - } - d.SetId("") - return nil -} diff --git a/pkg/resources/tag_masking_policy_association_acceptance_test.go b/pkg/resources/tag_masking_policy_association_acceptance_test.go deleted file mode 100644 index 2493681dd1..0000000000 --- a/pkg/resources/tag_masking_policy_association_acceptance_test.go +++ /dev/null @@ -1,121 +0,0 @@ -package resources_test - -import ( - "fmt" - "testing" - - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-testing/config" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/tfversion" -) - -func TestAcc_TagMaskingPolicyAssociationBasic(t *testing.T) { - _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) - acc.TestAccPreCheck(t) - tag, tagCleanup := acc.TestClient().Tag.CreateTag(t) - t.Cleanup(tagCleanup) - accName := acc.TestClient().Ids.Alpha() - - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - Config: tagAttachmentConfig(accName, acc.TestDatabaseName, acc.TestSchemaName, tag.ID()), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "masking_policy_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, accName)), - resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", tag.ID().FullyQualifiedName()), - ), - }, - }, - }) -} - -func TestAcc_TagMaskingPolicyAssociationsystem_functions_integration_testComplete(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - resourceName := "snowflake_tag.test" - m := func() map[string]config.Variable { - return map[string]config.Variable{ - "name": config.StringVariable(name), - "database": config.StringVariable(acc.TestDatabaseName), - "schema": config.StringVariable(acc.TestSchemaName), - "comment": config.StringVariable("Terraform acceptance test"), - } - } - variableSet2 := m() - variableSet2["comment"] = config.StringVariable("Terraform acceptance test - updated") - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: nil, - Steps: []resource.TestStep{ - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_TagMaskingPolicyAssociation/basic"), - ConfigVariables: m(), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "allowed_values.#", "2"), - resource.TestCheckResourceAttr(resourceName, "allowed_values.0", "alv1"), - resource.TestCheckResourceAttr(resourceName, "allowed_values.1", "alv2"), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test"), - - resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "masking_policy_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, name)), - resource.TestCheckResourceAttr("snowflake_tag_masking_policy_association.test", "tag_id", fmt.Sprintf("%s.%s.%s", acc.TestDatabaseName, acc.TestSchemaName, name)), - ), - }, - // test - change comment - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_TagMaskingPolicyAssociation/basic"), - ConfigVariables: variableSet2, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "name", name), - resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), - resource.TestCheckResourceAttr(resourceName, "comment", "Terraform acceptance test - updated"), - ), - }, - // test - import - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_TagMaskingPolicyAssociation/basic"), - ConfigVariables: variableSet2, - ResourceName: resourceName, - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func tagAttachmentConfig(n string, databaseName string, schemaName string, tagId sdk.SchemaObjectIdentifier) string { - return fmt.Sprintf(` -resource "snowflake_masking_policy" "test" { - name = "%[1]v" - database = "%[2]s" - schema = "%[3]s" - argument { - name = "val" - type = "VARCHAR" - } - body = "case when current_role() in ('ANALYST') then val else sha2(val, 512) end" - return_data_type = "VARCHAR(16777216)" - comment = "Terraform acceptance test" -} - -resource "snowflake_tag_masking_policy_association" "test" { - tag_id = "\"%s\".\"%s\".\"%s\"" - masking_policy_id = "${snowflake_masking_policy.test.database}.${snowflake_masking_policy.test.schema}.${snowflake_masking_policy.test.name}" -} -`, n, databaseName, schemaName, tagId.DatabaseName(), tagId.SchemaName(), tagId.Name()) -} diff --git a/pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/test.tf b/pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/test.tf deleted file mode 100644 index 180a0c22bd..0000000000 --- a/pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/test.tf +++ /dev/null @@ -1,4 +0,0 @@ -resource "snowflake_database_old" "db" { - name = var.db - comment = "test comment" -} diff --git a/pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/variables.tf b/pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/variables.tf deleted file mode 100644 index 5ed7b249f5..0000000000 --- a/pkg/resources/testdata/TestAcc_DatabaseRemovedOutsideOfTerraform/variables.tf +++ /dev/null @@ -1,3 +0,0 @@ -variable "db" { - type = string -} diff --git a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/test.tf b/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/test.tf deleted file mode 100644 index 2f9535a0f1..0000000000 --- a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/test.tf +++ /dev/null @@ -1,4 +0,0 @@ -resource "snowflake_database_old" "test" { - name = var.database - data_retention_time_in_days = var.database_data_retention_time -} diff --git a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf b/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf deleted file mode 100644 index 32f9fb7140..0000000000 --- a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf +++ /dev/null @@ -1,7 +0,0 @@ -variable "database" { - type = string -} - -variable "database_data_retention_time" { - type = number -} diff --git a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf b/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf deleted file mode 100644 index c3386f300a..0000000000 --- a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf +++ /dev/null @@ -1,3 +0,0 @@ -resource "snowflake_database_old" "test" { - name = var.database -} diff --git a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf b/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf deleted file mode 100644 index bfdd9eeb3c..0000000000 --- a/pkg/resources/testdata/TestAcc_Database_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf +++ /dev/null @@ -1,3 +0,0 @@ -variable "database" { - type = string -} diff --git a/pkg/resources/testdata/TestAcc_Function/complex/test.tf b/pkg/resources/testdata/TestAcc_Function/complex/test.tf deleted file mode 100644 index d8fac5a7b8..0000000000 --- a/pkg/resources/testdata/TestAcc_Function/complex/test.tf +++ /dev/null @@ -1,25 +0,0 @@ -resource "snowflake_function" "f" { - database = var.database - schema = var.schema - name = var.name - arguments { - name = "d" - type = "FLOAT" - } - language = "javascript" - return_type = "FLOAT" - return_behavior = "VOLATILE" - null_input_behavior = "CALLED ON NULL INPUT" - comment = var.comment - statement = < **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/account_roles.md.tmpl b/templates/data-sources/account_roles.md.tmpl new file mode 100644 index 0000000000..46d1225570 --- /dev/null +++ b/templates/data-sources/account_roles.md.tmpl @@ -0,0 +1,22 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/data-sources/%s/data-source.tf" .Name)}} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/accounts.md.tmpl b/templates/data-sources/accounts.md.tmpl new file mode 100644 index 0000000000..46d1225570 --- /dev/null +++ b/templates/data-sources/accounts.md.tmpl @@ -0,0 +1,22 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/data-sources/%s/data-source.tf" .Name)}} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/connections.md.tmpl b/templates/data-sources/connections.md.tmpl index e974f2a33b..46d1225570 100644 --- a/templates/data-sources/connections.md.tmpl +++ b/templates/data-sources/connections.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/cortex_search_services.md.tmpl b/templates/data-sources/cortex_search_services.md.tmpl index 1e81e2dce4..71f3d05df6 100644 --- a/templates/data-sources/cortex_search_services.md.tmpl +++ b/templates/data-sources/cortex_search_services.md.tmpl @@ -9,7 +9,7 @@ description: |- {{- end }} --- -!> **Disclaimer for Cortex Search service** Note that Cortex Search is a Private Preview feature as such, should be used only with non-production data even when using Snowflake's Terraform Provider. Also, note that the Terraform Provider is not covered by Snowflake's support team; the Product and Engineering teams are available for any questions. However, please contact the Cortex Search team for any issues with this object. +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. # {{.Name}} ({{.Type}}) diff --git a/templates/data-sources/database_roles.md.tmpl b/templates/data-sources/database_roles.md.tmpl index d3ff8d9c6c..46d1225570 100644 --- a/templates/data-sources/database_roles.md.tmpl +++ b/templates/data-sources/database_roles.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/databases.md.tmpl b/templates/data-sources/databases.md.tmpl index d3ff8d9c6c..46d1225570 100644 --- a/templates/data-sources/databases.md.tmpl +++ b/templates/data-sources/databases.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/grants.md.tmpl b/templates/data-sources/grants.md.tmpl index c49c97d64d..6450599660 100644 --- a/templates/data-sources/grants.md.tmpl +++ b/templates/data-sources/grants.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/masking_policies.md.tmpl b/templates/data-sources/masking_policies.md.tmpl index abd91a8e36..46d1225570 100644 --- a/templates/data-sources/masking_policies.md.tmpl +++ b/templates/data-sources/masking_policies.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/network_policies.md.tmpl b/templates/data-sources/network_policies.md.tmpl index d3ff8d9c6c..46d1225570 100644 --- a/templates/data-sources/network_policies.md.tmpl +++ b/templates/data-sources/network_policies.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/resource_monitors.md.tmpl b/templates/data-sources/resource_monitors.md.tmpl index abd91a8e36..46d1225570 100644 --- a/templates/data-sources/resource_monitors.md.tmpl +++ b/templates/data-sources/resource_monitors.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/roles.md.tmpl b/templates/data-sources/roles.md.tmpl deleted file mode 100644 index da95cdd5c0..0000000000 --- a/templates/data-sources/roles.md.tmpl +++ /dev/null @@ -1,27 +0,0 @@ ---- -page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" -subcategory: "" -description: |- -{{ if gt (len (split .Description "")) 1 -}} -{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} -{{- else -}} -{{ .Description | plainmarkdown | trimspace | prefixlines " " }} -{{- end }} ---- - -!> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - - --> **Note** Fields `STARTS WITH` and `LIMIT` are currently missing. They will be added in the future. - -# {{.Name}} ({{.Type}}) - -{{ .Description | trimspace }} - -{{ if .HasExample -}} -## Example Usage - -{{ tffile (printf "examples/data-sources/%s/data-source.tf" .Name)}} -{{- end }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/row_access_policies.md.tmpl b/templates/data-sources/row_access_policies.md.tmpl index abd91a8e36..46d1225570 100644 --- a/templates/data-sources/row_access_policies.md.tmpl +++ b/templates/data-sources/row_access_policies.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/schemas.md.tmpl b/templates/data-sources/schemas.md.tmpl index 67da95dca4..8b8f8060bd 100644 --- a/templates/data-sources/schemas.md.tmpl +++ b/templates/data-sources/schemas.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - -> **Note** Field `WITH PRIVILEGES` is currently missing. It will be added in the future. diff --git a/templates/data-sources/secrets.md.tmpl b/templates/data-sources/secrets.md.tmpl index e974f2a33b..46d1225570 100644 --- a/templates/data-sources/secrets.md.tmpl +++ b/templates/data-sources/secrets.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/security_integrations.md.tmpl b/templates/data-sources/security_integrations.md.tmpl index d3ff8d9c6c..46d1225570 100644 --- a/templates/data-sources/security_integrations.md.tmpl +++ b/templates/data-sources/security_integrations.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/streamlits.md.tmpl b/templates/data-sources/streamlits.md.tmpl index 0b004f8501..46d1225570 100644 --- a/templates/data-sources/streamlits.md.tmpl +++ b/templates/data-sources/streamlits.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/streams.md.tmpl b/templates/data-sources/streams.md.tmpl index daa12f5a7e..46d1225570 100644 --- a/templates/data-sources/streams.md.tmpl +++ b/templates/data-sources/streams.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/system_get_aws_sns_iam_policy.md.tmpl b/templates/data-sources/system_get_aws_sns_iam_policy.md.tmpl index 99dae46eb4..eb59148f75 100644 --- a/templates/data-sources/system_get_aws_sns_iam_policy.md.tmpl +++ b/templates/data-sources/system_get_aws_sns_iam_policy.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/tags.md.tmpl b/templates/data-sources/tags.md.tmpl index 2aef1476da..46d1225570 100644 --- a/templates/data-sources/tags.md.tmpl +++ b/templates/data-sources/tags.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/tasks.md.tmpl b/templates/data-sources/tasks.md.tmpl index 9173876ceb..46d1225570 100644 --- a/templates/data-sources/tasks.md.tmpl +++ b/templates/data-sources/tasks.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/users.md.tmpl b/templates/data-sources/users.md.tmpl index d3ff8d9c6c..46d1225570 100644 --- a/templates/data-sources/users.md.tmpl +++ b/templates/data-sources/users.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/views.md.tmpl b/templates/data-sources/views.md.tmpl index e34e4a1d07..46d1225570 100644 --- a/templates/data-sources/views.md.tmpl +++ b/templates/data-sources/views.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/warehouses.md.tmpl b/templates/data-sources/warehouses.md.tmpl index d3ff8d9c6c..46d1225570 100644 --- a/templates/data-sources/warehouses.md.tmpl +++ b/templates/data-sources/warehouses.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/guides/identifiers.md.tmpl b/templates/guides/identifiers.md.tmpl index 0af6936acf..e8ddac899a 100644 --- a/templates/guides/identifiers.md.tmpl +++ b/templates/guides/identifiers.md.tmpl @@ -15,7 +15,7 @@ For example, instead of writing ``` object_name = “\”${snowflake_table.database}\”.\”${snowflake_table.schema}\”.\”${snowflake_table.name}\”” # for procedures -object_name = “\”${snowflake_procedure.database}\”.\”${snowflake_procedure.schema}\”.\”${snowflake_procedure.name}\"(NUMBER, VARCHAR)” +object_name = “\”${snowflake_procedure_sql.database}\”.\”${snowflake_procedure_sql.schema}\”.\”${snowflake_procedure_sql.name}\"(NUMBER, VARCHAR)” ``` now we can write diff --git a/templates/index.md.tmpl b/templates/index.md.tmpl index 99bf6b0369..f545b41b60 100644 --- a/templates/index.md.tmpl +++ b/templates/index.md.tmpl @@ -5,7 +5,7 @@ description: Manage SnowflakeDB with Terraform. # Snowflake Provider -~> **Disclaimer** the project is still in the 0.x.x version, which means it’s still in the experimental phase (check [Go module versioning](https://go.dev/doc/modules/version-numbers#v0-number) for more details). It can be used in production but makes no stability or backward compatibility guarantees. We do not provide backward bug fixes and, therefore, always suggest using the newest version. We are providing only limited support for the provider; priorities will be assigned on a case-by-case basis. Our main current goals are stabilization, addressing existing issues, and providing the missing features (prioritizing the GA features; supporting PrPr and PuPr features are not high priorities now). With all that in mind, we aim to reach V1 with a stable, reliable, and functional provider. V1 will be free of all the above limitations. +~> **Disclaimer** The project is in v1 version, but some features are in preview. Such resources and data sources are considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee their stability. They will be reworked and marked as a stable feature in future releases. Breaking changes in these features are expected, even without bumping the major version. They are disabled by default. To use them, add the relevant feature name to `preview_features_enabled` field in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). The list of preview features is available below. Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. ~> **Note** Please check the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md) when changing the version of the provider. diff --git a/templates/resources.md.tmpl b/templates/resources.md.tmpl index 624644cf17..e91b295b0b 100644 --- a/templates/resources.md.tmpl +++ b/templates/resources.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/account.md.tmpl b/templates/resources/account.md.tmpl index c05e6ff4bc..7eef117525 100644 --- a/templates/resources/account.md.tmpl +++ b/templates/resources/account.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0990--v01000) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/account_role.md.tmpl b/templates/resources/account_role.md.tmpl index 28e2af568d..91ac3aec6c 100644 --- a/templates/resources/account_role.md.tmpl +++ b/templates/resources/account_role.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl b/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl index fe7454c7f2..51bef84595 100644 --- a/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl +++ b/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl b/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl index fe7454c7f2..51bef84595 100644 --- a/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl +++ b/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl b/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl index fe7454c7f2..51bef84595 100644 --- a/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl +++ b/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/authentication_policy.md.tmpl b/templates/resources/authentication_policy.md.tmpl index ca835354ca..14272ae5db 100644 --- a/templates/resources/authentication_policy.md.tmpl +++ b/templates/resources/authentication_policy.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-authentication-policy#usage-notes), an authentication policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/cortex_search_service.md.tmpl b/templates/resources/cortex_search_service.md.tmpl index 1affd212b3..769570eae4 100644 --- a/templates/resources/cortex_search_service.md.tmpl +++ b/templates/resources/cortex_search_service.md.tmpl @@ -9,7 +9,7 @@ description: |- {{- end }} --- -!> **Disclaimer for Cortex Search service** Note that Cortex Search is a Private Preview feature as such, should be used only with non-production data even when using Snowflake's Terraform Provider. Also, note that the Terraform Provider is not covered by Snowflake's support team; the Product and Engineering teams are available for any questions. However, please contact the Cortex Search team for any issues with this object. +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/database.md.tmpl b/templates/resources/database.md.tmpl index 65bc7d0dc9..c861689366 100644 --- a/templates/resources/database.md.tmpl +++ b/templates/resources/database.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. !> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. diff --git a/templates/resources/database_role.md.tmpl b/templates/resources/database_role.md.tmpl index fb6aff121b..91ac3aec6c 100644 --- a/templates/resources/database_role.md.tmpl +++ b/templates/resources/database_role.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/external_oauth_integration.md.tmpl b/templates/resources/external_oauth_integration.md.tmpl index fe7454c7f2..51bef84595 100644 --- a/templates/resources/external_oauth_integration.md.tmpl +++ b/templates/resources/external_oauth_integration.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/function_java.md.tmpl b/templates/resources/function_java.md.tmpl index 4092173dd0..70d8c90119 100644 --- a/templates/resources/function_java.md.tmpl +++ b/templates/resources/function_java.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/function_javascript.md.tmpl b/templates/resources/function_javascript.md.tmpl index 4092173dd0..70d8c90119 100644 --- a/templates/resources/function_javascript.md.tmpl +++ b/templates/resources/function_javascript.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/function_python.md.tmpl b/templates/resources/function_python.md.tmpl index 32911bea98..8739210377 100644 --- a/templates/resources/function_python.md.tmpl +++ b/templates/resources/function_python.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/function_scala.md.tmpl b/templates/resources/function_scala.md.tmpl index f452e9136e..ff73515a0c 100644 --- a/templates/resources/function_scala.md.tmpl +++ b/templates/resources/function_scala.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure`, `return_results_behavior`, and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/function_sql.md.tmpl b/templates/resources/function_sql.md.tmpl index 63cede4080..7a34e9af24 100644 --- a/templates/resources/function_sql.md.tmpl +++ b/templates/resources/function_sql.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `return_results_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/grant_account_role.md.tmpl b/templates/resources/grant_account_role.md.tmpl index d4fc5ab54a..624644cf17 100644 --- a/templates/resources/grant_account_role.md.tmpl +++ b/templates/resources/grant_account_role.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/grant_application_role.md.tmpl b/templates/resources/grant_application_role.md.tmpl index d4fc5ab54a..624644cf17 100644 --- a/templates/resources/grant_application_role.md.tmpl +++ b/templates/resources/grant_application_role.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/grant_database_role.md.tmpl b/templates/resources/grant_database_role.md.tmpl index d4fc5ab54a..624644cf17 100644 --- a/templates/resources/grant_database_role.md.tmpl +++ b/templates/resources/grant_database_role.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/grant_ownership.md.tmpl b/templates/resources/grant_ownership.md.tmpl index 59569cf852..43fc23c09a 100644 --- a/templates/resources/grant_ownership.md.tmpl +++ b/templates/resources/grant_ownership.md.tmpl @@ -10,8 +10,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - ~> **Note** For more details about granting ownership, please visit [`GRANT OWNERSHIP` Snowflake documentation page](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership). !> **Warning** Grant ownership resource still has some limitations. Delete operation is not implemented for on_future grants (you have to remove the config and then revoke ownership grant on future X manually). diff --git a/templates/resources/grant_privileges_to_account_role.md.tmpl b/templates/resources/grant_privileges_to_account_role.md.tmpl index f58bc379e2..aaabe7727c 100644 --- a/templates/resources/grant_privileges_to_account_role.md.tmpl +++ b/templates/resources/grant_privileges_to_account_role.md.tmpl @@ -10,8 +10,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - {{/* SNOW-990811 */}} !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). diff --git a/templates/resources/grant_privileges_to_database_role.md.tmpl b/templates/resources/grant_privileges_to_database_role.md.tmpl index 3b3a2fcb70..e0532ba6ed 100644 --- a/templates/resources/grant_privileges_to_database_role.md.tmpl +++ b/templates/resources/grant_privileges_to_database_role.md.tmpl @@ -10,8 +10,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - {{/* SNOW-990811 */}} !> **Warning** Be careful when using `always_apply` field. It will always produce a plan (even when no changes were made) and can be harmful in some setups. For more details why we decided to introduce it to go our document explaining those design decisions (coming soon). diff --git a/templates/resources/grant_privileges_to_share.md.tmpl b/templates/resources/grant_privileges_to_share.md.tmpl index c9c79fb2d3..19e52d362f 100644 --- a/templates/resources/grant_privileges_to_share.md.tmpl +++ b/templates/resources/grant_privileges_to_share.md.tmpl @@ -10,8 +10,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. To migrate from older grant resources please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#migration-from-old-grant-resources-to-new-ones). - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/legacy_service_user.md.tmpl b/templates/resources/legacy_service_user.md.tmpl index e9e1ac462f..601b0a7c0a 100644 --- a/templates/resources/legacy_service_user.md.tmpl +++ b/templates/resources/legacy_service_user.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - -> **Note** `snowflake_user_password_policy_attachment` will be reworked in the following versions of the provider which may still affect this resource. -> **Note** Attaching user policies will be handled in the following versions of the provider which may still affect this resource. diff --git a/templates/resources/masking_policy.md.tmpl b/templates/resources/masking_policy.md.tmpl index c516e8e1b1..773f857b55 100644 --- a/templates/resources/masking_policy.md.tmpl +++ b/templates/resources/masking_policy.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-masking-policy#usage-notes), a masking policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/network_policy.md.tmpl b/templates/resources/network_policy.md.tmpl index 1432fcbee1..2efb61d0e8 100644 --- a/templates/resources/network_policy.md.tmpl +++ b/templates/resources/network_policy.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes), a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. !> **Note** Due to technical limitations in Terraform SDK, changes in `allowed_network_rule_list` and `blocked_network_rule_list` do not cause diff for `show_output` and `describe_output`. diff --git a/templates/resources/network_rule.md.tmpl b/templates/resources/network_rule.md.tmpl index c96f3e8a41..3bf06e1919 100644 --- a/templates/resources/network_rule.md.tmpl +++ b/templates/resources/network_rule.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Note** A network rule cannot be dropped successfully if it is currently assigned to a network policy. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/oauth_integration_for_custom_clients.md.tmpl b/templates/resources/oauth_integration_for_custom_clients.md.tmpl index dc107a14ad..d66d38ca09 100644 --- a/templates/resources/oauth_integration_for_custom_clients.md.tmpl +++ b/templates/resources/oauth_integration_for_custom_clients.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** Setting a network policy with lowercase letters does not work correctly in Snowflake (see [issue](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3229)). As a workaround, set the network policy with uppercase letters only, or use unsafe_execute with network policy ID wrapped in `'`. !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. diff --git a/templates/resources/oauth_integration_for_partner_applications.md.tmpl b/templates/resources/oauth_integration_for_partner_applications.md.tmpl index fe7454c7f2..51bef84595 100644 --- a/templates/resources/oauth_integration_for_partner_applications.md.tmpl +++ b/templates/resources/oauth_integration_for_partner_applications.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/password_policy.md.tmpl b/templates/resources/password_policy.md.tmpl index 2dbed59233..73a3e8b2f2 100644 --- a/templates/resources/password_policy.md.tmpl +++ b/templates/resources/password_policy.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-password-policy#usage-notes), a password policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/pipe.md.tmpl b/templates/resources/pipe.md.tmpl index 5e9497b9e9..0bee000b29 100644 --- a/templates/resources/pipe.md.tmpl +++ b/templates/resources/pipe.md.tmpl @@ -10,6 +10,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + # {{.Name}} ({{.Type}}) ~> **Note** Right now, changes for the `integration` field are not detected. This will be resolved in the [upcoming refactoring](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#preparing-essential-ga-objects-for-the-provider-v1). For now, please try to use the [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) HCL meta-argument. diff --git a/templates/resources/primary_connection.md.tmpl b/templates/resources/primary_connection.md.tmpl index 76cb507a90..b4b946512d 100644 --- a/templates/resources/primary_connection.md.tmpl +++ b/templates/resources/primary_connection.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/procedure_java.md.tmpl b/templates/resources/procedure_java.md.tmpl index e67c6205b5..61ec281fff 100644 --- a/templates/resources/procedure_java.md.tmpl +++ b/templates/resources/procedure_java.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/procedure_javascript.md.tmpl b/templates/resources/procedure_javascript.md.tmpl index e67c6205b5..61ec281fff 100644 --- a/templates/resources/procedure_javascript.md.tmpl +++ b/templates/resources/procedure_javascript.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/procedure_python.md.tmpl b/templates/resources/procedure_python.md.tmpl index c0d68e135d..2636c371fa 100644 --- a/templates/resources/procedure_python.md.tmpl +++ b/templates/resources/procedure_python.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/procedure_scala.md.tmpl b/templates/resources/procedure_scala.md.tmpl index cc0445d06e..e8ea3af6eb 100644 --- a/templates/resources/procedure_scala.md.tmpl +++ b/templates/resources/procedure_scala.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` and `null_input_behavior` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/procedure_sql.md.tmpl b/templates/resources/procedure_sql.md.tmpl index a72c04a1c2..70ea3952a5 100644 --- a/templates/resources/procedure_sql.md.tmpl +++ b/templates/resources/procedure_sql.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + -> **Note** External changes to `is_secure` are not currently supported. They will be handled in the following versions of the provider which may still affect this resource. -> **Note** `COPY GRANTS` and `OR REPLACE` are not currently supported. diff --git a/templates/resources/resource_monitor.md.tmpl b/templates/resources/resource_monitor.md.tmpl index 0e6a0993bb..993dea4067 100644 --- a/templates/resources/resource_monitor.md.tmpl +++ b/templates/resources/resource_monitor.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - ~> **Note** For more details about resource monitor usage, please visit [this guide on Snowflake documentation page](https://docs.snowflake.com/en/user-guide/resource-monitors). **! Warning !** Due to Snowflake limitations, the following actions are not supported: diff --git a/templates/resources/row_access_policy.md.tmpl b/templates/resources/row_access_policy.md.tmpl index eeff47ba51..a150c75952 100644 --- a/templates/resources/row_access_policy.md.tmpl +++ b/templates/resources/row_access_policy.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. - !> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-row-access-policy#usage-notes), a row access policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/saml2_integration.md.tmpl b/templates/resources/saml2_integration.md.tmpl index fe7454c7f2..51bef84595 100644 --- a/templates/resources/saml2_integration.md.tmpl +++ b/templates/resources/saml2_integration.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/schema.md.tmpl b/templates/resources/schema.md.tmpl index dea0e19916..4a3d988520 100644 --- a/templates/resources/schema.md.tmpl +++ b/templates/resources/schema.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - -> **Note** Field `CLASSIFICATION_ROLE` is currently missing. It will be added in the future. diff --git a/templates/resources/scim_integration.md.tmpl b/templates/resources/scim_integration.md.tmpl index fe7454c7f2..51bef84595 100644 --- a/templates/resources/scim_integration.md.tmpl +++ b/templates/resources/scim_integration.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/secondary_connection.md.tmpl b/templates/resources/secondary_connection.md.tmpl index d54554f7d1..0b24291745 100644 --- a/templates/resources/secondary_connection.md.tmpl +++ b/templates/resources/secondary_connection.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/secondary_database.md.tmpl b/templates/resources/secondary_database.md.tmpl index 5abd07b360..08b8e95a47 100644 --- a/templates/resources/secondary_database.md.tmpl +++ b/templates/resources/secondary_database.md.tmpl @@ -10,8 +10,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. !> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. diff --git a/templates/resources/secret_with_authorization_code_grant.md.tmpl b/templates/resources/secret_with_authorization_code_grant.md.tmpl index bbc5e20afb..91ac3aec6c 100644 --- a/templates/resources/secret_with_authorization_code_grant.md.tmpl +++ b/templates/resources/secret_with_authorization_code_grant.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/secret_with_basic_authentication.md.tmpl b/templates/resources/secret_with_basic_authentication.md.tmpl index bbc5e20afb..91ac3aec6c 100644 --- a/templates/resources/secret_with_basic_authentication.md.tmpl +++ b/templates/resources/secret_with_basic_authentication.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/secret_with_client_credentials.md.tmpl b/templates/resources/secret_with_client_credentials.md.tmpl index bbc5e20afb..91ac3aec6c 100644 --- a/templates/resources/secret_with_client_credentials.md.tmpl +++ b/templates/resources/secret_with_client_credentials.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/secret_with_generic_string.md.tmpl b/templates/resources/secret_with_generic_string.md.tmpl index bbc5e20afb..91ac3aec6c 100644 --- a/templates/resources/secret_with_generic_string.md.tmpl +++ b/templates/resources/secret_with_generic_string.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource is a release candidate for the V1. It is on the list of remaining GA objects for V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/service_user.md.tmpl b/templates/resources/service_user.md.tmpl index 0072836bfc..051d96a9a4 100644 --- a/templates/resources/service_user.md.tmpl +++ b/templates/resources/service_user.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - -> **Note** `snowflake_user_password_policy_attachment` will be reworked in the following versions of the provider which may still affect this resource. -> **Note** Attaching user policies will be handled in the following versions of the provider which may still affect this resource. diff --git a/templates/resources/shared_database.md.tmpl b/templates/resources/shared_database.md.tmpl index 65bc7d0dc9..c861689366 100644 --- a/templates/resources/shared_database.md.tmpl +++ b/templates/resources/shared_database.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - !> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. !> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations.`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. diff --git a/templates/resources/stream_on_directory_table.md.tmpl b/templates/resources/stream_on_directory_table.md.tmpl index 6a7aa75378..1215c5f668 100644 --- a/templates/resources/stream_on_directory_table.md.tmpl +++ b/templates/resources/stream_on_directory_table.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - ~> **Note about copy_grants** Fields like `stage`, and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/stream_on_external_table.md.tmpl b/templates/resources/stream_on_external_table.md.tmpl index 8a062a52e7..7dc93a76bf 100644 --- a/templates/resources/stream_on_external_table.md.tmpl +++ b/templates/resources/stream_on_external_table.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - ~> **Note about copy_grants** Fields like `external_table`, `insert_only`, `at`, `before` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/stream_on_table.md.tmpl b/templates/resources/stream_on_table.md.tmpl index 270789e8c5..3868f5543d 100644 --- a/templates/resources/stream_on_table.md.tmpl +++ b/templates/resources/stream_on_table.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. - ~> **Note about copy_grants** Fields like `table`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/stream_on_view.md.tmpl b/templates/resources/stream_on_view.md.tmpl index 31e3a88a68..4f20fbbf08 100644 --- a/templates/resources/stream_on_view.md.tmpl +++ b/templates/resources/stream_on_view.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. - ~> **Note about copy_grants** Fields like `view`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/streamlit.md.tmpl b/templates/resources/streamlit.md.tmpl index dfcd9aef7f..f0d9d50d47 100644 --- a/templates/resources/streamlit.md.tmpl +++ b/templates/resources/streamlit.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. - !> **Note** Setting a query warehouse with lowercase letters does not work correctly in Snowflake. As a workaround, set the query warehouse with uppercase letters only, or use unsafe_execute with query warehouse ID wrapped in `'`. diff --git a/templates/resources/tag.md.tmpl b/templates/resources/tag.md.tmpl index d040e9d5b0..36c9a2b54a 100644 --- a/templates/resources/tag.md.tmpl +++ b/templates/resources/tag.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - ~> **Required warehouse** For this resource, the provider now uses [tag references](https://docs.snowflake.com/en/sql-reference/functions/tag_references) to get information about masking policies attached to tags. This function requires a warehouse in the connection. Please, make sure you have either set a `DEFAULT_WAREHOUSE` for the user, or specified a warehouse in the provider configuration. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/tag_association.md.tmpl b/templates/resources/tag_association.md.tmpl index a514fdf39d..933cd213d8 100644 --- a/templates/resources/tag_association.md.tmpl +++ b/templates/resources/tag_association.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - -> **Note** For `ACCOUNT` object type, only identifiers with organization name are supported. See [account identifier docs](https://docs.snowflake.com/en/user-guide/admin-account-identifier#format-1-preferred-account-name-in-your-organization) for more details. -> **Note** Tag association resource ID has the following format: `"TAG_DATABASE"."TAG_SCHEMA"."TAG_NAME"|TAG_VALUE|OBJECT_TYPE`. This means that a tuple of tag ID, tag value and object type should be unique across the resources. If you want to specify this combination for more than one object, you should use only one `tag_association` resource with specified `object_identifiers` set. diff --git a/templates/resources/task.md.tmpl b/templates/resources/task.md.tmpl index 7a876a0017..91ac3aec6c 100644 --- a/templates/resources/task.md.tmpl +++ b/templates/resources/task.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. - # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/unsafe_execute.md.tmpl b/templates/resources/unsafe_execute.md.tmpl deleted file mode 100644 index 80794cba89..0000000000 --- a/templates/resources/unsafe_execute.md.tmpl +++ /dev/null @@ -1,40 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" -subcategory: "" -description: |- -{{ if gt (len (split .Description "")) 1 -}} -{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} -{{- else -}} -{{ .Description | plainmarkdown | trimspace | prefixlines " " }} -{{- end }} ---- - -# {{.Name}} ({{.Type}}) - -!> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Use at your own risk. - -~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. - -~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. - -{{ .Description | trimspace }} - -{{ if .HasExample -}} -## Example Usage - -{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} --> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). - - -{{- end }} - -{{ .SchemaMarkdown | trimspace }} -{{- if .HasImport }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} -{{- end }} diff --git a/templates/resources/user.md.tmpl b/templates/resources/user.md.tmpl index eeaa0b36ff..326af4f2ff 100644 --- a/templates/resources/user.md.tmpl +++ b/templates/resources/user.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. - -> **Note** `snowflake_user_password_policy_attachment` will be reworked in the following versions of the provider which may still affect this resource. -> **Note** Attaching user policies will be handled in the following versions of the provider which may still affect this resource. diff --git a/templates/resources/user_public_keys.md.tmpl b/templates/resources/user_public_keys.md.tmpl index 525a4eeb2d..c514190018 100644 --- a/templates/resources/user_public_keys.md.tmpl +++ b/templates/resources/user_public_keys.md.tmpl @@ -9,6 +9,8 @@ description: |- {{- end }} --- +!> **Caution: Preview Feature** This feature is considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee its stability. It will be reworked and marked as a stable feature in future releases. Breaking changes are expected, even without bumping the major version. To use this feature, add the relevant feature name to `preview_features_enabled field` in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. + !> **Important** Starting from v0.95.0, it is advised to use this resource **only** if users are not managed through terraform. Check more in the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950). # {{.Name}} ({{.Type}}) diff --git a/templates/resources/view.md.tmpl b/templates/resources/view.md.tmpl index f9d69cd5de..8f8c9ffde7 100644 --- a/templates/resources/view.md.tmpl +++ b/templates/resources/view.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. - !> Due to Snowflake limitations, to properly compute diff on `statement` field, the provider parses a `text` field which contains the whole CREATE query used to create the resource. We recommend not using special characters, especially `(`, `,`, `)` in any of the fields, if possible. ~> **Note about copy_grants** Fields like `is_recursive`, `is_temporary`, `copy_grants` and `statement` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-view)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. diff --git a/templates/resources/warehouse.md.tmpl b/templates/resources/warehouse.md.tmpl index 21df1ad83c..f8141ea5bb 100644 --- a/templates/resources/warehouse.md.tmpl +++ b/templates/resources/warehouse.md.tmpl @@ -9,8 +9,6 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. - -> **Note** Field `RESOURCE_CONSTRAINT` is currently missing. It will be added in the future. diff --git a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD index c020bce7b3..0e2050fc31 100644 --- a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD +++ b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD @@ -31,8 +31,8 @@ newer provider versions. We will address these while working on the given object | STAGE | ❌ | [#2995](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2995), [#2818](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2818), [#2679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2679), [#2505](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2505), [#1911](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1911), [#1903](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1903), [#1795](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1795), [#1705](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1705), [#1544](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1544), [#1491](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1491), [#1087](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1087), [#265](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/265) | | STREAM | 🚀 | [#2975](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2975), [#2413](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2413), [#2201](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2201), [#1150](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1150) | | STREAMLIT | 🚀 | - | -| TABLE | 👨‍💻 | [#2997](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2997), [#2844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2844), [#2839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2839), [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733), [#2683](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2683), [#2676](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2676), [#2674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2674), [#2629](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2629), [#2418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2418), [#2415](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2415), [#2406](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2406), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1823](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1823), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1387](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1387), [#1272](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1272), [#1271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1271), [#1248](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1248), [#1241](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1241), [#1146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1146), [#1032](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1032), [#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420) | -| TAG | 👨‍💻 | [#2943](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2943), [#2598](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2598), [#1910](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1910), [#1909](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1909), [#1862](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1862), [#1806](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1806), [#1657](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1657), [#1496](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1496), [#1443](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1443), [#1394](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1394), [#1372](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1372), [#1074](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1074) | +| TABLE | 👨‍💻 | [#3221](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3221), [#2997](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2997), [#2844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2844), [#2839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2839), [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733), [#2683](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2683), [#2676](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2676), [#2674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2674), [#2629](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2629), [#2418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2418), [#2415](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2415), [#2406](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2406), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1823](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1823), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1387](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1387), [#1272](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1272), [#1271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1271), [#1248](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1248), [#1241](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1241), [#1146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1146), [#1032](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1032), [#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420) | +| TAG | 🚀 | [#2943](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2902), [#2598](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2598), [#1910](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1910), [#1909](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1909), [#1862](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1862), [#1806](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1806), [#1657](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1657), [#1496](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1496), [#1443](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1443), [#1394](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1394), [#1372](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1372), [#1074](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1074) | | TASK | 🚀 | [#3136](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3136), [#1419](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1419), [#1250](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1250), [#1194](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1194), [#1088](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1088) | | VIEW | 🚀 | issues in the older versions: [resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Aresource%3Aview+) and [datasources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Adata_source%3Aviews+) | | snowflake_unsafe_execute | 👨‍💻 | [#2934](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2934) | diff --git a/v1-preparations/GENERAL_TOPICS.md b/v1-preparations/GENERAL_TOPICS.md index 6370c637ee..1e8a3ed6d6 100644 --- a/v1-preparations/GENERAL_TOPICS.md +++ b/v1-preparations/GENERAL_TOPICS.md @@ -2,4 +2,4 @@ This is a (unfinished) list of additional topics, not being a part of the partic - object cloning: [#2211](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2211) - error handling improvements: [#2188](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2188), [#2091](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2091), [#1823](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1823) - generic function datasource/resource: [#2630](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2630), [#1630](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1630) -- open tofu: [#2939](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2939) \ No newline at end of file +- OpenTofu support: [#2939](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2939) diff --git a/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md b/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md index 8eb3d34669..93164a2c18 100644 --- a/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md +++ b/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md @@ -1,53 +1,71 @@ -[Preview features](../ROADMAP.md#preview-resourcesdatasources) for the V1: +# List of preview resources and data sources that are available in V1 -* [snowflake_current_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/current_account) (datasource) -* [snowflake_account_password_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/account_password_policy_attachment) -* [snowflake_alert](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/alert) -* [snowflake_alerts](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/alerts) (datasource) -* [snowflake_api_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/api_integration) -* [snowflake_cortex_search_service](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/cortex_search_service) -* [snowflake_cortex_search_services](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/cortex_search_services) (datasource) -* [snowflake_current_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/current_role) (datasource) -* [snowflake_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/database) (datasource) -* [snowflake_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/database_role) (datasource) -* [snowflake_dynamic_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/dynamic_table) -* [snowflake_dynamic_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/dynamic_tables) (datasource) -* [snowflake_external_function](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/external_function) -* [snowflake_external_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/external_functions) (datasource) -* [snowflake_external_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/external_table) -* [snowflake_external_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/external_tables) (datasource) -* [snowflake_external_volume](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/external_volume) -* [snowflake_failover_group](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/failover_group) -* [snowflake_failover_groups](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/failover_groups) (datasource) -* [snowflake_file_format](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/file_format) -* [snowflake_file_formats](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/file_formats) (datasource) -* [snowflake_managed_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/managed_account) -* [snowflake_materialized_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/materialized_view) -* [snowflake_materialized_views](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/materialized_views) (datasource) -* [snowflake_network_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/network_policy_attachment) -* [snowflake_network_rule](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/network_rule) -* [snowflake_email_notification_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/email_notification_integration) -* [snowflake_notification_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/notification_integration) -* [snowflake_password_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/password_policy) -* [snowflake_pipe](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/pipe) -* [snowflake_pipes](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/pipes) (datasource) -* [snowflake_sequence](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/sequence) -* [snowflake_sequences](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/sequences) (datasource) -* [snowflake_share](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/share) -* [snowflake_shares](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/shares) (datasource) -* [snowflake_object_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/object_parameter) -* [snowflake_parameters](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/parameters) (datasource) -* [snowflake_stage](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/stage) -* [snowflake_stages](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/stages) (datasource) -* [snowflake_storage_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/storage_integration) -* [snowflake_storage_integrations](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/storage_integrations) (datasource) -* [snowflake_system_generate_scim_access_token](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/system_generate_scim_access_token) (datasource) -* [snowflake_system_get_aws_sns_iam_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/system_get_aws_sns_iam_policy) (datasource) -* [snowflake_system_get_privatelink_config](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/system_get_privatelink_config) (datasource) -* [snowflake_system_get_snowflake_platform_info](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/system_get_snowflake_platform_info) (datasource) -* [snowflake_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/table) -* [snowflake_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tables) (datasource) -* [snowflake_table_column_masking_policy_application](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/table_column_masking_policy_application) -* [snowflake_table_constraint](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/table_constraint) (undecided - may be deleted instead) -* [snowflake_user_public_keys](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/user_public_keys) -* [snowflake_user_password_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/user_password_policy_attachment) +[Preview features](../ROADMAP.md#preview-resourcesdata sources) for the V1: + +* [snowflake_current_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/current_account) (data source) +* [snowflake_account_password_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/account_password_policy_attachment) +* [snowflake_account_authentication_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/account_authentication_policy_attachment) +* [snowflake_alert](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/alert) +* [snowflake_alerts](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/alerts) (datasource) +* [snowflake_api_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/api_integration) +* [snowflake_authentication_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/authentication_policy) +* [snowflake_cortex_search_service](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/cortex_search_service) +* [snowflake_cortex_search_services](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/cortex_search_services) (data source) +* [snowflake_current_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/current_role) (data source) +* [snowflake_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/database) (data source) +* [snowflake_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/database_role) (data source) +* [snowflake_dynamic_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/dynamic_table) +* [snowflake_dynamic_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/dynamic_tables) (data source) +* [snowflake_external_function](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/external_function) +* [snowflake_external_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/external_functions) (data source) +* [snowflake_external_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/external_table) +* [snowflake_external_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/external_tables) (data source) +* [snowflake_external_volume](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/external_volume) +* [snowflake_failover_group](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/failover_group) +* [snowflake_failover_groups](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/failover_groups) (data source) +* [snowflake_file_format](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/file_format) +* [snowflake_file_formats](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/file_formats) (data source) +* [snowflake_function_java](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/function_java) +* [snowflake_function_javascript](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/function_javascript) +* [snowflake_function_python](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/function_python) +* [snowflake_function_scala](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/function_scala) +* [snowflake_function_sql](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/function_sql) +* [snowflake_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/functions) (data source) +* [snowflake_managed_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/managed_account) +* [snowflake_materialized_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/materialized_view) +* [snowflake_materialized_views](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/materialized_views) (data source) +* [snowflake_network_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/network_policy_attachment) +* [snowflake_network_rule](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/network_rule) +* [snowflake_email_notification_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/email_notification_integration) +* [snowflake_notification_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/notification_integration) +* [snowflake_object_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/object_parameter) +* [snowflake_password_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/password_policy) +* [snowflake_pipe](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/pipe) +* [snowflake_pipes](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/pipes) (data source) +* [snowflake_procedure_java](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/procedure_java) +* [snowflake_procedure_javascript](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/procedure_javascript) +* [snowflake_procedure_python](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/procedure_python) +* [snowflake_procedure_scala](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/procedure_scala) +* [snowflake_procedure_sql](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/procedure_sql) +* [snowflake_procedures](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/procedures) (data source) +* [snowflake_sequence](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/sequence) +* [snowflake_sequences](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/sequences) (data source) +* [snowflake_share](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/share) +* [snowflake_shares](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/shares) (data source) +* [snowflake_object_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/object_parameter) +* [snowflake_parameters](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/parameters) (data source) +* [snowflake_stage](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/stage) +* [snowflake_stages](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/stages) (data source) +* [snowflake_storage_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/storage_integration) +* [snowflake_storage_integrations](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/storage_integrations) (data source) +* [snowflake_system_generate_scim_access_token](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/system_generate_scim_access_token) (data source) +* [snowflake_system_get_aws_sns_iam_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/system_get_aws_sns_iam_policy) (data source) +* [snowflake_system_get_privatelink_config](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/system_get_privatelink_config) (data source) +* [snowflake_system_get_snowflake_platform_info](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/system_get_snowflake_platform_info) (data source) +* [snowflake_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/table) +* [snowflake_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/tables) (data source) +* [snowflake_table_column_masking_policy_application](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/table_column_masking_policy_application) +* [snowflake_table_constraint](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/table_constraint) (undecided - may be deleted instead) +* [snowflake_user_public_keys](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/user_public_keys) +* [snowflake_user_password_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/user_password_policy_attachment) +* [snowflake_user_authentication_policy_attachment](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/snowflake_user_authentication_policy_attachment) diff --git a/v1-preparations/LIST_OF_REMOVED_RESOURCES_FOR_V1.md b/v1-preparations/LIST_OF_REMOVED_RESOURCES_FOR_V1.md index d34dc99216..77adbf8ce5 100644 --- a/v1-preparations/LIST_OF_REMOVED_RESOURCES_FOR_V1.md +++ b/v1-preparations/LIST_OF_REMOVED_RESOURCES_FOR_V1.md @@ -1,14 +1,14 @@ -Deprecated resources that will be removed with the V1: +# List of deprecated resources and data sources that were removed with the V1 -* [snowflake_database_old](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/database_old) -* [snowflake_tag_masking_policy_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/tag_masking_policy_association) +* [snowflake_database_old](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/database_old) +* [snowflake_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/role) +* [snowflake_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/role) (data source) +* [snowflake_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/roles) (data source) +* [snowflake_oauth_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/oauth_integration) +* [snowflake_saml_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/saml_integration) +* [snowflake_stream](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream) +* [snowflake_tag_masking_policy_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/tag_masking_policy_association) +* [snowflake_session_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/session_parameter) * [snowflake_procedure](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/procedure) * [snowflake_function](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/function) -* [snowflake_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/role) -* [snowflake_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/role) (datasource) -* [snowflake_oauth_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/oauth_integration) -* [snowflake_saml_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/saml_integration) -* [snowflake_session_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/session_parameter) -* [snowflake_unsafe_execute](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/unsafe_execute) - will be renamed -* [snowflake_stream](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/stream) -* [snowflake_tag_masking_policy_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/tag_masking_policy_association) +* [snowflake_unsafe_execute](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/unsafe_execute) diff --git a/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md b/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md index bb7a9d5426..ab1cfa6461 100644 --- a/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md +++ b/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md @@ -1,105 +1,91 @@ -We estimate the given list to be accurate, but it may be subject to small changes: +# List of stable resources and data sources available in V1 * Account - * [snowflake_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/account) - * [snowflake_accounts](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/accounts) (datasource) + * [snowflake_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/account) + * [snowflake_accounts](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/accounts) (data source) * Connection - * [snowflake_primary_connection](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/primary_connection) - * [snowflake_secondary_connection](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secondary_connection) - * [snowflake_connections](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/connections) (datasource) + * [snowflake_primary_connection](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/primary_connection) + * [snowflake_secondary_connection](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/secondary_connection) + * [snowflake_connections](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/connections) (data source) * Database - * [snowflake_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/database) - * [snowflake_secondary_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secondary_database) - * [snowflake_shared_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/shared_database) - * [snowflake_databases](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/databases) (datasource) + * [snowflake_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/database) + * [snowflake_secondary_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/secondary_database) + * [snowflake_shared_database](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/shared_database) + * [snowflake_databases](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/databases) (data source) * Database Role - * [snowflake_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/database_role) - * [snowflake_database_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/database_roles) (datasource) -* Function (in progress) - * snowflake_function_java - * snowflake_function_javascript - * snowflake_function_python - * snowflake_function_scala - * snowflake_function_sql - * [snowflake_functions](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/functions) (datasource) + * [snowflake_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/database_role) + * [snowflake_database_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/database_roles) (data source) * Grants - * [snowflake_grant_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_account_role) - * [snowflake_grant_application_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_application_role) - * [snowflake_grant_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_database_role) - * [snowflake_grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_ownership) - * [snowflake_grant_privileges_to_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_privileges_to_account_role) - * [snowflake_grant_privileges_to_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_privileges_to_database_role) - * [snowflake_grant_privileges_to_share](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/grant_privileges_to_share) - * [snowflake_grants](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/grants) (datasource) + * [snowflake_grant_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/grant_account_role) + * [snowflake_grant_application_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/grant_application_role) + * [snowflake_grant_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/grant_database_role) + * [snowflake_grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/grant_ownership) + * [snowflake_grant_privileges_to_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/grant_privileges_to_account_role) + * [snowflake_grant_privileges_to_database_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/grant_privileges_to_database_role) + * [snowflake_grant_privileges_to_share](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/grant_privileges_to_share) + * [snowflake_grants](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/grants) (data source) * Masking Policy - * [snowflake_masking_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/masking_policy) - * [snowflake_masking_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/masking_policies) (datasource) + * [snowflake_masking_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/masking_policy) + * [snowflake_masking_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/masking_policies) (data source) * Network Policy - * [snowflake_network_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/network_policy) - * [snowflake_network_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/network_policies) (datasource) -* Procedure (in progress) - * snowflake_procedure_java - * snowflake_procedure_javascript - * snowflake_procedure_python - * snowflake_procedure_scala - * snowflake_procedure_sql - * [snowflake_procedures](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/procedures) (datasource) + * [snowflake_network_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/network_policy) + * [snowflake_network_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/network_policies) (data source) * Resource Monitor - * [snowflake_resource_monitor](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/resource_monitor) - * [snowflake_resource_monitors](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/resource_monitors) (datasource) + * [snowflake_resource_monitor](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/resource_monitor) + * [snowflake_resource_monitors](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/resource_monitors) (data source) * Role - * [snowflake_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/account_role) - * [snowflake_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/roles) (datasource) + * [snowflake_account_role](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/account_role) + * [snowflake_account_roles](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/account_roles) (data source) * Row Access Policy - * [snowflake_row_access_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/row_access_policy) - * [snowflake_row_access_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/row_access_policies) (datasource) + * [snowflake_row_access_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/row_access_policy) + * [snowflake_row_access_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/row_access_policies) (data source) * Schema - * [snowflake_schema](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/schema) - * [snowflake_schemas](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/schemas) (datasource) + * [snowflake_schema](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/schema) + * [snowflake_schemas](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/schemas) (data source) * Secret - * [snowflake_secret_with_client_credentials](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_client_credentials) - * [snowflake_secret_with_authorization_code_grant](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_authorization_code_grant) - * [snowflake_secret_with_basic_authentication](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_basic_authentication) - * [snowflake_secret_with_generic_string](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/secret_with_generic_string) - * [snowflake_secrets](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/secrets) (datasource) + * [snowflake_secret_with_client_credentials](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/secret_with_client_credentials) + * [snowflake_secret_with_authorization_code_grant](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/secret_with_authorization_code_grant) + * [snowflake_secret_with_basic_authentication](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/secret_with_basic_authentication) + * [snowflake_secret_with_generic_string](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/secret_with_generic_string) + * [snowflake_secrets](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/secrets) (data source) * Security Integration - * [snowflake_api_authentication_integration_with_authorization_code_grant](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/api_authentication_integration_with_authorization_code_grant) - * [snowflake_api_authentication_integration_with_client_credentials](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/api_authentication_integration_with_client_credentials) - * [snowflake_api_authentication_integration_with_jwt_bearer](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/api_authentication_integration_with_jwt_bearer) - * [snowflake_external_oauth_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/external_oauth_integration) - * [snowflake_oauth_integration_for_custom_clients](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/oauth_integration_for_custom_clients) - * [snowflake_oauth_integration_for_partner_applications](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/oauth_integration_for_partner_applications) - * [snowflake_saml2_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/saml2_integration) - * [snowflake_scim_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/scim_integration) - * [snowflake_security_integrations](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/security_integrations) (datasource) -* Snowflake Parameters (in progress) - * [snowflake_account_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/account_parameter) -* SQL Execute (in progress) - * \ (no name yet) + * [snowflake_api_authentication_integration_with_authorization_code_grant](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/api_authentication_integration_with_authorization_code_grant) + * [snowflake_api_authentication_integration_with_client_credentials](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/api_authentication_integration_with_client_credentials) + * [snowflake_api_authentication_integration_with_jwt_bearer](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/api_authentication_integration_with_jwt_bearer) + * [snowflake_external_oauth_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/external_oauth_integration) + * [snowflake_oauth_integration_for_custom_clients](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/oauth_integration_for_custom_clients) + * [snowflake_oauth_integration_for_partner_applications](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/oauth_integration_for_partner_applications) + * [snowflake_saml2_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/saml2_integration) + * [snowflake_scim_integration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/scim_integration) + * [snowflake_security_integrations](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/security_integrations) (data source) +* Snowflake Parameters + * [snowflake_account_parameter](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/account_parameter) +* SQL Execute + * [snowflake_execute](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/account_parameter) * Stream - * [snowflake_stream_on_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_table) - * [snowflake_stream_on_external_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_external_table) - * [snowflake_stream_on_directory_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_directory_table) - * [snowflake_stream_on_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/stream_on_view) - * [snowflake_streams](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/streams) (datasource) + * [snowflake_stream_on_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/stream_on_table) + * [snowflake_stream_on_external_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/stream_on_external_table) + * [snowflake_stream_on_directory_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/stream_on_directory_table) + * [snowflake_stream_on_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/stream_on_view) + * [snowflake_streams](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/streams) (data source) * Streamlit - * [snowflake_streamlit](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/streamlit) - * [snowflake_streamlits](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/streamlits) (datasource) + * [snowflake_streamlit](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/streamlit) + * [snowflake_streamlits](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/streamlits) (data source) * Tag - * [snowflake_tag](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/tag) - * [snowflake_tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/tag_association) - * [snowflake_tags](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tags) (datasource) + * [snowflake_tag](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/tag) + * [snowflake_tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/tag_association) + * [snowflake_tags](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/tags) (data source) * Task - * [snowflake_task](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/task) - * [snowflake_tasks](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tasks) (datasource) + * [snowflake_task](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/task) + * [snowflake_tasks](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/tasks) (data source) * User - * [snowflake_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/user) - * [snowflake_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/service_user) - * [snowflake_legacy_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/legacy_service_user) - * [snowflake_users](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/users) (datasource) + * [snowflake_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/user) + * [snowflake_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/service_user) + * [snowflake_legacy_service_user](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/legacy_service_user) + * [snowflake_users](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/users) (data source) * View - * [snowflake_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/view) - * [snowflake_views](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/views) (datasource) + * [snowflake_view](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/view) + * [snowflake_views](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/views) (data source) * Warehouse - * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/warehouse) - * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/warehouse) (datasource) + * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/resources/warehouse) + * [snowflake_warehouse](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/1.0.0/docs/data-sources/warehouse) (data source) diff --git a/v1-preparations/REMAINING_GA_OBJECTS.MD b/v1-preparations/REMAINING_GA_OBJECTS.MD index 97f8f7bade..a0327351a4 100644 --- a/v1-preparations/REMAINING_GA_OBJECTS.MD +++ b/v1-preparations/REMAINING_GA_OBJECTS.MD @@ -6,13 +6,14 @@ Status is one of: - ✅ - done
- ❌ - not started
- 👨‍💻 - in progress
+- 🗑 - removed
Known issues lists open issues touching the given object. Note that some of these issues may be already fixed in the newer provider versions. We will address these while working on the given object. | Object Type | Status | Known issues | |-----------------------------|:------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | snowflake_object_parameter | 👨‍💻 | [#2446](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2446), [#1848](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1848), [#1561](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1561), [#1457](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1457) | -| snowflake_session_parameter | 👨‍💻 | [#1814](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1814), [#1783](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1783), [#1036](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1036) | +| snowflake_session_parameter | 🗑‍ | [#1814](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1814), [#1783](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1783), [#1036](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1036) | | snowflake_account_parameter | 👨‍💻 | [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679) | | API INTEGRATION | ❌ | [#2772](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2772), [#1445](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1445) | | APPLICATION | ❌ | - | From 112c85244191a65bf552ffe27285299d6cc3831f Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 12 Dec 2024 21:34:06 +0100 Subject: [PATCH 15/20] feat!: True v1 release (#3283) Try forcing release-please to create a pr with v1 release (previous commit had a wrong name). --- docs/index.md | 2 +- templates/index.md.tmpl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/index.md b/docs/index.md index b5d8c76acb..a9ef3a3972 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,7 +5,7 @@ description: Manage SnowflakeDB with Terraform. # Snowflake Provider -~> **Disclaimer** The project is in v1 version, but some features are in preview. Such resources and data sources are considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee their stability. They will be reworked and marked as a stable feature in future releases. Breaking changes in these features are expected, even without bumping the major version. They are disabled by default. To use them, add the relevant feature name to `preview_features_enabled` field in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). The list of preview features is available below. Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. +~> **Disclaimer** The project is in v1 version, but some features are in preview. Such resources and data sources are considered preview features in the provider, regardless of their state in Snowflake. We do not guarantee their stability. They will be reworked and marked as a stable feature in future releases. Breaking changes in these features are expected, even without bumping the major version. They are disabled by default. To use them, add the relevant feature name to `preview_features_enabled` field in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). The list of preview features is available below. Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. ~> **Note** Please check the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md) when changing the version of the provider. diff --git a/templates/index.md.tmpl b/templates/index.md.tmpl index f545b41b60..67bbd531c3 100644 --- a/templates/index.md.tmpl +++ b/templates/index.md.tmpl @@ -5,7 +5,7 @@ description: Manage SnowflakeDB with Terraform. # Snowflake Provider -~> **Disclaimer** The project is in v1 version, but some features are in preview. Such resources and data sources are considered a preview feature in the provider, regardless of the state of the resource in Snowflake. We do not guarantee their stability. They will be reworked and marked as a stable feature in future releases. Breaking changes in these features are expected, even without bumping the major version. They are disabled by default. To use them, add the relevant feature name to `preview_features_enabled` field in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). The list of preview features is available below. Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. +~> **Disclaimer** The project is in v1 version, but some features are in preview. Such resources and data sources are considered preview features in the provider, regardless of their state in Snowflake. We do not guarantee their stability. They will be reworked and marked as a stable feature in future releases. Breaking changes in these features are expected, even without bumping the major version. They are disabled by default. To use them, add the relevant feature name to `preview_features_enabled` field in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). The list of preview features is available below. Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. ~> **Note** Please check the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md) when changing the version of the provider. From b8ddbf91bafdc8eef21a89da1ea86e6a455e2b96 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 12 Dec 2024 21:36:54 +0100 Subject: [PATCH 16/20] chore: release 1.0.0 Release-As: 1.0.0 From 3aac502beb404449674c54d153ca5aab6ef3cbaa Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Fri, 13 Dec 2024 00:13:30 +0100 Subject: [PATCH 17/20] fix: Add missing preview features (#3284) Co-authored-by: Artur Sawicki --- docs/index.md | 2 +- .../previewfeatures/preview_features.go | 21 ++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/docs/index.md b/docs/index.md index a9ef3a3972..31fc8211d8 100644 --- a/docs/index.md +++ b/docs/index.md @@ -97,7 +97,7 @@ provider "snowflake" { - `passcode_in_password` (Boolean) False by default. Set to true if the MFA passcode is embedded to the configured password. Can also be sourced from the `SNOWFLAKE_PASSCODE_IN_PASSWORD` environment variable. - `password` (String, Sensitive) Password for user + password auth. Cannot be used with `private_key` and `private_key_passphrase`. Can also be sourced from the `SNOWFLAKE_PASSWORD` environment variable. - `port` (Number) Specifies a custom port value used by the driver for privatelink connections. Can also be sourced from the `SNOWFLAKE_PORT` environment variable. -- `preview_features_enabled` (Set of String) A list of preview features that are handled by the provider. See [preview features list](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md). Preview features may have breaking changes in future releases, even without raising the major version. This field can not be set with environmental variables. Valid options are: `snowflake_current_account_datasource` | `snowflake_account_authentication_policy_attachment_resource` | `snowflake_account_password_policy_attachment_resource` | `snowflake_alert_resource` | `snowflake_alerts_datasource` | `snowflake_api_integration_resource` | `snowflake_authentication_policy_resource` | `snowflake_cortex_search_service_resource` | `snowflake_cortex_search_services_datasource` | `snowflake_database_datasource` | `snowflake_database_role_datasource` | `snowflake_dynamic_table_resource` | `snowflake_dynamic_tables_datasource` | `snowflake_external_function_resource` | `snowflake_external_functions_datasource` | `snowflake_external_table_resource` | `snowflake_external_tables_datasource` | `snowflake_external_volume_resource` | `snowflake_failover_group_resource` | `snowflake_failover_groups_datasource` | `snowflake_file_format_resource` | `snowflake_file_formats_datasource` | `snowflake_managed_account_resource` | `snowflake_materialized_view_resource` | `snowflake_materialized_views_datasource` | `snowflake_network_policy_attachment_resource` | `snowflake_network_rule_resource` | `snowflake_email_notification_integration_resource` | `snowflake_notification_integration_resource` | `snowflake_object_parameter_resource` | `snowflake_password_policy_resource` | `snowflake_pipe_resource` | `snowflake_pipes_datasource` | `snowflake_current_role_datasource` | `snowflake_sequence_resource` | `snowflake_sequences_datasource` | `snowflake_share_resource` | `snowflake_shares_datasource` | `snowflake_parameters_datasource` | `snowflake_stage_resource` | `snowflake_stages_datasource` | `snowflake_storage_integration_resource` | `snowflake_storage_integrations_datasource` | `snowflake_system_generate_scim_access_token_datasource` | `snowflake_system_get_aws_sns_iam_policy_datasource` | `snowflake_system_get_privatelink_config_datasource` | `snowflake_system_get_snowflake_platform_info_datasource` | `snowflake_table_column_masking_policy_application_resource` | `snowflake_table_constraint_resource` | `snowflake_user_authentication_policy_attachment_resource` | `snowflake_user_public_keys_resource` | `snowflake_user_password_policy_attachment_resource`. +- `preview_features_enabled` (Set of String) A list of preview features that are handled by the provider. See [preview features list](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md). Preview features may have breaking changes in future releases, even without raising the major version. This field can not be set with environmental variables. Valid options are: `snowflake_current_account_datasource` | `snowflake_account_authentication_policy_attachment_resource` | `snowflake_account_password_policy_attachment_resource` | `snowflake_alert_resource` | `snowflake_alerts_datasource` | `snowflake_api_integration_resource` | `snowflake_authentication_policy_resource` | `snowflake_cortex_search_service_resource` | `snowflake_cortex_search_services_datasource` | `snowflake_database_datasource` | `snowflake_database_role_datasource` | `snowflake_dynamic_table_resource` | `snowflake_dynamic_tables_datasource` | `snowflake_external_function_resource` | `snowflake_external_functions_datasource` | `snowflake_external_table_resource` | `snowflake_external_tables_datasource` | `snowflake_external_volume_resource` | `snowflake_failover_group_resource` | `snowflake_failover_groups_datasource` | `snowflake_file_format_resource` | `snowflake_file_formats_datasource` | `snowflake_function_java_resource` | `snowflake_function_javascript_resource` | `snowflake_function_python_resource` | `snowflake_function_scala_resource` | `snowflake_function_sql_resource` | `snowflake_managed_account_resource` | `snowflake_materialized_view_resource` | `snowflake_materialized_views_datasource` | `snowflake_network_policy_attachment_resource` | `snowflake_network_rule_resource` | `snowflake_email_notification_integration_resource` | `snowflake_notification_integration_resource` | `snowflake_object_parameter_resource` | `snowflake_password_policy_resource` | `snowflake_pipe_resource` | `snowflake_pipes_datasource` | `snowflake_current_role_datasource` | `snowflake_sequence_resource` | `snowflake_sequences_datasource` | `snowflake_share_resource` | `snowflake_shares_datasource` | `snowflake_parameters_datasource` | `snowflake_procedure_java_resource` | `snowflake_procedure_javascript_resource` | `snowflake_procedure_python_resource` | `snowflake_procedure_scala_resource` | `snowflake_procedure_sql_resource` | `snowflake_stage_resource` | `snowflake_stages_datasource` | `snowflake_storage_integration_resource` | `snowflake_storage_integrations_datasource` | `snowflake_system_generate_scim_access_token_datasource` | `snowflake_system_get_aws_sns_iam_policy_datasource` | `snowflake_system_get_privatelink_config_datasource` | `snowflake_system_get_snowflake_platform_info_datasource` | `snowflake_table_column_masking_policy_application_resource` | `snowflake_table_constraint_resource` | `snowflake_table_resource` | `snowflake_user_authentication_policy_attachment_resource` | `snowflake_user_public_keys_resource` | `snowflake_user_password_policy_attachment_resource`. - `private_key` (String, Sensitive) Private Key for username+private-key auth. Cannot be used with `password`. Can also be sourced from the `SNOWFLAKE_PRIVATE_KEY` environment variable. - `private_key_passphrase` (String, Sensitive) Supports the encryption ciphers aes-128-cbc, aes-128-gcm, aes-192-cbc, aes-192-gcm, aes-256-cbc, aes-256-gcm, and des-ede3-cbc. Can also be sourced from the `SNOWFLAKE_PRIVATE_KEY_PASSPHRASE` environment variable. - `profile` (String) Sets the profile to read from ~/.snowflake/config file. Can also be sourced from the `SNOWFLAKE_PROFILE` environment variable. diff --git a/pkg/provider/previewfeatures/preview_features.go b/pkg/provider/previewfeatures/preview_features.go index e14600fe2c..112ac705e6 100644 --- a/pkg/provider/previewfeatures/preview_features.go +++ b/pkg/provider/previewfeatures/preview_features.go @@ -48,11 +48,11 @@ const ( PasswordPolicyResource feature = "snowflake_password_policy_resource" PipeResource feature = "snowflake_pipe_resource" PipesDatasource feature = "snowflake_pipes_datasource" - ProcedureJavaResource feature = "snowflake_procecure_java_resource" - ProcedureJavascriptResource feature = "snowflake_procecure_javascript_resource" - ProcedurePythonResource feature = "snowflake_procecure_python_resource" - ProcedureScalaResource feature = "snowflake_procecure_scala_resource" - ProcedureSqlResource feature = "snowflake_procecure_sql_resource" + ProcedureJavaResource feature = "snowflake_procedure_java_resource" + ProcedureJavascriptResource feature = "snowflake_procedure_javascript_resource" + ProcedurePythonResource feature = "snowflake_procedure_python_resource" + ProcedureScalaResource feature = "snowflake_procedure_scala_resource" + ProcedureSqlResource feature = "snowflake_procedure_sql_resource" ProceduresDatasource feature = "snowflake_procedures_datasource" CurrentRoleDatasource feature = "snowflake_current_role_datasource" SequenceResource feature = "snowflake_sequence_resource" @@ -100,6 +100,11 @@ var allPreviewFeatures = []feature{ FailoverGroupsDatasource, FileFormatResource, FileFormatsDatasource, + FunctionJavaResource, + FunctionJavascriptResource, + FunctionPythonResource, + FunctionScalaResource, + FunctionSqlResource, ManagedAccountResource, MaterializedViewResource, MaterializedViewsDatasource, @@ -117,6 +122,11 @@ var allPreviewFeatures = []feature{ ShareResource, SharesDatasource, ParametersDatasource, + ProcedureJavaResource, + ProcedureJavascriptResource, + ProcedurePythonResource, + ProcedureScalaResource, + ProcedureSqlResource, StageResource, StagesDatasource, StorageIntegrationResource, @@ -127,6 +137,7 @@ var allPreviewFeatures = []feature{ SystemGetSnowflakePlatformInfoDatasource, TableColumnMaskingPolicyApplicationResource, TableConstraintResource, + TableResource, UserAuthenticationPolicyAttachmentResource, UserPublicKeysResource, UserPasswordPolicyAttachmentResource, From ab015e8cf6f4db762b4043e8bfce2a010b623602 Mon Sep 17 00:00:00 2001 From: "snowflake-release-please[bot]" <105954990+snowflake-release-please[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 00:17:56 +0100 Subject: [PATCH 18/20] chore(main): release 1.0.0 (#3282) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* --- ## [1.0.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.100.0...v1.0.0) (2024-12-12) ### ⚠ BREAKING CHANGES * True v1 release ([#3283](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3283)) * Release v1 ### 🎉 **What's new:** * True v1 release ([#3283](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3283)) ([112c852](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/112c85244191a65bf552ffe27285299d6cc3831f)) ### 🔧 **Misc** * release 1.0.0 ([b8ddbf9](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/b8ddbf91bafdc8eef21a89da1ea86e6a455e2b96)) * Release v1 ([#3281](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3281)) ([82f240e](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/82f240eeed9ec24d6afc82cf5d0106544bec5838)) ### 🐛 **Bug fixes:** * Add missing preview features ([#3284](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3284)) ([3aac502](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/3aac502beb404449674c54d153ca5aab6ef3cbaa)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: snowflake-release-please[bot] <105954990+snowflake-release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ae66222e55..8bb79cd65a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [1.0.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.100.0...v1.0.0) (2024-12-12) + + +### ⚠ BREAKING CHANGES + +* True v1 release ([#3283](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3283)) +* Release v1 + +### 🎉 **What's new:** + +* True v1 release ([#3283](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3283)) ([112c852](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/112c85244191a65bf552ffe27285299d6cc3831f)) + + +### 🔧 **Misc** + +* release 1.0.0 ([b8ddbf9](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/b8ddbf91bafdc8eef21a89da1ea86e6a455e2b96)) +* Release v1 ([#3281](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3281)) ([82f240e](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/82f240eeed9ec24d6afc82cf5d0106544bec5838)) + + +### 🐛 **Bug fixes:** + +* Add missing preview features ([#3284](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3284)) ([3aac502](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/3aac502beb404449674c54d153ca5aab6ef3cbaa)) + ## [0.100.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.99.0...v0.100.0) (2024-12-12) From fc10672318af90ab6d879dfa63b8efaf93589e62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Fri, 13 Dec 2024 13:52:18 +0100 Subject: [PATCH 19/20] chore: Adjust GitHub templates to the new development branch (#3286) --- .github/workflows/docs-and-format.yml | 1 + .github/workflows/reviewdog-golanglint-ci.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/docs-and-format.yml b/.github/workflows/docs-and-format.yml index 1709cefcd9..9576f21114 100644 --- a/.github/workflows/docs-and-format.yml +++ b/.github/workflows/docs-and-format.yml @@ -3,6 +3,7 @@ on: push: branches: - main + - dev pull_request: permissions: diff --git a/.github/workflows/reviewdog-golanglint-ci.yml b/.github/workflows/reviewdog-golanglint-ci.yml index 770c6db545..589917f7f4 100644 --- a/.github/workflows/reviewdog-golanglint-ci.yml +++ b/.github/workflows/reviewdog-golanglint-ci.yml @@ -3,6 +3,7 @@ on: push: branches: - main + - dev pull_request: permissions: From 39a0cb76a1e64f2c4b5ee5923223d6e0df0ddb46 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Fri, 13 Dec 2024 16:33:52 +0100 Subject: [PATCH 20/20] chore: Add holidays note, change the disclaimer, and fix an example (#3288) Add holidays note, change the disclaimer, and fix an example --- README.md | 21 +++++++++++---------- ROADMAP.md | 12 ++++++++++++ 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 9136352ea3..a3c5975c67 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ # Snowflake Terraform Provider +> ⚠️ **Incoming holidays**: The whole team will be absent from the 21st of December to the 7th of January. Our involvement in GitHub issues during this time will be limited. We will look out for the critical issues, though. Merry Christmas and a Happy New Year! + > ⚠️ **Please note**: If you believe you have found a security issue, _please responsibly disclose_ by contacting us at [triage-terraformprovider-dl@snowflake.com](mailto:triage-terraformprovider-dl@snowflake.com). -> ⚠️ **Disclaimer**: the project is still in the 0.x.x version, which means it’s still in the experimental phase (check [Go module versioning](https://go.dev/doc/modules/version-numbers#v0-number) for more details). It can be used in production but makes no stability or backward compatibility guarantees. We do not provide backward bug fixes and, therefore, always suggest using the newest version. We are providing only limited support for the provider; priorities will be assigned on a case-by-case basis. -> -> Our main current goals are stabilization, addressing existing issues, and providing the missing features (prioritizing the GA features; supporting PrPr and PuPr features are not high priorities now). -> -> With all that in mind, we aim to reach V1 with a stable, reliable, and functional provider. V1 will be free of all the above limitations. +> ⚠️ **Disclaimer**: The project is in v1 version, but some features are in preview. Such resources and data sources are considered preview features in the provider, regardless of their state in Snowflake. We do not guarantee their stability. They will be reworked and marked as a stable feature in future releases. Breaking changes in these features are expected, even without bumping the major version. They are disabled by default. To use them, add the relevant feature name to `preview_features_enabled` field in the [provider configuration](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#schema). The list of preview features is available below. Please always refer to the [Getting Help](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#getting-help) section in our Github repo to best determine how to get help for your questions. +> +> Keep in mind that V1 does not mean we have an official Snowflake support. Please follow [creating issues guidelines](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/CREATING_ISSUES.md). ---- @@ -39,16 +39,17 @@ terraform { required_providers { snowflake = { source = "Snowflake-Labs/snowflake" - version = "~> 0.61" + version = ">= 1.0.0" } } } provider "snowflake" { - account = "abc12345" # the Snowflake account identifier - user = "johndoe" - password = "v3ry$3cr3t" - role = "ACCOUNTADMIN" + organization_name = "organization_name" + account_name = "account_name" + user = "johndoe" + password = "v3ry$3cr3t" + role = "ACCOUNTADMIN" } ``` diff --git a/ROADMAP.md b/ROADMAP.md index d71a3aa2c2..539f14ecb3 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -1,5 +1,17 @@ # Our roadmap +## (13.12.2024) V1 release update + +We have released a long-awaited [v1.0.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/releases/tag/v1.0.0). A few things to know now: +- Together with v1.0.0 we have also released "the last" 0.x.x version - 0.100.0. v1.0.0 is built on top of that; it removed the [deprecated resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/ab015e8cf6f4db762b4043e8bfce2a010b623602/v1-preparations/LIST_OF_REMOVED_RESOURCES_FOR_V1.md) and attributes mostly, so if you are using one of the latest 0.x versions, you should be really close to v1. +- Check the migration guides for [v1.0.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v01000--v100) and [v0.100.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0990--v01000). +- The provider entered a stable version from the engineering point of view. It will prohibit us from introducing breaking changes in stable resources without bumping the major version. +- Resources and data sources in our provider now have two states, [stable](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/ab015e8cf6f4db762b4043e8bfce2a010b623602/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md) and [preview](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/ab015e8cf6f4db762b4043e8bfce2a010b623602/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md). To allow the given preview feature you have to explicitly set it in [the provider config](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs#preview_features_enabled-1). Please familiarize yourselves with the limitations of the preview feature before enabling it (most notably, preview features may require migrations between minor versions). +- Our current main goal is to help with migration and address all the incoming v1 issues. +- Keep in mind that V1 does not mean we have an official Snowflake support (check our new disclaimer in [README](https://github.com/Snowflake-Labs/terraform-provider-snowflake?tab=readme-ov-file#snowflake-terraform-provider)). +- Our next milestone is reaching GA, which requires mostly procedural steps. Before that, no big changes are planned for the provider. +- Besides the GA, we want to focus mostly on stabilizing the preview resources. We will share their current prioritization in January. The main ones for now are functions, procedures, and tables. + ## (25.10.2024) Project state overview ### Goals