From 6d4a10364276e92fa791eaa022c3bd7bce16228d Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 12:06:47 +0100 Subject: [PATCH 1/2] feat: Basic functions implementation (#3269) Prepare most of the java resource implementation: - add common drop method (also to procedures) - fix parameters schema and add mapping (also for procedures) - fix parameter handling - handle parameters and test - handle arguments, return type, runtime version, imports, target path, language - add default values to arguments - improve function details with mapping logic (for easier use in resource) - add a bunch of common functions to handle this family of resources - handle basic rename - add TABLE data type (needs logic and tests) - handle external language change - test no arguments - regenerate model builders and the docs - rename a few attributes - change requirements for some fields Next PRs: - TABLE function improvements and tests - handle secrets, external access integrations, packages, return not null, and comments - Add a similar PR for java procedure (reuse what we can) - Add PR with all other function types - datasources --- docs/resources/function_java.md | 86 +++- docs/resources/function_javascript.md | 62 ++- docs/resources/function_python.md | 75 ++- docs/resources/function_scala.md | 86 +++- docs/resources/function_sql.md | 62 ++- docs/resources/procedure_java.md | 56 ++- docs/resources/procedure_javascript.md | 56 ++- docs/resources/procedure_python.md | 56 ++- docs/resources/procedure_scala.md | 56 ++- docs/resources/procedure_sql.md | 56 ++- .../function_describe_snowflake_ext.go | 69 +++ .../function_java_resource_ext.go | 17 + .../function_resource_parameters_ext.go | 13 + .../config/model/function_java_model_ext.go | 72 +++ .../config/model/function_java_model_gen.go | 19 +- .../model/function_javascript_model_gen.go | 40 +- .../config/model/function_python_model_gen.go | 14 +- .../config/model/function_scala_model_gen.go | 19 +- .../config/model/function_sql_model_gen.go | 40 +- pkg/acceptance/check_destroy.go | 44 +- pkg/acceptance/helpers/function_client.go | 32 ++ .../helpers/function_setup_helpers.go | 92 ++-- pkg/acceptance/helpers/ids_generator.go | 12 + pkg/acceptance/helpers/stage_client.go | 15 + pkg/resources/custom_diffs.go | 12 + pkg/resources/doc_helpers.go | 3 +- pkg/resources/function.go | 16 +- pkg/resources/function_commons.go | 277 ++++++++++- pkg/resources/function_java.go | 165 ++++++- .../function_java_acceptance_test.go | 435 ++++++++++++++++++ pkg/resources/function_javascript.go | 2 +- pkg/resources/function_parameters.go | 12 +- pkg/resources/function_python.go | 2 +- pkg/resources/function_scala.go | 2 +- pkg/resources/function_sql.go | 2 +- pkg/resources/procedure.go | 16 +- pkg/resources/procedure_commons.go | 21 +- pkg/resources/procedure_java.go | 6 +- pkg/resources/procedure_javascript.go | 6 +- pkg/resources/procedure_python.go | 6 +- pkg/resources/procedure_scala.go | 6 +- pkg/resources/procedure_sql.go | 6 +- pkg/resources/resource_helpers_create.go | 29 ++ pkg/resources/resource_helpers_read.go | 19 + pkg/resources/user.go | 1 - pkg/schemas/function_parameters.go | 35 ++ pkg/schemas/procedure_parameters.go | 35 ++ pkg/sdk/data_types_deprecated.go | 4 + pkg/sdk/datatypes/legacy.go | 3 + pkg/sdk/datatypes/table.go | 39 ++ pkg/sdk/functions_ext.go | 152 +++++- pkg/sdk/functions_ext_test.go | 179 +++++++ pkg/sdk/identifier_helpers.go | 12 +- pkg/sdk/random_test.go | 12 +- pkg/sdk/testint/functions_integration_test.go | 209 ++++++++- .../testint/procedures_integration_test.go | 39 +- 56 files changed, 2638 insertions(+), 274 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go create mode 100644 pkg/resources/function_java_acceptance_test.go create mode 100644 pkg/schemas/function_parameters.go create mode 100644 pkg/schemas/procedure_parameters.go create mode 100644 pkg/sdk/datatypes/table.go create mode 100644 pkg/sdk/functions_ext_test.go diff --git a/docs/resources/function_java.md b/docs/resources/function_java.md index 23ab3b5dc2..820bb4e63d 100644 --- a/docs/resources/function_java.md +++ b/docs/resources/function_java.md @@ -17,7 +17,6 @@ Resource used to manage java function objects. For more information, check [func ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Java source code. For more information, see [Introduction to Java UDFs](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -29,16 +28,17 @@ Resource used to manage java function objects. For more information, check [func - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Java source code. For more information, see [Introduction to Java UDFs](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java). (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `runtime_version` (String) Specifies the Java JDK runtime version to use. The supported versions of Java are 11.x and 17.x. If RUNTIME_VERSION is not set, Java JDK 11 is used. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. +- `target_path` (Block Set, Max: 1) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -67,15 +80,72 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_javascript.md b/docs/resources/function_javascript.md index 2680ff6653..1619ab3a06 100644 --- a/docs/resources/function_javascript.md +++ b/docs/resources/function_javascript.md @@ -31,7 +31,7 @@ Resource used to manage javascript function objects. For more information, check - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -50,16 +50,68 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_python.md b/docs/resources/function_python.md index 5f68cfb014..21e4244789 100644 --- a/docs/resources/function_python.md +++ b/docs/resources/function_python.md @@ -17,7 +17,6 @@ Resource used to manage python function objects. For more information, check [fu ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Python source code. For more information, see [Introduction to Python UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler function or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a function name. If the handler code is in-line with the CREATE FUNCTION statement, you can use the function name alone. When the handler code is referenced at a stage, this value should be qualified with the module name, as in the following form: `my_module.my_function`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -30,14 +29,15 @@ Resource used to manage python function objects. For more information, check [fu - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. A file can be a `.py` file or another type of file. Python UDFs can also read non-Python files, such as text files. For an example, see [Reading a file](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-examples.html#label-udf-python-read-files). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#python). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Python source code. For more information, see [Introduction to Python UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. A file can be a `.py` file or another type of file. Python UDFs can also read non-Python files, such as text files. For an example, see [Reading a file](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-examples.html#label-udf-python-read-files). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#python). (see [below for nested schema](#nestedblock--imports)) - `is_aggregate` (String) Specifies that the function is an aggregate function. For more information about user-defined aggregate functions, see [Python user-defined aggregate functions](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-aggregate-functions). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of packages required as dependencies. The value should be of the form `package_name==version_number`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -72,10 +85,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_scala.md b/docs/resources/function_scala.md index 9ec48d3866..01226e5512 100644 --- a/docs/resources/function_scala.md +++ b/docs/resources/function_scala.md @@ -17,7 +17,6 @@ Resource used to manage scala function objects. For more information, check [fun ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Scala source code. For more information, see [Introduction to Scala UDFs](https://docs.snowflake.com/en/developer-guide/udf/scala/udf-scala-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -30,15 +29,16 @@ Resource used to manage scala function objects. For more information, check [fun - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import, such as a JAR or other kind of file. The JAR file might contain handler dependency libraries. It can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). A non-JAR file might a file read by handler code. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#scala). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Scala source code. For more information, see [Introduction to Scala UDFs](https://docs.snowflake.com/en/developer-guide/udf/scala/udf-scala-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import, such as a JAR or other kind of file. The JAR file might contain handler dependency libraries. It can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). A non-JAR file might a file read by handler code. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#scala). (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. +- `target_path` (Block Set, Max: 1) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -67,15 +80,72 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/function_sql.md b/docs/resources/function_sql.md index 80d83727fb..4a48191740 100644 --- a/docs/resources/function_sql.md +++ b/docs/resources/function_sql.md @@ -31,7 +31,7 @@ Resource used to manage sql function objects. For more information, check [funct - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. -- `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. +- `return_results_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -50,16 +50,68 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_java.md b/docs/resources/procedure_java.md index 94490ed21e..dbb5f2eba3 100644 --- a/docs/resources/procedure_java.md +++ b/docs/resources/procedure_java.md @@ -73,10 +73,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_javascript.md b/docs/resources/procedure_javascript.md index a562ad589d..a9364db4cf 100644 --- a/docs/resources/procedure_javascript.md +++ b/docs/resources/procedure_javascript.md @@ -56,10 +56,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_python.md b/docs/resources/procedure_python.md index 7b6759ef75..a28cf0d0b5 100644 --- a/docs/resources/procedure_python.md +++ b/docs/resources/procedure_python.md @@ -72,10 +72,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_scala.md b/docs/resources/procedure_scala.md index 1347bfb5cf..692fb569b1 100644 --- a/docs/resources/procedure_scala.md +++ b/docs/resources/procedure_scala.md @@ -73,10 +73,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/docs/resources/procedure_sql.md b/docs/resources/procedure_sql.md index 3b078e3977..2533380779 100644 --- a/docs/resources/procedure_sql.md +++ b/docs/resources/procedure_sql.md @@ -56,10 +56,58 @@ Required: Read-Only: -- `enable_console_output` (Boolean) -- `log_level` (String) -- `metric_level` (String) -- `trace_level` (String) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `metric_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--metric_level)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.metric_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go index f540d487bd..a4c256b172 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go @@ -6,10 +6,12 @@ import ( "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + assert2 "github.com/stretchr/testify/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) // TODO [SNOW-1501905]: this file should be fully regenerated when adding and option to assert the results of describe @@ -405,3 +407,70 @@ func (f *FunctionDetailsAssert) HasExactlySecrets(expectedSecrets map[string]sdk }) return f } + +func (f *FunctionDetailsAssert) HasExactlyImportsNormalizedInAnyOrder(imports ...sdk.NormalizedPath) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedImports == nil { + return fmt.Errorf("expected imports to have value; got: nil") + } + if !assert2.ElementsMatch(t, imports, o.NormalizedImports) { + return fmt.Errorf("expected %v imports in task relations, got %v", imports, o.NormalizedImports) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasNormalizedTargetPath(expectedStageLocation string, expectedPathOnStage string) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedTargetPath == nil { + return fmt.Errorf("expected normalized target path to have value; got: nil") + } + if o.NormalizedTargetPath.StageLocation != expectedStageLocation { + return fmt.Errorf("expected %s stage location for target path, got %v", expectedStageLocation, o.NormalizedTargetPath.StageLocation) + } + if o.NormalizedTargetPath.PathOnStage != expectedPathOnStage { + return fmt.Errorf("expected %s path on stage for target path, got %v", expectedPathOnStage, o.NormalizedTargetPath.PathOnStage) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasNormalizedTargetPathNil() *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.NormalizedTargetPath != nil { + return fmt.Errorf("expected normalized target path to be nil, got: %s", *o.NormalizedTargetPath) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasReturnDataType(expectedDataType datatypes.DataType) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.ReturnDataType == nil { + return fmt.Errorf("expected return data type to have value; got: nil") + } + if !datatypes.AreTheSame(o.ReturnDataType, expectedDataType) { + return fmt.Errorf("expected %s return data type, got %v", expectedDataType, o.ReturnDataType.ToSql()) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasReturnNotNull(expected bool) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.ReturnNotNull != expected { + return fmt.Errorf("expected return not null %t; got: %t", expected, o.ReturnNotNull) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go new file mode 100644 index 0000000000..9a3bb1fa15 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/function_java_resource_ext.go @@ -0,0 +1,17 @@ +package resourceassert + +import ( + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (f *FunctionJavaResourceAssert) HasImportsLength(len int) *FunctionJavaResourceAssert { + f.AddAssertion(assert.ValueSet("imports.#", strconv.FormatInt(int64(len), 10))) + return f +} + +func (f *FunctionJavaResourceAssert) HasTargetPathEmpty() *FunctionJavaResourceAssert { + f.AddAssertion(assert.ValueSet("target_path.#", "0")) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go new file mode 100644 index 0000000000..2bc66908df --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/function_resource_parameters_ext.go @@ -0,0 +1,13 @@ +package resourceparametersassert + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (f *FunctionResourceParametersAssert) HasAllDefaults() *FunctionResourceParametersAssert { + return f. + HasEnableConsoleOutput(false). + HasLogLevel(sdk.LogLevelOff). + HasMetricLevel(sdk.MetricLevelNone). + HasTraceLevel(sdk.TraceLevelOff) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go index 4bac27ada5..8579ea981a 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_java_model_ext.go @@ -2,6 +2,11 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *FunctionJavaModel) MarshalJSON() ([]byte, error) { @@ -14,3 +19,70 @@ func (f *FunctionJavaModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func FunctionJavaBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + functionDefinition string, +) *FunctionJavaModel { + return FunctionJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), id.SchemaName()).WithFunctionDefinition(functionDefinition) +} + +func FunctionJavaBasicStaged( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + stageLocation string, + pathOnStage string, +) *FunctionJavaModel { + return FunctionJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), id.SchemaName()). + WithImport(stageLocation, pathOnStage) +} + +func (f *FunctionJavaModel) WithArgument(argName string, argDataType datatypes.DataType) *FunctionJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *FunctionJavaModel) WithArgumentWithDefaultValue(argName string, argDataType datatypes.DataType, value string) *FunctionJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + "arg_default_value": tfconfig.StringVariable(value), + }, + ), + ) +} + +func (f *FunctionJavaModel) WithImport(stageLocation string, pathOnStage string) *FunctionJavaModel { + return f.WithImportsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} + +func (f *FunctionJavaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *FunctionJavaModel { + return f.WithTargetPathValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go index 704f6b2bcf..309a53b0a9 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_java_model_gen.go @@ -26,7 +26,7 @@ type FunctionJavaModel struct { Name tfconfig.Variable `json:"name,omitempty"` NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` Packages tfconfig.Variable `json:"packages,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` ReturnType tfconfig.Variable `json:"return_type,omitempty"` RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` Schema tfconfig.Variable `json:"schema,omitempty"` @@ -44,7 +44,6 @@ type FunctionJavaModel struct { func FunctionJava( resourceName string, database string, - functionDefinition string, handler string, name string, returnType string, @@ -52,7 +51,6 @@ func FunctionJava( ) *FunctionJavaModel { f := &FunctionJavaModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionJava)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -62,7 +60,6 @@ func FunctionJava( func FunctionJavaWithDefaultMeta( database string, - functionDefinition string, handler string, name string, returnType string, @@ -70,7 +67,6 @@ func FunctionJavaWithDefaultMeta( ) *FunctionJavaModel { f := &FunctionJavaModel{ResourceModelMeta: config.DefaultMeta(resources.FunctionJava)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -150,8 +146,8 @@ func (f *FunctionJavaModel) WithNullInputBehavior(nullInputBehavior string) *Fun // packages attribute type is not yet supported, so WithPackages can't be generated -func (f *FunctionJavaModel) WithReturnBehavior(returnBehavior string) *FunctionJavaModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionJavaModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionJavaModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -172,10 +168,7 @@ func (f *FunctionJavaModel) WithSchema(schema string) *FunctionJavaModel { // secrets attribute type is not yet supported, so WithSecrets can't be generated -func (f *FunctionJavaModel) WithTargetPath(targetPath string) *FunctionJavaModel { - f.TargetPath = tfconfig.StringVariable(targetPath) - return f -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (f *FunctionJavaModel) WithTraceLevel(traceLevel string) *FunctionJavaModel { f.TraceLevel = tfconfig.StringVariable(traceLevel) @@ -266,8 +259,8 @@ func (f *FunctionJavaModel) WithPackagesValue(value tfconfig.Variable) *Function return f } -func (f *FunctionJavaModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionJavaModel { - f.ReturnBehavior = value +func (f *FunctionJavaModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionJavaModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go index 5d8ad68aec..742dee099b 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_javascript_model_gen.go @@ -10,22 +10,22 @@ import ( ) type FunctionJavascriptModel struct { - Arguments tfconfig.Variable `json:"arguments,omitempty"` - Comment tfconfig.Variable `json:"comment,omitempty"` - Database tfconfig.Variable `json:"database,omitempty"` - EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` - FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` - FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` - FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` - IsSecure tfconfig.Variable `json:"is_secure,omitempty"` - LogLevel tfconfig.Variable `json:"log_level,omitempty"` - MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` - Name tfconfig.Variable `json:"name,omitempty"` - NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` - ReturnType tfconfig.Variable `json:"return_type,omitempty"` - Schema tfconfig.Variable `json:"schema,omitempty"` - TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` + FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` *config.ResourceModelMeta } @@ -128,8 +128,8 @@ func (f *FunctionJavascriptModel) WithNullInputBehavior(nullInputBehavior string return f } -func (f *FunctionJavascriptModel) WithReturnBehavior(returnBehavior string) *FunctionJavascriptModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionJavascriptModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionJavascriptModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -212,8 +212,8 @@ func (f *FunctionJavascriptModel) WithNullInputBehaviorValue(value tfconfig.Vari return f } -func (f *FunctionJavascriptModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionJavascriptModel { - f.ReturnBehavior = value +func (f *FunctionJavascriptModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionJavascriptModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go index 9d0ffbd348..17ae5eccaf 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_python_model_gen.go @@ -27,7 +27,7 @@ type FunctionPythonModel struct { Name tfconfig.Variable `json:"name,omitempty"` NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` Packages tfconfig.Variable `json:"packages,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` ReturnType tfconfig.Variable `json:"return_type,omitempty"` RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` Schema tfconfig.Variable `json:"schema,omitempty"` @@ -44,7 +44,6 @@ type FunctionPythonModel struct { func FunctionPython( resourceName string, database string, - functionDefinition string, handler string, name string, returnType string, @@ -53,7 +52,6 @@ func FunctionPython( ) *FunctionPythonModel { f := &FunctionPythonModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionPython)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -64,7 +62,6 @@ func FunctionPython( func FunctionPythonWithDefaultMeta( database string, - functionDefinition string, handler string, name string, returnType string, @@ -73,7 +70,6 @@ func FunctionPythonWithDefaultMeta( ) *FunctionPythonModel { f := &FunctionPythonModel{ResourceModelMeta: config.DefaultMeta(resources.FunctionPython)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -159,8 +155,8 @@ func (f *FunctionPythonModel) WithNullInputBehavior(nullInputBehavior string) *F // packages attribute type is not yet supported, so WithPackages can't be generated -func (f *FunctionPythonModel) WithReturnBehavior(returnBehavior string) *FunctionPythonModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionPythonModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionPythonModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -275,8 +271,8 @@ func (f *FunctionPythonModel) WithPackagesValue(value tfconfig.Variable) *Functi return f } -func (f *FunctionPythonModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionPythonModel { - f.ReturnBehavior = value +func (f *FunctionPythonModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionPythonModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go index 017c397af3..070933fd4e 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_scala_model_gen.go @@ -26,7 +26,7 @@ type FunctionScalaModel struct { Name tfconfig.Variable `json:"name,omitempty"` NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` Packages tfconfig.Variable `json:"packages,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` ReturnType tfconfig.Variable `json:"return_type,omitempty"` RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` Schema tfconfig.Variable `json:"schema,omitempty"` @@ -44,7 +44,6 @@ type FunctionScalaModel struct { func FunctionScala( resourceName string, database string, - functionDefinition string, handler string, name string, returnType string, @@ -53,7 +52,6 @@ func FunctionScala( ) *FunctionScalaModel { f := &FunctionScalaModel{ResourceModelMeta: config.Meta(resourceName, resources.FunctionScala)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -64,7 +62,6 @@ func FunctionScala( func FunctionScalaWithDefaultMeta( database string, - functionDefinition string, handler string, name string, returnType string, @@ -73,7 +70,6 @@ func FunctionScalaWithDefaultMeta( ) *FunctionScalaModel { f := &FunctionScalaModel{ResourceModelMeta: config.DefaultMeta(resources.FunctionScala)} f.WithDatabase(database) - f.WithFunctionDefinition(functionDefinition) f.WithHandler(handler) f.WithName(name) f.WithReturnType(returnType) @@ -154,8 +150,8 @@ func (f *FunctionScalaModel) WithNullInputBehavior(nullInputBehavior string) *Fu // packages attribute type is not yet supported, so WithPackages can't be generated -func (f *FunctionScalaModel) WithReturnBehavior(returnBehavior string) *FunctionScalaModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionScalaModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionScalaModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -176,10 +172,7 @@ func (f *FunctionScalaModel) WithSchema(schema string) *FunctionScalaModel { // secrets attribute type is not yet supported, so WithSecrets can't be generated -func (f *FunctionScalaModel) WithTargetPath(targetPath string) *FunctionScalaModel { - f.TargetPath = tfconfig.StringVariable(targetPath) - return f -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (f *FunctionScalaModel) WithTraceLevel(traceLevel string) *FunctionScalaModel { f.TraceLevel = tfconfig.StringVariable(traceLevel) @@ -270,8 +263,8 @@ func (f *FunctionScalaModel) WithPackagesValue(value tfconfig.Variable) *Functio return f } -func (f *FunctionScalaModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionScalaModel { - f.ReturnBehavior = value +func (f *FunctionScalaModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionScalaModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go index 14cbbe9136..0733c2add4 100644 --- a/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/function_sql_model_gen.go @@ -10,22 +10,22 @@ import ( ) type FunctionSqlModel struct { - Arguments tfconfig.Variable `json:"arguments,omitempty"` - Comment tfconfig.Variable `json:"comment,omitempty"` - Database tfconfig.Variable `json:"database,omitempty"` - EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` - FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` - FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` - FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` - IsSecure tfconfig.Variable `json:"is_secure,omitempty"` - LogLevel tfconfig.Variable `json:"log_level,omitempty"` - MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` - Name tfconfig.Variable `json:"name,omitempty"` - NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` - ReturnBehavior tfconfig.Variable `json:"return_behavior,omitempty"` - ReturnType tfconfig.Variable `json:"return_type,omitempty"` - Schema tfconfig.Variable `json:"schema,omitempty"` - TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + FunctionDefinition tfconfig.Variable `json:"function_definition,omitempty"` + FunctionLanguage tfconfig.Variable `json:"function_language,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + ReturnResultsBehavior tfconfig.Variable `json:"return_results_behavior,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` *config.ResourceModelMeta } @@ -128,8 +128,8 @@ func (f *FunctionSqlModel) WithNullInputBehavior(nullInputBehavior string) *Func return f } -func (f *FunctionSqlModel) WithReturnBehavior(returnBehavior string) *FunctionSqlModel { - f.ReturnBehavior = tfconfig.StringVariable(returnBehavior) +func (f *FunctionSqlModel) WithReturnResultsBehavior(returnResultsBehavior string) *FunctionSqlModel { + f.ReturnResultsBehavior = tfconfig.StringVariable(returnResultsBehavior) return f } @@ -212,8 +212,8 @@ func (f *FunctionSqlModel) WithNullInputBehaviorValue(value tfconfig.Variable) * return f } -func (f *FunctionSqlModel) WithReturnBehaviorValue(value tfconfig.Variable) *FunctionSqlModel { - f.ReturnBehavior = value +func (f *FunctionSqlModel) WithReturnResultsBehaviorValue(value tfconfig.Variable) *FunctionSqlModel { + f.ReturnResultsBehavior = value return f } diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index 404ad98917..5959720df0 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -67,9 +67,19 @@ func decodeSnowflakeId(rs *terraform.ResourceState, resource resources.Resource) switch resource { case resources.ExternalFunction: return sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(rs.Primary.ID), nil - case resources.Function: + case resources.Function, + resources.FunctionJava, + resources.FunctionJavascript, + resources.FunctionPython, + resources.FunctionScala, + resources.FunctionSql: return sdk.ParseSchemaObjectIdentifierWithArguments(rs.Primary.ID) - case resources.Procedure: + case resources.Procedure, + resources.ProcedureJava, + resources.ProcedureJavascript, + resources.ProcedurePython, + resources.ProcedureScala, + resources.ProcedureSql: return sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(rs.Primary.ID), nil default: return helpers.DecodeSnowflakeID(rs.Primary.ID), nil @@ -145,6 +155,21 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Function: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Functions.ShowByID) }, + resources.FunctionJava: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionJavascript: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionPython: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionScala: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, + resources.FunctionSql: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Functions.ShowByID) + }, resources.LegacyServiceUser: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Users.ShowByID) }, @@ -181,6 +206,21 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Procedure: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Procedures.ShowByID) }, + resources.ProcedureJava: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedureJavascript: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedurePython: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedureScala: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, + resources.ProcedureSql: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Procedures.ShowByID) + }, resources.ResourceMonitor: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.ResourceMonitors.ShowByID) }, diff --git a/pkg/acceptance/helpers/function_client.go b/pkg/acceptance/helpers/function_client.go index 4d9bf35aaa..36c5ffcfb4 100644 --- a/pkg/acceptance/helpers/function_client.go +++ b/pkg/acceptance/helpers/function_client.go @@ -138,6 +138,26 @@ func (c *FunctionClient) CreateJava(t *testing.T) (*sdk.Function, func()) { return function, c.DropFunctionFunc(t, id) } +func (c *FunctionClient) CreateScalaStaged(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, dataType datatypes.DataType, importPath string, handler string) (*sdk.Function, func()) { + t.Helper() + ctx := context.Background() + + argName := "x" + argument := sdk.NewFunctionArgumentRequest(argName, dataType) + + request := sdk.NewCreateForScalaFunctionRequest(id.SchemaObjectId(), dataType, handler, "2.12"). + WithArguments([]sdk.FunctionArgumentRequest{*argument}). + WithImports([]sdk.FunctionImportRequest{*sdk.NewFunctionImportRequest().WithImport(importPath)}) + + err := c.client().CreateForScala(ctx, request) + require.NoError(t, err) + + function, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return function, c.DropFunctionFunc(t, id) +} + func (c *FunctionClient) CreateWithRequest(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, req *sdk.CreateForSQLFunctionRequest) *sdk.Function { t.Helper() ctx := context.Background() @@ -192,6 +212,18 @@ func (c *FunctionClient) SampleJavaDefinition(t *testing.T, className string, fu `, className, funcName, argName) } +func (c *FunctionClient) SampleJavaDefinitionNoArgs(t *testing.T, className string, funcName string) string { + t.Helper() + + return fmt.Sprintf(` + class %[1]s { + public static String %[2]s() { + return "hello"; + } + } +`, className, funcName) +} + func (c *FunctionClient) SampleJavascriptDefinition(t *testing.T, argName string) string { t.Helper() diff --git a/pkg/acceptance/helpers/function_setup_helpers.go b/pkg/acceptance/helpers/function_setup_helpers.go index 8f0447e443..d0f34d6768 100644 --- a/pkg/acceptance/helpers/function_setup_helpers.go +++ b/pkg/acceptance/helpers/function_setup_helpers.go @@ -15,8 +15,20 @@ import ( "github.com/stretchr/testify/require" ) +func (c *TestClient) CreateSampleJavaFunctionAndJarOnUserStage(t *testing.T) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaFunctionAndJarInLocation(t, "@~") +} + +func (c *TestClient) CreateSampleJavaFunctionAndJarOnStage(t *testing.T, stage *sdk.Stage) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaFunctionAndJarInLocation(t, stage.Location()) +} + // TODO [SNOW-1827324]: add TestClient ref to each specific client, so that we enhance specific client and not the base one -func (c *TestClient) CreateSampleJavaFunctionAndJar(t *testing.T) *TmpFunction { +func (c *TestClient) CreateSampleJavaFunctionAndJarInLocation(t *testing.T, stageLocation string) *TmpFunction { t.Helper() ctx := context.Background() @@ -32,7 +44,7 @@ func (c *TestClient) CreateSampleJavaFunctionAndJar(t *testing.T) *TmpFunction { handler := fmt.Sprintf("%s.%s", className, funcName) definition := c.Function.SampleJavaDefinition(t, className, funcName, argName) jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) - targetPath := fmt.Sprintf("@~/%s", jarName) + targetPath := fmt.Sprintf("%s/%s", stageLocation, jarName) request := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). WithArguments([]sdk.FunctionArgumentRequest{*argument}). @@ -42,19 +54,32 @@ func (c *TestClient) CreateSampleJavaFunctionAndJar(t *testing.T) *TmpFunction { err := c.context.client.Functions.CreateForJava(ctx, request) require.NoError(t, err) t.Cleanup(c.Function.DropFunctionFunc(t, id)) - t.Cleanup(c.Stage.RemoveFromUserStageFunc(t, jarName)) + t.Cleanup(c.Stage.RemoveFromStageFunc(t, stageLocation, jarName)) return &TmpFunction{ - FunctionId: id, - ClassName: className, - FuncName: funcName, - ArgName: argName, - ArgType: dataType, - JarName: jarName, + FunctionId: id, + ClassName: className, + FuncName: funcName, + ArgName: argName, + ArgType: dataType, + JarName: jarName, + StageLocation: stageLocation, } } -func (c *TestClient) CreateSampleJavaProcedureAndJar(t *testing.T) *TmpFunction { +func (c *TestClient) CreateSampleJavaProcedureAndJarOnUserStage(t *testing.T) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaProcedureAndJarInLocation(t, "@~") +} + +func (c *TestClient) CreateSampleJavaProcedureAndJarOnStage(t *testing.T, stage *sdk.Stage) *TmpFunction { + t.Helper() + + return c.CreateSampleJavaProcedureAndJarInLocation(t, stage.Location()) +} + +func (c *TestClient) CreateSampleJavaProcedureAndJarInLocation(t *testing.T, stageLocation string) *TmpFunction { t.Helper() ctx := context.Background() @@ -70,7 +95,7 @@ func (c *TestClient) CreateSampleJavaProcedureAndJar(t *testing.T) *TmpFunction handler := fmt.Sprintf("%s.%s", className, funcName) definition := c.Procedure.SampleJavaDefinition(t, className, funcName, argName) jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) - targetPath := fmt.Sprintf("@~/%s", jarName) + targetPath := fmt.Sprintf("%s/%s", stageLocation, jarName) packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). @@ -81,15 +106,16 @@ func (c *TestClient) CreateSampleJavaProcedureAndJar(t *testing.T) *TmpFunction err := c.context.client.Procedures.CreateForJava(ctx, request) require.NoError(t, err) t.Cleanup(c.Procedure.DropProcedureFunc(t, id)) - t.Cleanup(c.Stage.RemoveFromUserStageFunc(t, jarName)) + t.Cleanup(c.Stage.RemoveFromStageFunc(t, stageLocation, jarName)) return &TmpFunction{ - FunctionId: id, - ClassName: className, - FuncName: funcName, - ArgName: argName, - ArgType: dataType, - JarName: jarName, + FunctionId: id, + ClassName: className, + FuncName: funcName, + ArgName: argName, + ArgType: dataType, + JarName: jarName, + StageLocation: stageLocation, } } @@ -121,30 +147,32 @@ func (c *TestClient) CreateSamplePythonFunctionAndModule(t *testing.T) *TmpFunct moduleFileName := filepath.Base(modulePath) return &TmpFunction{ - FunctionId: id, - ModuleName: strings.TrimSuffix(moduleFileName, ".py"), - FuncName: funcName, - ArgName: argName, - ArgType: dataType, + FunctionId: id, + ModuleName: strings.TrimSuffix(moduleFileName, ".py"), + FuncName: funcName, + ArgName: argName, + ArgType: dataType, + StageLocation: "@~", } } type TmpFunction struct { - FunctionId sdk.SchemaObjectIdentifierWithArguments - ClassName string - ModuleName string - FuncName string - ArgName string - ArgType datatypes.DataType - JarName string + FunctionId sdk.SchemaObjectIdentifierWithArguments + ClassName string + ModuleName string + FuncName string + ArgName string + ArgType datatypes.DataType + JarName string + StageLocation string } func (f *TmpFunction) JarLocation() string { - return fmt.Sprintf("@~/%s", f.JarName) + return fmt.Sprintf("%s/%s", f.StageLocation, f.JarName) } func (f *TmpFunction) PythonModuleLocation() string { - return fmt.Sprintf("@~/%s", f.PythonFileName()) + return fmt.Sprintf("%s/%s", f.StageLocation, f.PythonFileName()) } func (f *TmpFunction) PythonFileName() string { diff --git a/pkg/acceptance/helpers/ids_generator.go b/pkg/acceptance/helpers/ids_generator.go index ade93d46bc..46b0e85d80 100644 --- a/pkg/acceptance/helpers/ids_generator.go +++ b/pkg/acceptance/helpers/ids_generator.go @@ -4,7 +4,9 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) type IdsGenerator struct { @@ -97,6 +99,11 @@ func (c *IdsGenerator) NewSchemaObjectIdentifierWithArguments(name string, argum return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), name, arguments...) } +func (c *IdsGenerator) NewSchemaObjectIdentifierWithArgumentsNewDataTypes(name string, arguments ...datatypes.DataType) sdk.SchemaObjectIdentifierWithArguments { + legacyDataTypes := collections.Map(arguments, sdk.LegacyDataTypeFrom) + return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), name, legacyDataTypes...) +} + func (c *IdsGenerator) NewSchemaObjectIdentifierWithArgumentsInSchema(name string, schemaId sdk.DatabaseObjectIdentifier, argumentDataTypes ...sdk.DataType) sdk.SchemaObjectIdentifierWithArguments { return sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(schemaId, name, argumentDataTypes...) } @@ -105,6 +112,11 @@ func (c *IdsGenerator) RandomSchemaObjectIdentifierWithArguments(arguments ...sd return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), c.Alpha(), arguments...) } +func (c *IdsGenerator) RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(arguments ...datatypes.DataType) sdk.SchemaObjectIdentifierWithArguments { + legacyDataTypes := collections.Map(arguments, sdk.LegacyDataTypeFrom) + return sdk.NewSchemaObjectIdentifierWithArguments(c.SchemaId().DatabaseName(), c.SchemaId().Name(), c.Alpha(), legacyDataTypes...) +} + func (c *IdsGenerator) Alpha() string { return c.AlphaN(6) } diff --git a/pkg/acceptance/helpers/stage_client.go b/pkg/acceptance/helpers/stage_client.go index 60bac47c90..5a1176b314 100644 --- a/pkg/acceptance/helpers/stage_client.go +++ b/pkg/acceptance/helpers/stage_client.go @@ -126,6 +126,21 @@ func (c *StageClient) RemoveFromUserStageFunc(t *testing.T, pathOnStage string) } } +func (c *StageClient) RemoveFromStage(t *testing.T, stageLocation string, pathOnStage string) { + t.Helper() + ctx := context.Background() + + _, err := c.context.client.ExecForTests(ctx, fmt.Sprintf(`REMOVE %s/%s`, stageLocation, pathOnStage)) + require.NoError(t, err) +} + +func (c *StageClient) RemoveFromStageFunc(t *testing.T, stageLocation string, pathOnStage string) func() { + t.Helper() + return func() { + c.RemoveFromStage(t, stageLocation, pathOnStage) + } +} + func (c *StageClient) PutOnStageWithContent(t *testing.T, id sdk.SchemaObjectIdentifier, filename string, content string) { t.Helper() ctx := context.Background() diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index eb33b246b7..1ea9025ac5 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -284,3 +284,15 @@ func RecreateWhenResourceBoolFieldChangedExternally(boolField string, wantValue return nil } } + +// RecreateWhenResourceStringFieldChangedExternally recreates a resource when wantValue is different from value in field. +// TODO [SNOW-1850370]: merge with above? test. +func RecreateWhenResourceStringFieldChangedExternally(field string, wantValue string) schema.CustomizeDiffFunc { + return func(_ context.Context, diff *schema.ResourceDiff, _ any) error { + if o, n := diff.GetChange(field); n != nil && o != nil && o != "" && n.(string) != wantValue { + log.Printf("[DEBUG] new external value for %s: %s (want: %s), recreating the resource...\n", field, n.(string), wantValue) + return errors.Join(diff.SetNew(field, wantValue), diff.ForceNew(field)) + } + return nil + } +} diff --git a/pkg/resources/doc_helpers.go b/pkg/resources/doc_helpers.go index eb437015f9..51142971c6 100644 --- a/pkg/resources/doc_helpers.go +++ b/pkg/resources/doc_helpers.go @@ -4,8 +4,9 @@ import ( "fmt" "strings" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" providerresources "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" ) func possibleValuesListed[T ~string | ~int](values []T) string { diff --git a/pkg/resources/function.go b/pkg/resources/function.go index 38c6619a37..69e972f7d5 100644 --- a/pkg/resources/function.go +++ b/pkg/resources/function.go @@ -171,7 +171,7 @@ func Function() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.Function, CreateContextFunction), ReadContext: TrackingReadWrapper(resources.Function, ReadContextFunction), UpdateContext: TrackingUpdateWrapper(resources.Function, UpdateContextFunction), - DeleteContext: TrackingDeleteWrapper(resources.Function, DeleteContextFunction), + DeleteContext: TrackingDeleteWrapper(resources.Function, DeleteFunction), CustomizeDiff: TrackingCustomDiffWrapper(resources.Function, customdiff.All( // TODO(SNOW-1348103): add `arguments` to ComputedIfAnyAttributeChanged. This can't be done now because this function compares values without diff suppress. @@ -722,20 +722,6 @@ func UpdateContextFunction(ctx context.Context, d *schema.ResourceData, meta int return ReadContextFunction(ctx, d, meta) } -func DeleteContextFunction(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - if err := client.Functions.Drop(ctx, sdk.NewDropFunctionRequest(id).WithIfExists(true)); err != nil { - return diag.FromErr(err) - } - d.SetId("") - return nil -} - func parseFunctionArguments(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, diag.Diagnostics) { args := make([]sdk.FunctionArgumentRequest, 0) if v, ok := d.GetOk("arguments"); ok { diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index fd4d57913e..ea005da2c2 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -1,11 +1,18 @@ package resources import ( + "context" + "errors" "fmt" + "log" "slices" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -20,6 +27,7 @@ func init() { type functionSchemaDef struct { additionalArguments []string functionDefinitionDescription string + functionDefinitionRequired bool runtimeVersionRequired bool runtimeVersionDescription string importsDescription string @@ -38,6 +46,11 @@ func setUpFunctionSchema(definition functionSchemaDef) map[string]*schema.Schema } if v, ok := currentSchema["function_definition"]; ok && v != nil { v.Description = diffSuppressStatementFieldDescription(definition.functionDefinitionDescription) + if definition.functionDefinitionRequired { + v.Required = true + } else { + v.Optional = true + } } if v, ok := currentSchema["runtime_version"]; ok && v != nil { if definition.runtimeVersionRequired { @@ -75,7 +88,7 @@ var ( "arguments", "return_type", "null_input_behavior", - "return_behavior", + "return_results_behavior", "comment", "function_definition", "function_language", @@ -94,16 +107,18 @@ var ( "target_path", }, functionDefinitionDescription: functionDefinitionTemplate("Java", "https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction"), - runtimeVersionRequired: false, - runtimeVersionDescription: "Specifies the Java JDK runtime version to use. The supported versions of Java are 11.x and 17.x. If RUNTIME_VERSION is not set, Java JDK 11 is used.", - importsDescription: "The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java).", - packagesDescription: "The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`.", - handlerDescription: "The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class.", - targetPathDescription: "The TARGET_PATH clause specifies the location to which Snowflake should write the compiled code (JAR file) after compiling the source code specified in the `function_definition`. If this clause is included, the user should manually remove the JAR file when it is no longer needed (typically when the Java UDF is dropped). If this clause is omitted, Snowflake re-compiles the source code each time the code is needed. The JAR file is not stored permanently, and the user does not need to clean up the JAR file. Snowflake returns an error if the TARGET_PATH matches an existing file; you cannot use TARGET_PATH to overwrite an existing file.", + // May be optional for java because if it is not set, describe return empty version. + runtimeVersionRequired: false, + runtimeVersionDescription: "Specifies the Java JDK runtime version to use. The supported versions of Java are 11.x and 17.x. If RUNTIME_VERSION is not set, Java JDK 11 is used.", + importsDescription: "The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java).", + packagesDescription: "The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`.", + handlerDescription: "The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class.", + targetPathDescription: "The TARGET_PATH clause specifies the location to which Snowflake should write the compiled code (JAR file) after compiling the source code specified in the `function_definition`. If this clause is included, the user should manually remove the JAR file when it is no longer needed (typically when the Java UDF is dropped). If this clause is omitted, Snowflake re-compiles the source code each time the code is needed. The JAR file is not stored permanently, and the user does not need to clean up the JAR file. Snowflake returns an error if the TARGET_PATH matches an existing file; you cannot use TARGET_PATH to overwrite an existing file.", } javascriptFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{}, functionDefinitionDescription: functionDefinitionTemplate("JavaScript", "https://docs.snowflake.com/en/developer-guide/udf/javascript/udf-javascript-introduction"), + functionDefinitionRequired: true, } pythonFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{ @@ -143,6 +158,7 @@ var ( sqlFunctionSchemaDefinition = functionSchemaDef{ additionalArguments: []string{}, functionDefinitionDescription: functionDefinitionTemplate("SQL", "https://docs.snowflake.com/en/developer-guide/udf/sql/udf-sql-introduction"), + functionDefinitionRequired: true, } ) @@ -214,6 +230,11 @@ func functionBaseSchema() map[string]schema.Schema { DiffSuppressFunc: DiffSuppressDataTypes, Description: "The argument type.", }, + "arg_default_value": { + Type: schema.TypeString, + Optional: true, + Description: externalChangesNotDetectedFieldDescription("Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted."), + }, }, }, Optional: true, @@ -228,28 +249,26 @@ func functionBaseSchema() map[string]schema.Schema { ValidateDiagFunc: IsDataTypeValid, DiffSuppressFunc: DiffSuppressDataTypes, Description: "Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages).", - // TODO [SNOW-1348103]: adjust DiffSuppressFunc }, "null_input_behavior": { Type: schema.TypeString, Optional: true, ForceNew: true, ValidateDiagFunc: sdkValidation(sdk.ToNullInputBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior)), // TODO [SNOW-1348103]: IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior") but not in show Description: fmt.Sprintf("Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedNullInputBehaviors)), }, - "return_behavior": { + "return_results_behavior": { Type: schema.TypeString, Optional: true, ForceNew: true, ValidateDiagFunc: sdkValidation(sdk.ToReturnResultsBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToReturnResultsBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("return_behavior")), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToReturnResultsBehavior)), // TODO [SNOW-1348103]: IgnoreChangeToCurrentSnowflakeValueInShow("return_results_behavior") but not in show Description: fmt.Sprintf("Specifies the behavior of the function when returning results. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedReturnResultsBehaviors)), }, "runtime_version": { Type: schema.TypeString, ForceNew: true, - // TODO [SNOW-1348103]: may be optional for java without consequence because if it is not set, the describe is not returning any version. }, "comment": { Type: schema.TypeString, @@ -258,12 +277,26 @@ func functionBaseSchema() map[string]schema.Schema { Default: "user-defined function", Description: "Specifies a comment for the function.", }, - // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage_name + target_path + // split into two because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6 + // TODO [SNOW-1348103]: add validations preventing setting improper stage and path "imports": { Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, // TODO [SNOW-1348103]: what do we do with the version "latest". "packages": { @@ -308,15 +341,28 @@ func functionBaseSchema() map[string]schema.Schema { }, Description: "Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter.", }, - // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage + path "target_path": { - Type: schema.TypeString, + Type: schema.TypeSet, + MaxItems: 1, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, "function_definition": { Type: schema.TypeString, - Required: true, ForceNew: true, DiffSuppressFunc: DiffSuppressStatement, }, @@ -338,9 +384,204 @@ func functionBaseSchema() map[string]schema.Schema { Computed: true, Description: "Outputs the result of `SHOW PARAMETERS IN FUNCTION` for the given function.", Elem: &schema.Resource{ - Schema: functionParametersSchema, + Schema: schemas.ShowFunctionParametersSchema, }, }, FullyQualifiedNameAttributeName: *schemas.FullyQualifiedNameSchema, } } + +func DeleteFunction(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + err = client.Functions.Drop(ctx, sdk.NewDropFunctionRequest(id).WithIfExists(true)) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} + +func parseFunctionArgumentsCommon(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, error) { + args := make([]sdk.FunctionArgumentRequest, 0) + if v, ok := d.GetOk("arguments"); ok { + for _, arg := range v.([]any) { + argName := arg.(map[string]any)["arg_name"].(string) + argDataType := arg.(map[string]any)["arg_data_type"].(string) + dataType, err := datatypes.ParseDataType(argDataType) + if err != nil { + return nil, err + } + request := sdk.NewFunctionArgumentRequest(argName, dataType) + + if argDefaultValue, defaultValuePresent := arg.(map[string]any)["arg_default_value"]; defaultValuePresent && argDefaultValue.(string) != "" { + request.WithDefaultValue(argDefaultValue.(string)) + } + + args = append(args, *request) + } + } + return args, nil +} + +func parseFunctionImportsCommon(d *schema.ResourceData) ([]sdk.FunctionImportRequest, error) { + imports := make([]sdk.FunctionImportRequest, 0) + if v, ok := d.GetOk("imports"); ok { + for _, imp := range v.(*schema.Set).List() { + stageLocation := imp.(map[string]any)["stage_location"].(string) + pathOnStage := imp.(map[string]any)["path_on_stage"].(string) + imports = append(imports, *sdk.NewFunctionImportRequest().WithImport(fmt.Sprintf("@%s/%s", stageLocation, pathOnStage))) + } + } + return imports, nil +} + +func parseFunctionTargetPathCommon(d *schema.ResourceData) (string, error) { + var tp string + if v, ok := d.GetOk("target_path"); ok { + for _, p := range v.(*schema.Set).List() { + stageLocation := p.(map[string]any)["stage_location"].(string) + pathOnStage := p.(map[string]any)["path_on_stage"].(string) + tp = fmt.Sprintf("@%s/%s", stageLocation, pathOnStage) + } + } + return tp, nil +} + +func parseFunctionReturnsCommon(d *schema.ResourceData) (*sdk.FunctionReturnsRequest, error) { + returnTypeRaw := d.Get("return_type").(string) + dataType, err := datatypes.ParseDataType(returnTypeRaw) + if err != nil { + return nil, err + } + returns := sdk.NewFunctionReturnsRequest() + switch v := dataType.(type) { + case *datatypes.TableDataType: + var cr []sdk.FunctionColumnRequest + for _, c := range v.Columns() { + cr = append(cr, *sdk.NewFunctionColumnRequest(c.ColumnName(), c.ColumnType())) + } + returns.WithTable(*sdk.NewFunctionReturnsTableRequest().WithColumns(cr)) + default: + returns.WithResultDataType(*sdk.NewFunctionReturnsResultDataTypeRequest(dataType)) + } + return returns, nil +} + +func setFunctionImportsInBuilder[T any](d *schema.ResourceData, setImports func([]sdk.FunctionImportRequest) T) error { + imports, err := parseFunctionImportsCommon(d) + if err != nil { + return err + } + setImports(imports) + return nil +} + +func setFunctionTargetPathInBuilder[T any](d *schema.ResourceData, setTargetPath func(string) T) error { + tp, err := parseFunctionTargetPathCommon(d) + if err != nil { + return err + } + if tp != "" { + setTargetPath(tp) + } + return nil +} + +func queryAllFunctionsDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allFunctionDetailsCommon, diag.Diagnostics) { + functionDetails, err := client.Functions.DescribeDetails(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { + log.Printf("[DEBUG] function (%s) not found or we are not authorized. Err: %s", d.Id(), err) + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query function. Marking the resource as removed.", + Detail: fmt.Sprintf("Function: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + function, err := client.Functions.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query function. Marking the resource as removed.", + Detail: fmt.Sprintf("Function: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + functionParameters, err := client.Functions.ShowParameters(ctx, id) + if err != nil { + return nil, diag.FromErr(err) + } + return &allFunctionDetailsCommon{ + function: function, + functionDetails: functionDetails, + functionParameters: functionParameters, + }, nil +} + +type allFunctionDetailsCommon struct { + function *sdk.Function + functionDetails *sdk.FunctionDetails + functionParameters []*sdk.Parameter +} + +func readFunctionArgumentsCommon(d *schema.ResourceData, args []sdk.NormalizedArgument) error { + if len(args) == 0 { + // TODO [SNOW-1348103]: handle empty list + return nil + } + // We do it the unusual way because the default values are not returned by SF. + // We update what we have - leaving the defaults unchanged. + if currentArgs, ok := d.Get("arguments").([]map[string]any); !ok { + return fmt.Errorf("arguments must be a list") + } else { + for i, arg := range args { + currentArgs[i]["arg_name"] = arg.Name + currentArgs[i]["arg_data_type"] = arg.DataType.ToSql() + } + return d.Set("arguments", currentArgs) + } +} + +func readFunctionImportsCommon(d *schema.ResourceData, imports []sdk.NormalizedPath) error { + if len(imports) == 0 { + // don't do anything if imports not present + return nil + } + imps := collections.Map(imports, func(imp sdk.NormalizedPath) map[string]any { + return map[string]any{ + "stage_location": imp.StageLocation, + "path_on_stage": imp.PathOnStage, + } + }) + return d.Set("imports", imps) +} + +func readFunctionTargetPathCommon(d *schema.ResourceData, normalizedPath *sdk.NormalizedPath) error { + if normalizedPath == nil { + // don't do anything if imports not present + return nil + } + tp := make([]map[string]any, 1) + tp[0] = map[string]any{ + "stage_location": normalizedPath.StageLocation, + "path_on_stage": normalizedPath.PathOnStage, + } + return d.Set("target_path", tp) +} diff --git a/pkg/resources/function_java.go b/pkg/resources/function_java.go index 5e05d3007f..b1e60da7cf 100644 --- a/pkg/resources/function_java.go +++ b/pkg/resources/function_java.go @@ -2,11 +2,18 @@ package resources import ( "context" + "errors" + "fmt" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -17,15 +24,19 @@ func FunctionJava() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionJava, CreateContextFunctionJava), ReadContext: TrackingReadWrapper(resources.FunctionJava, ReadContextFunctionJava), UpdateContext: TrackingUpdateWrapper(resources.FunctionJava, UpdateContextFunctionJava), - DeleteContext: TrackingDeleteWrapper(resources.FunctionJava, DeleteContextFunctionJava), + DeleteContext: TrackingDeleteWrapper(resources.FunctionJava, DeleteFunction), Description: "Resource used to manage java function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionJava, customdiff.All( - // TODO[SNOW-1348103]: ComputedIfAnyAttributeChanged(javaFunctionSchema, ShowOutputAttributeName, ...), + // TODO [SNOW-1348103]: ComputedIfAnyAttributeChanged(javaFunctionSchema, ShowOutputAttributeName, ...), ComputedIfAnyAttributeChanged(javaFunctionSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(functionParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllFunctionParameters), strings.ToLower)...), functionParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only potential option is java staged -> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("function_language", "JAVA"), )), Schema: collections.MergeMaps(javaFunctionSchema, functionParametersSchema), @@ -36,17 +47,155 @@ func FunctionJava() *schema.Resource { } func CreateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseFunctionArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseFunctionReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.FunctionArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + attributeMappedValueCreateBuilder[string](d, "return_results_behavior", request.WithReturnResultsBehavior, sdk.ToReturnResultsBehavior), + stringAttributeCreateBuilder(d, "runtime_version", request.WithRuntimeVersion), + // TODO [SNOW-1348103]: handle the rest of the attributes + // comment + setFunctionImportsInBuilder(d, request.WithImports), + // packages + // external_access_integrations + // secrets + setFunctionTargetPathInBuilder(d, request.WithTargetPath), + stringAttributeCreateBuilder(d, "function_definition", request.WithFunctionDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Functions.CreateForJava(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create function (query does not fail but parameters stay unchanged) + setRequest := sdk.NewFunctionSetRequest() + if parametersCreateDiags := handleFunctionParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionJava(ctx, d, meta) } func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allFunctionDetails, diags := queryAllFunctionsDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // TODO [SNOW-1348103]: set the rest of the fields + // not reading is_secure on purpose (handled as external change to show output) + readFunctionArgumentsCommon(d, allFunctionDetails.functionDetails.NormalizedArguments), + d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + // not reading return_results_behavior on purpose (handled as external change to show output) + setOptionalFromStringPtr(d, "runtime_version", allFunctionDetails.functionDetails.RuntimeVersion), + // comment + readFunctionImportsCommon(d, allFunctionDetails.functionDetails.NormalizedImports), + // packages + setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), + // external_access_integrations + // secrets + readFunctionTargetPathCommon(d, allFunctionDetails.functionDetails.NormalizedTargetPath), + setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), + d.Set("function_language", allFunctionDetails.functionDetails.Language), + + handleFunctionParameterRead(d, allFunctionDetails.functionParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.FunctionToSchema(allFunctionDetails.function)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.FunctionParametersToSchema(allFunctionDetails.functionParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } + return nil } func UpdateContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } -func DeleteContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + if d.HasChange("name") { + newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) + + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithRenameTo(newId.SchemaObjectId())) + if err != nil { + return diag.FromErr(fmt.Errorf("error renaming function %v err = %w", d.Id(), err)) + } + + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId + } + + // Batch SET operations and UNSET operations + setRequest := sdk.NewFunctionSetRequest() + unsetRequest := sdk.NewFunctionUnsetRequest() + + // TODO [SNOW-1348103]: handle all updates + // secure + // external access integration + // secrets + // comment + + if updateParamDiags := handleFunctionParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags + } + + // Apply SET and UNSET changes + if !reflect.DeepEqual(*setRequest, *sdk.NewFunctionSetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + if !reflect.DeepEqual(*unsetRequest, *sdk.NewFunctionUnsetRequest()) { + err := client.Functions.Alter(ctx, sdk.NewAlterFunctionRequest(id).WithUnset(*unsetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextFunctionJava(ctx, d, meta) } diff --git a/pkg/resources/function_java_acceptance_test.go b/pkg/resources/function_java_acceptance_test.go new file mode 100644 index 0000000000..b805187b69 --- /dev/null +++ b/pkg/resources/function_java_acceptance_test.go @@ -0,0 +1,435 @@ +package resources_test + +import ( + "fmt" + "testing" + "time" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +// TODO [SNOW-1348103]: test import +// TODO [SNOW-1348103]: test external changes +// TODO [SNOW-1348103]: test changes of attributes separately + +func TestAcc_FunctionJava_InlineBasic(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + functionModelRenamed := model.FunctionJavaBasicInline("w", idWithChangedNameButTheSameDataType, dataType, handler, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasImportsLength(0). + HasTargetPathEmpty(). + HasNoRuntimeVersion(). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, functionModelRenamed), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_InlineEmptyArgs(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + returnDataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes() + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinitionNoArgs(t, className, funcName) + + functionModel := model.FunctionJavaBasicInline("w", id, returnDataType, handler, definition) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_InlineBasicDefaultArg(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + defaultValue := "'hello'" + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgumentWithDefaultValue(argName, dataType, defaultValue) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "arguments.0.arg_default_value", defaultValue)), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + // TODO [SNOW-1850370]: extract to helper + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("11") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasImportsLength(0). + HasRuntimeVersionString("11"). + HasFunctionDefinitionString(definition). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_StagedBasic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaFunction := acc.TestClient().CreateSampleJavaFunctionAndJarOnStage(t, stage) + + dataType := tmpJavaFunction.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaFunction.JavaHandler() + + functionModel := model.FunctionJavaBasicStaged("w", id, dataType, handler, stage.ID().FullyQualifiedName(), tmpJavaFunction.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultFunctionComment). + HasImportsLength(1). + HasNoFunctionDefinition(). + HasFunctionLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "imports.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(functionModel.ResourceReference(), "imports.0.path_on_stage", tmpJavaFunction.JarName)), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_AllParameters(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Function.SampleJavaDefinition(t, className, funcName, argName) + + functionModel := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + functionModelWithAllParametersSet := model.FunctionJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithEnableConsoleOutput(true). + WithLogLevel(string(sdk.LogLevelWarn)). + WithMetricLevel(string(sdk.MetricLevelAll)). + WithTraceLevel(string(sdk.TraceLevelAlways)) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + // create with default values for all the parameters + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.FunctionResourceParameters(t, functionModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // import when no parameter set + { + ResourceName: functionModel.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedFunctionResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasAllDefaults(), + ), + }, + // set all parameters + { + Config: config.FromModels(t, functionModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.FunctionResourceParameters(t, functionModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // import when all parameters set + { + ResourceName: functionModelWithAllParametersSet.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedFunctionResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // unset all the parameters + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.FunctionResourceParameters(t, functionModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // destroy + { + Config: config.FromModels(t, functionModel), + Destroy: true, + }, + // create with all parameters set + { + Config: config.FromModels(t, functionModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.FunctionParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.FunctionResourceParameters(t, functionModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + }, + }) +} + +func TestAcc_FunctionJava_handleExternalLanguageChange(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + tmpJavaFunction := acc.TestClient().CreateSampleJavaFunctionAndJarOnUserStage(t) + + dataType := tmpJavaFunction.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaFunction.JavaHandler() + + functionModel := model.FunctionJavaBasicStaged("w", id, dataType, handler, "~", tmpJavaFunction.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.FunctionJava), + Steps: []resource.TestStep{ + { + Config: config.FromModels(t, functionModel), + Check: assert.AssertThat(t, + objectassert.Function(t, id).HasLanguage("JAVA"), + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()).HasNameString(id.Name()).HasFunctionLanguageString("JAVA"), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()).HasLanguage("JAVA"), + ), + }, + // change type externally by creating a new function with the exact same id but using different language + { + PreConfig: func() { + acc.TestClient().Function.DropFunctionFunc(t, id)() + acc.TestClient().Function.CreateScalaStaged(t, id, dataType, tmpJavaFunction.JarLocation(), handler) + objectassert.Function(t, id).HasLanguage("SCALA") + }, + Config: config.FromModels(t, functionModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(functionModel.ResourceReference(), plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + Check: assert.AssertThat(t, + objectassert.Function(t, id).HasLanguage("JAVA"), + resourceassert.FunctionJavaResource(t, functionModel.ResourceReference()).HasNameString(id.Name()).HasFunctionLanguageString("JAVA"), + resourceshowoutputassert.FunctionShowOutput(t, functionModel.ResourceReference()).HasLanguage("JAVA"), + ), + }, + }, + }) +} diff --git a/pkg/resources/function_javascript.go b/pkg/resources/function_javascript.go index f1b3e17e2a..0ba7e955b7 100644 --- a/pkg/resources/function_javascript.go +++ b/pkg/resources/function_javascript.go @@ -17,7 +17,7 @@ func FunctionJavascript() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionJavascript, CreateContextFunctionJavascript), ReadContext: TrackingReadWrapper(resources.FunctionJavascript, ReadContextFunctionJavascript), UpdateContext: TrackingUpdateWrapper(resources.FunctionJavascript, UpdateContextFunctionJavascript), - DeleteContext: TrackingDeleteWrapper(resources.FunctionJavascript, DeleteContextFunctionJavascript), + DeleteContext: TrackingDeleteWrapper(resources.FunctionJavascript, DeleteFunction), Description: "Resource used to manage javascript function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionJavascript, customdiff.All( diff --git a/pkg/resources/function_parameters.go b/pkg/resources/function_parameters.go index bccbe0666a..3ff28095b5 100644 --- a/pkg/resources/function_parameters.go +++ b/pkg/resources/function_parameters.go @@ -80,16 +80,16 @@ func handleFunctionParameterRead(d *schema.ResourceData, functionParameters []*s } // They do not work in create, that's why are set in alter -func handleFunctionParametersCreate(d *schema.ResourceData, alterOpts *sdk.FunctionSet) diag.Diagnostics { +func handleFunctionParametersCreate(d *schema.ResourceData, set *sdk.FunctionSetRequest) diag.Diagnostics { return JoinDiags( - handleParameterCreate(d, sdk.FunctionParameterEnableConsoleOutput, &alterOpts.EnableConsoleOutput), - handleParameterCreateWithMapping(d, sdk.FunctionParameterLogLevel, &alterOpts.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), - handleParameterCreateWithMapping(d, sdk.FunctionParameterMetricLevel, &alterOpts.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), - handleParameterCreateWithMapping(d, sdk.FunctionParameterTraceLevel, &alterOpts.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), + handleParameterCreate(d, sdk.FunctionParameterEnableConsoleOutput, &set.EnableConsoleOutput), + handleParameterCreateWithMapping(d, sdk.FunctionParameterLogLevel, &set.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), + handleParameterCreateWithMapping(d, sdk.FunctionParameterMetricLevel, &set.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), + handleParameterCreateWithMapping(d, sdk.FunctionParameterTraceLevel, &set.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), ) } -func handleFunctionParametersUpdate(d *schema.ResourceData, set *sdk.FunctionSet, unset *sdk.FunctionUnset) diag.Diagnostics { +func handleFunctionParametersUpdate(d *schema.ResourceData, set *sdk.FunctionSetRequest, unset *sdk.FunctionUnsetRequest) diag.Diagnostics { return JoinDiags( handleParameterUpdate(d, sdk.FunctionParameterEnableConsoleOutput, &set.EnableConsoleOutput, &unset.EnableConsoleOutput), handleParameterUpdateWithMapping(d, sdk.FunctionParameterLogLevel, &set.LogLevel, &unset.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), diff --git a/pkg/resources/function_python.go b/pkg/resources/function_python.go index e270f80ef6..cc6c137aff 100644 --- a/pkg/resources/function_python.go +++ b/pkg/resources/function_python.go @@ -17,7 +17,7 @@ func FunctionPython() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionPython, CreateContextFunctionPython), ReadContext: TrackingReadWrapper(resources.FunctionPython, ReadContextFunctionPython), UpdateContext: TrackingUpdateWrapper(resources.FunctionPython, UpdateContextFunctionPython), - DeleteContext: TrackingDeleteWrapper(resources.FunctionPython, DeleteContextFunctionPython), + DeleteContext: TrackingDeleteWrapper(resources.FunctionPython, DeleteFunction), Description: "Resource used to manage python function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionPython, customdiff.All( diff --git a/pkg/resources/function_scala.go b/pkg/resources/function_scala.go index 2c3adf0bc3..ff2bded481 100644 --- a/pkg/resources/function_scala.go +++ b/pkg/resources/function_scala.go @@ -17,7 +17,7 @@ func FunctionScala() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionScala, CreateContextFunctionScala), ReadContext: TrackingReadWrapper(resources.FunctionScala, ReadContextFunctionScala), UpdateContext: TrackingUpdateWrapper(resources.FunctionScala, UpdateContextFunctionScala), - DeleteContext: TrackingDeleteWrapper(resources.FunctionScala, DeleteContextFunctionScala), + DeleteContext: TrackingDeleteWrapper(resources.FunctionScala, DeleteFunction), Description: "Resource used to manage scala function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionScala, customdiff.All( diff --git a/pkg/resources/function_sql.go b/pkg/resources/function_sql.go index 48ea385f71..cd8cb31dc8 100644 --- a/pkg/resources/function_sql.go +++ b/pkg/resources/function_sql.go @@ -17,7 +17,7 @@ func FunctionSql() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.FunctionSql, CreateContextFunctionSql), ReadContext: TrackingReadWrapper(resources.FunctionSql, ReadContextFunctionSql), UpdateContext: TrackingUpdateWrapper(resources.FunctionSql, UpdateContextFunctionSql), - DeleteContext: TrackingDeleteWrapper(resources.FunctionSql, DeleteContextFunctionSql), + DeleteContext: TrackingDeleteWrapper(resources.FunctionSql, DeleteFunction), Description: "Resource used to manage sql function objects. For more information, check [function documentation](https://docs.snowflake.com/en/sql-reference/sql/create-function).", CustomizeDiff: TrackingCustomDiffWrapper(resources.FunctionSql, customdiff.All( diff --git a/pkg/resources/procedure.go b/pkg/resources/procedure.go index 8665f71d09..fa986ae8f5 100644 --- a/pkg/resources/procedure.go +++ b/pkg/resources/procedure.go @@ -186,7 +186,7 @@ func Procedure() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.Procedure, CreateContextProcedure), ReadContext: TrackingReadWrapper(resources.Procedure, ReadContextProcedure), UpdateContext: TrackingUpdateWrapper(resources.Procedure, UpdateContextProcedure), - DeleteContext: TrackingDeleteWrapper(resources.Procedure, DeleteContextProcedure), + DeleteContext: TrackingDeleteWrapper(resources.Procedure, DeleteProcedure), // TODO(SNOW-1348106): add `arguments` to ComputedIfAnyAttributeChanged for FullyQualifiedNameAttributeName. // This can't be done now because this function compares values without diff suppress. @@ -714,20 +714,6 @@ func UpdateContextProcedure(ctx context.Context, d *schema.ResourceData, meta in return ReadContextProcedure(ctx, d, meta) } -func DeleteContextProcedure(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client - - id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) - if err != nil { - return diag.FromErr(err) - } - if err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id).WithIfExists(true)); err != nil { - return diag.FromErr(err) - } - d.SetId("") - return nil -} - func getProcedureArguments(d *schema.ResourceData) ([]sdk.ProcedureArgumentRequest, diag.Diagnostics) { args := make([]sdk.ProcedureArgumentRequest, 0) if v, ok := d.GetOk("arguments"); ok { diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index 88e815978b..163a33da0f 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -1,11 +1,14 @@ package resources import ( + "context" "fmt" "slices" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -335,9 +338,25 @@ func procedureBaseSchema() map[string]schema.Schema { Computed: true, Description: "Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure.", Elem: &schema.Resource{ - Schema: procedureParametersSchema, + Schema: schemas.ShowProcedureParametersSchema, }, }, FullyQualifiedNameAttributeName: *schemas.FullyQualifiedNameSchema, } } + +func DeleteProcedure(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + if err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id).WithIfExists(true)); err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index 8019e72689..1804780de9 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -17,7 +17,7 @@ func ProcedureJava() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureJava, CreateContextProcedureJava), ReadContext: TrackingReadWrapper(resources.ProcedureJava, ReadContextProcedureJava), UpdateContext: TrackingUpdateWrapper(resources.ProcedureJava, UpdateContextProcedureJava), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureJava, DeleteContextProcedureJava), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureJava, DeleteProcedure), Description: "Resource used to manage java procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureJava, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta func UpdateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_javascript.go b/pkg/resources/procedure_javascript.go index 8c3958b99e..5088b492f7 100644 --- a/pkg/resources/procedure_javascript.go +++ b/pkg/resources/procedure_javascript.go @@ -17,7 +17,7 @@ func ProcedureJavascript() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureJavascript, CreateContextProcedureJavascript), ReadContext: TrackingReadWrapper(resources.ProcedureJavascript, ReadContextProcedureJavascript), UpdateContext: TrackingUpdateWrapper(resources.ProcedureJavascript, UpdateContextProcedureJavascript), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureJavascript, DeleteContextProcedureJavascript), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureJavascript, DeleteProcedure), Description: "Resource used to manage javascript procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureJavascript, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, func UpdateContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_python.go b/pkg/resources/procedure_python.go index 48d70329e7..717cee32fe 100644 --- a/pkg/resources/procedure_python.go +++ b/pkg/resources/procedure_python.go @@ -17,7 +17,7 @@ func ProcedurePython() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedurePython, CreateContextProcedurePython), ReadContext: TrackingReadWrapper(resources.ProcedurePython, ReadContextProcedurePython), UpdateContext: TrackingUpdateWrapper(resources.ProcedurePython, UpdateContextProcedurePython), - DeleteContext: TrackingDeleteWrapper(resources.ProcedurePython, DeleteContextProcedurePython), + DeleteContext: TrackingDeleteWrapper(resources.ProcedurePython, DeleteProcedure), Description: "Resource used to manage python procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedurePython, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedurePython(ctx context.Context, d *schema.ResourceData, met func UpdateContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_scala.go b/pkg/resources/procedure_scala.go index 3a7816b7d0..793663d0e1 100644 --- a/pkg/resources/procedure_scala.go +++ b/pkg/resources/procedure_scala.go @@ -17,7 +17,7 @@ func ProcedureScala() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureScala, CreateContextProcedureScala), ReadContext: TrackingReadWrapper(resources.ProcedureScala, ReadContextProcedureScala), UpdateContext: TrackingUpdateWrapper(resources.ProcedureScala, UpdateContextProcedureScala), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureScala, DeleteContextProcedureScala), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureScala, DeleteProcedure), Description: "Resource used to manage scala procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureScala, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta func UpdateContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/procedure_sql.go b/pkg/resources/procedure_sql.go index 0488941f03..11fcd69413 100644 --- a/pkg/resources/procedure_sql.go +++ b/pkg/resources/procedure_sql.go @@ -17,7 +17,7 @@ func ProcedureSql() *schema.Resource { CreateContext: TrackingCreateWrapper(resources.ProcedureSql, CreateContextProcedureSql), ReadContext: TrackingReadWrapper(resources.ProcedureSql, ReadContextProcedureSql), UpdateContext: TrackingUpdateWrapper(resources.ProcedureSql, UpdateContextProcedureSql), - DeleteContext: TrackingDeleteWrapper(resources.ProcedureSql, DeleteContextProcedureSql), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureSql, DeleteProcedure), Description: "Resource used to manage sql procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureSql, customdiff.All( @@ -46,7 +46,3 @@ func ReadContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta a func UpdateContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { return nil } - -func DeleteContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil -} diff --git a/pkg/resources/resource_helpers_create.go b/pkg/resources/resource_helpers_create.go index 837fada163..42c34ca0c7 100644 --- a/pkg/resources/resource_helpers_create.go +++ b/pkg/resources/resource_helpers_create.go @@ -12,6 +12,13 @@ func stringAttributeCreate(d *schema.ResourceData, key string, createField **str return nil } +func stringAttributeCreateBuilder[T any](d *schema.ResourceData, key string, setValue func(string) T) error { + if v, ok := d.GetOk(key); ok { + setValue(v.(string)) + } + return nil +} + func intAttributeCreate(d *schema.ResourceData, key string, createField **int) error { if v, ok := d.GetOk(key); ok { *createField = sdk.Int(v.(int)) @@ -37,6 +44,17 @@ func booleanStringAttributeCreate(d *schema.ResourceData, key string, createFiel return nil } +func booleanStringAttributeCreateBuilder[T any](d *schema.ResourceData, key string, setValue func(bool) T) error { + if v := d.Get(key).(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) + if err != nil { + return err + } + setValue(parsed) + } + return nil +} + func accountObjectIdentifierAttributeCreate(d *schema.ResourceData, key string, createField **sdk.AccountObjectIdentifier) error { if v, ok := d.GetOk(key); ok { *createField = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) @@ -73,6 +91,17 @@ func attributeMappedValueCreate[T any](d *schema.ResourceData, key string, creat return nil } +func attributeMappedValueCreateBuilder[InputType any, MappedType any, RequestBuilder any](d *schema.ResourceData, key string, setValue func(MappedType) RequestBuilder, mapper func(value InputType) (MappedType, error)) error { + if v, ok := d.GetOk(key); ok { + value, err := mapper(v.(InputType)) + if err != nil { + return err + } + setValue(value) + } + return nil +} + func copyGrantsAttributeCreate(d *schema.ResourceData, isOrReplace bool, orReplaceField, copyGrantsField **bool) error { if isOrReplace { *orReplaceField = sdk.Bool(true) diff --git a/pkg/resources/resource_helpers_read.go b/pkg/resources/resource_helpers_read.go index b3dcfcebf1..20d1e69fc6 100644 --- a/pkg/resources/resource_helpers_read.go +++ b/pkg/resources/resource_helpers_read.go @@ -63,3 +63,22 @@ func attributeMappedValueReadOrDefault[T, R any](d *schema.ResourceData, key str } return d.Set(key, nil) } + +func setOptionalFromStringPtr(d *schema.ResourceData, key string, ptr *string) error { + if ptr != nil { + if err := d.Set(key, *ptr); err != nil { + return err + } + } + return nil +} + +// TODO [SNOW-1348103]: return error if nil +func setRequiredFromStringPtr(d *schema.ResourceData, key string, ptr *string) error { + if ptr != nil { + if err := d.Set(key, *ptr); err != nil { + return err + } + } + return nil +} diff --git a/pkg/resources/user.go b/pkg/resources/user.go index 1fb6f15127..c84040e642 100644 --- a/pkg/resources/user.go +++ b/pkg/resources/user.go @@ -199,7 +199,6 @@ func User() *schema.Resource { }, CustomizeDiff: TrackingCustomDiffWrapper(resources.User, customdiff.All( - // TODO [SNOW-1629468 - next pr]: test "default_role", "default_secondary_roles" ComputedIfAnyAttributeChanged(userSchema, ShowOutputAttributeName, userExternalChangesAttributes...), ComputedIfAnyAttributeChanged(userParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllUserParameters), strings.ToLower)...), ComputedIfAnyAttributeChanged(userSchema, FullyQualifiedNameAttributeName, "name"), diff --git a/pkg/schemas/function_parameters.go b/pkg/schemas/function_parameters.go new file mode 100644 index 0000000000..af7752c394 --- /dev/null +++ b/pkg/schemas/function_parameters.go @@ -0,0 +1,35 @@ +package schemas + +import ( + "slices" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var ( + ShowFunctionParametersSchema = make(map[string]*schema.Schema) + functionParameters = []sdk.FunctionParameter{ + sdk.FunctionParameterEnableConsoleOutput, + sdk.FunctionParameterLogLevel, + sdk.FunctionParameterMetricLevel, + sdk.FunctionParameterTraceLevel, + } +) + +func init() { + for _, param := range functionParameters { + ShowFunctionParametersSchema[strings.ToLower(string(param))] = ParameterListSchema + } +} + +func FunctionParametersToSchema(parameters []*sdk.Parameter) map[string]any { + functionParametersValue := make(map[string]any) + for _, param := range parameters { + if slices.Contains(functionParameters, sdk.FunctionParameter(param.Key)) { + functionParametersValue[strings.ToLower(param.Key)] = []map[string]any{ParameterToSchema(param)} + } + } + return functionParametersValue +} diff --git a/pkg/schemas/procedure_parameters.go b/pkg/schemas/procedure_parameters.go new file mode 100644 index 0000000000..7e9c5c1638 --- /dev/null +++ b/pkg/schemas/procedure_parameters.go @@ -0,0 +1,35 @@ +package schemas + +import ( + "slices" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var ( + ShowProcedureParametersSchema = make(map[string]*schema.Schema) + ProcedureParameters = []sdk.ProcedureParameter{ + sdk.ProcedureParameterEnableConsoleOutput, + sdk.ProcedureParameterLogLevel, + sdk.ProcedureParameterMetricLevel, + sdk.ProcedureParameterTraceLevel, + } +) + +func init() { + for _, param := range ProcedureParameters { + ShowProcedureParametersSchema[strings.ToLower(string(param))] = ParameterListSchema + } +} + +func ProcedureParametersToSchema(parameters []*sdk.Parameter) map[string]any { + ProcedureParametersValue := make(map[string]any) + for _, param := range parameters { + if slices.Contains(ProcedureParameters, sdk.ProcedureParameter(param.Key)) { + ProcedureParametersValue[strings.ToLower(param.Key)] = []map[string]any{ParameterToSchema(param)} + } + } + return ProcedureParametersValue +} diff --git a/pkg/sdk/data_types_deprecated.go b/pkg/sdk/data_types_deprecated.go index 0d0315ad5e..24149f8d9f 100644 --- a/pkg/sdk/data_types_deprecated.go +++ b/pkg/sdk/data_types_deprecated.go @@ -47,5 +47,9 @@ func IsStringType(_type string) bool { } func LegacyDataTypeFrom(newDataType datatypes.DataType) DataType { + // TODO [SNOW-1850370]: remove this check? + if newDataType == nil { + return "" + } return DataType(newDataType.ToLegacyDataTypeSql()) } diff --git a/pkg/sdk/datatypes/legacy.go b/pkg/sdk/datatypes/legacy.go index 5a0e249cd7..c77f286f9c 100644 --- a/pkg/sdk/datatypes/legacy.go +++ b/pkg/sdk/datatypes/legacy.go @@ -16,4 +16,7 @@ const ( TimestampNtzLegacyDataType = "TIMESTAMP_NTZ" TimestampTzLegacyDataType = "TIMESTAMP_TZ" VariantLegacyDataType = "VARIANT" + + // TableLegacyDataType was not a value of legacy data type in the old implementation. Left for now for an easier implementation. + TableLegacyDataType = "TABLE" ) diff --git a/pkg/sdk/datatypes/table.go b/pkg/sdk/datatypes/table.go new file mode 100644 index 0000000000..e7c398ec6d --- /dev/null +++ b/pkg/sdk/datatypes/table.go @@ -0,0 +1,39 @@ +package datatypes + +// TableDataType is based on TODO [SNOW-1348103] +// It does not have synonyms. +// It consists of a list of column name + column type; may be empty. +// TODO [SNOW-1348103]: test and improve +type TableDataType struct { + columns []TableDataTypeColumn + underlyingType string +} + +type TableDataTypeColumn struct { + name string + dataType DataType +} + +func (c *TableDataTypeColumn) ColumnName() string { + return c.name +} + +func (c *TableDataTypeColumn) ColumnType() DataType { + return c.dataType +} + +func (t *TableDataType) ToSql() string { + return t.underlyingType +} + +func (t *TableDataType) ToLegacyDataTypeSql() string { + return TableLegacyDataType +} + +func (t *TableDataType) Canonical() string { + return TableLegacyDataType +} + +func (t *TableDataType) Columns() []TableDataTypeColumn { + return t.columns +} diff --git a/pkg/sdk/functions_ext.go b/pkg/sdk/functions_ext.go index 531ddfd9fa..2a87c2a458 100644 --- a/pkg/sdk/functions_ext.go +++ b/pkg/sdk/functions_ext.go @@ -4,7 +4,11 @@ import ( "context" "errors" "fmt" + "log" "strconv" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) const DefaultFunctionComment = "user-defined function" @@ -27,9 +31,28 @@ type FunctionDetails struct { Handler *string // present for python, java, and scala (hidden when SECURE) RuntimeVersion *string // present for python, java, and scala (hidden when SECURE) Packages *string // list // present for python, java, and scala - TargetPath *string // list present for scala and java (hidden when SECURE) + TargetPath *string // present for scala and java (hidden when SECURE) InstalledPackages *string // list present for python (hidden when SECURE) IsAggregate *bool // present for python + + NormalizedImports []NormalizedPath + NormalizedTargetPath *NormalizedPath + ReturnDataType datatypes.DataType + ReturnNotNull bool + NormalizedArguments []NormalizedArgument +} + +type NormalizedPath struct { + // StageLocation is a normalized (fully-quoted id or `~`) stage location + StageLocation string + // PathOnStage is path to the file on stage without opening `/` + PathOnStage string +} + +// NormalizedArgument does not contain default value because it is not returned in the Signature (or any other field). +type NormalizedArgument struct { + Name string + DataType datatypes.DataType } func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { @@ -69,9 +92,136 @@ func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { v.TargetPath = row.Value } } + if e := errors.Join(errs...); e != nil { + return nil, e + } + + if functionDetailsImports, err := parseFunctionDetailsImport(*v); err != nil { + errs = append(errs, err) + } else { + v.NormalizedImports = functionDetailsImports + } + + if v.TargetPath != nil { + if p, err := parseStageLocationPath(*v.TargetPath); err != nil { + errs = append(errs, err) + } else { + v.NormalizedTargetPath = p + } + } + + if dt, returnNotNull, err := parseFunctionOrProcedureReturns(v.Returns); err != nil { + errs = append(errs, err) + } else { + v.ReturnDataType = dt + v.ReturnNotNull = returnNotNull + } + + if args, err := parseFunctionOrProcedureSignature(v.Signature); err != nil { + errs = append(errs, err) + } else { + v.NormalizedArguments = args + } + return v, errors.Join(errs...) } +// TODO [SNOW-1850370]: use ParseCommaSeparatedStringArray + collections.MapErr combo here and in other methods? +func parseFunctionDetailsImport(details FunctionDetails) ([]NormalizedPath, error) { + functionDetailsImports := make([]NormalizedPath, 0) + if details.Imports == nil || *details.Imports == "" || *details.Imports == "[]" { + return functionDetailsImports, nil + } + if !strings.HasPrefix(*details.Imports, "[") || !strings.HasSuffix(*details.Imports, "]") { + return functionDetailsImports, fmt.Errorf("could not parse imports from Snowflake: %s, wrapping brackets not found", *details.Imports) + } + raw := (*details.Imports)[1 : len(*details.Imports)-1] + imports := strings.Split(raw, ",") + for _, imp := range imports { + p, err := parseStageLocationPath(imp) + if err != nil { + return nil, fmt.Errorf("could not parse imports from Snowflake: %s, err: %w", *details.Imports, err) + } + functionDetailsImports = append(functionDetailsImports, *p) + } + return functionDetailsImports, nil +} + +func parseStageLocationPath(location string) (*NormalizedPath, error) { + log.Printf("[DEBUG] parsing stage location path part: %s", location) + idx := strings.Index(location, "/") + if idx < 0 { + return nil, fmt.Errorf("part %s cannot be split into stage and path", location) + } + stageRaw := strings.TrimPrefix(strings.TrimSpace(location[:idx]), "@") + if stageRaw != "~" { + stageId, err := ParseSchemaObjectIdentifier(stageRaw) + if err != nil { + return nil, fmt.Errorf("part %s contains incorrect stage location: %w", location, err) + } + stageRaw = stageId.FullyQualifiedName() + } + pathRaw := strings.TrimPrefix(strings.TrimSpace(location[idx:]), "/") + if pathRaw == "" { + return nil, fmt.Errorf("part %s contains empty path", location) + } + return &NormalizedPath{stageRaw, pathRaw}, nil +} + +func parseFunctionOrProcedureReturns(returns string) (datatypes.DataType, bool, error) { + var returnNotNull bool + trimmed := strings.TrimSpace(returns) + if strings.HasSuffix(trimmed, " NOT NULL") { + returnNotNull = true + trimmed = strings.TrimSuffix(trimmed, " NOT NULL") + } + dt, err := datatypes.ParseDataType(trimmed) + return dt, returnNotNull, err +} + +// Format in Snowflake DB is: (argName argType, argName argType, ...). +func parseFunctionOrProcedureSignature(signature string) ([]NormalizedArgument, error) { + normalizedArguments := make([]NormalizedArgument, 0) + trimmed := strings.TrimSpace(signature) + if trimmed == "" { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, can't be empty", signature) + } + if trimmed == "()" { + return normalizedArguments, nil + } + if !strings.HasPrefix(trimmed, "(") || !strings.HasSuffix(trimmed, ")") { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, wrapping parentheses not found", trimmed) + } + raw := (trimmed)[1 : len(trimmed)-1] + args := strings.Split(raw, ",") + + for _, arg := range args { + a, err := parseFunctionOrProcedureArgument(arg) + if err != nil { + return nil, fmt.Errorf("could not parse signature from Snowflake: %s, err: %w", trimmed, err) + } + normalizedArguments = append(normalizedArguments, *a) + } + return normalizedArguments, nil +} + +// TODO [SNOW-1850370]: test with strange arg names (first integration test) +func parseFunctionOrProcedureArgument(arg string) (*NormalizedArgument, error) { + log.Printf("[DEBUG] parsing argument: %s", arg) + trimmed := strings.TrimSpace(arg) + idx := strings.Index(trimmed, " ") + if idx < 0 { + return nil, fmt.Errorf("arg %s cannot be split into arg name, data type, and default", arg) + } + argName := trimmed[:idx] + rest := strings.TrimSpace(trimmed[idx:]) + dt, err := datatypes.ParseDataType(rest) + if err != nil { + return nil, fmt.Errorf("arg type %s cannot be parsed, err: %w", rest, err) + } + return &NormalizedArgument{argName, dt}, nil +} + func (v *functions) DescribeDetails(ctx context.Context, id SchemaObjectIdentifierWithArguments) (*FunctionDetails, error) { rows, err := v.Describe(ctx, id) if err != nil { diff --git a/pkg/sdk/functions_ext_test.go b/pkg/sdk/functions_ext_test.go new file mode 100644 index 0000000000..a4f77431d0 --- /dev/null +++ b/pkg/sdk/functions_ext_test.go @@ -0,0 +1,179 @@ +package sdk + +import ( + "fmt" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" + "github.com/stretchr/testify/require" +) + +// TODO [SNOW-1850370]: test parsing single +func Test_parseFunctionDetailsImport(t *testing.T) { + inputs := []struct { + rawInput string + expected []NormalizedPath + }{ + {"", []NormalizedPath{}}, + {`[]`, []NormalizedPath{}}, + {`[@~/abc]`, []NormalizedPath{{"~", "abc"}}}, + {`[@~/abc/def]`, []NormalizedPath{{"~", "abc/def"}}}, + {`[@"db"."sc"."st"/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[@db.sc.st/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[db.sc.st/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[@"db"."sc".st/abc/def]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}}}, + {`[@"db"."sc".st/abc/def, db."sc".st/abc]`, []NormalizedPath{{`"db"."sc"."st"`, "abc/def"}, {`"db"."sc"."st"`, "abc"}}}, + } + + badInputs := []struct { + rawInput string + expectedErrorPart string + }{ + {"[", "wrapping brackets not found"}, + {"]", "wrapping brackets not found"}, + {`[@~/]`, "contains empty path"}, + {`[@~]`, "cannot be split into stage and path"}, + {`[@"db"."sc"/abc]`, "contains incorrect stage location"}, + {`[@"db"/abc]`, "contains incorrect stage location"}, + {`[@"db"."sc"."st"."smth"/abc]`, "contains incorrect stage location"}, + {`[@"db/a"."sc"."st"/abc]`, "contains incorrect stage location"}, + {`[@"db"."sc"."st"/abc], @"db"."sc"/abc]`, "contains incorrect stage location"}, + } + + for _, tc := range inputs { + tc := tc + t.Run(fmt.Sprintf("Snowflake raw imports: %s", tc.rawInput), func(t *testing.T) { + details := FunctionDetails{Imports: &tc.rawInput} + + results, err := parseFunctionDetailsImport(details) + require.NoError(t, err) + require.Equal(t, tc.expected, results) + }) + } + + for _, tc := range badInputs { + tc := tc + t.Run(fmt.Sprintf("incorrect Snowflake input: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { + details := FunctionDetails{Imports: &tc.rawInput} + + _, err := parseFunctionDetailsImport(details) + require.Error(t, err) + require.ErrorContains(t, err, "could not parse imports from Snowflake") + require.ErrorContains(t, err, tc.expectedErrorPart) + }) + } + + t.Run("Snowflake raw imports nil", func(t *testing.T) { + details := FunctionDetails{Imports: nil} + + results, err := parseFunctionDetailsImport(details) + require.NoError(t, err) + require.Equal(t, []NormalizedPath{}, results) + }) +} + +func Test_parseFunctionOrProcedureReturns(t *testing.T) { + inputs := []struct { + rawInput string + expectedRawDataType string + expectedReturnNotNull bool + }{ + {"CHAR", "CHAR(1)", false}, + {"CHAR(1)", "CHAR(1)", false}, + {"NUMBER(30, 2)", "NUMBER(30, 2)", false}, + {"NUMBER(30,2)", "NUMBER(30, 2)", false}, + {"NUMBER(30,2) NOT NULL", "NUMBER(30, 2)", true}, + {"CHAR NOT NULL", "CHAR(1)", true}, + {" CHAR NOT NULL ", "CHAR(1)", true}, + {"OBJECT", "OBJECT", false}, + {"OBJECT NOT NULL", "OBJECT", true}, + } + + badInputs := []struct { + rawInput string + expectedErrorPart string + }{ + {"", "invalid data type"}, + {"NOT NULL", "invalid data type"}, + {"CHA NOT NULL", "invalid data type"}, + {"CHA NOT NULLS", "invalid data type"}, + } + + for _, tc := range inputs { + tc := tc + t.Run(fmt.Sprintf("return data type raw: %s", tc.rawInput), func(t *testing.T) { + dt, returnNotNull, err := parseFunctionOrProcedureReturns(tc.rawInput) + require.NoError(t, err) + require.Equal(t, tc.expectedRawDataType, dt.ToSql()) + require.Equal(t, tc.expectedReturnNotNull, returnNotNull) + }) + } + + for _, tc := range badInputs { + tc := tc + t.Run(fmt.Sprintf("incorrect return data type raw: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { + _, _, err := parseFunctionOrProcedureReturns(tc.rawInput) + require.Error(t, err) + require.ErrorContains(t, err, tc.expectedErrorPart) + }) + } +} + +func Test_parseFunctionOrProcedureSignature(t *testing.T) { + inputs := []struct { + rawInput string + expectedArgs []NormalizedArgument + }{ + {"()", []NormalizedArgument{}}, + {"(abc CHAR)", []NormalizedArgument{{"abc", dataTypeChar}}}, + {"(abc CHAR(1))", []NormalizedArgument{{"abc", dataTypeChar}}}, + {"(abc CHAR(100))", []NormalizedArgument{{"abc", dataTypeChar_100}}}, + {" ( abc CHAR(100 ) )", []NormalizedArgument{{"abc", dataTypeChar_100}}}, + {"( abc CHAR )", []NormalizedArgument{{"abc", dataTypeChar}}}, + {"(abc DOUBLE PRECISION)", []NormalizedArgument{{"abc", dataTypeDoublePrecision}}}, + {"(abc double precision)", []NormalizedArgument{{"abc", dataTypeDoublePrecision}}}, + {"(abc TIMESTAMP WITHOUT TIME ZONE(5))", []NormalizedArgument{{"abc", dataTypeTimestampWithoutTimeZone_5}}}, + } + + badInputs := []struct { + rawInput string + expectedErrorPart string + }{ + {"", "can't be empty"}, + {"(abc CHAR", "wrapping parentheses not found"}, + {"abc CHAR)", "wrapping parentheses not found"}, + {"(abc)", "cannot be split into arg name, data type, and default"}, + {"(CHAR)", "cannot be split into arg name, data type, and default"}, + {"(abc CHA)", "invalid data type"}, + {"(abc CHA(123))", "invalid data type"}, + {"(abc CHAR(1) DEFAULT)", "cannot be parsed"}, + {"(abc CHAR(1) DEFAULT 'a')", "cannot be parsed"}, + // TODO [SNOW-1850370]: Snowflake currently does not return concrete data types so we can fail on them currently but it should be improved in the future + {"(abc NUMBER(30,2))", "cannot be parsed"}, + {"(abc NUMBER(30, 2))", "cannot be parsed"}, + } + + for _, tc := range inputs { + tc := tc + t.Run(fmt.Sprintf("return data type raw: %s", tc.rawInput), func(t *testing.T) { + args, err := parseFunctionOrProcedureSignature(tc.rawInput) + + require.NoError(t, err) + require.Len(t, args, len(tc.expectedArgs)) + for i, arg := range args { + require.Equal(t, tc.expectedArgs[i].Name, arg.Name) + require.True(t, datatypes.AreTheSame(tc.expectedArgs[i].DataType, arg.DataType)) + } + }) + } + + for _, tc := range badInputs { + tc := tc + t.Run(fmt.Sprintf("incorrect signature raw: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { + _, err := parseFunctionOrProcedureSignature(tc.rawInput) + require.Error(t, err) + require.ErrorContains(t, err, "could not parse signature from Snowflake") + require.ErrorContains(t, err, tc.expectedErrorPart) + }) + } +} diff --git a/pkg/sdk/identifier_helpers.go b/pkg/sdk/identifier_helpers.go index 1609593d71..308535c4f8 100644 --- a/pkg/sdk/identifier_helpers.go +++ b/pkg/sdk/identifier_helpers.go @@ -5,6 +5,7 @@ import ( "log" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) @@ -211,7 +212,7 @@ type SchemaObjectIdentifier struct { databaseName string schemaName string name string - // TODO(next prs): left right now for backward compatibility for procedures and externalFunctions + // TODO [SNOW-1850370]: left right now for backward compatibility for procedures and externalFunctions arguments []DataType } @@ -343,6 +344,15 @@ func NewSchemaObjectIdentifierWithArguments(databaseName, schemaName, name strin } } +func NewSchemaObjectIdentifierWithArgumentsNormalized(databaseName, schemaName, name string, argumentDataTypes ...datatypes.DataType) SchemaObjectIdentifierWithArguments { + return SchemaObjectIdentifierWithArguments{ + databaseName: strings.Trim(databaseName, `"`), + schemaName: strings.Trim(schemaName, `"`), + name: strings.Trim(name, `"`), + argumentDataTypes: collections.Map(argumentDataTypes, LegacyDataTypeFrom), + } +} + func NewSchemaObjectIdentifierWithArgumentsInSchema(schemaId DatabaseObjectIdentifier, name string, argumentDataTypes ...DataType) SchemaObjectIdentifierWithArguments { return NewSchemaObjectIdentifierWithArguments(schemaId.DatabaseName(), schemaId.Name(), name, argumentDataTypes...) } diff --git a/pkg/sdk/random_test.go b/pkg/sdk/random_test.go index 83880167df..cedf8c3985 100644 --- a/pkg/sdk/random_test.go +++ b/pkg/sdk/random_test.go @@ -17,10 +17,14 @@ var ( emptySchemaObjectIdentifierWithArguments = NewSchemaObjectIdentifierWithArguments("", "", "") // TODO [SNOW-1843440]: create using constructors (when we add them)? - dataTypeNumber, _ = datatypes.ParseDataType("NUMBER(36, 2)") - dataTypeVarchar, _ = datatypes.ParseDataType("VARCHAR(100)") - dataTypeFloat, _ = datatypes.ParseDataType("FLOAT") - dataTypeVariant, _ = datatypes.ParseDataType("VARIANT") + dataTypeNumber, _ = datatypes.ParseDataType("NUMBER(36, 2)") + dataTypeVarchar, _ = datatypes.ParseDataType("VARCHAR(100)") + dataTypeFloat, _ = datatypes.ParseDataType("FLOAT") + dataTypeVariant, _ = datatypes.ParseDataType("VARIANT") + dataTypeChar, _ = datatypes.ParseDataType("CHAR") + dataTypeChar_100, _ = datatypes.ParseDataType("CHAR(100)") + dataTypeDoublePrecision, _ = datatypes.ParseDataType("DOUBLE PRECISION") + dataTypeTimestampWithoutTimeZone_5, _ = datatypes.ParseDataType("TIMESTAMP WITHOUT TIME ZONE(5)") ) func randomSchemaObjectIdentifierWithArguments(argumentDataTypes ...DataType) SchemaObjectIdentifierWithArguments { diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index bb292cd627..022ba7592a 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -20,20 +20,20 @@ import ( "github.com/stretchr/testify/require" ) -// TODO [SNOW-1348103]: schemaName and catalog name are quoted (because we use lowercase) // TODO [SNOW-1850370]: HasArgumentsRawFrom(functionId, arguments, return) // TODO [SNOW-1850370]: extract show assertions with commons fields // TODO [SNOW-1850370]: test confirming that runtime version is required for Scala function -// TODO [SNOW-1348103 or SNOW-1850370]: test create or replace with name change, args change -// TODO [SNOW-1348103]: test rename more (arg stays, can't change arg, rename to different schema) -// TODO [SNOW-1348103]: test weird names for arg name - lower/upper if used with double quotes, to upper without quotes, dots, spaces, and both quotes not permitted +// TODO [SNOW-1850370]: test create or replace with name change, args change +// TODO [SNOW-1850370]: test rename more (arg stays, can't change arg, rename to different schema) // TODO [SNOW-1850370]: add test documenting that UNSET SECRETS does not work // TODO [SNOW-1850370]: add test documenting [JAVA]: 391516 (42601): SQL compilation error: Cannot specify TARGET_PATH without a function BODY. -// TODO [SNOW-1348103 or SNOW-1850370]: test secure -// TODO [SNOW-1348103]: python aggregate func (100357 (P0000): Could not find accumulate method in function CVVEMHIT_06547800_08D6_DBCA_1AC7_5E422AFF8B39 with handler dump) -// TODO [SNOW-1348103]: add a test documenting that we can't set parameters in create (and revert adding these parameters directly in object...) +// TODO [SNOW-1850370]: add a test documenting that we can't set parameters in create (and revert adding these parameters directly in object...) // TODO [SNOW-1850370]: active warehouse vs validations -// TODO [SNOW-1348103]: add a test documenting STRICT behavior +// TODO [SNOW-1850370]: add a test documenting STRICT behavior +// TODO [SNOW-1348103]: test weird names for arg name - lower/upper if used with double quotes, to upper without quotes, dots, spaces, and both quotes not permitted +// TODO [SNOW-1348103]: test secure +// TODO [SNOW-1348103]: python aggregate func (100357 (P0000): Could not find accumulate method in function CVVEMHIT_06547800_08D6_DBCA_1AC7_5E422AFF8B39 with handler dump) +// TODO [SNOW-1348103]: add test with multiple imports func TestInt_Functions(t *testing.T) { client := testClient(t) ctx := context.Background() @@ -48,7 +48,7 @@ func TestInt_Functions(t *testing.T) { externalAccessIntegration, externalAccessIntegrationCleanup := testClientHelper().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) t.Cleanup(externalAccessIntegrationCleanup) - tmpJavaFunction := testClientHelper().CreateSampleJavaFunctionAndJar(t) + tmpJavaFunction := testClientHelper().CreateSampleJavaFunctionAndJarOnUserStage(t) tmpPythonFunction := testClientHelper().CreateSamplePythonFunctionAndModule(t) assertParametersSet := func(t *testing.T, functionParametersAssert *objectparametersassert.FunctionParametersAssert) { @@ -112,6 +112,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -119,10 +121,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersionNil(). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -206,6 +210,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -215,10 +221,14 @@ func TestInt_Functions(t *testing.T) { // TODO [SNOW-1348103]: check multiple secrets (to know how to parse) HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -278,6 +288,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -285,10 +297,14 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersionNil(). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -360,6 +376,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -367,10 +385,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -381,6 +403,78 @@ func TestInt_Functions(t *testing.T) { ) }) + t.Run("create function for Java - different stage", func(t *testing.T) { + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaFunctionDifferentStage := testClientHelper().CreateSampleJavaFunctionAndJarOnStage(t, stage) + + dataType := tmpJavaFunctionDifferentStage.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + argument := sdk.NewFunctionArgumentRequest(argName, dataType) + dt := sdk.NewFunctionReturnsResultDataTypeRequest(dataType) + returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) + handler := tmpJavaFunctionDifferentStage.JavaHandler() + importPath := tmpJavaFunctionDifferentStage.JarLocation() + + requestStaged := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). + WithArguments([]sdk.FunctionArgumentRequest{*argument}). + WithImports([]sdk.FunctionImportRequest{*sdk.NewFunctionImportRequest().WithImport(importPath)}) + + err := client.Functions.CreateForJava(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Function.DropFunctionFunc(t, id)) + + function, err := client.Functions.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). + HasImports(fmt.Sprintf(`[@"%s"."%s".%s/%s]`, stage.ID().DatabaseName(), stage.ID().SchemaName(), stage.ID().Name(), tmpJavaFunctionDifferentStage.JarName)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: stage.ID().FullyQualifiedName(), PathOnStage: tmpJavaFunctionDifferentStage.JarName, + }). + HasHandler(handler). + HasTargetPathNil(). + HasNormalizedTargetPathNil(), + ) + }) + + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create function for Java - default argument value", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewFunctionArgumentRequest(argName, dataType).WithDefaultValue(`'abc'`) + dt := sdk.NewFunctionReturnsResultDataTypeRequest(dataType) + returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := testClientHelper().Function.SampleJavaDefinition(t, className, funcName, argName) + + request := sdk.NewCreateForJavaFunctionRequest(id.SchemaObjectId(), *returns, handler). + WithArguments([]sdk.FunctionArgumentRequest{*argument}). + WithFunctionDefinitionWrapped(definition) + + err := client.Functions.CreateForJava(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Function.DropFunctionFunc(t, id)) + + function, err := client.Functions.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), + ) + }) + t.Run("create function for Javascript - inline minimal", func(t *testing.T) { dataType := testdatatypes.DataTypeFloat id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) @@ -428,6 +522,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -435,10 +531,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -501,6 +599,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -508,10 +608,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -569,7 +671,9 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). - HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). // TODO [SNOW-1348103]: do we care about this whitespace? + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -577,10 +681,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -651,7 +757,9 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). - HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). // TODO [SNOW-1348103]: do we care about this whitespace? + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -659,10 +767,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['absl-py==0.10.0','about-time==4.2.1']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -719,6 +831,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -726,10 +840,14 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -798,6 +916,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -805,10 +925,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['absl-py==0.10.0','about-time==4.2.1']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasIsAggregate(false), ) @@ -868,6 +992,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -875,10 +1001,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -959,6 +1087,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -966,10 +1096,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1027,6 +1161,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -1034,10 +1170,14 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1106,6 +1246,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -1113,10 +1255,14 @@ func TestInt_Functions(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaFunction.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1173,6 +1319,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1180,10 +1328,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1194,6 +1344,35 @@ func TestInt_Functions(t *testing.T) { ) }) + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create function for SQL - default argument value", func(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + definition := testClientHelper().Function.SampleSqlDefinition(t) + dt := sdk.NewFunctionReturnsResultDataTypeRequest(dataType) + returns := sdk.NewFunctionReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewFunctionArgumentRequest(argName, dataType).WithDefaultValue("3.123") + request := sdk.NewCreateForSQLFunctionRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). + WithArguments([]sdk.FunctionArgumentRequest{*argument}) + + err := client.Functions.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Function.DropFunctionFunc(t, id)) + + function, err := client.Functions.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), + ) + }) + t.Run("create function for SQL - inline full", func(t *testing.T) { argName := "x" dataType := testdatatypes.DataTypeFloat @@ -1246,6 +1425,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1255,10 +1436,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) @@ -1312,6 +1495,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionDetails(t, function.ID()). HasSignature("()"). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1319,10 +1504,12 @@ func TestInt_Functions(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasIsAggregateNil(), ) diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 2a69ef42c2..6e0298308e 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -37,7 +37,7 @@ func TestInt_Procedures(t *testing.T) { externalAccessIntegration, externalAccessIntegrationCleanup := testClientHelper().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) t.Cleanup(externalAccessIntegrationCleanup) - tmpJavaProcedure := testClientHelper().CreateSampleJavaProcedureAndJar(t) + tmpJavaProcedure := testClientHelper().CreateSampleJavaProcedureAndJarOnUserStage(t) tmpPythonFunction := testClientHelper().CreateSamplePythonFunctionAndModule(t) assertParametersSet := func(t *testing.T, procedureParametersAssert *objectparametersassert.ProcedureParametersAssert) { @@ -354,6 +354,43 @@ func TestInt_Procedures(t *testing.T) { ) }) + t.Run("create procedure for Java - different stage", func(t *testing.T) { + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaProcedureDifferentStage := testClientHelper().CreateSampleJavaProcedureAndJarOnStage(t, stage) + + dataType := tmpJavaProcedureDifferentStage.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := tmpJavaProcedureDifferentStage.JavaHandler() + importPath := tmpJavaProcedureDifferentStage.JarLocation() + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0"), + } + + requestStaged := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(importPath)}) + + err := client.Procedures.CreateForJava(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasImports(fmt.Sprintf(`[@"%s"."%s".%s/%s]`, stage.ID().DatabaseName(), stage.ID().SchemaName(), stage.ID().Name(), tmpJavaProcedureDifferentStage.JarName)). + HasHandler(handler). + HasTargetPathNil(), + ) + }) + t.Run("create procedure for Javascript - inline minimal", func(t *testing.T) { dataType := testdatatypes.DataTypeFloat id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) From 933335f56d1e53bf3e95d1f552672f35425b4878 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 12 Dec 2024 12:15:24 +0100 Subject: [PATCH 2/2] feat: Basic procedures implementation (#3271) Prepare most of the java procedure resource implementation (based on #3269, check it for details); additionally: - extracted more common functions to reuse between functions and procedures - left TODO for some that we duplicate for now Next PRs: - handle secrets, external access integrations, packages, return not null, and comments - TABLE function improvements and tests - Add PR with all other function types - datasources --- docs/resources/procedure_java.md | 28 +- docs/resources/procedure_javascript.md | 4 + docs/resources/procedure_python.md | 17 +- docs/resources/procedure_scala.md | 28 +- docs/resources/procedure_sql.md | 4 + .../procedure_describe_snowflake_ext.go | 69 +++ .../procedure_java_resource_ext.go | 17 + .../procedure_resource_parameters_ext.go | 13 + .../config/model/procedure_java_model_ext.go | 73 +++ .../config/model/procedure_java_model_gen.go | 9 +- .../model/procedure_python_model_gen.go | 4 - .../config/model/procedure_scala_model_gen.go | 9 +- pkg/acceptance/helpers/procedure_client.go | 36 ++ .../function_and_procedure_commons.go | 54 +++ pkg/resources/function_commons.go | 50 +- pkg/resources/function_java.go | 10 +- pkg/resources/procedure_commons.go | 187 +++++++- pkg/resources/procedure_java.go | 159 ++++++- .../procedure_java_acceptance_test.go | 429 ++++++++++++++++++ pkg/resources/procedure_parameters.go | 12 +- pkg/sdk/functions_and_procedures_commons.go | 118 +++++ pkg/sdk/functions_ext.go | 117 +---- pkg/sdk/functions_ext_test.go | 14 +- pkg/sdk/procedures_ext.go | 39 +- .../testint/procedures_integration_test.go | 216 ++++++++- 25 files changed, 1495 insertions(+), 221 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go create mode 100644 pkg/resources/function_and_procedure_commons.go create mode 100644 pkg/resources/procedure_java_acceptance_test.go create mode 100644 pkg/sdk/functions_and_procedures_commons.go diff --git a/docs/resources/procedure_java.md b/docs/resources/procedure_java.md index dbb5f2eba3..edc8047672 100644 --- a/docs/resources/procedure_java.md +++ b/docs/resources/procedure_java.md @@ -19,7 +19,6 @@ Resource used to manage java procedure objects. For more information, check [pro - `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `handler` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. - `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Java source code. For more information, see [Java (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Java Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-java-data-type-mappings)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). - `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 11. - `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -32,14 +31,15 @@ Resource used to manage java procedure objects. For more information, check [pro - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Java source code. For more information, see [Java (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `target_path` (Block Set, Max: 1) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -58,6 +58,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -68,6 +81,15 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` diff --git a/docs/resources/procedure_javascript.md b/docs/resources/procedure_javascript.md index a9364db4cf..a8d0ee9db2 100644 --- a/docs/resources/procedure_javascript.md +++ b/docs/resources/procedure_javascript.md @@ -50,6 +50,10 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` diff --git a/docs/resources/procedure_python.md b/docs/resources/procedure_python.md index a28cf0d0b5..8761764754 100644 --- a/docs/resources/procedure_python.md +++ b/docs/resources/procedure_python.md @@ -19,7 +19,6 @@ Resource used to manage python procedure objects. For more information, check [p - `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `handler` (String) Use the name of the stored procedure’s function or method. This can differ depending on whether the code is in-line or referenced at a stage. When the code is in-line, you can specify just the function name. When the code is imported from a stage, specify the fully-qualified handler function name as `.`. - `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Python source code. For more information, see [Python (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/python/procedure-python-overview). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Python Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-python-data-type-mappings)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). - `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 3.9, 3.10, and 3.11. - `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -32,12 +31,13 @@ Resource used to manage python procedure objects. For more information, check [p - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If your stored procedure’s code will be on a stage, you must also include a path to the module file your code is in. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If your stored procedure’s code will be on a stage, you must also include a path to the module file your code is in. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Python source code. For more information, see [Python (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/python/procedure-python-overview). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). @@ -57,6 +57,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` diff --git a/docs/resources/procedure_scala.md b/docs/resources/procedure_scala.md index 692fb569b1..ef76be8b1d 100644 --- a/docs/resources/procedure_scala.md +++ b/docs/resources/procedure_scala.md @@ -19,7 +19,6 @@ Resource used to manage scala procedure objects. For more information, check [pr - `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `handler` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. - `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Scala source code. For more information, see [Scala (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Scala Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-types-to-scala-types)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). - `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 2.12. - `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -32,14 +31,15 @@ Resource used to manage scala procedure objects. For more information, check [pr - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. -- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `imports` (Block Set) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. (see [below for nested schema](#nestedblock--imports)) - `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Scala source code. For more information, see [Scala (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) -- `target_path` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `target_path` (Block Set, Max: 1) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. (see [below for nested schema](#nestedblock--target_path)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -58,6 +58,19 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + + + +### Nested Schema for `imports` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + ### Nested Schema for `secrets` @@ -68,6 +81,15 @@ Required: - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + +### Nested Schema for `target_path` + +Required: + +- `path_on_stage` (String) Path for import on stage, without the leading `/`. +- `stage_location` (String) Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform). + + ### Nested Schema for `parameters` diff --git a/docs/resources/procedure_sql.md b/docs/resources/procedure_sql.md index 2533380779..3dcc0fefb7 100644 --- a/docs/resources/procedure_sql.md +++ b/docs/resources/procedure_sql.md @@ -50,6 +50,10 @@ Required: - `arg_data_type` (String) The argument type. - `arg_name` (String) The argument name. +Optional: + +- `arg_default_value` (String) Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". + ### Nested Schema for `parameters` diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go index 64011d14f9..2319b30f7a 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go @@ -6,10 +6,12 @@ import ( "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + assert2 "github.com/stretchr/testify/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) // TODO [SNOW-1501905]: this file should be fully regenerated when adding and option to assert the results of describe @@ -391,3 +393,70 @@ func (f *ProcedureDetailsAssert) HasExactlySecrets(expectedSecrets map[string]sd }) return f } + +func (f *ProcedureDetailsAssert) HasExactlyImportsNormalizedInAnyOrder(imports ...sdk.NormalizedPath) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedImports == nil { + return fmt.Errorf("expected imports to have value; got: nil") + } + if !assert2.ElementsMatch(t, imports, o.NormalizedImports) { + return fmt.Errorf("expected %v imports in task relations, got %v", imports, o.NormalizedImports) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasNormalizedTargetPath(expectedStageLocation string, expectedPathOnStage string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedTargetPath == nil { + return fmt.Errorf("expected normalized target path to have value; got: nil") + } + if o.NormalizedTargetPath.StageLocation != expectedStageLocation { + return fmt.Errorf("expected %s stage location for target path, got %v", expectedStageLocation, o.NormalizedTargetPath.StageLocation) + } + if o.NormalizedTargetPath.PathOnStage != expectedPathOnStage { + return fmt.Errorf("expected %s path on stage for target path, got %v", expectedPathOnStage, o.NormalizedTargetPath.PathOnStage) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasNormalizedTargetPathNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NormalizedTargetPath != nil { + return fmt.Errorf("expected normalized target path to be nil, got: %s", *o.NormalizedTargetPath) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasReturnDataType(expectedDataType datatypes.DataType) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ReturnDataType == nil { + return fmt.Errorf("expected return data type to have value; got: nil") + } + if !datatypes.AreTheSame(o.ReturnDataType, expectedDataType) { + return fmt.Errorf("expected %s return data type, got %v", expectedDataType, o.ReturnDataType.ToSql()) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasReturnNotNull(expected bool) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ReturnNotNull != expected { + return fmt.Errorf("expected return not null %t; got: %t", expected, o.ReturnNotNull) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go new file mode 100644 index 0000000000..85de853dbe --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_ext.go @@ -0,0 +1,17 @@ +package resourceassert + +import ( + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (f *ProcedureJavaResourceAssert) HasImportsLength(len int) *ProcedureJavaResourceAssert { + f.AddAssertion(assert.ValueSet("imports.#", strconv.FormatInt(int64(len), 10))) + return f +} + +func (f *ProcedureJavaResourceAssert) HasTargetPathEmpty() *ProcedureJavaResourceAssert { + f.AddAssertion(assert.ValueSet("target_path.#", "0")) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go new file mode 100644 index 0000000000..e7090a0661 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/procedure_resource_parameters_ext.go @@ -0,0 +1,13 @@ +package resourceparametersassert + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (f *ProcedureResourceParametersAssert) HasAllDefaults() *ProcedureResourceParametersAssert { + return f. + HasEnableConsoleOutput(false). + HasLogLevel(sdk.LogLevelOff). + HasMetricLevel(sdk.MetricLevelNone). + HasTraceLevel(sdk.TraceLevelOff) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go index 1fa425aa28..cb6779784c 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go @@ -2,6 +2,11 @@ package model import ( "encoding/json" + + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) func (f *ProcedureJavaModel) MarshalJSON() ([]byte, error) { @@ -14,3 +19,71 @@ func (f *ProcedureJavaModel) MarshalJSON() ([]byte, error) { DependsOn: f.DependsOn(), }) } + +func ProcedureJavaBasicInline( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + procedureDefinition string, +) *ProcedureJavaModel { + return ProcedureJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), "11", id.SchemaName(), "1.14.0"). + WithProcedureDefinition(procedureDefinition) +} + +func ProcedureJavaBasicStaged( + resourceName string, + id sdk.SchemaObjectIdentifierWithArguments, + returnType datatypes.DataType, + handler string, + stageLocation string, + pathOnStage string, +) *ProcedureJavaModel { + return ProcedureJava(resourceName, id.DatabaseName(), handler, id.Name(), returnType.ToSql(), "11", id.SchemaName(), "1.14.0"). + WithImport(stageLocation, pathOnStage) +} + +func (f *ProcedureJavaModel) WithArgument(argName string, argDataType datatypes.DataType) *ProcedureJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + }, + ), + ) +} + +func (f *ProcedureJavaModel) WithArgumentWithDefaultValue(argName string, argDataType datatypes.DataType, value string) *ProcedureJavaModel { + return f.WithArgumentsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "arg_name": tfconfig.StringVariable(argName), + "arg_data_type": tfconfig.StringVariable(argDataType.ToSql()), + "arg_default_value": tfconfig.StringVariable(value), + }, + ), + ) +} + +func (f *ProcedureJavaModel) WithImport(stageLocation string, pathOnStage string) *ProcedureJavaModel { + return f.WithImportsValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} + +func (f *ProcedureJavaModel) WithTargetPathParts(stageLocation string, pathOnStage string) *ProcedureJavaModel { + return f.WithTargetPathValue( + tfconfig.ObjectVariable( + map[string]tfconfig.Variable{ + "stage_location": tfconfig.StringVariable(stageLocation), + "path_on_stage": tfconfig.StringVariable(pathOnStage), + }, + ), + ) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go index 5be880ae22..ed2be8286e 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go @@ -47,7 +47,6 @@ func ProcedureJava( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -57,7 +56,6 @@ func ProcedureJava( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -69,7 +67,6 @@ func ProcedureJavaWithDefaultMeta( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -79,7 +76,6 @@ func ProcedureJavaWithDefaultMeta( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -186,10 +182,7 @@ func (p *ProcedureJavaModel) WithSnowparkPackage(snowparkPackage string) *Proced return p } -func (p *ProcedureJavaModel) WithTargetPath(targetPath string) *ProcedureJavaModel { - p.TargetPath = tfconfig.StringVariable(targetPath) - return p -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (p *ProcedureJavaModel) WithTraceLevel(traceLevel string) *ProcedureJavaModel { p.TraceLevel = tfconfig.StringVariable(traceLevel) diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go index dfe2801f00..c69dcab167 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go @@ -46,7 +46,6 @@ func ProcedurePython( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -56,7 +55,6 @@ func ProcedurePython( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -68,7 +66,6 @@ func ProcedurePythonWithDefaultMeta( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -78,7 +75,6 @@ func ProcedurePythonWithDefaultMeta( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go index 01ff2f1107..9df0441308 100644 --- a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go @@ -47,7 +47,6 @@ func ProcedureScala( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -57,7 +56,6 @@ func ProcedureScala( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -69,7 +67,6 @@ func ProcedureScalaWithDefaultMeta( database string, handler string, name string, - procedureDefinition string, returnType string, runtimeVersion string, schema string, @@ -79,7 +76,6 @@ func ProcedureScalaWithDefaultMeta( p.WithDatabase(database) p.WithHandler(handler) p.WithName(name) - p.WithProcedureDefinition(procedureDefinition) p.WithReturnType(returnType) p.WithRuntimeVersion(runtimeVersion) p.WithSchema(schema) @@ -186,10 +182,7 @@ func (p *ProcedureScalaModel) WithSnowparkPackage(snowparkPackage string) *Proce return p } -func (p *ProcedureScalaModel) WithTargetPath(targetPath string) *ProcedureScalaModel { - p.TargetPath = tfconfig.StringVariable(targetPath) - return p -} +// target_path attribute type is not yet supported, so WithTargetPath can't be generated func (p *ProcedureScalaModel) WithTraceLevel(traceLevel string) *ProcedureScalaModel { p.TraceLevel = tfconfig.StringVariable(traceLevel) diff --git a/pkg/acceptance/helpers/procedure_client.go b/pkg/acceptance/helpers/procedure_client.go index 019d5f9299..7e77e37782 100644 --- a/pkg/acceptance/helpers/procedure_client.go +++ b/pkg/acceptance/helpers/procedure_client.go @@ -87,6 +87,29 @@ func (c *ProcedureClient) CreateJava(t *testing.T) (*sdk.Procedure, func()) { return function, c.DropProcedureFunc(t, id) } +func (c *ProcedureClient) CreateScalaStaged(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, dataType datatypes.DataType, importPath string, handler string) (*sdk.Procedure, func()) { + t.Helper() + ctx := context.Background() + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(importPath)}) + + err := c.client().CreateForScala(ctx, request) + require.NoError(t, err) + + function, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return function, c.DropProcedureFunc(t, id) +} + func (c *ProcedureClient) Create(t *testing.T, arguments ...sdk.DataType) *sdk.Procedure { t.Helper() return c.CreateWithIdentifier(t, c.ids.RandomSchemaObjectIdentifierWithArguments(arguments...)) @@ -156,6 +179,19 @@ func (c *ProcedureClient) SampleJavaDefinition(t *testing.T, className string, f `, className, funcName, argName) } +func (c *ProcedureClient) SampleJavaDefinitionNoArgs(t *testing.T, className string, funcName string) string { + t.Helper() + + return fmt.Sprintf(` + import com.snowflake.snowpark_java.*; + class %[1]s { + public static String %[2]s(Session session) { + return "hello"; + } + } +`, className, funcName) +} + // For more references: https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript func (c *ProcedureClient) SampleJavascriptDefinition(t *testing.T, argName string) string { t.Helper() diff --git a/pkg/resources/function_and_procedure_commons.go b/pkg/resources/function_and_procedure_commons.go new file mode 100644 index 0000000000..e21801813c --- /dev/null +++ b/pkg/resources/function_and_procedure_commons.go @@ -0,0 +1,54 @@ +package resources + +import ( + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func readFunctionOrProcedureArguments(d *schema.ResourceData, args []sdk.NormalizedArgument) error { + if len(args) == 0 { + // TODO [SNOW-1348103]: handle empty list + return nil + } + // We do it the unusual way because the default values are not returned by SF. + // We update what we have - leaving the defaults unchanged. + if currentArgs, ok := d.Get("arguments").([]map[string]any); !ok { + return fmt.Errorf("arguments must be a list") + } else { + for i, arg := range args { + currentArgs[i]["arg_name"] = arg.Name + currentArgs[i]["arg_data_type"] = arg.DataType.ToSql() + } + return d.Set("arguments", currentArgs) + } +} + +func readFunctionOrProcedureImports(d *schema.ResourceData, imports []sdk.NormalizedPath) error { + if len(imports) == 0 { + // don't do anything if imports not present + return nil + } + imps := collections.Map(imports, func(imp sdk.NormalizedPath) map[string]any { + return map[string]any{ + "stage_location": imp.StageLocation, + "path_on_stage": imp.PathOnStage, + } + }) + return d.Set("imports", imps) +} + +func readFunctionOrProcedureTargetPath(d *schema.ResourceData, normalizedPath *sdk.NormalizedPath) error { + if normalizedPath == nil { + // don't do anything if imports not present + return nil + } + tp := make([]map[string]any, 1) + tp[0] = map[string]any{ + "stage_location": normalizedPath.StageLocation, + "path_on_stage": normalizedPath.PathOnStage, + } + return d.Set("target_path", tp) +} diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index ea005da2c2..fe5a097a45 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -7,7 +7,6 @@ import ( "log" "slices" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -241,7 +240,6 @@ func functionBaseSchema() map[string]schema.Schema { ForceNew: true, Description: "List of the arguments for the function. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages) for more details.", }, - // TODO [SNOW-1348103]: for now, the proposal is to leave return type as string, add TABLE to data types, and here always parse (easier handling and diff suppression) "return_type": { Type: schema.TypeString, Required: true, @@ -408,6 +406,7 @@ func DeleteFunction(ctx context.Context, d *schema.ResourceData, meta any) diag. return nil } +// TODO [SNOW-1850370]: Make the rest of the functions in this file generic (for reuse with procedures) func parseFunctionArgumentsCommon(d *schema.ResourceData) ([]sdk.FunctionArgumentRequest, error) { args := make([]sdk.FunctionArgumentRequest, 0) if v, ok := d.GetOk("arguments"); ok { @@ -494,7 +493,7 @@ func setFunctionTargetPathInBuilder[T any](d *schema.ResourceData, setTargetPath return nil } -func queryAllFunctionsDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allFunctionDetailsCommon, diag.Diagnostics) { +func queryAllFunctionDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allFunctionDetailsCommon, diag.Diagnostics) { functionDetails, err := client.Functions.DescribeDetails(ctx, id) if err != nil { if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { @@ -540,48 +539,3 @@ type allFunctionDetailsCommon struct { functionDetails *sdk.FunctionDetails functionParameters []*sdk.Parameter } - -func readFunctionArgumentsCommon(d *schema.ResourceData, args []sdk.NormalizedArgument) error { - if len(args) == 0 { - // TODO [SNOW-1348103]: handle empty list - return nil - } - // We do it the unusual way because the default values are not returned by SF. - // We update what we have - leaving the defaults unchanged. - if currentArgs, ok := d.Get("arguments").([]map[string]any); !ok { - return fmt.Errorf("arguments must be a list") - } else { - for i, arg := range args { - currentArgs[i]["arg_name"] = arg.Name - currentArgs[i]["arg_data_type"] = arg.DataType.ToSql() - } - return d.Set("arguments", currentArgs) - } -} - -func readFunctionImportsCommon(d *schema.ResourceData, imports []sdk.NormalizedPath) error { - if len(imports) == 0 { - // don't do anything if imports not present - return nil - } - imps := collections.Map(imports, func(imp sdk.NormalizedPath) map[string]any { - return map[string]any{ - "stage_location": imp.StageLocation, - "path_on_stage": imp.PathOnStage, - } - }) - return d.Set("imports", imps) -} - -func readFunctionTargetPathCommon(d *schema.ResourceData, normalizedPath *sdk.NormalizedPath) error { - if normalizedPath == nil { - // don't do anything if imports not present - return nil - } - tp := make([]map[string]any, 1) - tp[0] = map[string]any{ - "stage_location": normalizedPath.StageLocation, - "path_on_stage": normalizedPath.PathOnStage, - } - return d.Set("target_path", tp) -} diff --git a/pkg/resources/function_java.go b/pkg/resources/function_java.go index b1e60da7cf..c8fca3c13f 100644 --- a/pkg/resources/function_java.go +++ b/pkg/resources/function_java.go @@ -35,7 +35,7 @@ func FunctionJava() *schema.Resource { // The language check is more for the future. // Currently, almost all attributes are marked as forceNew. // When language changes, these attributes also change, causing the object to recreate either way. - // The only potential option is java staged -> scala staged (however scala need runtime_version which may interfere). + // The only potential option is java staged <-> scala staged (however scala need runtime_version which may interfere). RecreateWhenResourceStringFieldChangedExternally("function_language", "JAVA"), )), @@ -112,7 +112,7 @@ func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta a return diag.FromErr(err) } - allFunctionDetails, diags := queryAllFunctionsDetailsCommon(ctx, d, client, id) + allFunctionDetails, diags := queryAllFunctionDetailsCommon(ctx, d, client, id) if diags != nil { return diags } @@ -123,18 +123,18 @@ func ReadContextFunctionJava(ctx context.Context, d *schema.ResourceData, meta a errs := errors.Join( // TODO [SNOW-1348103]: set the rest of the fields // not reading is_secure on purpose (handled as external change to show output) - readFunctionArgumentsCommon(d, allFunctionDetails.functionDetails.NormalizedArguments), + readFunctionOrProcedureArguments(d, allFunctionDetails.functionDetails.NormalizedArguments), d.Set("return_type", allFunctionDetails.functionDetails.ReturnDataType.ToSql()), // not reading null_input_behavior on purpose (handled as external change to show output) // not reading return_results_behavior on purpose (handled as external change to show output) setOptionalFromStringPtr(d, "runtime_version", allFunctionDetails.functionDetails.RuntimeVersion), // comment - readFunctionImportsCommon(d, allFunctionDetails.functionDetails.NormalizedImports), + readFunctionOrProcedureImports(d, allFunctionDetails.functionDetails.NormalizedImports), // packages setRequiredFromStringPtr(d, "handler", allFunctionDetails.functionDetails.Handler), // external_access_integrations // secrets - readFunctionTargetPathCommon(d, allFunctionDetails.functionDetails.NormalizedTargetPath), + readFunctionOrProcedureTargetPath(d, allFunctionDetails.functionDetails.NormalizedTargetPath), setOptionalFromStringPtr(d, "function_definition", allFunctionDetails.functionDetails.Body), d.Set("function_language", allFunctionDetails.functionDetails.Language), diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go index 163a33da0f..759f44f878 100644 --- a/pkg/resources/procedure_commons.go +++ b/pkg/resources/procedure_commons.go @@ -2,12 +2,15 @@ package resources import ( "context" + "errors" "fmt" + "log" "slices" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -23,6 +26,7 @@ func init() { type procedureSchemaDef struct { additionalArguments []string procedureDefinitionDescription string + procedureDefinitionRequired bool returnTypeLinkName string returnTypeLinkUrl string runtimeVersionDescription string @@ -41,6 +45,11 @@ func setUpProcedureSchema(definition procedureSchemaDef) map[string]*schema.Sche } if v, ok := currentSchema["procedure_definition"]; ok && v != nil { v.Description = diffSuppressStatementFieldDescription(definition.procedureDefinitionDescription) + if definition.procedureDefinitionRequired { + v.Required = true + } else { + v.Optional = true + } } if v, ok := currentSchema["return_type"]; ok && v != nil { v.Description = procedureReturnsTemplate(definition.returnTypeLinkName, definition.returnTypeLinkUrl) @@ -109,6 +118,7 @@ var ( returnTypeLinkName: "SQL and JavaScript data type mapping", returnTypeLinkUrl: "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript.html#label-stored-procedure-data-type-mapping", procedureDefinitionDescription: procedureDefinitionTemplate("JavaScript", "JavaScript", "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript"), + procedureDefinitionRequired: true, } pythonProcedureSchemaDefinition = procedureSchemaDef{ additionalArguments: []string{ @@ -149,6 +159,7 @@ var ( sqlProcedureSchemaDefinition = procedureSchemaDef{ additionalArguments: []string{}, procedureDefinitionDescription: procedureDefinitionTemplate("SQL", "Snowflake Scripting", "https://docs.snowflake.com/en/developer-guide/snowflake-scripting/index"), + procedureDefinitionRequired: true, returnTypeLinkName: "SQL data type", returnTypeLinkUrl: "https://docs.snowflake.com/en/sql-reference-data-types", } @@ -212,13 +223,17 @@ func procedureBaseSchema() map[string]schema.Schema { DiffSuppressFunc: DiffSuppressDataTypes, Description: "The argument type.", }, + "arg_default_value": { + Type: schema.TypeString, + Optional: true, + Description: externalChangesNotDetectedFieldDescription("Optional default value for the argument. For text values use single quotes. Numeric values can be unquoted."), + }, }, }, Optional: true, ForceNew: true, Description: "List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details.", }, - // TODO [SNOW-1348103]: for now, the proposal is to leave return type as string, add TABLE to data types, and here always parse (easier handling and diff suppression) "return_type": { Type: schema.TypeString, Required: true, @@ -231,7 +246,7 @@ func procedureBaseSchema() map[string]schema.Schema { Optional: true, ForceNew: true, ValidateDiagFunc: sdkValidation(sdk.ToNullInputBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior)), // IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), Description: fmt.Sprintf("Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedNullInputBehaviors)), }, // "return_behavior" removed because it is deprecated in the docs: https://docs.snowflake.com/en/sql-reference/sql/create-procedure#id1 @@ -249,9 +264,22 @@ func procedureBaseSchema() map[string]schema.Schema { }, "imports": { Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, "snowpark_package": { Type: schema.TypeString, @@ -303,9 +331,24 @@ func procedureBaseSchema() map[string]schema.Schema { Description: "Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter.", }, "target_path": { - Type: schema.TypeString, + Type: schema.TypeSet, + MaxItems: 1, Optional: true, ForceNew: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "stage_location": { + Type: schema.TypeString, + Required: true, + Description: "Stage location without leading `@`. To use your user's stage set this to `~`, otherwise pass fully qualified name of the stage (with every part contained in double quotes or use `snowflake_stage..fully_qualified_name` if you manage this stage through terraform).", + }, + "path_on_stage": { + Type: schema.TypeString, + Required: true, + Description: "Path for import on stage, without the leading `/`.", + }, + }, + }, }, "execute_as": { Type: schema.TypeString, @@ -316,7 +359,6 @@ func procedureBaseSchema() map[string]schema.Schema { }, "procedure_definition": { Type: schema.TypeString, - Required: true, ForceNew: true, DiffSuppressFunc: DiffSuppressStatement, }, @@ -360,3 +402,138 @@ func DeleteProcedure(ctx context.Context, d *schema.ResourceData, meta any) diag d.SetId("") return nil } + +func queryAllProcedureDetailsCommon(ctx context.Context, d *schema.ResourceData, client *sdk.Client, id sdk.SchemaObjectIdentifierWithArguments) (*allProcedureDetailsCommon, diag.Diagnostics) { + procedureDetails, err := client.Procedures.DescribeDetails(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { + log.Printf("[DEBUG] procedure (%s) not found or we are not authorized. Err: %s", d.Id(), err) + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query procedure. Marking the resource as removed.", + Detail: fmt.Sprintf("Procedure: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + procedure, err := client.Procedures.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return nil, diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query procedure. Marking the resource as removed.", + Detail: fmt.Sprintf("Procedure: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return nil, diag.FromErr(err) + } + procedureParameters, err := client.Procedures.ShowParameters(ctx, id) + if err != nil { + return nil, diag.FromErr(err) + } + return &allProcedureDetailsCommon{ + procedure: procedure, + procedureDetails: procedureDetails, + procedureParameters: procedureParameters, + }, nil +} + +type allProcedureDetailsCommon struct { + procedure *sdk.Procedure + procedureDetails *sdk.ProcedureDetails + procedureParameters []*sdk.Parameter +} + +// TODO [SNOW-1850370]: Make the rest of the functions in this file generic (for reuse with functions) +// These were copy-pasted for now. +func parseProcedureArgumentsCommon(d *schema.ResourceData) ([]sdk.ProcedureArgumentRequest, error) { + args := make([]sdk.ProcedureArgumentRequest, 0) + if v, ok := d.GetOk("arguments"); ok { + for _, arg := range v.([]any) { + argName := arg.(map[string]any)["arg_name"].(string) + argDataType := arg.(map[string]any)["arg_data_type"].(string) + dataType, err := datatypes.ParseDataType(argDataType) + if err != nil { + return nil, err + } + request := sdk.NewProcedureArgumentRequest(argName, dataType) + + if argDefaultValue, defaultValuePresent := arg.(map[string]any)["arg_default_value"]; defaultValuePresent && argDefaultValue.(string) != "" { + request.WithDefaultValue(argDefaultValue.(string)) + } + + args = append(args, *request) + } + } + return args, nil +} + +func parseProcedureImportsCommon(d *schema.ResourceData) ([]sdk.ProcedureImportRequest, error) { + imports := make([]sdk.ProcedureImportRequest, 0) + if v, ok := d.GetOk("imports"); ok { + for _, imp := range v.(*schema.Set).List() { + stageLocation := imp.(map[string]any)["stage_location"].(string) + pathOnStage := imp.(map[string]any)["path_on_stage"].(string) + imports = append(imports, *sdk.NewProcedureImportRequest(fmt.Sprintf("@%s/%s", stageLocation, pathOnStage))) + } + } + return imports, nil +} + +func parseProcedureTargetPathCommon(d *schema.ResourceData) (string, error) { + var tp string + if v, ok := d.GetOk("target_path"); ok { + for _, p := range v.(*schema.Set).List() { + stageLocation := p.(map[string]any)["stage_location"].(string) + pathOnStage := p.(map[string]any)["path_on_stage"].(string) + tp = fmt.Sprintf("@%s/%s", stageLocation, pathOnStage) + } + } + return tp, nil +} + +func parseProcedureReturnsCommon(d *schema.ResourceData) (*sdk.ProcedureReturnsRequest, error) { + returnTypeRaw := d.Get("return_type").(string) + dataType, err := datatypes.ParseDataType(returnTypeRaw) + if err != nil { + return nil, err + } + returns := sdk.NewProcedureReturnsRequest() + switch v := dataType.(type) { + case *datatypes.TableDataType: + var cr []sdk.ProcedureColumnRequest + for _, c := range v.Columns() { + cr = append(cr, *sdk.NewProcedureColumnRequest(c.ColumnName(), c.ColumnType())) + } + returns.WithTable(*sdk.NewProcedureReturnsTableRequest().WithColumns(cr)) + default: + returns.WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(dataType)) + } + return returns, nil +} + +func setProcedureImportsInBuilder[T any](d *schema.ResourceData, setImports func([]sdk.ProcedureImportRequest) T) error { + imports, err := parseProcedureImportsCommon(d) + if err != nil { + return err + } + setImports(imports) + return nil +} + +func setProcedureTargetPathInBuilder[T any](d *schema.ResourceData, setTargetPath func(string) T) error { + tp, err := parseProcedureTargetPathCommon(d) + if err != nil { + return err + } + if tp != "" { + setTargetPath(tp) + } + return nil +} diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go index 1804780de9..bc4f417144 100644 --- a/pkg/resources/procedure_java.go +++ b/pkg/resources/procedure_java.go @@ -2,11 +2,18 @@ package resources import ( "context" + "errors" + "fmt" + "reflect" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -25,7 +32,11 @@ func ProcedureJava() *schema.Resource { ComputedIfAnyAttributeChanged(javaProcedureSchema, FullyQualifiedNameAttributeName, "name"), ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), procedureParametersCustomDiff, - // TODO[SNOW-1348103]: recreate when type changed externally + // The language check is more for the future. + // Currently, almost all attributes are marked as forceNew. + // When language changes, these attributes also change, causing the object to recreate either way. + // The only option is java staged <-> scala staged (however scala need runtime_version which may interfere). + RecreateWhenResourceStringFieldChangedExternally("procedure_language", "JAVA"), )), Schema: collections.MergeMaps(javaProcedureSchema, procedureParametersSchema), @@ -36,13 +47,155 @@ func ProcedureJava() *schema.Resource { } func CreateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + database := d.Get("database").(string) + sc := d.Get("schema").(string) + name := d.Get("name").(string) + + argumentRequests, err := parseProcedureArgumentsCommon(d) + if err != nil { + return diag.FromErr(err) + } + returns, err := parseProcedureReturnsCommon(d) + if err != nil { + return diag.FromErr(err) + } + handler := d.Get("handler").(string) + runtimeVersion := d.Get("runtime_version").(string) + // TODO [this PR]: handle real packages + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + argumentDataTypes := collections.Map(argumentRequests, func(r sdk.ProcedureArgumentRequest) datatypes.DataType { return r.ArgDataType }) + id := sdk.NewSchemaObjectIdentifierWithArgumentsNormalized(database, sc, name, argumentDataTypes...) + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler). + WithArguments(argumentRequests) + + errs := errors.Join( + booleanStringAttributeCreateBuilder(d, "is_secure", request.WithSecure), + attributeMappedValueCreateBuilder[string](d, "null_input_behavior", request.WithNullInputBehavior, sdk.ToNullInputBehavior), + // TODO [SNOW-1348103]: handle the rest of the attributes + // comment + setProcedureImportsInBuilder(d, request.WithImports), + // packages + // external_access_integrations + // secrets + setProcedureTargetPathInBuilder(d, request.WithTargetPath), + stringAttributeCreateBuilder(d, "procedure_definition", request.WithProcedureDefinitionWrapped), + ) + if errs != nil { + return diag.FromErr(errs) + } + + if err := client.Procedures.CreateForJava(ctx, request); err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + // parameters do not work in create procedure (query does not fail but parameters stay unchanged) + setRequest := sdk.NewProcedureSetRequest() + if parametersCreateDiags := handleProcedureParametersCreate(d, setRequest); len(parametersCreateDiags) > 0 { + return parametersCreateDiags + } + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedureJava(ctx, d, meta) } func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + allProcedureDetails, diags := queryAllProcedureDetailsCommon(ctx, d, client, id) + if diags != nil { + return diags + } + + // TODO [SNOW-1348103]: handle external changes marking + // TODO [SNOW-1348103]: handle setting state to value from config + + errs := errors.Join( + // TODO [SNOW-1348103]: set the rest of the fields + // not reading is_secure on purpose (handled as external change to show output) + readFunctionOrProcedureArguments(d, allProcedureDetails.procedureDetails.NormalizedArguments), + d.Set("return_type", allProcedureDetails.procedureDetails.ReturnDataType.ToSql()), + // not reading null_input_behavior on purpose (handled as external change to show output) + setRequiredFromStringPtr(d, "runtime_version", allProcedureDetails.procedureDetails.RuntimeVersion), + // comment + readFunctionOrProcedureImports(d, allProcedureDetails.procedureDetails.NormalizedImports), + // packages + setRequiredFromStringPtr(d, "handler", allProcedureDetails.procedureDetails.Handler), + // external_access_integrations + // secrets + readFunctionOrProcedureTargetPath(d, allProcedureDetails.procedureDetails.NormalizedTargetPath), + setOptionalFromStringPtr(d, "procedure_definition", allProcedureDetails.procedureDetails.Body), + d.Set("procedure_language", allProcedureDetails.procedureDetails.Language), + + handleProcedureParameterRead(d, allProcedureDetails.procedureParameters), + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.ProcedureToSchema(allProcedureDetails.procedure)}), + d.Set(ParametersAttributeName, []map[string]any{schemas.ProcedureParametersToSchema(allProcedureDetails.procedureParameters)}), + ) + if errs != nil { + return diag.FromErr(err) + } + return nil } func UpdateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - return nil + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifierWithArguments(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + if d.HasChange("name") { + newId := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), d.Get("name").(string), id.ArgumentDataTypes()...) + + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithRenameTo(newId.SchemaObjectId())) + if err != nil { + return diag.FromErr(fmt.Errorf("error renaming procedure %v err = %w", d.Id(), err)) + } + + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId + } + + // Batch SET operations and UNSET operations + setRequest := sdk.NewProcedureSetRequest() + unsetRequest := sdk.NewProcedureUnsetRequest() + + // TODO [SNOW-1348103]: handle all updates + // secure + // external access integration + // secrets + // comment + + if updateParamDiags := handleProcedureParametersUpdate(d, setRequest, unsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags + } + + // Apply SET and UNSET changes + if !reflect.DeepEqual(*setRequest, *sdk.NewProcedureSetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*setRequest)) + if err != nil { + return diag.FromErr(err) + } + } + if !reflect.DeepEqual(*unsetRequest, *sdk.NewProcedureUnsetRequest()) { + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnset(*unsetRequest)) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadContextProcedureJava(ctx, d, meta) } diff --git a/pkg/resources/procedure_java_acceptance_test.go b/pkg/resources/procedure_java_acceptance_test.go new file mode 100644 index 0000000000..35bdd401ec --- /dev/null +++ b/pkg/resources/procedure_java_acceptance_test.go @@ -0,0 +1,429 @@ +package resources_test + +import ( + "fmt" + "testing" + "time" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +// TODO [SNOW-1348103]: test import +// TODO [SNOW-1348103]: test external changes +// TODO [SNOW-1348103]: test changes of attributes separately + +func TestAcc_ProcedureJava_InlineBasic(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + idWithChangedNameButTheSameDataType := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + procedureModelRenamed := model.ProcedureJavaBasicInline("w", idWithChangedNameButTheSameDataType, dataType, handler, definition). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(0). + HasTargetPathEmpty(). + HasRuntimeVersionString("11"). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", "")), + ), + }, + // RENAME + { + Config: config.FromModels(t, procedureModelRenamed), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModelRenamed.ResourceReference()). + HasNameString(idWithChangedNameButTheSameDataType.Name()). + HasFullyQualifiedNameString(idWithChangedNameButTheSameDataType.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_InlineEmptyArgs(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + returnDataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes() + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinitionNoArgs(t, className, funcName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, returnDataType, handler, definition) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_InlineBasicDefaultArg(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + defaultValue := "'hello'" + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgumentWithDefaultValue(argName, dataType, defaultValue) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_name", argName)), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_data_type", dataType.ToSql())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "arguments.0.arg_default_value", defaultValue)), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_InlineFull(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + // TODO [SNOW-1850370]: extract to helper + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithTargetPathParts(stage.ID().FullyQualifiedName(), jarName). + WithRuntimeVersion("11") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(0). + HasRuntimeVersionString("11"). + HasProcedureDefinitionString(definition). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "target_path.0.path_on_stage", jarName)), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_StagedBasic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + stage, stageCleanup := acc.TestClient().Stage.CreateStage(t) + t.Cleanup(stageCleanup) + + tmpJavaProcedure := acc.TestClient().CreateSampleJavaProcedureAndJarOnStage(t, stage) + + dataType := tmpJavaProcedure.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaProcedure.JavaHandler() + + procedureModel := model.ProcedureJavaBasicStaged("w", id, dataType, handler, stage.ID().FullyQualifiedName(), tmpJavaProcedure.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // CREATE BASIC + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()). + HasNameString(id.Name()). + HasIsSecureString(r.BooleanDefault). + HasCommentString(sdk.DefaultProcedureComment). + HasImportsLength(1). + HasNoProcedureDefinition(). + HasProcedureLanguageString("JAVA"). + HasFullyQualifiedNameString(id.FullyQualifiedName()), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "imports.0.stage_location", stage.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(procedureModel.ResourceReference(), "imports.0.path_on_stage", tmpJavaProcedure.JarName)), + resourceshowoutputassert.ProcedureShowOutput(t, procedureModel.ResourceReference()). + HasIsSecure(false), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_AllParameters(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := acc.TestClient().Procedure.SampleJavaDefinition(t, className, funcName, argName) + + procedureModel := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType) + procedureModelWithAllParametersSet := model.ProcedureJavaBasicInline("w", id, dataType, handler, definition). + WithArgument(argName, dataType). + WithEnableConsoleOutput(true). + WithLogLevel(string(sdk.LogLevelWarn)). + WithMetricLevel(string(sdk.MetricLevelAll)). + WithTraceLevel(string(sdk.TraceLevelAlways)) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + // create with default values for all the parameters + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.ProcedureResourceParameters(t, procedureModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // import when no parameter set + { + ResourceName: procedureModel.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedProcedureResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasAllDefaults(), + ), + }, + // set all parameters + { + Config: config.FromModels(t, procedureModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.ProcedureResourceParameters(t, procedureModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // import when all parameters set + { + ResourceName: procedureModelWithAllParametersSet.ResourceReference(), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceparametersassert.ImportedProcedureResourceParameters(t, helpers.EncodeResourceIdentifier(id)). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + // unset all the parameters + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + resourceparametersassert.ProcedureResourceParameters(t, procedureModel.ResourceReference()). + HasAllDefaults(), + ), + }, + // destroy + { + Config: config.FromModels(t, procedureModel), + Destroy: true, + }, + // create with all parameters set + { + Config: config.FromModels(t, procedureModelWithAllParametersSet), + Check: assert.AssertThat(t, + objectparametersassert.ProcedureParameters(t, id). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + resourceparametersassert.ProcedureResourceParameters(t, procedureModelWithAllParametersSet.ResourceReference()). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ), + }, + }, + }) +} + +func TestAcc_ProcedureJava_handleExternalLanguageChange(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + tmpJavaProcedure := acc.TestClient().CreateSampleJavaProcedureAndJarOnUserStage(t) + + dataType := tmpJavaProcedure.ArgType + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArgumentsNewDataTypes(dataType) + + argName := "x" + handler := tmpJavaProcedure.JavaHandler() + + procedureModel := model.ProcedureJavaBasicStaged("w", id, dataType, handler, "~", tmpJavaProcedure.JarName). + WithArgument(argName, dataType) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.ProcedureJava), + Steps: []resource.TestStep{ + { + Config: config.FromModels(t, procedureModel), + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()).HasNameString(id.Name()).HasProcedureLanguageString("JAVA"), + ), + }, + // change type externally by creating a new procedure with the exact same id but using different language + { + PreConfig: func() { + acc.TestClient().Procedure.DropProcedureFunc(t, id)() + acc.TestClient().Procedure.CreateScalaStaged(t, id, dataType, tmpJavaProcedure.JarLocation(), handler) + }, + Config: config.FromModels(t, procedureModel), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(procedureModel.ResourceReference(), plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + Check: assert.AssertThat(t, + resourceassert.ProcedureJavaResource(t, procedureModel.ResourceReference()).HasNameString(id.Name()).HasProcedureLanguageString("JAVA"), + ), + }, + }, + }) +} diff --git a/pkg/resources/procedure_parameters.go b/pkg/resources/procedure_parameters.go index eba2a378b2..4bb719776e 100644 --- a/pkg/resources/procedure_parameters.go +++ b/pkg/resources/procedure_parameters.go @@ -80,16 +80,16 @@ func handleProcedureParameterRead(d *schema.ResourceData, procedureParameters [] } // They do not work in create, that's why are set in alter -func handleProcedureParametersCreate(d *schema.ResourceData, alterOpts *sdk.ProcedureSet) diag.Diagnostics { +func handleProcedureParametersCreate(d *schema.ResourceData, set *sdk.ProcedureSetRequest) diag.Diagnostics { return JoinDiags( - handleParameterCreate(d, sdk.ProcedureParameterEnableConsoleOutput, &alterOpts.EnableConsoleOutput), - handleParameterCreateWithMapping(d, sdk.ProcedureParameterLogLevel, &alterOpts.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), - handleParameterCreateWithMapping(d, sdk.ProcedureParameterMetricLevel, &alterOpts.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), - handleParameterCreateWithMapping(d, sdk.ProcedureParameterTraceLevel, &alterOpts.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), + handleParameterCreate(d, sdk.ProcedureParameterEnableConsoleOutput, &set.EnableConsoleOutput), + handleParameterCreateWithMapping(d, sdk.ProcedureParameterLogLevel, &set.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), + handleParameterCreateWithMapping(d, sdk.ProcedureParameterMetricLevel, &set.MetricLevel, stringToStringEnumProvider(sdk.ToMetricLevel)), + handleParameterCreateWithMapping(d, sdk.ProcedureParameterTraceLevel, &set.TraceLevel, stringToStringEnumProvider(sdk.ToTraceLevel)), ) } -func handleProcedureParametersUpdate(d *schema.ResourceData, set *sdk.ProcedureSet, unset *sdk.ProcedureUnset) diag.Diagnostics { +func handleProcedureParametersUpdate(d *schema.ResourceData, set *sdk.ProcedureSetRequest, unset *sdk.ProcedureUnsetRequest) diag.Diagnostics { return JoinDiags( handleParameterUpdate(d, sdk.ProcedureParameterEnableConsoleOutput, &set.EnableConsoleOutput, &unset.EnableConsoleOutput), handleParameterUpdateWithMapping(d, sdk.ProcedureParameterLogLevel, &set.LogLevel, &unset.LogLevel, stringToStringEnumProvider(sdk.ToLogLevel)), diff --git a/pkg/sdk/functions_and_procedures_commons.go b/pkg/sdk/functions_and_procedures_commons.go new file mode 100644 index 0000000000..df64aba187 --- /dev/null +++ b/pkg/sdk/functions_and_procedures_commons.go @@ -0,0 +1,118 @@ +package sdk + +import ( + "fmt" + "log" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" +) + +type NormalizedPath struct { + // StageLocation is a normalized (fully-quoted id or `~`) stage location + StageLocation string + // PathOnStage is path to the file on stage without opening `/` + PathOnStage string +} + +// NormalizedArgument does not contain default value because it is not returned in the Signature (or any other field). +type NormalizedArgument struct { + Name string + DataType datatypes.DataType +} + +// TODO [SNOW-1850370]: use ParseCommaSeparatedStringArray + collections.MapErr combo here and in other methods? +func parseFunctionOrProcedureImports(importsRaw *string) ([]NormalizedPath, error) { + normalizedImports := make([]NormalizedPath, 0) + if importsRaw == nil || *importsRaw == "" || *importsRaw == "[]" { + return normalizedImports, nil + } + if !strings.HasPrefix(*importsRaw, "[") || !strings.HasSuffix(*importsRaw, "]") { + return normalizedImports, fmt.Errorf("could not parse imports from Snowflake: %s, wrapping brackets not found", *importsRaw) + } + raw := (*importsRaw)[1 : len(*importsRaw)-1] + imports := strings.Split(raw, ",") + for _, imp := range imports { + p, err := parseFunctionOrProcedureStageLocationPath(imp) + if err != nil { + return nil, fmt.Errorf("could not parse imports from Snowflake: %s, err: %w", *importsRaw, err) + } + normalizedImports = append(normalizedImports, *p) + } + return normalizedImports, nil +} + +func parseFunctionOrProcedureStageLocationPath(location string) (*NormalizedPath, error) { + log.Printf("[DEBUG] parsing stage location path part: %s", location) + idx := strings.Index(location, "/") + if idx < 0 { + return nil, fmt.Errorf("part %s cannot be split into stage and path", location) + } + stageRaw := strings.TrimPrefix(strings.TrimSpace(location[:idx]), "@") + if stageRaw != "~" { + stageId, err := ParseSchemaObjectIdentifier(stageRaw) + if err != nil { + return nil, fmt.Errorf("part %s contains incorrect stage location: %w", location, err) + } + stageRaw = stageId.FullyQualifiedName() + } + pathRaw := strings.TrimPrefix(strings.TrimSpace(location[idx:]), "/") + if pathRaw == "" { + return nil, fmt.Errorf("part %s contains empty path", location) + } + return &NormalizedPath{stageRaw, pathRaw}, nil +} + +func parseFunctionOrProcedureReturns(returns string) (datatypes.DataType, bool, error) { + var returnNotNull bool + trimmed := strings.TrimSpace(returns) + if strings.HasSuffix(trimmed, " NOT NULL") { + returnNotNull = true + trimmed = strings.TrimSuffix(trimmed, " NOT NULL") + } + dt, err := datatypes.ParseDataType(trimmed) + return dt, returnNotNull, err +} + +// Format in Snowflake DB is: (argName argType, argName argType, ...). +func parseFunctionOrProcedureSignature(signature string) ([]NormalizedArgument, error) { + normalizedArguments := make([]NormalizedArgument, 0) + trimmed := strings.TrimSpace(signature) + if trimmed == "" { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, can't be empty", signature) + } + if trimmed == "()" { + return normalizedArguments, nil + } + if !strings.HasPrefix(trimmed, "(") || !strings.HasSuffix(trimmed, ")") { + return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, wrapping parentheses not found", trimmed) + } + raw := (trimmed)[1 : len(trimmed)-1] + args := strings.Split(raw, ",") + + for _, arg := range args { + a, err := parseFunctionOrProcedureArgument(arg) + if err != nil { + return nil, fmt.Errorf("could not parse signature from Snowflake: %s, err: %w", trimmed, err) + } + normalizedArguments = append(normalizedArguments, *a) + } + return normalizedArguments, nil +} + +// TODO [SNOW-1850370]: test with strange arg names (first integration test) +func parseFunctionOrProcedureArgument(arg string) (*NormalizedArgument, error) { + log.Printf("[DEBUG] parsing argument: %s", arg) + trimmed := strings.TrimSpace(arg) + idx := strings.Index(trimmed, " ") + if idx < 0 { + return nil, fmt.Errorf("arg %s cannot be split into arg name, data type, and default", arg) + } + argName := trimmed[:idx] + rest := strings.TrimSpace(trimmed[idx:]) + dt, err := datatypes.ParseDataType(rest) + if err != nil { + return nil, fmt.Errorf("arg type %s cannot be parsed, err: %w", rest, err) + } + return &NormalizedArgument{argName, dt}, nil +} diff --git a/pkg/sdk/functions_ext.go b/pkg/sdk/functions_ext.go index 2a87c2a458..facd9ede1d 100644 --- a/pkg/sdk/functions_ext.go +++ b/pkg/sdk/functions_ext.go @@ -4,9 +4,7 @@ import ( "context" "errors" "fmt" - "log" "strconv" - "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" ) @@ -42,19 +40,6 @@ type FunctionDetails struct { NormalizedArguments []NormalizedArgument } -type NormalizedPath struct { - // StageLocation is a normalized (fully-quoted id or `~`) stage location - StageLocation string - // PathOnStage is path to the file on stage without opening `/` - PathOnStage string -} - -// NormalizedArgument does not contain default value because it is not returned in the Signature (or any other field). -type NormalizedArgument struct { - Name string - DataType datatypes.DataType -} - func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { v := &FunctionDetails{} var errs []error @@ -96,14 +81,14 @@ func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { return nil, e } - if functionDetailsImports, err := parseFunctionDetailsImport(*v); err != nil { + if normalizedImports, err := parseFunctionOrProcedureImports(v.Imports); err != nil { errs = append(errs, err) } else { - v.NormalizedImports = functionDetailsImports + v.NormalizedImports = normalizedImports } if v.TargetPath != nil { - if p, err := parseStageLocationPath(*v.TargetPath); err != nil { + if p, err := parseFunctionOrProcedureStageLocationPath(*v.TargetPath); err != nil { errs = append(errs, err) } else { v.NormalizedTargetPath = p @@ -126,102 +111,6 @@ func functionDetailsFromRows(rows []FunctionDetail) (*FunctionDetails, error) { return v, errors.Join(errs...) } -// TODO [SNOW-1850370]: use ParseCommaSeparatedStringArray + collections.MapErr combo here and in other methods? -func parseFunctionDetailsImport(details FunctionDetails) ([]NormalizedPath, error) { - functionDetailsImports := make([]NormalizedPath, 0) - if details.Imports == nil || *details.Imports == "" || *details.Imports == "[]" { - return functionDetailsImports, nil - } - if !strings.HasPrefix(*details.Imports, "[") || !strings.HasSuffix(*details.Imports, "]") { - return functionDetailsImports, fmt.Errorf("could not parse imports from Snowflake: %s, wrapping brackets not found", *details.Imports) - } - raw := (*details.Imports)[1 : len(*details.Imports)-1] - imports := strings.Split(raw, ",") - for _, imp := range imports { - p, err := parseStageLocationPath(imp) - if err != nil { - return nil, fmt.Errorf("could not parse imports from Snowflake: %s, err: %w", *details.Imports, err) - } - functionDetailsImports = append(functionDetailsImports, *p) - } - return functionDetailsImports, nil -} - -func parseStageLocationPath(location string) (*NormalizedPath, error) { - log.Printf("[DEBUG] parsing stage location path part: %s", location) - idx := strings.Index(location, "/") - if idx < 0 { - return nil, fmt.Errorf("part %s cannot be split into stage and path", location) - } - stageRaw := strings.TrimPrefix(strings.TrimSpace(location[:idx]), "@") - if stageRaw != "~" { - stageId, err := ParseSchemaObjectIdentifier(stageRaw) - if err != nil { - return nil, fmt.Errorf("part %s contains incorrect stage location: %w", location, err) - } - stageRaw = stageId.FullyQualifiedName() - } - pathRaw := strings.TrimPrefix(strings.TrimSpace(location[idx:]), "/") - if pathRaw == "" { - return nil, fmt.Errorf("part %s contains empty path", location) - } - return &NormalizedPath{stageRaw, pathRaw}, nil -} - -func parseFunctionOrProcedureReturns(returns string) (datatypes.DataType, bool, error) { - var returnNotNull bool - trimmed := strings.TrimSpace(returns) - if strings.HasSuffix(trimmed, " NOT NULL") { - returnNotNull = true - trimmed = strings.TrimSuffix(trimmed, " NOT NULL") - } - dt, err := datatypes.ParseDataType(trimmed) - return dt, returnNotNull, err -} - -// Format in Snowflake DB is: (argName argType, argName argType, ...). -func parseFunctionOrProcedureSignature(signature string) ([]NormalizedArgument, error) { - normalizedArguments := make([]NormalizedArgument, 0) - trimmed := strings.TrimSpace(signature) - if trimmed == "" { - return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, can't be empty", signature) - } - if trimmed == "()" { - return normalizedArguments, nil - } - if !strings.HasPrefix(trimmed, "(") || !strings.HasSuffix(trimmed, ")") { - return normalizedArguments, fmt.Errorf("could not parse signature from Snowflake: %s, wrapping parentheses not found", trimmed) - } - raw := (trimmed)[1 : len(trimmed)-1] - args := strings.Split(raw, ",") - - for _, arg := range args { - a, err := parseFunctionOrProcedureArgument(arg) - if err != nil { - return nil, fmt.Errorf("could not parse signature from Snowflake: %s, err: %w", trimmed, err) - } - normalizedArguments = append(normalizedArguments, *a) - } - return normalizedArguments, nil -} - -// TODO [SNOW-1850370]: test with strange arg names (first integration test) -func parseFunctionOrProcedureArgument(arg string) (*NormalizedArgument, error) { - log.Printf("[DEBUG] parsing argument: %s", arg) - trimmed := strings.TrimSpace(arg) - idx := strings.Index(trimmed, " ") - if idx < 0 { - return nil, fmt.Errorf("arg %s cannot be split into arg name, data type, and default", arg) - } - argName := trimmed[:idx] - rest := strings.TrimSpace(trimmed[idx:]) - dt, err := datatypes.ParseDataType(rest) - if err != nil { - return nil, fmt.Errorf("arg type %s cannot be parsed, err: %w", rest, err) - } - return &NormalizedArgument{argName, dt}, nil -} - func (v *functions) DescribeDetails(ctx context.Context, id SchemaObjectIdentifierWithArguments) (*FunctionDetails, error) { rows, err := v.Describe(ctx, id) if err != nil { diff --git a/pkg/sdk/functions_ext_test.go b/pkg/sdk/functions_ext_test.go index a4f77431d0..9fea4d4319 100644 --- a/pkg/sdk/functions_ext_test.go +++ b/pkg/sdk/functions_ext_test.go @@ -9,7 +9,7 @@ import ( ) // TODO [SNOW-1850370]: test parsing single -func Test_parseFunctionDetailsImport(t *testing.T) { +func Test_parseFunctionOrProcedureImports(t *testing.T) { inputs := []struct { rawInput string expected []NormalizedPath @@ -43,9 +43,7 @@ func Test_parseFunctionDetailsImport(t *testing.T) { for _, tc := range inputs { tc := tc t.Run(fmt.Sprintf("Snowflake raw imports: %s", tc.rawInput), func(t *testing.T) { - details := FunctionDetails{Imports: &tc.rawInput} - - results, err := parseFunctionDetailsImport(details) + results, err := parseFunctionOrProcedureImports(&tc.rawInput) require.NoError(t, err) require.Equal(t, tc.expected, results) }) @@ -54,9 +52,7 @@ func Test_parseFunctionDetailsImport(t *testing.T) { for _, tc := range badInputs { tc := tc t.Run(fmt.Sprintf("incorrect Snowflake input: %s, expecting error with: %s", tc.rawInput, tc.expectedErrorPart), func(t *testing.T) { - details := FunctionDetails{Imports: &tc.rawInput} - - _, err := parseFunctionDetailsImport(details) + _, err := parseFunctionOrProcedureImports(&tc.rawInput) require.Error(t, err) require.ErrorContains(t, err, "could not parse imports from Snowflake") require.ErrorContains(t, err, tc.expectedErrorPart) @@ -64,9 +60,7 @@ func Test_parseFunctionDetailsImport(t *testing.T) { } t.Run("Snowflake raw imports nil", func(t *testing.T) { - details := FunctionDetails{Imports: nil} - - results, err := parseFunctionDetailsImport(details) + results, err := parseFunctionOrProcedureImports(nil) require.NoError(t, err) require.Equal(t, []NormalizedPath{}, results) }) diff --git a/pkg/sdk/procedures_ext.go b/pkg/sdk/procedures_ext.go index a8ee2844bf..de40fd8732 100644 --- a/pkg/sdk/procedures_ext.go +++ b/pkg/sdk/procedures_ext.go @@ -29,9 +29,15 @@ type ProcedureDetails struct { Handler *string // present for python, java, and scala (hidden when SECURE) RuntimeVersion *string // present for python, java, and scala (hidden when SECURE) Packages *string // list // present for python, java, and scala (hidden when SECURE) - TargetPath *string // list present for scala and java (hidden when SECURE) + TargetPath *string // present for scala and java (hidden when SECURE) InstalledPackages *string // list present for python (hidden when SECURE) ExecuteAs string // present for all procedure types + + NormalizedImports []NormalizedPath + NormalizedTargetPath *NormalizedPath + ReturnDataType datatypes.DataType + ReturnNotNull bool + NormalizedArguments []NormalizedArgument } func procedureDetailsFromRows(rows []ProcedureDetail) (*ProcedureDetails, error) { @@ -71,6 +77,37 @@ func procedureDetailsFromRows(rows []ProcedureDetail) (*ProcedureDetails, error) v.TargetPath = row.Value } } + if e := errors.Join(errs...); e != nil { + return nil, e + } + + if normalizedImports, err := parseFunctionOrProcedureImports(v.Imports); err != nil { + errs = append(errs, err) + } else { + v.NormalizedImports = normalizedImports + } + + if v.TargetPath != nil { + if p, err := parseFunctionOrProcedureStageLocationPath(*v.TargetPath); err != nil { + errs = append(errs, err) + } else { + v.NormalizedTargetPath = p + } + } + + if dt, returnNotNull, err := parseFunctionOrProcedureReturns(v.Returns); err != nil { + errs = append(errs, err) + } else { + v.ReturnDataType = dt + v.ReturnNotNull = returnNotNull + } + + if args, err := parseFunctionOrProcedureSignature(v.Signature); err != nil { + errs = append(errs, err) + } else { + v.NormalizedArguments = args + } + return v, errors.Join(errs...) } diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 6e0298308e..c5434d6308 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -100,6 +100,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, procedure.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -107,10 +109,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -186,6 +190,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -193,10 +199,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -256,6 +266,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -263,10 +275,14 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -333,6 +349,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -340,10 +358,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("11"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -386,8 +408,47 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasImports(fmt.Sprintf(`[@"%s"."%s".%s/%s]`, stage.ID().DatabaseName(), stage.ID().SchemaName(), stage.ID().Name(), tmpJavaProcedureDifferentStage.JarName)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: stage.ID().FullyQualifiedName(), PathOnStage: tmpJavaProcedureDifferentStage.JarName, + }). HasHandler(handler). - HasTargetPathNil(), + HasTargetPathNil(). + HasNormalizedTargetPathNil(), + ) + }) + + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create procedure for Java - default argument value", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewProcedureArgumentRequest(argName, dataType).WithDefaultValue(`'abc'`) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := testClientHelper().Procedure.SampleJavaDefinition(t, className, funcName, argName) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithProcedureDefinitionWrapped(definition) + + err := client.Procedures.CreateForJava(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), ) }) @@ -432,6 +493,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -439,10 +502,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -500,6 +565,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("JAVASCRIPT"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -507,10 +574,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -568,6 +637,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -575,10 +646,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("OWNER"), ) @@ -648,6 +721,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -655,10 +730,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(funcName). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("CALLER"), ) @@ -714,6 +793,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -721,10 +802,14 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("OWNER"), ) @@ -791,6 +876,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("PYTHON"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -798,10 +885,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpPythonFunction.PythonFileName(), + }). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNotEmpty(). HasExecuteAs("CALLER"), ) @@ -861,6 +952,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -868,10 +961,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(`[]`). + HasExactlyImportsNormalizedInAnyOrder(). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -948,6 +1043,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBody(definition). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -955,10 +1052,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPath(targetPath). + HasNormalizedTargetPath("~", jarName). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -1015,6 +1116,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). @@ -1022,10 +1125,14 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -1094,6 +1201,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SCALA"). HasBodyNil(). HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). @@ -1101,10 +1210,14 @@ func TestInt_Procedures(t *testing.T) { HasExactlyExternalAccessIntegrations(externalAccessIntegration). HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasExactlyImportsNormalizedInAnyOrder(sdk.NormalizedPath{ + StageLocation: "~", PathOnStage: tmpJavaProcedure.JarName, + }). HasHandler(handler). HasRuntimeVersion("2.12"). HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -1157,6 +1270,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). HasLanguage("SQL"). HasBody(definition). HasNullHandlingNil(). @@ -1164,10 +1279,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("OWNER"), ) @@ -1178,6 +1295,35 @@ func TestInt_Procedures(t *testing.T) { ) }) + // proves that we don't get default argument values from SHOW and DESCRIBE + t.Run("create procedure for SQL - default argument value", func(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + definition := testClientHelper().Procedure.SampleSqlDefinition(t) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType).WithDefaultValue("3.123") + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}) + + err := client.Procedures.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(DEFAULT %[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())), + ) + }) + t.Run("create procedure for SQL - inline full", func(t *testing.T) { argName := "x" dataType := testdatatypes.DataTypeFloat @@ -1227,6 +1373,8 @@ func TestInt_Procedures(t *testing.T) { assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasReturnDataType(dataType). + HasReturnNotNull(true). HasLanguage("SQL"). HasBody(definition). // TODO [SNOW-1348103]: null handling and volatility are not returned and is present in create syntax @@ -1236,10 +1384,12 @@ func TestInt_Procedures(t *testing.T) { HasExternalAccessIntegrationsNil(). HasSecretsNil(). HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). HasHandlerNil(). HasRuntimeVersionNil(). HasPackagesNil(). HasTargetPathNil(). + HasNormalizedTargetPathNil(). HasInstalledPackagesNil(). HasExecuteAs("CALLER"), ) @@ -1250,6 +1400,70 @@ func TestInt_Procedures(t *testing.T) { ) }) + t.Run("create procedure for SQL - no arguments", func(t *testing.T) { + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments() + + definition := testClientHelper().Procedure.SampleSqlDefinition(t) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt) + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition) + + err := client.Procedures.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(0). + HasMaxNumArguments(0). + HasArgumentsOld([]sdk.DataType{}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s() RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature("()"). + HasReturns(dataType.ToSql()). + HasReturnDataType(dataType). + HasReturnNotNull(false). + HasLanguage("SQL"). + HasBody(definition). + HasNullHandlingNil(). + HasVolatilityNil(). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImportsNil(). + HasExactlyImportsNormalizedInAnyOrder(). + HasHandlerNil(). + HasRuntimeVersionNil(). + HasPackagesNil(). + HasTargetPathNil(). + HasNormalizedTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Java: returns table", func(t *testing.T) { t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103")