diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 085302fc64..56a1af3788 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -6,6 +6,66 @@ across different versions. > [!TIP] > We highly recommend upgrading the versions one by one instead of bulk upgrades. + +## v0.98.0 ➞ v0.99.0 + +### snowflake_task resource changes + +new fields: +- `config` + +### snowflake_tasks data source changes + +New filtering options: +- `with_parameters` +- `like` +- `in` +- `starts_with` +- `root_only` +- `limit` + +New output fields +- `show_output` +- `parameters` + +Breaking changes: +- `database` and `schema` are right now under `in` field + +Before: +```terraform +data "snowflake_tasks" "old_tasks" { + database = "" + schema = "" +} +``` +After: +```terraform +data "snowflake_tasks" "new_tasks" { + in { + # for IN SCHEMA specify: + schema = "." + + # for IN DATABASE specify: + database = "" + } +} +``` +- `tasks` field now organizes output of show under `show_output` field and the output of show parameters under `parameters` field. + +Before: +```terraform +output "simple_output" { + value = data.snowflake_tasks.test.tasks[0].name +} +``` +After: +```terraform +output "simple_output" { + value = data.snowflake_tasks.test.tasks[0].show_output[0].name +} +``` + +Please adjust your Terraform configuration files. ## v0.98.0 ➞ v0.99.0 diff --git a/docs/data-sources/tasks.md b/docs/data-sources/tasks.md index ac557968e0..a9b8e7efc0 100644 --- a/docs/data-sources/tasks.md +++ b/docs/data-sources/tasks.md @@ -2,42 +2,983 @@ page_title: "snowflake_tasks Data Source - terraform-provider-snowflake" subcategory: "" description: |- - + Data source used to get details of filtered tasks. Filtering is aligned with the current possibilities for SHOW TASKS https://docs.snowflake.com/en/sql-reference/sql/show-tasks query. The results of SHOW and SHOW PARAMETERS IN are encapsulated in one output collection tasks. --- -# snowflake_tasks (Data Source) +!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. +# snowflake_tasks (Data Source) +Data source used to get details of filtered tasks. Filtering is aligned with the current possibilities for [SHOW TASKS](https://docs.snowflake.com/en/sql-reference/sql/show-tasks) query. The results of SHOW and SHOW PARAMETERS IN are encapsulated in one output collection `tasks`. ## Example Usage ```terraform -data "snowflake_tasks" "current" { - database = "MYDB" - schema = "MYSCHEMA" +# Simple usage +data "snowflake_tasks" "simple" { +} + +output "simple_output" { + value = data.snowflake_tasks.simple.tasks +} + +# Filtering (like) +data "snowflake_tasks" "like" { + like = "task-name" +} + +output "like_output" { + value = data.snowflake_tasks.like.tasks +} + +# Filtering (in - account - database - schema - application - application package) +data "snowflake_tasks" "in_account" { + in { + account = true + } +} + +data "snowflake_tasks" "in_database" { + in { + database = "" + } +} + +data "snowflake_tasks" "in_schema" { + in { + schema = "." + } +} + +data "snowflake_tasks" "in_application" { + in { + application = "" + } +} + +data "snowflake_tasks" "in_application_package" { + in { + application_package = "" + } +} + +output "in_output" { + value = { + "account" : data.snowflake_tasks.in_account.tasks, + "database" : data.snowflake_tasks.in_database.tasks, + "schema" : data.snowflake_tasks.in_schema.tasks, + "application" : data.snowflake_tasks.in_application.tasks, + "application_package" : data.snowflake_tasks.in_application_package.tasks, + } +} + +# Filtering (root only tasks) +data "snowflake_tasks" "root_only" { + root_only = true +} + +output "root_only_output" { + value = data.snowflake_tasks.root_only.tasks +} + +# Filtering (starts_with) +data "snowflake_tasks" "starts_with" { + starts_with = "task-" +} + +output "starts_with_output" { + value = data.snowflake_tasks.starts_with.tasks +} + +# Filtering (limit) +data "snowflake_tasks" "limit" { + limit { + rows = 10 + from = "task-" + } +} + +output "limit_output" { + value = data.snowflake_tasks.limit.tasks +} + +# Without additional data (to limit the number of calls make for every found task) +data "snowflake_tasks" "only_show" { + # with_parameters is turned on by default and it calls SHOW PARAMETERS FOR task for every task found and attaches its output to tasks.*.parameters field + with_parameters = false +} + +output "only_show_output" { + value = data.snowflake_tasks.only_show.tasks +} + +# Ensure the number of tasks is equal to at least one element (with the use of postcondition) +data "snowflake_tasks" "assert_with_postcondition" { + starts_with = "task-name" + lifecycle { + postcondition { + condition = length(self.tasks) > 0 + error_message = "there should be at least one task" + } + } +} + +# Ensure the number of tasks is equal to at exactly one element (with the use of check block) +check "task_check" { + data "snowflake_tasks" "assert_with_check_block" { + like = "task-name" + } + + assert { + condition = length(data.snowflake_tasks.assert_with_check_block.tasks) == 1 + error_message = "tasks filtered by '${data.snowflake_tasks.assert_with_check_block.like}' returned ${length(data.snowflake_tasks.assert_with_check_block.tasks)} tasks where one was expected" + } } ``` ## Schema -### Required +### Optional -- `database` (String) The database from which to return the schemas from. -- `schema` (String) The schema from which to return the tasks from. +- `in` (Block List, Max: 1) IN clause to filter the list of objects (see [below for nested schema](#nestedblock--in)) +- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). +- `limit` (Block List, Max: 1) Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`. (see [below for nested schema](#nestedblock--limit)) +- `root_only` (Boolean) Filters the command output to return only root tasks (tasks with no predecessors). +- `starts_with` (String) Filters the output with **case-sensitive** characters indicating the beginning of the object name. +- `with_parameters` (Boolean) Runs SHOW PARAMETERS FOR TASK for each user returned by SHOW TASK. The output of describe is saved to the parameters field as a map. By default this value is set to true. ### Read-Only - `id` (String) The ID of this resource. -- `tasks` (List of Object) The tasks in the schema (see [below for nested schema](#nestedatt--tasks)) +- `tasks` (List of Object) Holds the aggregated output of all task details queries. (see [below for nested schema](#nestedatt--tasks)) + + +### Nested Schema for `in` + +Optional: + +- `account` (Boolean) Returns records for the entire account. +- `application` (String) Returns records for the specified application. +- `application_package` (String) Returns records for the specified application package. +- `database` (String) Returns records for the current database in use or for a specified database. +- `schema` (String) Returns records for the current schema in use or a specified schema. Use fully qualified name. + + + +### Nested Schema for `limit` + +Required: + +- `rows` (Number) The maximum number of rows to return. + +Optional: + +- `from` (String) Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied. + ### Nested Schema for `tasks` Read-Only: +- `parameters` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters)) +- `show_output` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--show_output)) + + +### Nested Schema for `tasks.parameters` + +Read-Only: + +- `abort_detached_query` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--abort_detached_query)) +- `autocommit` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--autocommit)) +- `binary_input_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--binary_input_format)) +- `binary_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--binary_output_format)) +- `client_memory_limit` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_memory_limit)) +- `client_metadata_request_use_connection_ctx` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_metadata_request_use_connection_ctx)) +- `client_prefetch_threads` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_prefetch_threads)) +- `client_result_chunk_size` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_result_chunk_size)) +- `client_result_column_case_insensitive` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_result_column_case_insensitive)) +- `client_session_keep_alive` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_session_keep_alive)) +- `client_session_keep_alive_heartbeat_frequency` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_session_keep_alive_heartbeat_frequency)) +- `client_timestamp_type_mapping` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--client_timestamp_type_mapping)) +- `date_input_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--date_input_format)) +- `date_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--date_output_format)) +- `enable_unload_physical_type_optimization` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--enable_unload_physical_type_optimization)) +- `error_on_nondeterministic_merge` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--error_on_nondeterministic_merge)) +- `error_on_nondeterministic_update` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--error_on_nondeterministic_update)) +- `geography_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--geography_output_format)) +- `geometry_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--geometry_output_format)) +- `jdbc_treat_timestamp_ntz_as_utc` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--jdbc_treat_timestamp_ntz_as_utc)) +- `jdbc_use_session_timezone` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--jdbc_use_session_timezone)) +- `json_indent` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--json_indent)) +- `lock_timeout` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--lock_timeout)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--log_level)) +- `multi_statement_count` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--multi_statement_count)) +- `noorder_sequence_as_default` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--noorder_sequence_as_default)) +- `odbc_treat_decimal_as_int` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--odbc_treat_decimal_as_int)) +- `query_tag` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--query_tag)) +- `quoted_identifiers_ignore_case` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--quoted_identifiers_ignore_case)) +- `rows_per_resultset` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--rows_per_resultset)) +- `s3_stage_vpce_dns_name` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--s3_stage_vpce_dns_name)) +- `search_path` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--search_path)) +- `statement_queued_timeout_in_seconds` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--statement_queued_timeout_in_seconds)) +- `statement_timeout_in_seconds` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--statement_timeout_in_seconds)) +- `strict_json_output` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--strict_json_output)) +- `suspend_task_after_num_failures` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--suspend_task_after_num_failures)) +- `task_auto_retry_attempts` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--task_auto_retry_attempts)) +- `time_input_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--time_input_format)) +- `time_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--time_output_format)) +- `timestamp_day_is_always_24h` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timestamp_day_is_always_24h)) +- `timestamp_input_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timestamp_input_format)) +- `timestamp_ltz_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timestamp_ltz_output_format)) +- `timestamp_ntz_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timestamp_ntz_output_format)) +- `timestamp_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timestamp_output_format)) +- `timestamp_type_mapping` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timestamp_type_mapping)) +- `timestamp_tz_output_format` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timestamp_tz_output_format)) +- `timezone` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--timezone)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--trace_level)) +- `transaction_abort_on_error` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--transaction_abort_on_error)) +- `transaction_default_isolation_level` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--transaction_default_isolation_level)) +- `two_digit_century_start` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--two_digit_century_start)) +- `unsupported_ddl_action` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--unsupported_ddl_action)) +- `use_cached_result` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--use_cached_result)) +- `user_task_managed_initial_warehouse_size` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--user_task_managed_initial_warehouse_size)) +- `user_task_minimum_trigger_interval_in_seconds` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--user_task_minimum_trigger_interval_in_seconds)) +- `user_task_timeout_ms` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--user_task_timeout_ms)) +- `week_of_year_policy` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--week_of_year_policy)) +- `week_start` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--parameters--week_start)) + + +### Nested Schema for `tasks.parameters.abort_detached_query` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.autocommit` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.binary_input_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.binary_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_memory_limit` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_metadata_request_use_connection_ctx` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_prefetch_threads` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_result_chunk_size` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_result_column_case_insensitive` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_session_keep_alive` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_session_keep_alive_heartbeat_frequency` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.client_timestamp_type_mapping` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.date_input_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.date_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.enable_unload_physical_type_optimization` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.error_on_nondeterministic_merge` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.error_on_nondeterministic_update` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.geography_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.geometry_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.jdbc_treat_timestamp_ntz_as_utc` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.jdbc_use_session_timezone` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.json_indent` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.lock_timeout` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.multi_statement_count` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.noorder_sequence_as_default` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.odbc_treat_decimal_as_int` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.query_tag` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.quoted_identifiers_ignore_case` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.rows_per_resultset` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.s3_stage_vpce_dns_name` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.search_path` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.statement_queued_timeout_in_seconds` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.statement_timeout_in_seconds` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.strict_json_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.suspend_task_after_num_failures` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.task_auto_retry_attempts` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.time_input_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.time_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timestamp_day_is_always_24h` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timestamp_input_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timestamp_ltz_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timestamp_ntz_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timestamp_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timestamp_type_mapping` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timestamp_tz_output_format` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.timezone` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.transaction_abort_on_error` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.transaction_default_isolation_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.two_digit_century_start` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.unsupported_ddl_action` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.use_cached_result` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.user_task_managed_initial_warehouse_size` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.user_task_minimum_trigger_interval_in_seconds` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.user_task_timeout_ms` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.week_of_year_policy` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `tasks.parameters.week_start` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + + +### Nested Schema for `tasks.show_output` + +Read-Only: + +- `allow_overlapping_execution` (Boolean) +- `budget` (String) - `comment` (String) -- `database` (String) +- `condition` (String) +- `config` (String) +- `created_on` (String) +- `database_name` (String) +- `definition` (String) +- `error_integration` (String) +- `id` (String) +- `last_committed_on` (String) +- `last_suspended_on` (String) +- `last_suspended_reason` (String) - `name` (String) -- `schema` (String) +- `owner` (String) +- `owner_role_type` (String) +- `predecessors` (Set of String) +- `schedule` (String) +- `schema_name` (String) +- `state` (String) +- `task_relations` (List of Object) (see [below for nested schema](#nestedobjatt--tasks--show_output--task_relations)) - `warehouse` (String) + + +### Nested Schema for `tasks.show_output.task_relations` + +Read-Only: + +- `finalized_root_task` (String) +- `finalizer` (String) +- `predecessors` (List of String) diff --git a/docs/resources/task.md b/docs/resources/task.md index 4405b10da3..549a73f9d9 100644 --- a/docs/resources/task.md +++ b/docs/resources/task.md @@ -5,6 +5,8 @@ description: |- Resource used to manage task objects. For more information, check task documentation https://docs.snowflake.com/en/user-guide/tasks-intro. --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. + # snowflake_task (Resource) Resource used to manage task objects. For more information, check [task documentation](https://docs.snowflake.com/en/user-guide/tasks-intro). @@ -67,7 +69,6 @@ resource "snowflake_task" "test_task" { enabled = true } ``` - -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). diff --git a/examples/data-sources/snowflake_tasks/data-source.tf b/examples/data-sources/snowflake_tasks/data-source.tf index dba5b39d70..c6dea29f1e 100644 --- a/examples/data-sources/snowflake_tasks/data-source.tf +++ b/examples/data-sources/snowflake_tasks/data-source.tf @@ -1,4 +1,120 @@ -data "snowflake_tasks" "current" { - database = "MYDB" - schema = "MYSCHEMA" -} \ No newline at end of file +# Simple usage +data "snowflake_tasks" "simple" { +} + +output "simple_output" { + value = data.snowflake_tasks.simple.tasks +} + +# Filtering (like) +data "snowflake_tasks" "like" { + like = "task-name" +} + +output "like_output" { + value = data.snowflake_tasks.like.tasks +} + +# Filtering (in - account - database - schema - application - application package) +data "snowflake_tasks" "in_account" { + in { + account = true + } +} + +data "snowflake_tasks" "in_database" { + in { + database = "" + } +} + +data "snowflake_tasks" "in_schema" { + in { + schema = "." + } +} + +data "snowflake_tasks" "in_application" { + in { + application = "" + } +} + +data "snowflake_tasks" "in_application_package" { + in { + application_package = "" + } +} + +output "in_output" { + value = { + "account" : data.snowflake_tasks.in_account.tasks, + "database" : data.snowflake_tasks.in_database.tasks, + "schema" : data.snowflake_tasks.in_schema.tasks, + "application" : data.snowflake_tasks.in_application.tasks, + "application_package" : data.snowflake_tasks.in_application_package.tasks, + } +} + +# Filtering (root only tasks) +data "snowflake_tasks" "root_only" { + root_only = true +} + +output "root_only_output" { + value = data.snowflake_tasks.root_only.tasks +} + +# Filtering (starts_with) +data "snowflake_tasks" "starts_with" { + starts_with = "task-" +} + +output "starts_with_output" { + value = data.snowflake_tasks.starts_with.tasks +} + +# Filtering (limit) +data "snowflake_tasks" "limit" { + limit { + rows = 10 + from = "task-" + } +} + +output "limit_output" { + value = data.snowflake_tasks.limit.tasks +} + +# Without additional data (to limit the number of calls make for every found task) +data "snowflake_tasks" "only_show" { + # with_parameters is turned on by default and it calls SHOW PARAMETERS FOR task for every task found and attaches its output to tasks.*.parameters field + with_parameters = false +} + +output "only_show_output" { + value = data.snowflake_tasks.only_show.tasks +} + +# Ensure the number of tasks is equal to at least one element (with the use of postcondition) +data "snowflake_tasks" "assert_with_postcondition" { + starts_with = "task-name" + lifecycle { + postcondition { + condition = length(self.tasks) > 0 + error_message = "there should be at least one task" + } + } +} + +# Ensure the number of tasks is equal to at exactly one element (with the use of check block) +check "task_check" { + data "snowflake_tasks" "assert_with_check_block" { + like = "task-name" + } + + assert { + condition = length(data.snowflake_tasks.assert_with_check_block.tasks) == 1 + error_message = "tasks filtered by '${data.snowflake_tasks.assert_with_check_block.like}' returned ${length(data.snowflake_tasks.assert_with_check_block.tasks)} tasks where one was expected" + } +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/task_resource_parameters_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/task_resource_parameters_ext.go index 45a2def403..f6bca65b34 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/task_resource_parameters_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceparametersassert/task_resource_parameters_ext.go @@ -2,10 +2,23 @@ package resourceparametersassert import ( "strings" + "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) +// TaskDatasourceParameters is a temporary workaround to have better parameter assertions in data source acceptance tests. +func TaskDatasourceParameters(t *testing.T, name string) *TaskResourceParametersAssert { + t.Helper() + + taskAssert := TaskResourceParametersAssert{ + ResourceAssert: assert.NewDatasourceAssert("data."+name, "parameters", "tasks.0."), + } + taskAssert.AddAssertion(assert.ValueSet("parameters.#", "1")) + return &taskAssert +} + func (u *TaskResourceParametersAssert) HasAllDefaults() *TaskResourceParametersAssert { return u. HasSuspendTaskAfterNumFailures(10). diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/task_show_output_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/task_show_output_ext.go index 3bfea52bd6..2107ddd500 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/task_show_output_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/task_show_output_ext.go @@ -3,11 +3,28 @@ package resourceshowoutputassert import ( "fmt" "strconv" + "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) +// TaskDatasourceShowOutput is a temporary workaround to have better show output assertions in data source acceptance tests. +func TaskDatasourceShowOutput(t *testing.T, name string) *TaskShowOutputAssert { + t.Helper() + + taskAssert := TaskShowOutputAssert{ + ResourceAssert: assert.NewDatasourceAssert("data."+name, "show_output", "tasks.0."), + } + taskAssert.AddAssertion(assert.ValueSet("show_output.#", "1")) + return &taskAssert +} + +func (t *TaskShowOutputAssert) HasErrorIntegrationEmpty() *TaskShowOutputAssert { + t.AddAssertion(assert.ResourceShowOutputStringUnderlyingValueSet("error_integration", "")) + return t +} + func (t *TaskShowOutputAssert) HasCreatedOnNotEmpty() *TaskShowOutputAssert { t.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) return t @@ -18,6 +35,11 @@ func (t *TaskShowOutputAssert) HasIdNotEmpty() *TaskShowOutputAssert { return t } +func (t *TaskShowOutputAssert) HasOwnerNotEmpty() *TaskShowOutputAssert { + t.AddAssertion(assert.ResourceShowOutputValuePresent("owner")) + return t +} + func (t *TaskShowOutputAssert) HasLastCommittedOnNotEmpty() *TaskShowOutputAssert { t.AddAssertion(assert.ResourceShowOutputValuePresent("last_committed_on")) return t diff --git a/pkg/acceptance/helpers/ids_generator.go b/pkg/acceptance/helpers/ids_generator.go index 42e247e6d5..7cb9c767ec 100644 --- a/pkg/acceptance/helpers/ids_generator.go +++ b/pkg/acceptance/helpers/ids_generator.go @@ -81,6 +81,10 @@ func (c *IdsGenerator) RandomSchemaObjectIdentifierInSchema(schemaId sdk.Databas return sdk.NewSchemaObjectIdentifierInSchema(schemaId, c.Alpha()) } +func (c *IdsGenerator) RandomSchemaObjectIdentifierInSchemaWithPrefix(prefix string, schemaId sdk.DatabaseObjectIdentifier) sdk.SchemaObjectIdentifier { + return sdk.NewSchemaObjectIdentifierInSchema(schemaId, c.AlphaWithPrefix(prefix)) +} + func (c *IdsGenerator) RandomSchemaObjectIdentifierWithArgumentsOld(arguments ...sdk.DataType) sdk.SchemaObjectIdentifier { return sdk.NewSchemaObjectIdentifierWithArgumentsOld(c.SchemaId().DatabaseName(), c.SchemaId().Name(), c.Alpha(), arguments) } diff --git a/pkg/datasources/common.go b/pkg/datasources/common.go index 6f8ab20169..2a7ae9ed10 100644 --- a/pkg/datasources/common.go +++ b/pkg/datasources/common.go @@ -12,6 +12,37 @@ var likeSchema = &schema.Schema{ Description: "Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`).", } +var inSchema = &schema.Schema{ + Type: schema.TypeList, + Optional: true, + Description: "IN clause to filter the list of objects", + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "account": { + Type: schema.TypeBool, + Optional: true, + Description: "Returns records for the entire account.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema"}, + }, + "database": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the current database in use or for a specified database.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema"}, + ValidateDiagFunc: resources.IsValidIdentifier[sdk.AccountObjectIdentifier](), + }, + "schema": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the current schema in use or a specified schema. Use fully qualified name.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema"}, + ValidateDiagFunc: resources.IsValidIdentifier[sdk.DatabaseObjectIdentifier](), + }, + }, + }, +} + var extendedInSchema = &schema.Schema{ Type: schema.TypeList, Optional: true, @@ -114,6 +145,29 @@ func handleLimitFrom(d *schema.ResourceData, setField **sdk.LimitFrom) { } } +func handleIn(d *schema.ResourceData, setField **sdk.In) error { + if v, ok := d.GetOk("in"); ok { + in := v.([]any)[0].(map[string]any) + accountValue, okAccount := in["account"] + databaseValue, okDatabase := in["database"] + schemaValue, okSchema := in["schema"] + + switch { + case okAccount && accountValue.(bool): + *setField = &sdk.In{Account: sdk.Bool(true)} + case okDatabase && databaseValue.(string) != "": + *setField = &sdk.In{Database: sdk.NewAccountObjectIdentifier(databaseValue.(string))} + case okSchema && schemaValue.(string) != "": + schemaId, err := sdk.ParseDatabaseObjectIdentifier(schemaValue.(string)) + if err != nil { + return err + } + *setField = &sdk.In{Schema: schemaId} + } + } + return nil +} + func handleExtendedIn(d *schema.ResourceData, setField **sdk.ExtendedIn) error { if v, ok := d.GetOk("in"); ok { in := v.([]any)[0].(map[string]any) diff --git a/pkg/datasources/tasks.go b/pkg/datasources/tasks.go index ff62bc5e96..b840c547e3 100644 --- a/pkg/datasources/tasks.go +++ b/pkg/datasources/tasks.go @@ -2,53 +2,53 @@ package datasources import ( "context" - "fmt" - "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) var tasksSchema = map[string]*schema.Schema{ - "database": { - Type: schema.TypeString, - Required: true, - Description: "The database from which to return the schemas from.", + "with_parameters": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Runs SHOW PARAMETERS FOR TASK for each user returned by SHOW TASK. The output of describe is saved to the parameters field as a map. By default this value is set to true.", }, - "schema": { - Type: schema.TypeString, - Required: true, - Description: "The schema from which to return the tasks from.", + "like": likeSchema, + "in": extendedInSchema, + "starts_with": startsWithSchema, + "root_only": { + Type: schema.TypeBool, + Optional: true, + Description: "Filters the command output to return only root tasks (tasks with no predecessors).", }, + "limit": limitFromSchema, "tasks": { Type: schema.TypeList, Computed: true, - Description: "The tasks in the schema", + Description: "Holds the aggregated output of all task details queries.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, + resources.ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW TASKS.", + Elem: &schema.Resource{ + Schema: schemas.ShowTaskSchema, + }, }, - "database": { - Type: schema.TypeString, - Computed: true, - }, - "schema": { - Type: schema.TypeString, - Computed: true, - }, - "comment": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "warehouse": { - Type: schema.TypeString, - Optional: true, - Computed: true, + resources.ParametersAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW PARAMETERS FOR TASK.", + Elem: &schema.Resource{ + Schema: schemas.ShowTaskParametersSchema, + }, }, }, }, @@ -57,39 +57,54 @@ var tasksSchema = map[string]*schema.Schema{ func Tasks() *schema.Resource { return &schema.Resource{ - Read: ReadTasks, - Schema: tasksSchema, + ReadContext: ReadTasks, + Schema: tasksSchema, + Description: "Data source used to get details of filtered tasks. Filtering is aligned with the current possibilities for [SHOW TASKS](https://docs.snowflake.com/en/sql-reference/sql/show-tasks) query. The results of SHOW and SHOW PARAMETERS IN are encapsulated in one output collection `tasks`.", } } -func ReadTasks(d *schema.ResourceData, meta interface{}) error { +func ReadTasks(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - ctx := context.Background() + req := sdk.NewShowTaskRequest() - databaseName := d.Get("database").(string) - schemaName := d.Get("schema").(string) + handleLike(d, &req.Like) + if err := handleExtendedIn(d, &req.In); err != nil { + return diag.FromErr(err) + } + handleStartsWith(d, &req.StartsWith) + if v, ok := d.GetOk("root_only"); ok && v.(bool) { + req.WithRootOnly(true) + } + handleLimitFrom(d, &req.Limit) - extractedTasks, err := client.Tasks.Show(ctx, sdk.NewShowTaskRequest().WithIn(sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(databaseName, schemaName)})) + tasks, err := client.Tasks.Show(ctx, req) if err != nil { - // If not found, mark resource to be removed from state file during apply or refresh - log.Printf("[DEBUG] tasks in schema (%s) not found", d.Id()) - d.SetId("") - return nil + return diag.FromErr(err) } + d.SetId("tasks_read") + + flattenedTasks := make([]map[string]any, len(tasks)) + for i, task := range tasks { + task := task - tasks := make([]map[string]any, 0, len(extractedTasks)) - for _, task := range extractedTasks { - taskMap := map[string]any{} + var taskParameters []map[string]any + if d.Get("with_parameters").(bool) { + parameters, err := client.Tasks.ShowParameters(ctx, task.ID()) + if err != nil { + return diag.FromErr(err) + } + taskParameters = []map[string]any{schemas.TaskParametersToSchema(parameters)} + } - taskMap["name"] = task.Name - taskMap["database"] = task.DatabaseName - taskMap["schema"] = task.SchemaName - taskMap["comment"] = task.Comment - taskMap["warehouse"] = task.Warehouse.Name() + flattenedTasks[i] = map[string]any{ + resources.ShowOutputAttributeName: []map[string]any{schemas.TaskToSchema(&task)}, + resources.ParametersAttributeName: taskParameters, + } + } - tasks = append(tasks, taskMap) + if err := d.Set("tasks", flattenedTasks); err != nil { + return diag.FromErr(err) } - d.SetId(fmt.Sprintf(`%v|%v`, databaseName, schemaName)) - return d.Set("tasks", tasks) + return nil } diff --git a/pkg/datasources/tasks_acceptance_test.go b/pkg/datasources/tasks_acceptance_test.go index de1a55755d..7358268599 100644 --- a/pkg/datasources/tasks_acceptance_test.go +++ b/pkg/datasources/tasks_acceptance_test.go @@ -1,74 +1,270 @@ package datasources_test import ( + "bytes" "fmt" + "strconv" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -func TestAcc_Tasks(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - taskName := acc.TestClient().Ids.Alpha() +func TestAcc_Tasks_Like_RootTask(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + // Created to show LIKE is working + _, standaloneTaskCleanup := acc.TestClient().Task.Create(t) + t.Cleanup(standaloneTaskCleanup) + + createRootReq := sdk.NewCreateTaskRequest(acc.TestClient().Ids.RandomSchemaObjectIdentifier(), "SELECT 1"). + WithSchedule("1 MINUTE"). + WithComment("some comment"). + WithAllowOverlappingExecution(true). + WithWarehouse(*sdk.NewCreateTaskWarehouseRequest().WithWarehouse(acc.TestClient().Ids.WarehouseId())) + rootTask, rootTaskCleanup := acc.TestClient().Task.CreateWithRequest(t, createRootReq) + t.Cleanup(rootTaskCleanup) + + childTask, childTaskCleanup := acc.TestClient().Task.CreateWithAfter(t, rootTask.ID()) + t.Cleanup(childTaskCleanup) + resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, Steps: []resource.TestStep{ { - Config: tasks(databaseName, schemaName, taskName), + Config: taskDatasourceLikeRootOnly(rootTask.ID().Name(), true), + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "1")), + resourceshowoutputassert.TaskDatasourceShowOutput(t, "snowflake_tasks.test"). + HasName(rootTask.Name). + HasSchemaName(rootTask.SchemaName). + HasDatabaseName(rootTask.DatabaseName). + HasCreatedOnNotEmpty(). + HasIdNotEmpty(). + HasOwnerNotEmpty(). + HasComment("some comment"). + HasWarehouse(acc.TestClient().Ids.WarehouseId()). + HasSchedule("1 MINUTE"). + HasPredecessors(). + HasDefinition("SELECT 1"). + HasCondition(""). + HasAllowOverlappingExecution(true). + HasErrorIntegrationEmpty(). + HasLastCommittedOn(""). + HasLastSuspendedOn(""). + HasOwnerRoleType("ROLE"). + HasConfig(""). + HasBudget(""). + HasTaskRelations(sdk.TaskRelations{}). + HasLastSuspendedReason(""), + resourceparametersassert.TaskDatasourceParameters(t, "snowflake_tasks.test"). + HasAllDefaults(), + ), + }, + { + Config: taskDatasourceLikeRootOnly(childTask.ID().Name(), true), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.snowflake_tasks.t", "database", databaseName), - resource.TestCheckResourceAttr("data.snowflake_tasks.t", "schema", schemaName), - resource.TestCheckResourceAttrSet("data.snowflake_tasks.t", "tasks.#"), - resource.TestCheckResourceAttr("data.snowflake_tasks.t", "tasks.#", "1"), - resource.TestCheckResourceAttr("data.snowflake_tasks.t", "tasks.0.name", taskName), + resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "0"), ), }, }, }) } -func tasks(databaseName string, schemaName string, taskName string) string { - return fmt.Sprintf(` - resource snowflake_database "test" { - name = "%v" - } +func TestAcc_Tasks_In_StartsWith(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + prefix := acc.TestClient().Ids.AlphaN(4) + + _, standaloneTaskCleanup := acc.TestClient().Task.CreateWithRequest(t, sdk.NewCreateTaskRequest(acc.TestClient().Ids.RandomSchemaObjectIdentifierWithPrefix(prefix), "SELECT 1")) + t.Cleanup(standaloneTaskCleanup) + + schema, schemaCleanup := acc.TestClient().Schema.CreateSchema(t) + t.Cleanup(schemaCleanup) + + standaloneTask2, standaloneTask2Cleanup := acc.TestClient().Task.CreateWithRequest(t, sdk.NewCreateTaskRequest(acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchemaWithPrefix(prefix, schema.ID()), "SELECT 1")) + t.Cleanup(standaloneTask2Cleanup) + + _, standaloneTask3Cleanup := acc.TestClient().Task.CreateWithRequest(t, sdk.NewCreateTaskRequest(acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schema.ID()), "SELECT 1")) + t.Cleanup(standaloneTask3Cleanup) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + // On account with prefix + { + Config: taskDatasourceOnAccountStartsWith(prefix), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "2"), + ), + }, + // On database with prefix + { + Config: taskDatasourceInDatabaseStartsWith(acc.TestClient().Ids.DatabaseId(), prefix), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "2"), + ), + }, + // On schema with prefix + { + Config: taskDatasourceInSchemaStartsWith(schema.ID(), prefix), + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "1")), + resourceshowoutputassert.TaskDatasourceShowOutput(t, "snowflake_tasks.test"). + HasName(standaloneTask2.Name). + HasSchemaName(standaloneTask2.SchemaName). + HasDatabaseName(standaloneTask2.DatabaseName), + ), + }, + // On schema + { + Config: taskDatasourceInSchema(schema.ID()), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "2"), + ), + }, + }, + }) +} + +func TestAcc_Tasks_Limit(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + prefix := acc.TestClient().Ids.AlphaN(4) - resource snowflake_schema "test"{ - name = "%v" - database = snowflake_database.test.name + _, standaloneTaskCleanup := acc.TestClient().Task.CreateWithRequest(t, sdk.NewCreateTaskRequest(acc.TestClient().Ids.RandomSchemaObjectIdentifierWithPrefix(prefix), "SELECT 1")) + t.Cleanup(standaloneTaskCleanup) + + _, standaloneTask2Cleanup := acc.TestClient().Task.CreateWithRequest(t, sdk.NewCreateTaskRequest(acc.TestClient().Ids.RandomSchemaObjectIdentifierWithPrefix(prefix), "SELECT 1")) + t.Cleanup(standaloneTask2Cleanup) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + // Limit with prefix + { + Config: taskDatasourceLimitWithPrefix(2, prefix), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "2"), + ), + }, + // Only limit + { + Config: taskDatasourceLimit(1), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_tasks.test", "tasks.#", "1"), + ), + }, + }, + }) +} + +func taskDatasourceLikeRootOnly(like string, rootOnly bool) string { + return taskDatasourceConfig(like, false, sdk.AccountObjectIdentifier{}, sdk.DatabaseObjectIdentifier{}, "", rootOnly, nil) +} + +func taskDatasourceOnAccountStartsWith(startsWith string) string { + return taskDatasourceConfig("", true, sdk.AccountObjectIdentifier{}, sdk.DatabaseObjectIdentifier{}, startsWith, false, nil) +} + +func taskDatasourceInDatabaseStartsWith(databaseId sdk.AccountObjectIdentifier, startsWith string) string { + return taskDatasourceConfig("", false, databaseId, sdk.DatabaseObjectIdentifier{}, startsWith, false, nil) +} + +func taskDatasourceInSchemaStartsWith(schemaId sdk.DatabaseObjectIdentifier, startsWith string) string { + return taskDatasourceConfig("", false, sdk.AccountObjectIdentifier{}, schemaId, startsWith, false, nil) +} + +func taskDatasourceInSchema(schemaId sdk.DatabaseObjectIdentifier) string { + return taskDatasourceConfig("", false, sdk.AccountObjectIdentifier{}, schemaId, "", false, nil) +} + +func taskDatasourceLimit(limit int) string { + return taskDatasourceConfig("", false, sdk.AccountObjectIdentifier{}, sdk.DatabaseObjectIdentifier{}, "", false, &sdk.LimitFrom{ + Rows: sdk.Int(limit), + }) +} + +func taskDatasourceLimitWithPrefix(limit int, prefix string) string { + return taskDatasourceConfig("", false, sdk.AccountObjectIdentifier{}, sdk.DatabaseObjectIdentifier{}, "", false, &sdk.LimitFrom{ + Rows: sdk.Int(limit), + From: sdk.String(prefix), + }) +} + +func taskDatasourceConfig(like string, onAccount bool, onDatabase sdk.AccountObjectIdentifier, onSchema sdk.DatabaseObjectIdentifier, startsWith string, rootOnly bool, limitFrom *sdk.LimitFrom) string { + var likeString string + if len(like) > 0 { + likeString = fmt.Sprintf("like = \"%s\"", like) } - resource snowflake_warehouse "test" { - name = snowflake_database.test.name - max_concurrency_level = 8 - statement_timeout_in_seconds = 172800 + var startsWithString string + if len(startsWith) > 0 { + startsWithString = fmt.Sprintf("starts_with = \"%s\"", startsWith) } - resource snowflake_task "test" { - name = "%v" - database = snowflake_database.test.name - schema = snowflake_schema.test.name - warehouse = snowflake_warehouse.test.name - sql_statement = "SHOW FUNCTIONS" - started = true - schedule { - minutes = 15 + var inString string + if onAccount || (onDatabase != sdk.AccountObjectIdentifier{}) || (onSchema != sdk.DatabaseObjectIdentifier{}) { + inStringBuffer := new(bytes.Buffer) + inStringBuffer.WriteString("in {\n") + switch { + case onAccount: + inStringBuffer.WriteString("account = true\n") + case onDatabase != sdk.AccountObjectIdentifier{}: + inStringBuffer.WriteString(fmt.Sprintf("database = %s\n", strconv.Quote(onDatabase.FullyQualifiedName()))) + case onSchema != sdk.DatabaseObjectIdentifier{}: + inStringBuffer.WriteString(fmt.Sprintf("schema = %s\n", strconv.Quote(onSchema.FullyQualifiedName()))) } - } + inStringBuffer.WriteString("}\n") + inString = inStringBuffer.String() + } - data snowflake_tasks "t" { - database = snowflake_task.test.database - schema = snowflake_task.test.schema - depends_on = [snowflake_task.test] + var rootOnlyString string + if rootOnly { + rootOnlyString = fmt.Sprintf("root_only = %t", rootOnly) } - `, databaseName, schemaName, taskName) + + var limitFromString string + if limitFrom != nil { + inStringBuffer := new(bytes.Buffer) + inStringBuffer.WriteString("limit {\n") + inStringBuffer.WriteString(fmt.Sprintf("rows = %d\n", *limitFrom.Rows)) + if limitFrom.From != nil { + inStringBuffer.WriteString(fmt.Sprintf("from = \"%s\"\n", *limitFrom.From)) + } + inStringBuffer.WriteString("}\n") + limitFromString = inStringBuffer.String() + } + + return fmt.Sprintf(` + data "snowflake_tasks" "test" { + %[1]s + %[2]s + %[3]s + %[4]s + %[5]s + }`, likeString, inString, startsWithString, rootOnlyString, limitFromString) } diff --git a/pkg/resources/task_acceptance_test.go b/pkg/resources/task_acceptance_test.go index 5484eefb73..d38e94bb22 100644 --- a/pkg/resources/task_acceptance_test.go +++ b/pkg/resources/task_acceptance_test.go @@ -4,7 +4,6 @@ import ( "bytes" "fmt" "regexp" - "strings" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectparametersassert" @@ -132,10 +131,7 @@ func TestAcc_Task_Complete(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT 1" - taskConfig := `$${"output_dir": "/temp/test_directory/", "learning_rate": 0.1}$$` - // We have to do three $ at the beginning because Terraform will remove one $. - // It's because `${` is a special pattern, and it's escaped by `$${`. - expectedTaskConfig := strings.ReplaceAll(taskConfig, "$", "") + taskConfig := `{"output_dir": "/temp/test_directory/", "learning_rate": 0.1}` comment := random.Comment() condition := `SYSTEM$STREAM_HAS_DATA('MYSTREAM')` configModel := model.TaskWithId("test", id, true, statement). @@ -167,7 +163,7 @@ func TestAcc_Task_Complete(t *testing.T) { HasStartedString(r.BooleanTrue). HasWarehouseString(acc.TestClient().Ids.WarehouseId().Name()). HasScheduleMinutes(10). - HasConfigString(expectedTaskConfig). + HasConfigString(taskConfig). HasAllowOverlappingExecutionString(r.BooleanTrue). HasErrorIntegrationString(errorNotificationIntegration.ID().Name()). HasCommentString(comment). @@ -194,7 +190,7 @@ func TestAcc_Task_Complete(t *testing.T) { HasLastCommittedOnNotEmpty(). HasLastSuspendedOn(""). HasOwnerRoleType("ROLE"). - HasConfig(expectedTaskConfig). + HasConfig(taskConfig). HasBudget(""). HasTaskRelations(sdk.TaskRelations{}), resourceparametersassert.TaskResourceParameters(t, configModel.ResourceReference()). @@ -215,7 +211,7 @@ func TestAcc_Task_Complete(t *testing.T) { HasStartedString(r.BooleanTrue). HasWarehouseString(acc.TestClient().Ids.WarehouseId().Name()). HasScheduleMinutes(10). - HasConfigString(expectedTaskConfig). + HasConfigString(taskConfig). HasAllowOverlappingExecutionString(r.BooleanTrue). HasErrorIntegrationString(errorNotificationIntegration.ID().Name()). HasCommentString(comment). @@ -246,10 +242,7 @@ func TestAcc_Task_Updates(t *testing.T) { errorNotificationIntegration, errorNotificationIntegrationCleanup := acc.TestClient().NotificationIntegration.Create(t) t.Cleanup(errorNotificationIntegrationCleanup) - taskConfig := `$${"output_dir": "/temp/test_directory/", "learning_rate": 0.1}$$` - // We have to do three $ at the beginning because Terraform will remove one $. - // It's because `${` is a special pattern, and it's escaped by `$${`. - expectedTaskConfig := strings.ReplaceAll(taskConfig, "$", "") + taskConfig := `{"output_dir": "/temp/test_directory/", "learning_rate": 0.1}` comment := random.Comment() condition := `SYSTEM$STREAM_HAS_DATA('MYSTREAM')` completeConfigModel := model.TaskWithId("test", id, true, statement). @@ -325,7 +318,7 @@ func TestAcc_Task_Updates(t *testing.T) { HasStartedString(r.BooleanTrue). HasWarehouseString(warehouse.ID().Name()). HasScheduleMinutes(5). - HasConfigString(expectedTaskConfig). + HasConfigString(taskConfig). HasAllowOverlappingExecutionString(r.BooleanTrue). HasErrorIntegrationString(errorNotificationIntegration.ID().Name()). HasCommentString(comment). @@ -352,7 +345,7 @@ func TestAcc_Task_Updates(t *testing.T) { HasLastCommittedOnNotEmpty(). HasLastSuspendedOn(""). HasOwnerRoleType("ROLE"). - HasConfig(expectedTaskConfig). + HasConfig(taskConfig). HasBudget(""). HasTaskRelations(sdk.TaskRelations{}), ), diff --git a/pkg/sdk/grants_impl.go b/pkg/sdk/grants_impl.go index 3cb14ae05c..082ec61020 100644 --- a/pkg/sdk/grants_impl.go +++ b/pkg/sdk/grants_impl.go @@ -519,7 +519,7 @@ func (v *grants) runOnAllTasks(ctx context.Context, inDatabase *AccountObjectIde } } - tasks, err := v.client.Tasks.Show(ctx, NewShowTaskRequest().WithIn(in)) + tasks, err := v.client.Tasks.Show(ctx, NewShowTaskRequest().WithIn(ExtendedIn{In: in})) if err != nil { return err } diff --git a/pkg/sdk/tasks_def.go b/pkg/sdk/tasks_def.go index 19b9db8df3..a05400bcb0 100644 --- a/pkg/sdk/tasks_def.go +++ b/pkg/sdk/tasks_def.go @@ -286,7 +286,7 @@ var TasksDef = g.NewInterface( Terse(). SQL("TASKS"). OptionalLike(). - OptionalIn(). + OptionalExtendedIn(). OptionalStartsWith(). OptionalSQL("ROOT ONLY"). OptionalLimit(), diff --git a/pkg/sdk/tasks_dto_builders_gen.go b/pkg/sdk/tasks_dto_builders_gen.go index 6f56cf41b6..7e3f7b90f7 100644 --- a/pkg/sdk/tasks_dto_builders_gen.go +++ b/pkg/sdk/tasks_dto_builders_gen.go @@ -441,7 +441,7 @@ func (s *ShowTaskRequest) WithLike(Like Like) *ShowTaskRequest { return s } -func (s *ShowTaskRequest) WithIn(In In) *ShowTaskRequest { +func (s *ShowTaskRequest) WithIn(In ExtendedIn) *ShowTaskRequest { s.In = &In return s } diff --git a/pkg/sdk/tasks_dto_gen.go b/pkg/sdk/tasks_dto_gen.go index a6986c9ea2..e8dcc23d20 100644 --- a/pkg/sdk/tasks_dto_gen.go +++ b/pkg/sdk/tasks_dto_gen.go @@ -133,7 +133,7 @@ type DropTaskRequest struct { type ShowTaskRequest struct { Terse *bool Like *Like - In *In + In *ExtendedIn StartsWith *string RootOnly *bool Limit *LimitFrom diff --git a/pkg/sdk/tasks_gen.go b/pkg/sdk/tasks_gen.go index f2fd244d75..09da9fa9f5 100644 --- a/pkg/sdk/tasks_gen.go +++ b/pkg/sdk/tasks_gen.go @@ -145,14 +145,14 @@ type DropTaskOptions struct { // ShowTaskOptions is based on https://docs.snowflake.com/en/sql-reference/sql/show-tasks. type ShowTaskOptions struct { - show bool `ddl:"static" sql:"SHOW"` - Terse *bool `ddl:"keyword" sql:"TERSE"` - tasks bool `ddl:"static" sql:"TASKS"` - Like *Like `ddl:"keyword" sql:"LIKE"` - In *In `ddl:"keyword" sql:"IN"` - StartsWith *string `ddl:"parameter,single_quotes,no_equals" sql:"STARTS WITH"` - RootOnly *bool `ddl:"keyword" sql:"ROOT ONLY"` - Limit *LimitFrom `ddl:"keyword" sql:"LIMIT"` + show bool `ddl:"static" sql:"SHOW"` + Terse *bool `ddl:"keyword" sql:"TERSE"` + tasks bool `ddl:"static" sql:"TASKS"` + Like *Like `ddl:"keyword" sql:"LIKE"` + In *ExtendedIn `ddl:"keyword" sql:"IN"` + StartsWith *string `ddl:"parameter,single_quotes,no_equals" sql:"STARTS WITH"` + RootOnly *bool `ddl:"keyword" sql:"ROOT ONLY"` + Limit *LimitFrom `ddl:"keyword" sql:"LIMIT"` } type taskDBRow struct { diff --git a/pkg/sdk/tasks_gen_test.go b/pkg/sdk/tasks_gen_test.go index fa3e9edbbc..35ac195b37 100644 --- a/pkg/sdk/tasks_gen_test.go +++ b/pkg/sdk/tasks_gen_test.go @@ -457,14 +457,34 @@ func TestTasks_Show(t *testing.T) { assertOptsValidAndSQLEquals(t, opts, "SHOW TASKS") }) + t.Run("in application", func(t *testing.T) { + opts := defaultOpts() + id := randomAccountObjectIdentifier() + opts.In = &ExtendedIn{ + Application: id, + } + assertOptsValidAndSQLEquals(t, opts, "SHOW TASKS IN APPLICATION %s", id.FullyQualifiedName()) + }) + + t.Run("in application package", func(t *testing.T) { + opts := defaultOpts() + id := randomAccountObjectIdentifier() + opts.In = &ExtendedIn{ + ApplicationPackage: id, + } + assertOptsValidAndSQLEquals(t, opts, "SHOW TASKS IN APPLICATION PACKAGE %s", id.FullyQualifiedName()) + }) + t.Run("all options", func(t *testing.T) { opts := defaultOpts() opts.Terse = Bool(true) opts.Like = &Like{ Pattern: String("myaccount"), } - opts.In = &In{ - Account: Bool(true), + opts.In = &ExtendedIn{ + In: In{ + Account: Bool(true), + }, } opts.StartsWith = String("abc") opts.RootOnly = Bool(true) diff --git a/pkg/sdk/tasks_impl_gen.go b/pkg/sdk/tasks_impl_gen.go index ac390a67c9..3d4b530194 100644 --- a/pkg/sdk/tasks_impl_gen.go +++ b/pkg/sdk/tasks_impl_gen.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "errors" + "fmt" "log" "slices" "strings" @@ -53,8 +54,10 @@ func (v *tasks) Show(ctx context.Context, request *ShowTaskRequest) ([]Task, err } func (v *tasks) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Task, error) { - tasks, err := v.Show(ctx, NewShowTaskRequest().WithIn(In{ - Schema: id.SchemaId(), + tasks, err := v.Show(ctx, NewShowTaskRequest().WithIn(ExtendedIn{ + In: In{ + Schema: id.SchemaId(), + }, }).WithLike(Like{ Pattern: String(id.Name()), })) @@ -179,7 +182,6 @@ func (r *CreateTaskRequest) toOpts() *CreateTaskOptions { IfNotExists: r.IfNotExists, name: r.name, Schedule: r.Schedule, - Config: r.Config, AllowOverlappingExecution: r.AllowOverlappingExecution, SessionParameters: r.SessionParameters, UserTaskTimeoutMs: r.UserTaskTimeoutMs, @@ -200,6 +202,9 @@ func (r *CreateTaskRequest) toOpts() *CreateTaskOptions { UserTaskManagedInitialWarehouseSize: r.Warehouse.UserTaskManagedInitialWarehouseSize, } } + if r.Config != nil { + opts.Config = String(fmt.Sprintf("$$%s$$", *r.Config)) + } return opts } @@ -207,7 +212,6 @@ func (r *CreateOrAlterTaskRequest) toOpts() *CreateOrAlterTaskOptions { opts := &CreateOrAlterTaskOptions{ name: r.name, Schedule: r.Schedule, - Config: r.Config, AllowOverlappingExecution: r.AllowOverlappingExecution, UserTaskTimeoutMs: r.UserTaskTimeoutMs, SessionParameters: r.SessionParameters, @@ -226,6 +230,9 @@ func (r *CreateOrAlterTaskRequest) toOpts() *CreateOrAlterTaskOptions { UserTaskManagedInitialWarehouseSize: r.Warehouse.UserTaskManagedInitialWarehouseSize, } } + if r.Config != nil { + opts.Config = String(fmt.Sprintf("$$%s$$", *r.Config)) + } return opts } @@ -261,7 +268,6 @@ func (r *AlterTaskRequest) toOpts() *AlterTaskOptions { Warehouse: r.Set.Warehouse, UserTaskManagedInitialWarehouseSize: r.Set.UserTaskManagedInitialWarehouseSize, Schedule: r.Set.Schedule, - Config: r.Set.Config, AllowOverlappingExecution: r.Set.AllowOverlappingExecution, UserTaskTimeoutMs: r.Set.UserTaskTimeoutMs, SuspendTaskAfterNumFailures: r.Set.SuspendTaskAfterNumFailures, @@ -271,6 +277,9 @@ func (r *AlterTaskRequest) toOpts() *AlterTaskOptions { TaskAutoRetryAttempts: r.Set.TaskAutoRetryAttempts, UserTaskMinimumTriggerIntervalInSeconds: r.Set.UserTaskMinimumTriggerIntervalInSeconds, } + if r.Set.Config != nil { + opts.Set.Config = String(fmt.Sprintf("$$%s$$", *r.Set.Config)) + } } if r.Unset != nil { opts.Unset = &TaskUnset{ diff --git a/pkg/sdk/testint/tasks_gen_integration_test.go b/pkg/sdk/testint/tasks_gen_integration_test.go index 34edff7495..5fee5e5bcf 100644 --- a/pkg/sdk/testint/tasks_gen_integration_test.go +++ b/pkg/sdk/testint/tasks_gen_integration_test.go @@ -272,7 +272,7 @@ func TestInt_Tasks(t *testing.T) { WithWarehouse(*sdk.NewCreateTaskWarehouseRequest().WithWarehouse(testClientHelper().Ids.WarehouseId())). WithErrorIntegration(errorIntegration.ID()). WithSchedule("10 MINUTE"). - WithConfig(`$${"output_dir": "/temp/test_directory/", "learning_rate": 0.1}$$`). + WithConfig(`{"output_dir": "/temp/test_directory/", "learning_rate": 0.1}`). WithAllowOverlappingExecution(true). WithSessionParameters(sdk.SessionParameters{ JSONIndent: sdk.Int(4), @@ -863,7 +863,7 @@ func TestInt_Tasks(t *testing.T) { task2, task2Cleanup := testClientHelper().Task.Create(t) t.Cleanup(task2Cleanup) - returnedTasks, err := client.Tasks.Show(ctx, sdk.NewShowTaskRequest().WithIn(sdk.In{Schema: testClientHelper().Ids.SchemaId()})) + returnedTasks, err := client.Tasks.Show(ctx, sdk.NewShowTaskRequest().WithIn(sdk.ExtendedIn{In: sdk.In{Schema: testClientHelper().Ids.SchemaId()}})) require.NoError(t, err) require.Len(t, returnedTasks, 2) @@ -876,7 +876,7 @@ func TestInt_Tasks(t *testing.T) { task, taskCleanup := testClientHelper().Task.CreateWithRequest(t, sdk.NewCreateTaskRequest(id, sql).WithSchedule("10 MINUTE")) t.Cleanup(taskCleanup) - returnedTasks, err := client.Tasks.Show(ctx, sdk.NewShowTaskRequest().WithIn(sdk.In{Schema: testClientHelper().Ids.SchemaId()}).WithTerse(true)) + returnedTasks, err := client.Tasks.Show(ctx, sdk.NewShowTaskRequest().WithIn(sdk.ExtendedIn{In: sdk.In{Schema: testClientHelper().Ids.SchemaId()}}).WithTerse(true)) require.NoError(t, err) require.Len(t, returnedTasks, 1) @@ -892,7 +892,7 @@ func TestInt_Tasks(t *testing.T) { returnedTasks, err := client.Tasks.Show(ctx, sdk.NewShowTaskRequest(). WithLike(sdk.Like{Pattern: &task1.Name}). - WithIn(sdk.In{Schema: testClientHelper().Ids.SchemaId()}). + WithIn(sdk.ExtendedIn{In: sdk.In{Schema: testClientHelper().Ids.SchemaId()}}). WithLimit(sdk.LimitFrom{Rows: sdk.Int(5)})) require.NoError(t, err) diff --git a/templates/data-sources/tasks.md.tmpl b/templates/data-sources/tasks.md.tmpl new file mode 100644 index 0000000000..9173876ceb --- /dev/null +++ b/templates/data-sources/tasks.md.tmpl @@ -0,0 +1,24 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/data-sources/%s/data-source.tf" .Name)}} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} diff --git a/templates/resources/task.md.tmpl b/templates/resources/task.md.tmpl new file mode 100644 index 0000000000..7a876a0017 --- /dev/null +++ b/templates/resources/task.md.tmpl @@ -0,0 +1,35 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }}