diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 4eb3892494..c0846c1420 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -15,12 +15,12 @@ From now on, please migrate and use the new database resources for their unique The split was done (and will be done for several objects during the refactor) to simplify the resource on maintainability and usage level. Its purpose was also to divide the resources by their specific purpose rather than cramping every use case of an object into one resource. -### *(behavior change)* snowflake_databases +### *(behavior change)* snowflake_databases datasource - `terse` and `history` fields were removed. - `replication_configuration` field was removed from `databases`. - `pattern` was replaced by `like` field. - Additional filtering options added (`limit`). -- Added missing fields returned by SHOW DATABASES> +- Added missing fields returned by SHOW DATABASES. - Added outputs from DESC DATABASE and SHOW PARAMETERS IN DATABASE (they can be turned off by declaring `with_describe = false` and `with_parameters = false`, **they're turned on by default**). ## v0.89.0 ➞ v0.90.0 diff --git a/docs/data-sources/databases.md b/docs/data-sources/databases.md index cb0a2762b6..c05a35f15b 100644 --- a/docs/data-sources/databases.md +++ b/docs/data-sources/databases.md @@ -12,16 +12,60 @@ description: |- ## Example Usage ```terraform -data "snowflake_databases" "test" { - with_describe = false - with_parameters = false - like = "database-name" - starts_with = "database-name" +# Simple usage +data "snowflake_databases" "simple" { +} + +output "simple_output" { + value = data.snowflake_databases.simple.databases +} + +# Filtering (like) +data "snowflake_databases" "like" { + like = "database-name" +} + +output "like_output" { + value = data.snowflake_databases.like.databases +} + +# Filtering (starts_with) +data "snowflake_databases" "starts_with" { + starts_with = "database-" +} + +output "starts_with_output" { + value = data.snowflake_databases.starts_with.databases +} + +# Filtering (limit) +data "snowflake_databases" "limit" { limit { - rows = 20 - from = "database-name" + rows = 10 + from = "database-" } +} + +output "limit_output" { + value = data.snowflake_databases.limit.databases +} + +# Without additional data (to limit the number of calls make for every found database) +data "snowflake_databases" "only_show" { + # with_describe is turned on by default and it calls DESCRIBE DATABASE for every database found and attaches it's output to databases.*.description field + with_describe = false + + # with_parameters is turned on by default and it calls SHOW PARAMETERS FOR DATABASE for every database found and attaches it's output to databases.*.parameters field + with_parameters = false +} + +output "only_show_output" { + value = data.snowflake_databases.only_show.databases +} +# Ensure the number of databases is equal to at least one element (with the use of postcondition) +data "snowflake_databases" "assert_with_postcondition" { + starts_with = "database-name" lifecycle { postcondition { condition = length(self.databases) > 0 @@ -30,8 +74,9 @@ data "snowflake_databases" "test" { } } +# Ensure the number of databases is equal to at exatly one element (with the use of check block) check "database_check" { - data "snowflake_databases" "test" { + data "snowflake_databases" "assert_with_check_block" { like = "database-name" } @@ -48,7 +93,7 @@ check "database_check" { ### Optional - `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). -- `limit` (Block List, Max: 1) Limits the number of rows returned, while also enabling "pagination" or the results. (see [below for nested schema](#nestedblock--limit)) +- `limit` (Block List, Max: 1) Limits the number of rows returned. The limit may start from the first element matched by from which is optional. (see [below for nested schema](#nestedblock--limit)) - `starts_with` (String) Filters the output with **case-sensitive** characters indicating the beginning of the object name. - `with_describe` (Boolean) Runs DESC DATABASE for each database returned by SHOW DATABASES. The output of describe is saved to the description field. By default this value is set to true. - `with_parameters` (Boolean) Runs SHOW PARAMETERS FOR DATABASE for each database returned by SHOW DATABASES. The output of describe is saved to the parameters field as a map. By default this value is set to true. diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md index 5e085f79ee..6961617d10 100644 --- a/docs/resources/secondary_database.md +++ b/docs/resources/secondary_database.md @@ -29,25 +29,26 @@ resource "snowflake_standard_database" "primary" { resource "snowflake_secondary_database" "test" { provider = secondary_account name = snowflake_standard_database.primary.name # It's recommended to give a secondary database the same name as its primary database - as_replica_of = "..${snowflake_standard_database.primary.name}" is_transient = false - - data_retention_time_in_days { - value = 10 - } - - max_data_extension_time_in_days { - value = 20 - } - - external_volume = "external_volume_name" - catalog = "catalog_name" - replace_invalid_characters = false - default_ddl_collation = "en_US" - storage_serialization_policy = "OPTIMIZED" - log_level = "OFF" - trace_level = "OFF" - comment = "A secondary database" + as_replica_of = "..${snowflake_standard_database.primary.name}" + comment = "A secondary database" + + data_retention_time_in_days = 10 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false } ``` @@ -63,35 +64,27 @@ resource "snowflake_secondary_database" "test" { - `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. - `comment` (String) Specifies a comment for the database. -- `data_retention_time_in_days` (Block List, Max: 1) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). (see [below for nested schema](#nestedblock--data_retention_time_in_days)) +- `data_retention_time_in_days` (Number) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). - `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `enable_console_output` (Boolean) If true, enables stdout/stderr fast path logging for anonymous stored procedures. - `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. - `is_transient` (Boolean) Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. - `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). -- `max_data_extension_time_in_days` (Block List, Max: 1) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). (see [below for nested schema](#nestedblock--max_data_extension_time_in_days)) +- `max_data_extension_time_in_days` (Number) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). +- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. - `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. -- `storage_serialization_policy` (String) Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. +- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. - `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). +- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. +- `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds. +- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. ### Read-Only - `id` (String) The ID of this resource. - -### Nested Schema for `data_retention_time_in_days` - -Required: - -- `value` (Number) - - - -### Nested Schema for `max_data_extension_time_in_days` - -Required: - -- `value` (Number) - ## Import Import is supported using the following syntax: diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md index 74f34308a5..f5d68a7915 100644 --- a/docs/resources/shared_database.md +++ b/docs/resources/shared_database.md @@ -33,19 +33,29 @@ resource "snowflake_grant_privileges_to_share" "test" { # 2. Creating shared database resource "snowflake_shared_database" "test" { - provider = secondary_account - depends_on = [snowflake_grant_privileges_to_share.test] - name = snowflake_standard_database.test.name # shared database should have the same as the "imported" one - from_share = "..${snowflake_share.test.name}" - is_transient = false - external_volume = "external_volume_name" - catalog = "catalog_name" - replace_invalid_characters = false - default_ddl_collation = "en_US" - storage_serialization_policy = "OPTIMIZED" - log_level = "OFF" - trace_level = "OFF" - comment = "A shared database" + provider = secondary_account + depends_on = [snowflake_grant_privileges_to_share.test] + name = snowflake_standard_database.test.name # shared database should have the same as the "imported" one + is_transient = false + from_share = "..${snowflake_share.test.name}" + comment = "A shared database" + + data_retention_time_in_days = 10 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false } ``` @@ -62,11 +72,18 @@ resource "snowflake_shared_database" "test" { - `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. - `comment` (String) Specifies a comment for the database. - `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `enable_console_output` (Boolean) If true, enables stdout/stderr fast path logging for anonymous stored procedures. - `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. - `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). +- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. - `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. -- `storage_serialization_policy` (String) Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. +- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. - `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). +- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. +- `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds. +- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. ### Read-Only diff --git a/docs/resources/standard_database.md b/docs/resources/standard_database.md index d307c63119..eb7e255a65 100644 --- a/docs/resources/standard_database.md +++ b/docs/resources/standard_database.md @@ -17,33 +17,22 @@ resource "snowflake_standard_database" "primary" { is_transient = false comment = "my standard database" - data_retention_time_in_days { - value = 10 - } - max_data_extension_time_in_days { - value = 20 - } - external_volume { - value = "" - } - catalog { - value = "" - } - replace_invalid_characters { - value = false - } - default_ddl_collation { - value = "en_US" - } - storage_serialization_policy { - value = "COMPATIBLE" - } - log_level { - value = "INFO" - } - trace_level { - value = "ALWAYS" - } + data_retention_time_in_days = 10 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false replication { enable_for_account { @@ -64,79 +53,30 @@ resource "snowflake_standard_database" "primary" { ### Optional -- `catalog` (Block List, Max: 1) The database parameter that specifies the default catalog to use for Iceberg tables. (see [below for nested schema](#nestedblock--catalog)) +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. - `comment` (String) Specifies a comment for the database. -- `data_retention_time_in_days` (Block List, Max: 1) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). (see [below for nested schema](#nestedblock--data_retention_time_in_days)) -- `default_ddl_collation` (Block List, Max: 1) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). (see [below for nested schema](#nestedblock--default_ddl_collation)) -- `external_volume` (Block List, Max: 1) The database parameter that specifies the default external volume to use for Iceberg tables. (see [below for nested schema](#nestedblock--external_volume)) +- `data_retention_time_in_days` (Number) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). +- `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `enable_console_output` (Boolean) If true, enables stdout/stderr fast path logging for anonymous stored procedures. +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. - `is_transient` (Boolean) Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. -- `log_level` (Block List, Max: 1) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). (see [below for nested schema](#nestedblock--log_level)) -- `max_data_extension_time_in_days` (Block List, Max: 1) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). (see [below for nested schema](#nestedblock--max_data_extension_time_in_days)) -- `replace_invalid_characters` (Block List, Max: 1) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. (see [below for nested schema](#nestedblock--replace_invalid_characters)) +- `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). +- `max_data_extension_time_in_days` (Number) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). +- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. - `replication` (Block List, Max: 1) Configures replication for a given database. When specified, this database will be promoted to serve as a primary database for replication. A primary database can be replicated in one or more accounts, allowing users in those accounts to query objects in each secondary (i.e. replica) database. (see [below for nested schema](#nestedblock--replication)) -- `storage_serialization_policy` (Block List, Max: 1) Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. (see [below for nested schema](#nestedblock--storage_serialization_policy)) -- `trace_level` (Block List, Max: 1) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). (see [below for nested schema](#nestedblock--trace_level)) +- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. +- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. +- `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). +- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. +- `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds. +- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. ### Read-Only - `id` (String) The ID of this resource. - -### Nested Schema for `catalog` - -Required: - -- `value` (String) - - - -### Nested Schema for `data_retention_time_in_days` - -Required: - -- `value` (Number) - - - -### Nested Schema for `default_ddl_collation` - -Required: - -- `value` (String) - - - -### Nested Schema for `external_volume` - -Required: - -- `value` (String) - - - -### Nested Schema for `log_level` - -Required: - -- `value` (String) - - - -### Nested Schema for `max_data_extension_time_in_days` - -Required: - -- `value` (Number) - - - -### Nested Schema for `replace_invalid_characters` - -Required: - -- `value` (Boolean) - - ### Nested Schema for `replication` @@ -159,23 +99,6 @@ Optional: - `with_failover` (Boolean) Specifies if failover should be enabled for the specified account identifier - - - -### Nested Schema for `storage_serialization_policy` - -Required: - -- `value` (String) - - - -### Nested Schema for `trace_level` - -Required: - -- `value` (String) - ## Import Import is supported using the following syntax: diff --git a/examples/data-sources/snowflake_databases/data-source.tf b/examples/data-sources/snowflake_databases/data-source.tf index 7e1e9dae55..d716994976 100644 --- a/examples/data-sources/snowflake_databases/data-source.tf +++ b/examples/data-sources/snowflake_databases/data-source.tf @@ -1,13 +1,57 @@ -data "snowflake_databases" "test" { - with_describe = false - with_parameters = false - like = "database-name" - starts_with = "database-name" +# Simple usage +data "snowflake_databases" "simple" { +} + +output "simple_output" { + value = data.snowflake_databases.simple.databases +} + +# Filtering (like) +data "snowflake_databases" "like" { + like = "database-name" +} + +output "like_output" { + value = data.snowflake_databases.like.databases +} + +# Filtering (starts_with) +data "snowflake_databases" "starts_with" { + starts_with = "database-" +} + +output "starts_with_output" { + value = data.snowflake_databases.starts_with.databases +} + +# Filtering (limit) +data "snowflake_databases" "limit" { limit { - rows = 20 - from = "database-name" + rows = 10 + from = "database-" } +} + +output "limit_output" { + value = data.snowflake_databases.limit.databases +} + +# Without additional data (to limit the number of calls make for every found database) +data "snowflake_databases" "only_show" { + # with_describe is turned on by default and it calls DESCRIBE DATABASE for every database found and attaches it's output to databases.*.description field + with_describe = false + + # with_parameters is turned on by default and it calls SHOW PARAMETERS FOR DATABASE for every database found and attaches it's output to databases.*.parameters field + with_parameters = false +} + +output "only_show_output" { + value = data.snowflake_databases.only_show.databases +} +# Ensure the number of databases is equal to at least one element (with the use of postcondition) +data "snowflake_databases" "assert_with_postcondition" { + starts_with = "database-name" lifecycle { postcondition { condition = length(self.databases) > 0 @@ -16,8 +60,9 @@ data "snowflake_databases" "test" { } } +# Ensure the number of databases is equal to at exatly one element (with the use of check block) check "database_check" { - data "snowflake_databases" "test" { + data "snowflake_databases" "assert_with_check_block" { like = "database-name" } diff --git a/examples/resources/snowflake_secondary_database/resource.tf b/examples/resources/snowflake_secondary_database/resource.tf index 3159fa4c81..b2f7fbaa4f 100644 --- a/examples/resources/snowflake_secondary_database/resource.tf +++ b/examples/resources/snowflake_secondary_database/resource.tf @@ -15,23 +15,24 @@ resource "snowflake_standard_database" "primary" { resource "snowflake_secondary_database" "test" { provider = secondary_account name = snowflake_standard_database.primary.name # It's recommended to give a secondary database the same name as its primary database - as_replica_of = "..${snowflake_standard_database.primary.name}" is_transient = false + as_replica_of = "..${snowflake_standard_database.primary.name}" + comment = "A secondary database" - data_retention_time_in_days { - value = 10 - } - - max_data_extension_time_in_days { - value = 20 - } - - external_volume = "external_volume_name" - catalog = "catalog_name" - replace_invalid_characters = false - default_ddl_collation = "en_US" - storage_serialization_policy = "OPTIMIZED" - log_level = "OFF" - trace_level = "OFF" - comment = "A secondary database" + data_retention_time_in_days = 10 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false } diff --git a/examples/resources/snowflake_shared_database/resource.tf b/examples/resources/snowflake_shared_database/resource.tf index 5b62ae98d8..3ab7129d44 100644 --- a/examples/resources/snowflake_shared_database/resource.tf +++ b/examples/resources/snowflake_shared_database/resource.tf @@ -19,17 +19,27 @@ resource "snowflake_grant_privileges_to_share" "test" { # 2. Creating shared database resource "snowflake_shared_database" "test" { - provider = secondary_account - depends_on = [snowflake_grant_privileges_to_share.test] - name = snowflake_standard_database.test.name # shared database should have the same as the "imported" one - from_share = "..${snowflake_share.test.name}" - is_transient = false - external_volume = "external_volume_name" - catalog = "catalog_name" - replace_invalid_characters = false - default_ddl_collation = "en_US" - storage_serialization_policy = "OPTIMIZED" - log_level = "OFF" - trace_level = "OFF" - comment = "A shared database" + provider = secondary_account + depends_on = [snowflake_grant_privileges_to_share.test] + name = snowflake_standard_database.test.name # shared database should have the same as the "imported" one + is_transient = false + from_share = "..${snowflake_share.test.name}" + comment = "A shared database" + + data_retention_time_in_days = 10 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false } diff --git a/examples/resources/snowflake_standard_database/resource.tf b/examples/resources/snowflake_standard_database/resource.tf index 192206ac6e..5e3c1d31d6 100644 --- a/examples/resources/snowflake_standard_database/resource.tf +++ b/examples/resources/snowflake_standard_database/resource.tf @@ -3,33 +3,22 @@ resource "snowflake_standard_database" "primary" { is_transient = false comment = "my standard database" - data_retention_time_in_days { - value = 10 - } - max_data_extension_time_in_days { - value = 20 - } - external_volume { - value = "" - } - catalog { - value = "" - } - replace_invalid_characters { - value = false - } - default_ddl_collation { - value = "en_US" - } - storage_serialization_policy { - value = "COMPATIBLE" - } - log_level { - value = "INFO" - } - trace_level { - value = "ALWAYS" - } + data_retention_time_in_days = 10 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false replication { enable_for_account { diff --git a/pkg/acceptance/asserts.go b/pkg/acceptance/asserts.go index ec185e3d39..6912a44d57 100644 --- a/pkg/acceptance/asserts.go +++ b/pkg/acceptance/asserts.go @@ -11,11 +11,11 @@ func IsGreaterOrEqualTo(greaterOrEqualValue int) resource.CheckResourceAttrWithF return func(value string) error { intValue, err := strconv.Atoi(value) if err != nil { - return err + return fmt.Errorf("unable to parse value %s as integer, err = %w", value, err) } if intValue < greaterOrEqualValue { - return fmt.Errorf("expected value greater or equal to %d, got %d", greaterOrEqualValue, intValue) + return fmt.Errorf("expected value %d greater or equal to %d", intValue, greaterOrEqualValue) } return nil diff --git a/pkg/acceptance/asserts_test.go b/pkg/acceptance/asserts_test.go index 3369778d5b..b414f13f0e 100644 --- a/pkg/acceptance/asserts_test.go +++ b/pkg/acceptance/asserts_test.go @@ -23,7 +23,7 @@ func TestIsGreaterOrEqualTo(t *testing.T) { Name: "validation: not int value", GreaterOrEqualTo: 20, Actual: "not_int", - Error: "strconv.Atoi: parsing \"not_int\": invalid syntax", + Error: "unable to parse value not_int as integer, err = strconv.Atoi: parsing \"not_int\": invalid syntax", }, { Name: "validation: equal value", @@ -43,7 +43,7 @@ func TestIsGreaterOrEqualTo(t *testing.T) { if testCase.Error != "" { assert.ErrorContains(t, err, testCase.Error) } else { - assert.Nil(t, err) + assert.NoError(t, err) } }) } diff --git a/pkg/acceptance/helpers/parameter_client.go b/pkg/acceptance/helpers/parameter_client.go index c0e62d79ed..f41dc52cdb 100644 --- a/pkg/acceptance/helpers/parameter_client.go +++ b/pkg/acceptance/helpers/parameter_client.go @@ -2,6 +2,8 @@ package helpers import ( "context" + "fmt" + "slices" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -22,6 +24,35 @@ func (c *ParameterClient) client() sdk.Parameters { return c.context.client.Parameters } +func (c *ParameterClient) ShowAccountParameters(t *testing.T) []*sdk.Parameter { + t.Helper() + params, err := c.client().ShowParameters(context.Background(), &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Account: sdk.Bool(true), + }, + }) + require.NoError(t, err) + return params +} + +func (c *ParameterClient) ShowDatabaseParameters(t *testing.T, id sdk.AccountObjectIdentifier) []*sdk.Parameter { + t.Helper() + params, err := c.client().ShowParameters(context.Background(), &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Database: id, + }, + }) + require.NoError(t, err) + return params +} + +func (c *ParameterClient) GetAccountParameter(t *testing.T, parameter sdk.AccountParameter) *sdk.Parameter { + t.Helper() + param, err := c.client().ShowAccountParameter(context.Background(), parameter) + require.NoError(t, err) + return param +} + func (c *ParameterClient) UpdateAccountParameterTemporarily(t *testing.T, parameter sdk.AccountParameter, newValue string) func() { t.Helper() ctx := context.Background() @@ -38,3 +69,12 @@ func (c *ParameterClient) UpdateAccountParameterTemporarily(t *testing.T, parame require.NoError(t, err) } } + +func FindParameter(t *testing.T, parameters []*sdk.Parameter, parameter sdk.AccountParameter) *sdk.Parameter { + t.Helper() + idx := slices.IndexFunc(parameters, func(p *sdk.Parameter) bool { + return p.Key == string(parameter) + }) + require.NotEqual(t, -1, idx, fmt.Sprintf("parameter %s not found", string(parameter))) + return parameters[idx] +} diff --git a/pkg/datasources/databases.go b/pkg/datasources/databases.go index a2b6bf5f30..ef84f90efa 100644 --- a/pkg/datasources/databases.go +++ b/pkg/datasources/databases.go @@ -36,7 +36,7 @@ var databasesSchema = map[string]*schema.Schema{ "limit": { Type: schema.TypeList, Optional: true, - Description: `Limits the number of rows returned, while also enabling "pagination" or the results.`, + Description: `Limits the number of rows returned. The limit may start from the first element matched by from which is optional.`, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ diff --git a/pkg/datasources/databases_acceptance_test.go b/pkg/datasources/databases_acceptance_test.go index 9484826939..d250eb891a 100644 --- a/pkg/datasources/databases_acceptance_test.go +++ b/pkg/datasources/databases_acceptance_test.go @@ -1,6 +1,8 @@ package datasources_test import ( + "maps" + "regexp" "strconv" "testing" @@ -32,7 +34,7 @@ func TestAcc_Databases_Complete(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.StandardDatabase), Steps: []resource.TestStep{ { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Databases"), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Databases/optionals-set"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.#", "1"), @@ -63,6 +65,104 @@ func TestAcc_Databases_Complete(t *testing.T) { resource.TestCheckResourceAttrSet("data.snowflake_databases.test", "databases.0.parameters.0.description"), ), }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Databases/optionals-unset"), + ConfigVariables: configVariables, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.#", "1"), + resource.TestCheckResourceAttrSet("data.snowflake_databases.test", "databases.0.created_on"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.name", databaseName), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.kind", "STANDARD"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.is_transient", "false"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.is_default", "false"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.is_current", "false"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.origin", ""), + resource.TestCheckResourceAttrSet("data.snowflake_databases.test", "databases.0.owner"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.comment", comment), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.options", ""), + resource.TestCheckResourceAttrSet("data.snowflake_databases.test", "databases.0.retention_time"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.resource_group", ""), + resource.TestCheckResourceAttrSet("data.snowflake_databases.test", "databases.0.owner_role_type"), + + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.description.#", "0"), + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.0.parameters.#", "0"), + ), + }, + }, + }) +} + +func TestAcc_Databases_DifferentFiltering(t *testing.T) { + prefix := acc.TestClient().Ids.Alpha() + idOne := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + idTwo := acc.TestClient().Ids.RandomAccountObjectIdentifierWithPrefix(prefix) + idThree := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + commonVariables := config.Variables{ + "name_1": config.StringVariable(idOne.Name()), + "name_2": config.StringVariable(idTwo.Name()), + "name_3": config.StringVariable(idThree.Name()), + } + + likeConfig := config.Variables{ + "like": config.StringVariable(idOne.Name()), + } + maps.Copy(likeConfig, commonVariables) + + startsWithConfig := config.Variables{ + "starts_with": config.StringVariable(prefix), + } + maps.Copy(startsWithConfig, commonVariables) + + limitConfig := config.Variables{ + "rows": config.IntegerVariable(1), + "from": config.StringVariable(prefix), + } + maps.Copy(limitConfig, commonVariables) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.StandardDatabase), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Databases/like"), + ConfigVariables: likeConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.#", "1"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Databases/starts_with"), + ConfigVariables: startsWithConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.#", "2"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Databases/limit"), + ConfigVariables: limitConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_databases.test", "databases.#", "1"), + ), + }, + }, + }) +} + +func TestAcc_Databases_DatabaseNotFound_WithPostConditions(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Databases/without-database"), + ExpectError: regexp.MustCompile("there should be at least one database"), + }, }, }) } diff --git a/pkg/datasources/testdata/TestAcc_Databases/like/test.tf b/pkg/datasources/testdata/TestAcc_Databases/like/test.tf new file mode 100644 index 0000000000..5610719c92 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/like/test.tf @@ -0,0 +1,16 @@ +resource "snowflake_standard_database" "test_1" { + name = var.name_1 +} + +resource "snowflake_standard_database" "test_2" { + name = var.name_2 +} + +resource "snowflake_standard_database" "test_3" { + name = var.name_3 +} + +data "snowflake_databases" "test" { + depends_on = [snowflake_standard_database.test_1, snowflake_standard_database.test_2, snowflake_standard_database.test_3] + like = var.like +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/like/variables.tf b/pkg/datasources/testdata/TestAcc_Databases/like/variables.tf new file mode 100644 index 0000000000..6bd0278080 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/like/variables.tf @@ -0,0 +1,15 @@ +variable "name_1" { + type = string +} + +variable "name_2" { + type = string +} + +variable "name_3" { + type = string +} + +variable "like" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/limit/test.tf b/pkg/datasources/testdata/TestAcc_Databases/limit/test.tf new file mode 100644 index 0000000000..574ccb8b18 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/limit/test.tf @@ -0,0 +1,19 @@ +resource "snowflake_standard_database" "test_1" { + name = var.name_1 +} + +resource "snowflake_standard_database" "test_2" { + name = var.name_2 +} + +resource "snowflake_standard_database" "test_3" { + name = var.name_3 +} + +data "snowflake_databases" "test" { + depends_on = [snowflake_standard_database.test_1, snowflake_standard_database.test_2, snowflake_standard_database.test_3] + limit { + rows = var.rows + from = var.from + } +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/limit/variables.tf b/pkg/datasources/testdata/TestAcc_Databases/limit/variables.tf new file mode 100644 index 0000000000..989508a9ce --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/limit/variables.tf @@ -0,0 +1,19 @@ +variable "name_1" { + type = string +} + +variable "name_2" { + type = string +} + +variable "name_3" { + type = string +} + +variable "rows" { + type = number +} + +variable "from" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/test.tf b/pkg/datasources/testdata/TestAcc_Databases/optionals_set/test.tf similarity index 100% rename from pkg/datasources/testdata/TestAcc_Databases/test.tf rename to pkg/datasources/testdata/TestAcc_Databases/optionals_set/test.tf diff --git a/pkg/datasources/testdata/TestAcc_Databases/variables.tf b/pkg/datasources/testdata/TestAcc_Databases/optionals_set/variables.tf similarity index 100% rename from pkg/datasources/testdata/TestAcc_Databases/variables.tf rename to pkg/datasources/testdata/TestAcc_Databases/optionals_set/variables.tf diff --git a/pkg/datasources/testdata/TestAcc_Databases/optionals_unset/test.tf b/pkg/datasources/testdata/TestAcc_Databases/optionals_unset/test.tf new file mode 100644 index 0000000000..1394847199 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/optionals_unset/test.tf @@ -0,0 +1,22 @@ +resource "snowflake_standard_database" "test" { + name = var.name + comment = var.comment + replication { + enable_for_account { + account_identifier = var.account_identifier + with_failover = true + } + ignore_edition_check = true + } +} + +data "snowflake_databases" "test" { + with_describe = false + with_parameters = false + depends_on = [snowflake_standard_database.test] + like = var.name + starts_with = var.name + limit { + rows = 1 + } +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/optionals_unset/variables.tf b/pkg/datasources/testdata/TestAcc_Databases/optionals_unset/variables.tf new file mode 100644 index 0000000000..ea75b95f23 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/optionals_unset/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "account_identifier" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/starts_with/test.tf b/pkg/datasources/testdata/TestAcc_Databases/starts_with/test.tf new file mode 100644 index 0000000000..047d9f6c86 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/starts_with/test.tf @@ -0,0 +1,16 @@ +resource "snowflake_standard_database" "test_1" { + name = var.name_1 +} + +resource "snowflake_standard_database" "test_2" { + name = var.name_2 +} + +resource "snowflake_standard_database" "test_3" { + name = var.name_3 +} + +data "snowflake_databases" "test" { + depends_on = [snowflake_standard_database.test_1, snowflake_standard_database.test_2, snowflake_standard_database.test_3] + starts_with = var.starts_with +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/starts_with/variables.tf b/pkg/datasources/testdata/TestAcc_Databases/starts_with/variables.tf new file mode 100644 index 0000000000..a4044d2176 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/starts_with/variables.tf @@ -0,0 +1,15 @@ +variable "name_1" { + type = string +} + +variable "name_2" { + type = string +} + +variable "name_3" { + type = string +} + +variable "starts_with" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Databases/without_database/test.tf b/pkg/datasources/testdata/TestAcc_Databases/without_database/test.tf new file mode 100644 index 0000000000..5fe341159d --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Databases/without_database/test.tf @@ -0,0 +1,10 @@ +data "snowflake_databases" "test" { + like = "non-existing-database" + + lifecycle { + postcondition { + condition = length(self.databases) > 0 + error_message = "there should be at least one database" + } + } +} diff --git a/pkg/helpers/helpers.go b/pkg/helpers/helpers.go index d5a04ac4ba..6af1a74fb1 100644 --- a/pkg/helpers/helpers.go +++ b/pkg/helpers/helpers.go @@ -6,6 +6,7 @@ import ( "log" "reflect" "regexp" + "slices" "strconv" "strings" "time" @@ -167,3 +168,24 @@ func Retry(attempts int, sleepDuration time.Duration, f func() (error, bool)) er } return fmt.Errorf("giving up after %v attempts", attempts) } + +// ListDiff Compares two lists (before and after), then compares and returns two lists that include +// added and removed items between those lists. +func ListDiff[T comparable](beforeList []T, afterList []T) (added []T, removed []T) { + added = make([]T, 0) + removed = make([]T, 0) + + for _, privilegeBeforeChange := range beforeList { + if !slices.Contains(afterList, privilegeBeforeChange) { + removed = append(removed, privilegeBeforeChange) + } + } + + for _, privilegeAfterChange := range afterList { + if !slices.Contains(beforeList, privilegeAfterChange) { + added = append(added, privilegeAfterChange) + } + } + + return added, removed +} diff --git a/pkg/helpers/helpers_test.go b/pkg/helpers/helpers_test.go index 7122c9962e..03c5ae465f 100644 --- a/pkg/helpers/helpers_test.go +++ b/pkg/helpers/helpers_test.go @@ -2,6 +2,7 @@ package helpers import ( "fmt" + "github.com/stretchr/testify/assert" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -185,3 +186,64 @@ func (i unsupportedObjectIdentifier) Name() string { func (i unsupportedObjectIdentifier) FullyQualifiedName() string { return "fully qualified name" } + +func TestListDiff(t *testing.T) { + testCases := []struct { + Name string + Before []any + After []any + Added []any + Removed []any + }{ + { + Name: "only removed", + Before: []any{1, 2, 3, 4}, + After: []any{}, + Removed: []any{1, 2, 3, 4}, + Added: []any{}, + }, + { + Name: "only added", + Before: []any{}, + After: []any{1, 2, 3, 4}, + Removed: []any{}, + Added: []any{1, 2, 3, 4}, + }, + { + Name: "added repeated items", + Before: []any{2}, + After: []any{1, 2, 1}, + Removed: []any{}, + Added: []any{1, 1}, + }, + { + Name: "removed repeated items", + Before: []any{1, 2, 1}, + After: []any{2}, + Removed: []any{1, 1}, + Added: []any{}, + }, + { + Name: "simple diff: ints", + Before: []any{1, 2, 3, 4, 5, 6, 7, 8, 9}, + After: []any{1, 3, 5, 7, 9, 12, 13, 14}, + Removed: []any{2, 4, 6, 8}, + Added: []any{12, 13, 14}, + }, + { + Name: "simple diff: strings", + Before: []any{"one", "two", "three", "four"}, + After: []any{"five", "two", "four", "six"}, + Removed: []any{"one", "three"}, + Added: []any{"five", "six"}, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + added, removed := ListDiff(tc.Before, tc.After) + assert.Equal(t, tc.Added, added) + assert.Equal(t, tc.Removed, removed) + }) + } +} diff --git a/pkg/internal/provider/docs/doc_helpers.go b/pkg/internal/provider/docs/doc_helpers.go index 7016300e36..82877a8b8b 100644 --- a/pkg/internal/provider/docs/doc_helpers.go +++ b/pkg/internal/provider/docs/doc_helpers.go @@ -8,6 +8,7 @@ import ( // deprecationMessageRegex is the message that should be used in resource/datasource DeprecationMessage to get a nice link in the documentation to the replacing resource. var deprecationMessageRegex = regexp.MustCompile(`Please use (snowflake_(\w+)) instead.`) +// TODO(SNOW-1465227): Should detect more than one replacements // GetDeprecatedResourceReplacement allows us to get resource replacement based on the regex deprecationMessageRegex func GetDeprecatedResourceReplacement(deprecationMessage string) (replacement string, replacementPage string, ok bool) { resourceReplacement := deprecationMessageRegex.FindStringSubmatch(deprecationMessage) diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index 02bc6eff33..99060cf0e4 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -5,76 +5,10 @@ import ( "log" "strconv" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -// NestedIntValueAccountObjectComputedIf is NestedValueComputedIf, -// but dedicated for account level objects with integer properties. -func NestedIntValueAccountObjectComputedIf(key string, parameter sdk.AccountParameter) schema.CustomizeDiffFunc { - return NestedValueComputedIf( - key, - func(client *sdk.Client) (*sdk.Parameter, error) { - return client.Parameters.ShowAccountParameter(context.Background(), parameter) - }, - func(v any) string { return strconv.Itoa(v.(int)) }, - ) -} - -// NestedStringValueAccountObjectComputedIf is NestedValueComputedIf, -// but dedicated for account level objects with string properties. -func NestedStringValueAccountObjectComputedIf(key string, parameter sdk.AccountParameter) schema.CustomizeDiffFunc { - return NestedValueComputedIf( - key, - func(client *sdk.Client) (*sdk.Parameter, error) { - return client.Parameters.ShowAccountParameter(context.Background(), parameter) - }, - func(v any) string { return v.(string) }, - ) -} - -// NestedBoolValueAccountObjectComputedIf is NestedValueComputedIf, -// but dedicated for account level objects with bool properties. -func NestedBoolValueAccountObjectComputedIf(key string, parameter sdk.AccountParameter) schema.CustomizeDiffFunc { - return NestedValueComputedIf( - key, - func(client *sdk.Client) (*sdk.Parameter, error) { - return client.Parameters.ShowAccountParameter(context.Background(), parameter) - }, - func(v any) string { - return strconv.FormatBool(v.(bool)) - }, - ) -} - -// NestedValueComputedIf internally calls schema.ResourceDiff.SetNewComputed whenever the inner function returns true. -// It's main purpose was to use it with hierarchical values that are marked with Computed and Optional. Such values should -// be recomputed whenever the value is not in the configuration and the remote value is not equal to the value in state. -func NestedValueComputedIf(key string, showParam func(client *sdk.Client) (*sdk.Parameter, error), valueToString func(v any) string) schema.CustomizeDiffFunc { - return customdiff.ComputedIf(key, func(ctx context.Context, d *schema.ResourceDiff, meta interface{}) bool { - configValue, ok := d.GetRawConfig().AsValueMap()[key] - if ok && len(configValue.AsValueSlice()) == 1 { - return false - } - - client := meta.(*provider.Context).Client - - param, err := showParam(client) - if err != nil { - return false - } - - stateValue := d.Get(key).([]any) - if len(stateValue) != 1 { - return false - } - - return param.Value != valueToString(stateValue[0].(map[string]any)["value"]) - }) -} - func AccountObjectStringValueComputedIf(key string, params []*sdk.Parameter, parameter sdk.AccountParameter) schema.CustomizeDiffFunc { return ValueComputedIf( key, @@ -138,8 +72,13 @@ func ValueComputedIf[T any](key string, parameters []*sdk.Parameter, accountPara return func(ctx context.Context, d *schema.ResourceDiff, meta any) error { if condition(ctx, d, meta) { - return d.SetNew(key, valueFromString(*parameterValue)) + if *parameterValue == "" { + return d.SetNew(key, "") + } else { + return d.SetNew(key, valueFromString(*parameterValue)) + } } + return nil } } diff --git a/pkg/resources/custom_diffs_test.go b/pkg/resources/custom_diffs_test.go index f8ea5cc9d4..fc1b717007 100644 --- a/pkg/resources/custom_diffs_test.go +++ b/pkg/resources/custom_diffs_test.go @@ -2,191 +2,74 @@ package resources_test import ( "context" - "strconv" "testing" - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/assert" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestNestedValueComputedIf(t *testing.T) { - customDiff := resources.NestedValueComputedIf( - "nested_value", - func(client *sdk.Client) (*sdk.Parameter, error) { - return &sdk.Parameter{ - Key: "Parameter", - Value: "snow-value", - }, nil + customDiff := resources.ValueComputedIf[string]( + "value", + []*sdk.Parameter{ + { + Key: string(sdk.AccountParameterLogLevel), + Value: string(sdk.LogLevelInfo), + }, }, + sdk.AccountParameterLogLevel, func(v any) string { return v.(string) }, + func(v string) string { return v }, ) - providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeString, customDiff) + providerConfig := createProviderWithValuePropertyAndCustomDiff(t, schema.TypeString, customDiff) t.Run("value set in the configuration and state", func(t *testing.T) { diff := calculateDiff(t, providerConfig, cty.MapVal(map[string]cty.Value{ - "nested_value": cty.ListVal([]cty.Value{ - cty.MapVal(map[string]cty.Value{ - "value": cty.NumberIntVal(123), - }), - }), + "value": cty.StringVal(string(sdk.LogLevelInfo)), }), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": 123, - }, - }, + "value": string(sdk.LogLevelInfo), }) - assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + assert.False(t, diff.Attributes["value"].NewComputed) }) t.Run("value set only in the configuration", func(t *testing.T) { diff := calculateDiff(t, providerConfig, cty.MapVal(map[string]cty.Value{ - "nested_value": cty.ListVal([]cty.Value{ - cty.MapVal(map[string]cty.Value{ - "value": cty.NumberIntVal(123), - }), - }), + "value": cty.StringVal(string(sdk.LogLevelInfo)), }), map[string]any{}) - assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + assert.True(t, diff.Attributes["value"].NewComputed) }) t.Run("value set in the state and not equals with parameter", func(t *testing.T) { diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": "value-to-change", - }, - }, + "value": string(sdk.LogLevelDebug), }) - assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + assert.Equal(t, string(sdk.LogLevelInfo), diff.Attributes["value"].New) }) t.Run("value set in the state and equals with parameter", func(t *testing.T) { diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": "snow-value", - }, - }, - }) - assert.False(t, diff.Attributes["nested_value.#"].NewComputed) - }) -} - -func TestNestedIntValueAccountObjectComputedIf(t *testing.T) { - providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeInt, resources.NestedIntValueAccountObjectComputedIf("nested_value", sdk.AccountParameterDataRetentionTimeInDays)) - - t.Run("different value than on the Snowflake side", func(t *testing.T) { - diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": 999, // value outside of valid range - }, - }, - }) - assert.True(t, diff.Attributes["nested_value.#"].NewComputed) - }) - - t.Run("same value as in Snowflake", func(t *testing.T) { - dataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) - require.NoError(t, err) - - diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": dataRetentionTimeInDays.Value, - }, - }, - }) - assert.False(t, diff.Attributes["nested_value.#"].NewComputed) - }) -} - -func TestNestedStringValueAccountObjectComputedIf(t *testing.T) { - providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeString, resources.NestedStringValueAccountObjectComputedIf("nested_value", sdk.AccountParameterTraceLevel)) - - t.Run("different value than on the Snowflake side", func(t *testing.T) { - diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": "not_a_valid_value", - }, - }, - }) - assert.True(t, diff.Attributes["nested_value.#"].NewComputed) - }) - - t.Run("same value as in Snowflake", func(t *testing.T) { - traceLevel, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterTraceLevel) - require.NoError(t, err) - - diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": traceLevel.Value, - }, - }, - }) - assert.False(t, diff.Attributes["nested_value.#"].NewComputed) - }) -} - -func TestNestedBoolValueAccountObjectComputedIf(t *testing.T) { - providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeBool, resources.NestedBoolValueAccountObjectComputedIf("nested_value", sdk.AccountParameterReplaceInvalidCharacters)) - - replaceInvalidCharacters, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterReplaceInvalidCharacters) - require.NoError(t, err) - - replaceInvalidCharactersBoolValue, err := strconv.ParseBool(replaceInvalidCharacters.Value) - require.NoError(t, err) - - t.Run("different value than on the Snowflake side", func(t *testing.T) { - diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": !replaceInvalidCharactersBoolValue, - }, - }, - }) - assert.True(t, diff.Attributes["nested_value.#"].NewComputed) - }) - - t.Run("same value as in Snowflake", func(t *testing.T) { - diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ - "nested_value": []any{ - map[string]any{ - "value": replaceInvalidCharactersBoolValue, - }, - }, + "value": string(sdk.LogLevelInfo), }) - assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + assert.False(t, diff.Attributes["value"].NewComputed) }) } -func createProviderWithNestedValueAndCustomDiff(t *testing.T, valueType schema.ValueType, customDiffFunc schema.CustomizeDiffFunc) *schema.Provider { +func createProviderWithValuePropertyAndCustomDiff(t *testing.T, valueType schema.ValueType, customDiffFunc schema.CustomizeDiffFunc) *schema.Provider { t.Helper() return &schema.Provider{ ResourcesMap: map[string]*schema.Resource{ "test": { Schema: map[string]*schema.Schema{ - "nested_value": { - Type: schema.TypeList, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "value": { - Type: valueType, - Required: true, - }, - }, - }, + "value": { + Type: valueType, Computed: true, Optional: true, }, diff --git a/pkg/resources/database_commons.go b/pkg/resources/database_commons.go index ca822e57d0..f5bff58a21 100644 --- a/pkg/resources/database_commons.go +++ b/pkg/resources/database_commons.go @@ -3,14 +3,15 @@ package resources import ( "context" "fmt" + "slices" + "strconv" + "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "slices" - "strconv" - "strings" ) var ( @@ -36,34 +37,53 @@ var ( } return customdiff.All( AccountObjectIntValueComputedIf("data_retention_time_in_days", params, sdk.AccountParameterDataRetentionTimeInDays), - //AccountObjectIntValueComputedIf("max_data_extension_time_in_days", params, sdk.AccountParameterMaxDataExtensionTimeInDays), - //AccountObjectStringValueComputedIf("external_volume", params, sdk.AccountParameterExternalVolume), - //AccountObjectStringValueComputedIf("catalog", params, sdk.AccountParameterCatalog), - //AccountObjectBoolValueComputedIf("replace_invalid_characters", params, sdk.AccountParameterReplaceInvalidCharacters), - //AccountObjectStringValueComputedIf("default_ddl_collation", params, sdk.AccountParameterDefaultDDLCollation), - //AccountObjectStringValueComputedIf("storage_serialization_policy", params, sdk.AccountParameterStorageSerializationPolicy), - //AccountObjectStringValueComputedIf("log_level", params, sdk.AccountParameterLogLevel), - //AccountObjectStringValueComputedIf("trace_level", params, sdk.AccountParameterTraceLevel), - //AccountObjectIntValueComputedIf("suspend_task_after_num_failures", params, sdk.AccountParameterSuspendTaskAfterNumFailures), - //AccountObjectIntValueComputedIf("task_auto_retry_attempts", params, sdk.AccountParameterTaskAutoRetryAttempts), - //AccountObjectStringValueComputedIf("user_task_managed_initial_warehouse_size", params, sdk.AccountParameterUserTaskManagedInitialWarehouseSize), - //AccountObjectIntValueComputedIf("user_task_timeout_ms", params, sdk.AccountParameterUserTaskTimeoutMs), - //AccountObjectIntValueComputedIf("user_task_minimum_trigger_interval_in_seconds", params, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds), - //AccountObjectBoolValueComputedIf("quoted_identifiers_ignore_case", params, sdk.AccountParameterQuotedIdentifiersIgnoreCase), - //AccountObjectBoolValueComputedIf("enable_console_output", params, sdk.AccountParameterEnableConsoleOutput), + AccountObjectIntValueComputedIf("max_data_extension_time_in_days", params, sdk.AccountParameterMaxDataExtensionTimeInDays), + AccountObjectStringValueComputedIf("external_volume", params, sdk.AccountParameterExternalVolume), + AccountObjectStringValueComputedIf("catalog", params, sdk.AccountParameterCatalog), + AccountObjectBoolValueComputedIf("replace_invalid_characters", params, sdk.AccountParameterReplaceInvalidCharacters), + AccountObjectStringValueComputedIf("default_ddl_collation", params, sdk.AccountParameterDefaultDDLCollation), + AccountObjectStringValueComputedIf("storage_serialization_policy", params, sdk.AccountParameterStorageSerializationPolicy), + AccountObjectStringValueComputedIf("log_level", params, sdk.AccountParameterLogLevel), + AccountObjectStringValueComputedIf("trace_level", params, sdk.AccountParameterTraceLevel), + AccountObjectIntValueComputedIf("suspend_task_after_num_failures", params, sdk.AccountParameterSuspendTaskAfterNumFailures), + AccountObjectIntValueComputedIf("task_auto_retry_attempts", params, sdk.AccountParameterTaskAutoRetryAttempts), + AccountObjectStringValueComputedIf("user_task_managed_initial_warehouse_size", params, sdk.AccountParameterUserTaskManagedInitialWarehouseSize), + AccountObjectIntValueComputedIf("user_task_timeout_ms", params, sdk.AccountParameterUserTaskTimeoutMs), + AccountObjectIntValueComputedIf("user_task_minimum_trigger_interval_in_seconds", params, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds), + AccountObjectBoolValueComputedIf("quoted_identifiers_ignore_case", params, sdk.AccountParameterQuotedIdentifiersIgnoreCase), + AccountObjectBoolValueComputedIf("enable_console_output", params, sdk.AccountParameterEnableConsoleOutput), )(ctx, d, meta) } ) -type DatabaseParameterField struct { - Name sdk.ObjectParameter - Type schema.ValueType - Description string - SchemaModifier func(inner *schema.Schema) +// handleValuePropertyChange calls internally handleValuePropertyChangeWithMapping with identity mapping +func handleValuePropertyChange[T any](d *schema.ResourceData, key string, setField **T, unsetField **bool) diag.Diagnostics { + return handleValuePropertyChangeWithMapping[T, T](d, key, setField, unsetField, func(value T) T { return value }) +} + +// handleValuePropertyChangeWithMapping checks schema.ResourceData for change in key's value. If there's a change detected, +// it checks if the value is set in the configuration. If the value is set, setField (representing setter for a value) is +// set to the new planned value applying mapping in such cases as enum values, identifiers, etc. have to be set. +// Otherwise, unset is called for a given field. +func handleValuePropertyChangeWithMapping[T, R any](d *schema.ResourceData, key string, setField **R, unsetField **bool, mapping func(value T) R) diag.Diagnostics { + if d.HasChange(key) { + if !d.GetRawConfig().AsValueMap()[key].IsNull() { + *setField = sdk.Pointer(mapping(d.Get(key).(T))) + } else { + *unsetField = sdk.Bool(true) + } + } + return nil } func init() { - databaseParameterFields := []DatabaseParameterField{ + databaseParameterFields := []struct { + Name sdk.ObjectParameter + Type schema.ValueType + Description string + DiffSuppress schema.SchemaDiffSuppressFunc + ValidateDiag schema.SchemaValidateDiagFunc + }{ { Name: sdk.ObjectParameterDataRetentionTimeInDays, Type: schema.TypeInt, @@ -75,39 +95,33 @@ func init() { Description: "Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification).", }, { - Name: sdk.ObjectParameterCatalog, - Type: schema.TypeString, - Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", - SchemaModifier: func(inner *schema.Schema) { - inner.ValidateDiagFunc = IsValidIdentifier[sdk.AccountObjectIdentifier]() - }, + Name: sdk.ObjectParameterCatalog, + Type: schema.TypeString, + Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", + ValidateDiag: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, { - Name: sdk.ObjectParameterExternalVolume, - Type: schema.TypeString, - Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", - SchemaModifier: func(inner *schema.Schema) { - inner.ValidateDiagFunc = IsValidIdentifier[sdk.AccountObjectIdentifier]() - }, + Name: sdk.ObjectParameterExternalVolume, + Type: schema.TypeString, + Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", + ValidateDiag: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, { - Name: sdk.ObjectParameterLogLevel, - Type: schema.TypeString, - Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), - SchemaModifier: func(inner *schema.Schema) { - inner.DiffSuppressFunc = func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) && d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "" - } + Name: sdk.ObjectParameterLogLevel, + Type: schema.TypeString, + Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), + ValidateDiag: StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true), + DiffSuppress: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return strings.EqualFold(oldValue, newValue) && d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "" }, }, { - Name: sdk.ObjectParameterTraceLevel, - Type: schema.TypeString, - Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), - SchemaModifier: func(inner *schema.Schema) { - inner.DiffSuppressFunc = func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) && d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "" - } + Name: sdk.ObjectParameterTraceLevel, + Type: schema.TypeString, + Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), + ValidateDiag: StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true), + DiffSuppress: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return strings.EqualFold(oldValue, newValue) && d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "" }, }, { @@ -121,9 +135,13 @@ func init() { Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", }, { - Name: sdk.ObjectParameterStorageSerializationPolicy, - Type: schema.TypeString, - Description: fmt.Sprintf("The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), + Name: sdk.ObjectParameterStorageSerializationPolicy, + Type: schema.TypeString, + Description: fmt.Sprintf("The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), + ValidateDiag: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), + DiffSuppress: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return strings.EqualFold(oldValue, newValue) && d.Get(k).(string) == string(sdk.StorageSerializationPolicyOptimized) && newValue == "" + }, }, { Name: sdk.ObjectParameterSuspendTaskAfterNumFailures, @@ -136,10 +154,20 @@ func init() { Description: "Maximum automatic retries allowed for a user task.", }, { - Name: sdk.ObjectParameterUserTaskManagedInitialWarehouseSize, - Type: schema.TypeString, - Description: "The initial size of warehouse to use for managed warehouses in the absence of history.", - SchemaModifier: nil, // TODO: Validate correct warehouse size + Name: sdk.ObjectParameterUserTaskManagedInitialWarehouseSize, + Type: schema.TypeString, + Description: "The initial size of warehouse to use for managed warehouses in the absence of history.", + DiffSuppress: func(k, old, new string, d *schema.ResourceData) bool { + oldSize, err := sdk.ToWarehouseSize(old) + if err != nil { + return false + } + newSize, err := sdk.ToWarehouseSize(new) + if err != nil { + return false + } + return oldSize == newSize + }, }, { Name: sdk.ObjectParameterUserTaskTimeoutMs, @@ -156,13 +184,6 @@ func init() { Type: schema.TypeBool, Description: "If true, the case of quoted identifiers is ignored.", }, - // TODO: Preview feature - //{ - // Name: sdk.ObjectParameterMetricLevel, - // Type: schema.TypeString, - // Description: "Controls whether to emit metrics to Event Table.", - // InnerModifier: nil, // TODO: Validate one of metric levels - //}, { Name: sdk.ObjectParameterEnableConsoleOutput, Type: schema.TypeBool, @@ -174,25 +195,24 @@ func init() { fieldName := strings.ToLower(string(field.Name)) DatabaseParametersSchema[fieldName] = &schema.Schema{ - Type: field.Type, - Description: field.Description, - Computed: true, - Optional: true, - } - if field.SchemaModifier != nil { - field.SchemaModifier(DatabaseParametersSchema[fieldName]) + Type: field.Type, + Description: field.Description, + Computed: true, + Optional: true, + ValidateDiagFunc: field.ValidateDiag, + DiffSuppressFunc: field.DiffSuppress, } if !slices.Contains(sharedDatabaseNotApplicableParameters, field.Name) { - forceNewSchemaField := &schema.Schema{ - Type: field.Type, - Description: field.Description, - ForceNew: true, - } - if field.SchemaModifier != nil { - field.SchemaModifier(forceNewSchemaField) + SharedDatabaseParametersSchema[fieldName] = &schema.Schema{ + Type: field.Type, + Description: field.Description, + ForceNew: true, + Optional: true, + Computed: true, + ValidateDiagFunc: field.ValidateDiag, + DiffSuppressFunc: field.DiffSuppress, } - SharedDatabaseParametersSchema[fieldName] = forceNewSchemaField } } } @@ -251,7 +271,7 @@ func HandleDatabaseParameterChanges(d *schema.ResourceData, set *sdk.DatabaseSet handleValuePropertyChange[int](d, "data_retention_time_in_days", &set.DataRetentionTimeInDays, &unset.DataRetentionTimeInDays), handleValuePropertyChange[int](d, "max_data_extension_time_in_days", &set.MaxDataExtensionTimeInDays, &unset.MaxDataExtensionTimeInDays), handleValuePropertyChangeWithMapping[string](d, "external_volume", &set.ExternalVolume, &unset.ExternalVolume, sdk.NewAccountObjectIdentifier), - handleValuePropertyChangeWithMapping[string](d, "catalog", &set.ExternalVolume, &unset.ExternalVolume, sdk.NewAccountObjectIdentifier), + handleValuePropertyChangeWithMapping[string](d, "catalog", &set.Catalog, &unset.Catalog, sdk.NewAccountObjectIdentifier), handleValuePropertyChange[bool](d, "replace_invalid_characters", &set.ReplaceInvalidCharacters, &unset.ReplaceInvalidCharacters), handleValuePropertyChange[string](d, "default_ddl_collation", &set.DefaultDDLCollation, &unset.DefaultDDLCollation), handleValuePropertyChangeWithMapping[string](d, "storage_serialization_policy", &set.StorageSerializationPolicy, &unset.StorageSerializationPolicy, func(value string) sdk.StorageSerializationPolicy { return sdk.StorageSerializationPolicy(value) }), @@ -267,32 +287,16 @@ func HandleDatabaseParameterChanges(d *schema.ResourceData, set *sdk.DatabaseSet ) } -// TODO: Move to common + test + describe (e.g. why it's **T - because setting pointers is hard) (others too) -func handleValuePropertyChange[T any](d *schema.ResourceData, key string, setField **T, unsetField **bool) diag.Diagnostics { - return handleValuePropertyChangeWithMapping[T, T](d, key, setField, unsetField, func(value T) T { return value }) -} - -func handleValuePropertyChangeWithMapping[T, R any](d *schema.ResourceData, key string, setField **R, unsetField **bool, mapping func(value T) R) diag.Diagnostics { - if d.HasChange(key) { - if !d.GetRawConfig().AsValueMap()[key].IsNull() { - *setField = sdk.Pointer(mapping(d.Get(key).(T))) - } else { - *unsetField = sdk.Bool(true) - } - } - return nil -} - func HandleDatabaseParameterRead(d *schema.ResourceData, databaseParameters []*sdk.Parameter) diag.Diagnostics { for _, parameter := range databaseParameters { switch parameter.Key { case - "DATA_RETENTION_TIME_IN_DAYS", - "MAX_DATA_EXTENSION_TIME_IN_DAYS", - "SUSPEND_TASK_AFTER_NUM_FAILURES", - "TASK_AUTO_RETRY_ATTEMPTS", - "USER_TASK_TIMEOUT_MS", - "USER_TASK_MINIMUM_TRIGGER_INTERVAL_IN_SECONDS": + string(sdk.ObjectParameterDataRetentionTimeInDays), + string(sdk.ObjectParameterMaxDataExtensionTimeInDays), + string(sdk.ObjectParameterSuspendTaskAfterNumFailures), + string(sdk.ObjectParameterTaskAutoRetryAttempts), + string(sdk.ObjectParameterUserTaskTimeoutMs), + string(sdk.ObjectParameterUserTaskMinimumTriggerIntervalInSeconds): value, err := strconv.Atoi(parameter.Value) if err != nil { return diag.FromErr(err) @@ -301,20 +305,20 @@ func HandleDatabaseParameterRead(d *schema.ResourceData, databaseParameters []*s return diag.FromErr(err) } case - "EXTERNAL_VOLUME", - "CATALOG", - "DEFAULT_DDL_COLLATION", - "STORAGE_SERIALIZATION_POLICY", - "LOG_LEVEL", - "TRACE_LEVEL", - "USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE": + string(sdk.ObjectParameterExternalVolume), + string(sdk.ObjectParameterCatalog), + string(sdk.ObjectParameterDefaultDDLCollation), + string(sdk.ObjectParameterStorageSerializationPolicy), + string(sdk.ObjectParameterLogLevel), + string(sdk.ObjectParameterTraceLevel), + string(sdk.ObjectParameterUserTaskManagedInitialWarehouseSize): if err := d.Set(strings.ToLower(parameter.Key), parameter.Value); err != nil { return diag.FromErr(err) } case - "REPLACE_INVALID_CHARACTERS", - "QUOTED_IDENTIFIERS_IGNORE_CASE", - "ENABLE_CONSOLE_OUTPUT": + string(sdk.ObjectParameterReplaceInvalidCharacters), + string(sdk.ObjectParameterQuotedIdentifiersIgnoreCase), + string(sdk.ObjectParameterEnableConsoleOutput): value, err := strconv.ParseBool(parameter.Value) if err != nil { return diag.FromErr(err) diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index cec5aff402..4d0ea1752f 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -2,9 +2,10 @@ package resources import ( "fmt" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" diff --git a/pkg/resources/secondary_database.go b/pkg/resources/secondary_database.go index 30dd2dc1d8..1e4a358387 100644 --- a/pkg/resources/secondary_database.go +++ b/pkg/resources/secondary_database.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -122,7 +123,10 @@ func UpdateSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta a databaseSetRequest := new(sdk.DatabaseSet) databaseUnsetRequest := new(sdk.DatabaseUnset) - updateParamDiags := HandleDatabaseParameterChanges(d, databaseSetRequest, databaseUnsetRequest) + + if updateParamDiags := HandleDatabaseParameterChanges(d, databaseSetRequest, databaseUnsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags + } if d.HasChange("comment") { comment := d.Get("comment").(string) @@ -151,7 +155,7 @@ func UpdateSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta a } } - return append(updateParamDiags, ReadSecondaryDatabase(ctx, d, meta)...) + return ReadSecondaryDatabase(ctx, d, meta) } func ReadSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { diff --git a/pkg/resources/secondary_database_acceptance_test.go b/pkg/resources/secondary_database_acceptance_test.go index 8ce202239a..058b166522 100644 --- a/pkg/resources/secondary_database_acceptance_test.go +++ b/pkg/resources/secondary_database_acceptance_test.go @@ -15,7 +15,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestAcc_CreateSecondaryDatabase_minimal(t *testing.T) { +func TestAcc_CreateSecondaryDatabase_Basic(t *testing.T) { id := acc.TestClient().Ids.RandomAccountObjectIdentifier() comment := random.Comment() @@ -27,11 +27,37 @@ func TestAcc_CreateSecondaryDatabase_minimal(t *testing.T) { newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() newComment := random.Comment() - accountDataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + params, err := acc.Client(t).Parameters.ShowParameters(context.Background(), &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Account: sdk.Bool(true), + }, + }) require.NoError(t, err) - accountMaxDataExtensionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterMaxDataExtensionTimeInDays) - require.NoError(t, err) + findParamValue := func(searchedParameter sdk.AccountParameter) string { + idx := slices.IndexFunc(params, func(parameter *sdk.Parameter) bool { + return parameter.Key == string(searchedParameter) + }) + require.NotEqual(t, -1, idx, string(searchedParameter)) + return params[idx].Value + } + + accountDataRetentionTimeInDays := findParamValue(sdk.AccountParameterDataRetentionTimeInDays) + accountMaxDataExtensionTimeInDays := findParamValue(sdk.AccountParameterMaxDataExtensionTimeInDays) + accountExternalVolume := findParamValue(sdk.AccountParameterExternalVolume) + accountCatalog := findParamValue(sdk.AccountParameterCatalog) + accountReplaceInvalidCharacters := findParamValue(sdk.AccountParameterReplaceInvalidCharacters) + accountDefaultDdlCollation := findParamValue(sdk.AccountParameterDefaultDDLCollation) + accountStorageSerializationPolicy := findParamValue(sdk.AccountParameterStorageSerializationPolicy) + accountLogLevel := findParamValue(sdk.AccountParameterLogLevel) + accountTraceLevel := findParamValue(sdk.AccountParameterTraceLevel) + accountSuspendTaskAfterNumFailures := findParamValue(sdk.AccountParameterSuspendTaskAfterNumFailures) + accountTaskAutoRetryAttempts := findParamValue(sdk.AccountParameterTaskAutoRetryAttempts) + accountUserTaskMangedInitialWarehouseSize := findParamValue(sdk.AccountParameterUserTaskManagedInitialWarehouseSize) + accountUserTaskTimeoutMs := findParamValue(sdk.AccountParameterUserTaskTimeoutMs) + accountUserTaskMinimumTriggerIntervalInSeconds := findParamValue(sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds) + accountQuotedIdentifiersIgnoreCase := findParamValue(sdk.AccountParameterQuotedIdentifiersIgnoreCase) + accountEnableConsoleOutput := findParamValue(sdk.AccountParameterEnableConsoleOutput) configVariables := func(id sdk.AccountObjectIdentifier, primaryDatabaseName sdk.ExternalObjectIdentifier, comment string) config.Variables { return config.Variables{ @@ -57,22 +83,22 @@ func TestAcc_CreateSecondaryDatabase_minimal(t *testing.T) { resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", comment), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays.Value), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays.Value), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", ""), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", ""), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", ""), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", "OPTIMIZED"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", "OFF"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", "OFF"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "suspend_task_after_num_failures", "10"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "task_auto_retry_attempts", "0"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_managed_initial_warehouse_size", "Medium"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_timeout_ms", "3600000"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_minimum_trigger_interval_in_seconds", "30"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "quoted_identifiers_ignore_case", "false"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "enable_console_output", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, // Rename + comment update @@ -84,22 +110,22 @@ func TestAcc_CreateSecondaryDatabase_minimal(t *testing.T) { resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", newComment), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays.Value), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays.Value), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", ""), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", ""), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", ""), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", "OPTIMIZED"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", "OFF"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", "OFF"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "suspend_task_after_num_failures", "10"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "task_auto_retry_attempts", "0"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_managed_initial_warehouse_size", "Medium"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_timeout_ms", "3600000"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_minimum_trigger_interval_in_seconds", "30"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "quoted_identifiers_ignore_case", "false"), - resource.TestCheckResourceAttr("snowflake_secondary_database.test", "enable_console_output", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, // Import all values diff --git a/pkg/resources/shared_database.go b/pkg/resources/shared_database.go index a8e47e2426..632d01cf99 100644 --- a/pkg/resources/shared_database.go +++ b/pkg/resources/shared_database.go @@ -4,8 +4,6 @@ import ( "context" "errors" "fmt" - "strconv" - "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" @@ -26,6 +24,11 @@ var sharedDatabaseSchema = map[string]*schema.Schema{ ForceNew: true, Description: "A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `\"\".\"\".\"\"`.", }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the database.", + }, // TODO(SNOW-1325381): Add it as an item to discuss and either remove or uncomment (and implement) it // "is_transient": { // Type: schema.TypeBool, @@ -33,67 +36,6 @@ var sharedDatabaseSchema = map[string]*schema.Schema{ // ForceNew: true, // Description: "Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", // }, - "external_volume": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), - Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", - }, - "catalog": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), - Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", - }, - "replace_invalid_characters": { - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", - }, - "default_ddl_collation": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: "Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification).", - }, - "storage_serialization_policy": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), - Description: fmt.Sprintf("Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), - DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) || (d.Get(k).(string) == string(sdk.StorageSerializationPolicyOptimized) && newValue == "") - }, - }, - "log_level": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true), - DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) || (d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "") - }, - Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), - }, - "trace_level": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true), - DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) || (d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "") - }, - Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), - }, - "comment": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies a comment for the database.", - }, } func SharedDatabase() *schema.Resource { @@ -104,7 +46,7 @@ func SharedDatabase() *schema.Resource { DeleteContext: DeleteSharedDatabase, Description: "A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see [Introduction to Secure Data Sharing](https://docs.snowflake.com/en/user-guide/data-sharing-intro).", - Schema: sharedDatabaseSchema, + Schema: MergeMaps(sharedDatabaseSchema, SharedDatabaseParametersSchema), Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, }, @@ -117,42 +59,39 @@ func CreateSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) id := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) externalShareId := sdk.NewExternalObjectIdentifierFromFullyQualifiedName(d.Get("from_share").(string)) - var externalVolume *sdk.AccountObjectIdentifier - if v, ok := d.GetOk("external_volume"); ok { - externalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) - } - - var catalog *sdk.AccountObjectIdentifier - if v, ok := d.GetOk("catalog"); ok { - catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) - } - - var storageSerializationPolicy *sdk.StorageSerializationPolicy - if v, ok := d.GetOk("storage_serialization_policy"); ok { - storageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(v.(string))) - } - - var logLevel *sdk.LogLevel - if v, ok := d.GetOk("log_level"); ok { - logLevel = sdk.Pointer(sdk.LogLevel(v.(string))) - } - - var traceLevel *sdk.TraceLevel - if v, ok := d.GetOk("trace_level"); ok { - traceLevel = sdk.Pointer(sdk.TraceLevel(v.(string))) - } + _, _, externalVolume, + catalog, + replaceInvalidCharacters, + defaultDDLCollation, + storageSerializationPolicy, + logLevel, + traceLevel, + suspendTaskAfterNumFailures, + taskAutoRetryAttempts, + userTaskManagedInitialWarehouseSize, + userTaskTimeoutMs, + userTaskMinimumTriggerIntervalInSeconds, + quotedIdentifiersIgnoreCase, + enableConsoleOutput := GetAllDatabaseParameters(d) err := client.Databases.CreateShared(ctx, id, externalShareId, &sdk.CreateSharedDatabaseOptions{ // TODO(SNOW-1325381) // Transient: GetPropertyAsPointer[bool](d, "is_transient"), - ExternalVolume: externalVolume, - Catalog: catalog, - ReplaceInvalidCharacters: GetPropertyAsPointer[bool](d, "replace_invalid_characters"), - DefaultDDLCollation: GetPropertyAsPointer[string](d, "default_ddl_collation"), - StorageSerializationPolicy: storageSerializationPolicy, - LogLevel: logLevel, - TraceLevel: traceLevel, - Comment: GetPropertyAsPointer[string](d, "comment"), + ExternalVolume: externalVolume, + Catalog: catalog, + ReplaceInvalidCharacters: replaceInvalidCharacters, + DefaultDDLCollation: defaultDDLCollation, + StorageSerializationPolicy: storageSerializationPolicy, + LogLevel: logLevel, + TraceLevel: traceLevel, + SuspendTaskAfterNumFailures: suspendTaskAfterNumFailures, + TaskAutoRetryAttempts: taskAutoRetryAttempts, + UserTaskManagedInitialWarehouseSize: userTaskManagedInitialWarehouseSize, + UserTaskTimeoutMs: userTaskTimeoutMs, + UserTaskMinimumTriggerIntervalInSeconds: userTaskMinimumTriggerIntervalInSeconds, + QuotedIdentifiersIgnoreCase: quotedIdentifiersIgnoreCase, + EnableConsoleOutput: enableConsoleOutput, + Comment: GetPropertyAsPointer[string](d, "comment"), }) if err != nil { return diag.FromErr(err) @@ -224,15 +163,6 @@ func ReadSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) d return diag.FromErr(err) } - parameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ - In: &sdk.ParametersIn{ - Database: id, - }, - }) - if err != nil { - return diag.FromErr(err) - } - if err := d.Set("name", database.Name); err != nil { return diag.FromErr(err) } @@ -250,41 +180,17 @@ func ReadSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) d return diag.FromErr(err) } - for _, parameter := range parameters { - switch parameter.Key { - case "EXTERNAL_VOLUME": - if err := d.Set("external_volume", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "CATALOG": - if err := d.Set("catalog", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "DEFAULT_DDL_COLLATION": - if err := d.Set("default_ddl_collation", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "LOG_LEVEL": - if err := d.Set("log_level", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "TRACE_LEVEL": - if err := d.Set("trace_level", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "REPLACE_INVALID_CHARACTERS": - boolValue, err := strconv.ParseBool(parameter.Value) - if err != nil { - return diag.FromErr(err) - } - if err := d.Set("replace_invalid_characters", boolValue); err != nil { - return diag.FromErr(err) - } - case "STORAGE_SERIALIZATION_POLICY": - if err := d.Set("storage_serialization_policy", parameter.Value); err != nil { - return diag.FromErr(err) - } - } + databaseParameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Database: id, + }, + }) + if err != nil { + return diag.FromErr(err) + } + + if diags := HandleDatabaseParameterRead(d, databaseParameters); diags != nil { + return diags } return nil diff --git a/pkg/resources/shared_database_acceptance_test.go b/pkg/resources/shared_database_acceptance_test.go index 6c90d5560c..c5efa5ea65 100644 --- a/pkg/resources/shared_database_acceptance_test.go +++ b/pkg/resources/shared_database_acceptance_test.go @@ -5,6 +5,9 @@ import ( "regexp" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" @@ -15,13 +18,30 @@ import ( "github.com/stretchr/testify/require" ) -func TestAcc_CreateSharedDatabase_minimal(t *testing.T) { +func TestAcc_CreateSharedDatabase_Basic(t *testing.T) { id := acc.TestClient().Ids.RandomAccountObjectIdentifier() comment := random.Comment() newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() newComment := random.Comment() + var ( + accountExternalVolume = new(string) + accountCatalog = new(string) + accountReplaceInvalidCharacters = new(string) + accountDefaultDdlCollation = new(string) + accountStorageSerializationPolicy = new(string) + accountLogLevel = new(string) + accountTraceLevel = new(string) + accountSuspendTaskAfterNumFailures = new(string) + accountTaskAutoRetryAttempts = new(string) + accountUserTaskMangedInitialWarehouseSize = new(string) + accountUserTaskTimeoutMs = new(string) + accountUserTaskMinimumTriggerIntervalInSeconds = new(string) + accountQuotedIdentifiersIgnoreCase = new(string) + accountEnableConsoleOutput = new(string) + ) + configVariables := func(id sdk.AccountObjectIdentifier, shareName sdk.ExternalObjectIdentifier, comment string) config.Variables { return config.Variables{ "name": config.StringVariable(id.Name()), @@ -41,35 +61,73 @@ func TestAcc_CreateSharedDatabase_minimal(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), Steps: []resource.TestStep{ { + PreConfig: func() { + params := acc.TestClient().Parameter.ShowAccountParameters(t) + *accountExternalVolume = helpers.FindParameter(t, params, sdk.AccountParameterExternalVolume).Value + *accountCatalog = helpers.FindParameter(t, params, sdk.AccountParameterCatalog).Value + *accountReplaceInvalidCharacters = helpers.FindParameter(t, params, sdk.AccountParameterReplaceInvalidCharacters).Value + *accountDefaultDdlCollation = helpers.FindParameter(t, params, sdk.AccountParameterDefaultDDLCollation).Value + *accountStorageSerializationPolicy = helpers.FindParameter(t, params, sdk.AccountParameterStorageSerializationPolicy).Value + *accountLogLevel = helpers.FindParameter(t, params, sdk.AccountParameterLogLevel).Value + *accountTraceLevel = helpers.FindParameter(t, params, sdk.AccountParameterTraceLevel).Value + *accountSuspendTaskAfterNumFailures = helpers.FindParameter(t, params, sdk.AccountParameterSuspendTaskAfterNumFailures).Value + *accountTaskAutoRetryAttempts = helpers.FindParameter(t, params, sdk.AccountParameterTaskAutoRetryAttempts).Value + *accountUserTaskMangedInitialWarehouseSize = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskManagedInitialWarehouseSize).Value + *accountUserTaskTimeoutMs = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskTimeoutMs).Value + *accountUserTaskMinimumTriggerIntervalInSeconds = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds).Value + *accountQuotedIdentifiersIgnoreCase = helpers.FindParameter(t, params, sdk.AccountParameterQuotedIdentifiersIgnoreCase).Value + *accountEnableConsoleOutput = helpers.FindParameter(t, params, sdk.AccountParameterEnableConsoleOutput).Value + }, ConfigVariables: configVariables(id, shareExternalId, comment), ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_shared_database.test", "from_share", shareExternalId.FullyQualifiedName()), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", ""), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", ""), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "false"), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", ""), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", "OPTIMIZED"), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", "OFF"), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", "OFF"), resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", comment), + + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, { ConfigVariables: configVariables(newId, shareExternalId, newComment), ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_shared_database.test", plancheck.ResourceActionUpdate), + }, + }, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", newId.Name()), resource.TestCheckResourceAttr("snowflake_shared_database.test", "from_share", shareExternalId.FullyQualifiedName()), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", ""), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", ""), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "false"), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", ""), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", "OPTIMIZED"), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", "OFF"), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", "OFF"), resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", newComment), + + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_shared_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, // Import all values @@ -84,6 +142,8 @@ func TestAcc_CreateSharedDatabase_minimal(t *testing.T) { }) } +// TODO: Tests + func TestAcc_CreateSharedDatabase_complete(t *testing.T) { id := acc.TestClient().Ids.RandomAccountObjectIdentifier() comment := random.Comment() @@ -95,30 +155,25 @@ func TestAcc_CreateSharedDatabase_complete(t *testing.T) { catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) t.Cleanup(catalogCleanup) - configVariables := func( - id sdk.AccountObjectIdentifier, - shareName sdk.ExternalObjectIdentifier, - externalVolume sdk.AccountObjectIdentifier, - catalog sdk.AccountObjectIdentifier, - replaceInvalidCharacters bool, - defaultDdlCollation string, - storageSerializationPolicy sdk.StorageSerializationPolicy, - logLevel sdk.LogLevel, - traceLevel sdk.TraceLevel, - comment string, - ) config.Variables { - return config.Variables{ - "name": config.StringVariable(id.Name()), - "from_share": config.StringVariable(shareName.FullyQualifiedName()), - "external_volume": config.StringVariable(externalVolume.Name()), - "catalog": config.StringVariable(catalog.Name()), - "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), - "default_ddl_collation": config.StringVariable(defaultDdlCollation), - "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), - "log_level": config.StringVariable(string(logLevel)), - "trace_level": config.StringVariable(string(traceLevel)), - "comment": config.StringVariable(comment), - } + configVariables := config.Variables{ + "name": config.StringVariable(id.Name()), + "from_share": config.StringVariable(externalShareId.FullyQualifiedName()), + "comment": config.StringVariable(comment), + + "external_volume": config.StringVariable(externalVolumeId.Name()), + "catalog": config.StringVariable(catalogId.Name()), + "replace_invalid_characters": config.BoolVariable(true), + "default_ddl_collation": config.StringVariable("en_US"), + "storage_serialization_policy": config.StringVariable(string(sdk.StorageSerializationPolicyOptimized)), + "log_level": config.StringVariable(string(sdk.LogLevelInfo)), + "trace_level": config.StringVariable(string(sdk.TraceLevelOnEvent)), + "suspend_task_after_num_failures": config.IntegerVariable(20), + "task_auto_retry_attempts": config.IntegerVariable(20), + "user_task_managed_initial_warehouse_size": config.StringVariable(string(sdk.WarehouseSizeXLarge)), + "user_task_timeout_ms": config.IntegerVariable(1200000), + "user_task_minimum_trigger_interval_in_seconds": config.IntegerVariable(120), + "quoted_identifiers_ignore_case": config.BoolVariable(true), + "enable_console_output": config.BoolVariable(true), } resource.Test(t, resource.TestCase{ @@ -130,21 +185,13 @@ func TestAcc_CreateSharedDatabase_complete(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), Steps: []resource.TestStep{ { - ConfigVariables: configVariables( - id, - externalShareId, - externalVolumeId, - catalogId, - true, - "en_US", - sdk.StorageSerializationPolicyOptimized, - sdk.LogLevelInfo, - sdk.TraceLevelOnEvent, - comment, - ), + ConfigVariables: configVariables, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "from_share", externalShareId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", comment), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", externalVolumeId.Name()), resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", catalogId.Name()), resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "true"), @@ -152,23 +199,18 @@ func TestAcc_CreateSharedDatabase_complete(t *testing.T) { resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyOptimized)), resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", string(sdk.LogLevelInfo)), resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), - resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", comment), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "suspend_task_after_num_failures", "20"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "task_auto_retry_attempts", "20"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "user_task_managed_initial_warehouse_size", string(sdk.WarehouseSizeXLarge)), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "user_task_timeout_ms", "1200000"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "user_task_minimum_trigger_interval_in_seconds", "120"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "quoted_identifiers_ignore_case", "true"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "enable_console_output", "true"), ), }, // Import all values { - ConfigVariables: configVariables( - id, - externalShareId, - externalVolumeId, - catalogId, - true, - "en_US", - sdk.StorageSerializationPolicyOptimized, - sdk.LogLevelInfo, - sdk.TraceLevelOnEvent, - comment, - ), + ConfigVariables: configVariables, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), ResourceName: "snowflake_shared_database.test", ImportState: true, @@ -181,26 +223,25 @@ func TestAcc_CreateSharedDatabase_complete(t *testing.T) { func TestAcc_CreateSharedDatabase_InvalidValues(t *testing.T) { comment := random.Comment() - configVariables := func( - replaceInvalidCharacters bool, - defaultDdlCollation string, - storageSerializationPolicy string, - logLevel string, - traceLevel string, - comment string, - ) config.Variables { - return config.Variables{ - "name": config.StringVariable(""), - "from_share": config.StringVariable(""), - "external_volume": config.StringVariable(""), - "catalog": config.StringVariable(""), - "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), - "default_ddl_collation": config.StringVariable(defaultDdlCollation), - "storage_serialization_policy": config.StringVariable(storageSerializationPolicy), - "log_level": config.StringVariable(logLevel), - "trace_level": config.StringVariable(traceLevel), - "comment": config.StringVariable(comment), - } + configVariables := config.Variables{ + "name": config.StringVariable("name"), + "from_share": config.StringVariable("org.acc.name"), + "comment": config.StringVariable(comment), + + "external_volume": config.StringVariable(""), + "catalog": config.StringVariable(""), + "replace_invalid_characters": config.BoolVariable(false), + "default_ddl_collation": config.StringVariable(""), + "storage_serialization_policy": config.StringVariable("invalid_value"), + "log_level": config.StringVariable("invalid_value"), + "trace_level": config.StringVariable("invalid_value"), + "suspend_task_after_num_failures": config.IntegerVariable(0), + "task_auto_retry_attempts": config.IntegerVariable(0), + "user_task_managed_initial_warehouse_size": config.StringVariable(""), + "user_task_timeout_ms": config.IntegerVariable(0), + "user_task_minimum_trigger_interval_in_seconds": config.IntegerVariable(0), + "quoted_identifiers_ignore_case": config.BoolVariable(false), + "enable_console_output": config.BoolVariable(false), } resource.Test(t, resource.TestCase{ @@ -212,14 +253,7 @@ func TestAcc_CreateSharedDatabase_InvalidValues(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), Steps: []resource.TestStep{ { - ConfigVariables: configVariables( - true, - "en_US", - "invalid_value", - "invalid_value", - "invalid_value", - comment, - ), + ConfigVariables: configVariables, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), ExpectError: regexp.MustCompile(`(expected \[{{} log_level}\] to be one of \[\"TRACE\" \"DEBUG\" \"INFO\" \"WARN\" \"ERROR\" \"FATAL\" \"OFF\"\], got invalid_value)|` + `(expected \[{{} trace_level}\] to be one of \[\"ALWAYS\" \"ON_EVENT\" \"OFF\"\], got invalid_value)|` + diff --git a/pkg/resources/standard_database.go b/pkg/resources/standard_database.go index 684553bc30..43b7a0e09b 100644 --- a/pkg/resources/standard_database.go +++ b/pkg/resources/standard_database.go @@ -5,14 +5,12 @@ import ( "errors" "fmt" "slices" - "strconv" "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -28,72 +26,6 @@ var standardDatabaseSchema = map[string]*schema.Schema{ ForceNew: true, Description: "Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", }, - "data_retention_time_in_days": nestedProperty( - schema.TypeInt, - "Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel).", - ), - // TODO: Remove - "data_retention_time_in_days_2": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - }, - "max_data_extension_time_in_days": nestedProperty( - schema.TypeInt, - "Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days).", - ), - "external_volume": nestedPropertyWithInnerModifier( - schema.TypeString, - "The database parameter that specifies the default external volume to use for Iceberg tables.", - func(inner *schema.Schema) { - inner.ValidateDiagFunc = IsValidIdentifier[sdk.AccountObjectIdentifier]() - }, - ), - "catalog": nestedPropertyWithInnerModifier( - schema.TypeString, - "The database parameter that specifies the default catalog to use for Iceberg tables.", - func(inner *schema.Schema) { - inner.ValidateDiagFunc = IsValidIdentifier[sdk.AccountObjectIdentifier]() - }, - ), - "replace_invalid_characters": nestedProperty( - schema.TypeBool, - "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", - ), - "default_ddl_collation": nestedProperty( - schema.TypeString, - "Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification).", - ), - "storage_serialization_policy": nestedPropertyWithInnerModifier( - schema.TypeString, - fmt.Sprintf("Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), - func(inner *schema.Schema) { - inner.ValidateDiagFunc = StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true) - inner.DiffSuppressFunc = func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) || (d.Get(k).(string) == string(sdk.StorageSerializationPolicyOptimized) && newValue == "") - } - }, - ), - "log_level": nestedPropertyWithInnerModifier( - schema.TypeString, - fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), - func(inner *schema.Schema) { - inner.ValidateDiagFunc = StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true) - inner.DiffSuppressFunc = func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) || (d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "") - } - }, - ), - "trace_level": nestedPropertyWithInnerModifier( - schema.TypeString, - fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), - func(inner *schema.Schema) { - inner.ValidateDiagFunc = StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true) - inner.DiffSuppressFunc = func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) || (d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "") - } - }, - ), "replication": { Type: schema.TypeList, Optional: true, @@ -101,7 +33,7 @@ var standardDatabaseSchema = map[string]*schema.Schema{ MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "enable_for_account": { + "enable_to_account": { Type: schema.TypeList, Required: true, Description: "Entry to enable replication and optionally failover for a given account identifier.", @@ -146,25 +78,10 @@ func StandardDatabase() *schema.Resource { ReadContext: ReadStandardDatabase, DeleteContext: DeleteStandardDatabase, UpdateContext: UpdateStandardDatabase, + Description: "Represents a standard database. If replication configuration is specified, the database is promoted to serve as a primary database for replication.", - CustomizeDiff: customdiff.All( - // Value - //AccountObjectIntValueComputedIf("data_retention_time_in_days_2", sdk.AccountParameterDataRetentionTimeInDays), - - // Nested - NestedIntValueAccountObjectComputedIf("data_retention_time_in_days", sdk.AccountParameterDataRetentionTimeInDays), - NestedIntValueAccountObjectComputedIf("max_data_extension_time_in_days", sdk.AccountParameterMaxDataExtensionTimeInDays), - NestedStringValueAccountObjectComputedIf("external_volume", sdk.AccountParameterExternalVolume), - NestedStringValueAccountObjectComputedIf("catalog", sdk.AccountParameterCatalog), - NestedBoolValueAccountObjectComputedIf("replace_invalid_characters", sdk.AccountParameterReplaceInvalidCharacters), - NestedStringValueAccountObjectComputedIf("default_ddl_collation", sdk.AccountParameterDefaultDDLCollation), - NestedStringValueAccountObjectComputedIf("storage_serialization_policy", sdk.AccountParameterStorageSerializationPolicy), - NestedStringValueAccountObjectComputedIf("log_level", sdk.AccountParameterLogLevel), - NestedStringValueAccountObjectComputedIf("trace_level", sdk.AccountParameterTraceLevel), - ), - - Description: "Represents a standard database. If replication configuration is specified, the database is promoted to serve as a primary database for replication.", - Schema: standardDatabaseSchema, + CustomizeDiff: DatabaseParametersCustomDiff, + Schema: MergeMaps(standardDatabaseSchema, DatabaseParametersSchema), Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, }, @@ -193,11 +110,6 @@ func CreateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an quotedIdentifiersIgnoreCase, enableConsoleOutput := GetAllDatabaseParameters(d) - dataRetentionTimeInDays = nil - if v, ok := d.GetOk("data_retention_time_in_days_2"); ok { - dataRetentionTimeInDays = sdk.Int(v.(int)) - } - err := client.Databases.Create(ctx, id, &sdk.CreateDatabaseOptions{ Transient: GetPropertyAsPointer[bool](d, "is_transient"), DataRetentionTimeInDays: dataRetentionTimeInDays, @@ -234,27 +146,27 @@ func CreateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an ignoreEditionCheck = sdk.Pointer(v.(bool)) } - if enableForAccounts, ok := replicationConfiguration["enable_for_account"]; ok { - enableForAccountList := enableForAccounts.([]any) + if enableToAccounts, ok := replicationConfiguration["enable_to_account"]; ok { + enableToAccountList := enableToAccounts.([]any) - if len(enableForAccountList) > 0 { - replicationForAccounts := make([]sdk.AccountIdentifier, 0) - failoverForAccounts := make([]sdk.AccountIdentifier, 0) + if len(enableToAccountList) > 0 { + replicationToAccounts := make([]sdk.AccountIdentifier, 0) + failoverToAccounts := make([]sdk.AccountIdentifier, 0) - for _, enableForAccount := range enableForAccountList { - accountConfig := enableForAccount.(map[string]any) + for _, enableToAccount := range enableToAccountList { + accountConfig := enableToAccount.(map[string]any) accountIdentifier := sdk.NewAccountIdentifierFromFullyQualifiedName(accountConfig["account_identifier"].(string)) - replicationForAccounts = append(replicationForAccounts, accountIdentifier) + replicationToAccounts = append(replicationToAccounts, accountIdentifier) if v, ok := accountConfig["with_failover"]; ok && v.(bool) { - failoverForAccounts = append(failoverForAccounts, accountIdentifier) + failoverToAccounts = append(failoverToAccounts, accountIdentifier) } } - if len(replicationForAccounts) > 0 { + if len(replicationToAccounts) > 0 { err := client.Databases.AlterReplication(ctx, id, &sdk.AlterDatabaseReplicationOptions{ EnableReplication: &sdk.EnableReplication{ - ToAccounts: replicationForAccounts, + ToAccounts: replicationToAccounts, IgnoreEditionCheck: ignoreEditionCheck, }, }) @@ -266,10 +178,10 @@ func CreateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an } } - if len(failoverForAccounts) > 0 { + if len(failoverToAccounts) > 0 { err = client.Databases.AlterFailover(ctx, id, &sdk.AlterDatabaseFailoverOptions{ EnableFailover: &sdk.EnableFailover{ - ToAccounts: failoverForAccounts, + ToAccounts: failoverToAccounts, }, }) if err != nil { @@ -302,191 +214,47 @@ func UpdateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an id = newId } - var databaseSetRequest sdk.DatabaseSet - var databaseUnsetRequest sdk.DatabaseUnset - - if d.HasChange("data_retention_time_in_days") { - dataRetentionObject, ok := d.GetOk("data_retention_time_in_days") - if ok && len(dataRetentionObject.([]any)) > 0 { - dataRetentionTimeInDays, err := GetPropertyOfFirstNestedObjectByValueKey[int](d, "data_retention_time_in_days") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.DataRetentionTimeInDays = dataRetentionTimeInDays - } else { - databaseUnsetRequest.DataRetentionTimeInDays = sdk.Bool(true) - } - } - - if d.HasChange("data_retention_time_in_days_2") { - if !d.GetRawConfig().AsValueMap()["data_retention_time_in_days_2"].IsNull() { - databaseSetRequest.DataRetentionTimeInDays = sdk.Pointer(d.Get("data_retention_time_in_days_2").(int)) - } else { - databaseUnsetRequest.DataRetentionTimeInDays = sdk.Bool(true) - } - } - - if d.HasChange("max_data_extension_time_in_days") { - maxDataExtensionTimeInDaysObject, ok := d.GetOk("max_data_extension_time_in_days") - if ok && len(maxDataExtensionTimeInDaysObject.([]any)) > 0 { - maxDataExtensionTimeInDays, err := GetPropertyOfFirstNestedObjectByValueKey[int](d, "max_data_extension_time_in_days") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.MaxDataExtensionTimeInDays = maxDataExtensionTimeInDays - } else { - databaseUnsetRequest.MaxDataExtensionTimeInDays = sdk.Bool(true) - } - } - - if d.HasChange("external_volume") { - externalVolumeObject, ok := d.GetOk("external_volume") - if ok && len(externalVolumeObject.([]any)) > 0 { - externalVolume, err := GetPropertyOfFirstNestedObjectByValueKey[string](d, "external_volume") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.ExternalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(*externalVolume)) - } else { - databaseUnsetRequest.ExternalVolume = sdk.Bool(true) - } - } - - if d.HasChange("catalog") { - catalogObject, ok := d.GetOk("catalog") - if ok && len(catalogObject.([]any)) > 0 { - catalog, err := GetPropertyOfFirstNestedObjectByValueKey[string](d, "catalog") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.Catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(*catalog)) - } else { - databaseUnsetRequest.Catalog = sdk.Bool(true) - } - } - - if d.HasChange("replace_invalid_characters") { - replaceInvalidCharactersObject, ok := d.GetOk("replace_invalid_characters") - if ok && len(replaceInvalidCharactersObject.([]any)) > 0 { - replaceInvalidCharacters, err := GetPropertyOfFirstNestedObjectByValueKey[bool](d, "replace_invalid_characters") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.ReplaceInvalidCharacters = sdk.Bool(*replaceInvalidCharacters) - } else { - databaseUnsetRequest.ReplaceInvalidCharacters = sdk.Bool(true) - } - } - - if d.HasChange("default_ddl_collation") { - defaultDdlCollationObject, ok := d.GetOk("default_ddl_collation") - if ok && len(defaultDdlCollationObject.([]any)) > 0 { - defaultDdlCollation, err := GetPropertyOfFirstNestedObjectByValueKey[string](d, "default_ddl_collation") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.DefaultDDLCollation = defaultDdlCollation - } else { - databaseUnsetRequest.DefaultDDLCollation = sdk.Bool(true) - } - } - - if d.HasChange("storage_serialization_policy") { - storageSerializationPolicyObject, ok := d.GetOk("storage_serialization_policy") - if ok && len(storageSerializationPolicyObject.([]any)) > 0 { - storageSerializationPolicy, err := GetPropertyOfFirstNestedObjectByValueKey[string](d, "storage_serialization_policy") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.StorageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(*storageSerializationPolicy)) - } else { - databaseUnsetRequest.StorageSerializationPolicy = sdk.Bool(true) - } - } - - if d.HasChange("log_level") { - logLevelObject, ok := d.GetOk("log_level") - if ok && len(logLevelObject.([]any)) > 0 { - logLevel, err := GetPropertyOfFirstNestedObjectByValueKey[string](d, "log_level") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.LogLevel = sdk.Pointer(sdk.LogLevel(*logLevel)) - } else { - databaseUnsetRequest.LogLevel = sdk.Bool(true) - } - } + databaseSetRequest := new(sdk.DatabaseSet) + databaseUnsetRequest := new(sdk.DatabaseUnset) - if d.HasChange("trace_level") { - traceLevelObject, ok := d.GetOk("trace_level") - if ok && len(traceLevelObject.([]any)) > 0 { - traceLevel, err := GetPropertyOfFirstNestedObjectByValueKey[string](d, "trace_level") - if err != nil { - return diag.FromErr(err) - } - databaseSetRequest.TraceLevel = sdk.Pointer(sdk.TraceLevel(*traceLevel)) - } else { - databaseUnsetRequest.TraceLevel = sdk.Bool(true) - } + if updateParamDiags := HandleDatabaseParameterChanges(d, databaseSetRequest, databaseUnsetRequest); len(updateParamDiags) > 0 { + return updateParamDiags } if d.HasChange("replication") { before, after := d.GetChange("replication") - var ( - accountsToEnableReplication []sdk.AccountIdentifier - accountsToDisableReplication []sdk.AccountIdentifier - accountsToEnableFailover []sdk.AccountIdentifier - accountsToDisableFailover []sdk.AccountIdentifier - - // maps represent replication configuration by having sdk.AccountIdentifier - // as a key (implicitly enabling replication), and failover as an option value - beforeReplicationFailoverConfigurationMap = make(map[sdk.AccountIdentifier]bool) - afterReplicationFailoverConfigurationMap = make(map[sdk.AccountIdentifier]bool) - ) + getReplicationConfiguration := func(replicationConfigs []any) (replicationEnabledToAccounts []sdk.AccountIdentifier, failoverEnabledToAccounts []sdk.AccountIdentifier) { + replicationEnabledToAccounts = make([]sdk.AccountIdentifier, 0) + failoverEnabledToAccounts = make([]sdk.AccountIdentifier, 0) - fillReplicationMap := func(replicationConfigs []any, replicationFailoverMap map[sdk.AccountIdentifier]bool) { for _, replicationConfigurationMap := range replicationConfigs { replicationConfiguration := replicationConfigurationMap.(map[string]any) - for _, enableForAccountMap := range replicationConfiguration["enable_for_account"].([]any) { - enableForAccount := enableForAccountMap.(map[string]any) - accountIdentifier := sdk.NewAccountIdentifierFromFullyQualifiedName(enableForAccount["account_identifier"].(string)) - replicationFailoverMap[accountIdentifier] = enableForAccount["with_failover"].(bool) - } - } - } - fillReplicationMap(before.([]any), beforeReplicationFailoverConfigurationMap) - fillReplicationMap(after.([]any), afterReplicationFailoverConfigurationMap) - - for accountIdentifier := range beforeReplicationFailoverConfigurationMap { - if _, ok := afterReplicationFailoverConfigurationMap[accountIdentifier]; !ok { - // Entry removed -> only replication needs to be disabled, because failover will be disabled implicitly - // (in Snowflake you cannot have failover enabled when replication is disabled). - accountsToDisableReplication = append(accountsToDisableReplication, accountIdentifier) - } - } + for _, enableToAccountMap := range replicationConfiguration["enable_to_account"].([]any) { + enableToAccount := enableToAccountMap.(map[string]any) + accountIdentifier := sdk.NewAccountIdentifierFromFullyQualifiedName(enableToAccount["account_identifier"].(string)) - for accountIdentifier, withFailover := range afterReplicationFailoverConfigurationMap { - if beforeWithFailover, ok := beforeReplicationFailoverConfigurationMap[accountIdentifier]; !ok { - // New entry, enable replication and failover if set to true - accountsToEnableReplication = append(accountsToEnableReplication, accountIdentifier) - if withFailover { - accountsToEnableFailover = append(accountsToEnableFailover, accountIdentifier) - } - // Existing entry (check for possible failover modifications) - } else if beforeWithFailover != withFailover { - if withFailover { - accountsToEnableFailover = append(accountsToEnableFailover, accountIdentifier) - } else { - accountsToDisableFailover = append(accountsToDisableFailover, accountIdentifier) + replicationEnabledToAccounts = append(replicationEnabledToAccounts, accountIdentifier) + if enableToAccount["with_failover"].(bool) { + failoverEnabledToAccounts = append(failoverEnabledToAccounts, accountIdentifier) + } } } + + return replicationEnabledToAccounts, failoverEnabledToAccounts } + beforeReplicationEnabledToAccounts, beforeFailoverEnabledToAccounts := getReplicationConfiguration(before.([]any)) + afterReplicationEnabledToAccounts, afterFailoverEnabledToAccounts := getReplicationConfiguration(after.([]any)) - if len(accountsToEnableReplication) > 0 { + addedFailovers, removedFailovers := helpers.ListDiff(beforeFailoverEnabledToAccounts, afterFailoverEnabledToAccounts) + addedReplications, removedReplications := helpers.ListDiff(beforeReplicationEnabledToAccounts, afterReplicationEnabledToAccounts) + // Failovers will be disabled implicitly by disabled replications + removedFailovers = slices.DeleteFunc(removedFailovers, func(identifier sdk.AccountIdentifier) bool { return slices.Contains(removedReplications, identifier) }) + + if len(addedReplications) > 0 { err := client.Databases.AlterReplication(ctx, id, &sdk.AlterDatabaseReplicationOptions{ EnableReplication: &sdk.EnableReplication{ - ToAccounts: accountsToEnableReplication, + ToAccounts: addedReplications, IgnoreEditionCheck: sdk.Bool(d.Get("replication.0.ignore_edition_check").(bool)), }, }) @@ -495,10 +263,10 @@ func UpdateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an } } - if len(accountsToEnableFailover) > 0 { + if len(addedFailovers) > 0 { err := client.Databases.AlterFailover(ctx, id, &sdk.AlterDatabaseFailoverOptions{ EnableFailover: &sdk.EnableFailover{ - ToAccounts: accountsToEnableFailover, + ToAccounts: addedFailovers, }, }) if err != nil { @@ -506,10 +274,10 @@ func UpdateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an } } - if len(accountsToDisableReplication) > 0 { + if len(removedReplications) > 0 { err := client.Databases.AlterReplication(ctx, id, &sdk.AlterDatabaseReplicationOptions{ DisableReplication: &sdk.DisableReplication{ - ToAccounts: accountsToDisableReplication, + ToAccounts: removedReplications, }, }) if err != nil { @@ -517,10 +285,10 @@ func UpdateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an } } - if len(accountsToDisableFailover) > 0 { + if len(removedFailovers) > 0 { err := client.Databases.AlterFailover(ctx, id, &sdk.AlterDatabaseFailoverOptions{ DisableFailover: &sdk.DisableFailover{ - ToAccounts: accountsToDisableFailover, + ToAccounts: removedFailovers, }, }) if err != nil { @@ -538,18 +306,18 @@ func UpdateStandardDatabase(ctx context.Context, d *schema.ResourceData, meta an } } - if (databaseSetRequest != sdk.DatabaseSet{}) { + if (*databaseSetRequest != sdk.DatabaseSet{}) { err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ - Set: &databaseSetRequest, + Set: databaseSetRequest, }) if err != nil { return diag.FromErr(err) } } - if (databaseUnsetRequest != sdk.DatabaseUnset{}) { + if (*databaseUnsetRequest != sdk.DatabaseUnset{}) { err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ - Unset: &databaseUnsetRequest, + Unset: databaseUnsetRequest, }) if err != nil { return diag.FromErr(err) @@ -586,27 +354,10 @@ func ReadStandardDatabase(ctx context.Context, d *schema.ResourceData, meta any) return diag.FromErr(err) } - if err := SetPropertyOfFirstNestedObjectByKey(d, "data_retention_time_in_days", "value", database.RetentionTime); err != nil { - return diag.FromErr(err) - } - - if err := d.Set("data_retention_time_in_days_2", database.RetentionTime); err != nil { - return diag.FromErr(err) - } - if err := d.Set("comment", database.Comment); err != nil { return diag.FromErr(err) } - parameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ - In: &sdk.ParametersIn{ - Database: id, - }, - }) - if err != nil { - return diag.FromErr(err) - } - sessionDetails, err := client.ContextFunctions.CurrentSessionDetails(ctx) if err != nil { return diag.FromErr(err) @@ -640,20 +391,20 @@ func ReadStandardDatabase(ctx context.Context, d *schema.ResourceData, meta any) failoverAllowedToAccounts = append(failoverAllowedToAccounts, allowedAccountIdentifier) } - enableForAccount := make([]map[string]any, 0) + enableToAccount := make([]map[string]any, 0) for _, allowedAccount := range replicationAllowedToAccounts { - enableForAccount = append(enableForAccount, map[string]any{ + enableToAccount = append(enableToAccount, map[string]any{ "account_identifier": allowedAccount.FullyQualifiedName(), "with_failover": slices.Contains(failoverAllowedToAccounts, allowedAccount), }) } - var ignoreEditionCheck *bool + var ignoreEditionCheck bool if v, ok := d.GetOk("replication.0.ignore_edition_check"); ok { - ignoreEditionCheck = sdk.Bool(v.(bool)) + ignoreEditionCheck = v.(bool) } - if len(enableForAccount) == 0 && ignoreEditionCheck == nil { + if len(enableToAccount) == 0 { err := d.Set("replication", []any{}) if err != nil { return diag.FromErr(err) @@ -661,7 +412,7 @@ func ReadStandardDatabase(ctx context.Context, d *schema.ResourceData, meta any) } else { err := d.Set("replication", []any{ map[string]any{ - "enable_for_account": enableForAccount, + "enable_to_account": enableToAccount, "ignore_edition_check": ignoreEditionCheck, }, }) @@ -671,49 +422,17 @@ func ReadStandardDatabase(ctx context.Context, d *schema.ResourceData, meta any) } } - for _, parameter := range parameters { - switch parameter.Key { - case "MAX_DATA_EXTENSION_TIME_IN_DAYS": - maxDataExtensionTimeInDays, err := strconv.Atoi(parameter.Value) - if err != nil { - return diag.FromErr(err) - } - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "max_data_extension_time_in_days", maxDataExtensionTimeInDays); err != nil { - return diag.FromErr(err) - } - case "EXTERNAL_VOLUME": - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "external_volume", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "CATALOG": - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "catalog", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "DEFAULT_DDL_COLLATION": - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "default_ddl_collation", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "LOG_LEVEL": - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "log_level", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "TRACE_LEVEL": - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "trace_level", parameter.Value); err != nil { - return diag.FromErr(err) - } - case "REPLACE_INVALID_CHARACTERS": - boolValue, err := strconv.ParseBool(parameter.Value) - if err != nil { - return diag.FromErr(err) - } - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "replace_invalid_characters", boolValue); err != nil { - return diag.FromErr(err) - } - case "STORAGE_SERIALIZATION_POLICY": - if err := SetPropertyOfFirstNestedObjectByValueKey(d, "storage_serialization_policy", parameter.Value); err != nil { - return diag.FromErr(err) - } - } + databaseParameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Database: id, + }, + }) + if err != nil { + return diag.FromErr(err) + } + + if diags := HandleDatabaseParameterRead(d, databaseParameters); diags != nil { + return diags } return nil diff --git a/pkg/resources/standard_database_acceptance_test.go b/pkg/resources/standard_database_acceptance_test.go index 57c20e62e9..2dd08132ae 100644 --- a/pkg/resources/standard_database_acceptance_test.go +++ b/pkg/resources/standard_database_acceptance_test.go @@ -1,28 +1,44 @@ package resources_test import ( - "context" - "slices" - "strconv" "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" - "github.com/stretchr/testify/require" ) -func TestAcc_StandardDatabase_Minimal(t *testing.T) { +func TestAcc_StandardDatabase_Basic(t *testing.T) { id := acc.TestClient().Ids.RandomAccountObjectIdentifier() comment := random.Comment() newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() newComment := random.Comment() + var ( + accountDataRetentionTimeInDays = new(string) + accountMaxDataExtensionTimeInDays = new(string) + accountExternalVolume = new(string) + accountCatalog = new(string) + accountReplaceInvalidCharacters = new(string) + accountDefaultDdlCollation = new(string) + accountStorageSerializationPolicy = new(string) + accountLogLevel = new(string) + accountTraceLevel = new(string) + accountSuspendTaskAfterNumFailures = new(string) + accountTaskAutoRetryAttempts = new(string) + accountUserTaskMangedInitialWarehouseSize = new(string) + accountUserTaskTimeoutMs = new(string) + accountUserTaskMinimumTriggerIntervalInSeconds = new(string) + accountQuotedIdentifiersIgnoreCase = new(string) + accountEnableConsoleOutput = new(string) + ) + configVariables := func(id sdk.AccountObjectIdentifier, comment string) config.Variables { return config.Variables{ "name": config.StringVariable(id.Name()), @@ -39,22 +55,49 @@ func TestAcc_StandardDatabase_Minimal(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.StandardDatabase), Steps: []resource.TestStep{ { + PreConfig: func() { + params := acc.TestClient().Parameter.ShowAccountParameters(t) + *accountDataRetentionTimeInDays = helpers.FindParameter(t, params, sdk.AccountParameterDataRetentionTimeInDays).Value + *accountMaxDataExtensionTimeInDays = helpers.FindParameter(t, params, sdk.AccountParameterMaxDataExtensionTimeInDays).Value + *accountExternalVolume = helpers.FindParameter(t, params, sdk.AccountParameterExternalVolume).Value + *accountCatalog = helpers.FindParameter(t, params, sdk.AccountParameterCatalog).Value + *accountReplaceInvalidCharacters = helpers.FindParameter(t, params, sdk.AccountParameterReplaceInvalidCharacters).Value + *accountDefaultDdlCollation = helpers.FindParameter(t, params, sdk.AccountParameterDefaultDDLCollation).Value + *accountStorageSerializationPolicy = helpers.FindParameter(t, params, sdk.AccountParameterStorageSerializationPolicy).Value + *accountLogLevel = helpers.FindParameter(t, params, sdk.AccountParameterLogLevel).Value + *accountTraceLevel = helpers.FindParameter(t, params, sdk.AccountParameterTraceLevel).Value + *accountSuspendTaskAfterNumFailures = helpers.FindParameter(t, params, sdk.AccountParameterSuspendTaskAfterNumFailures).Value + *accountTaskAutoRetryAttempts = helpers.FindParameter(t, params, sdk.AccountParameterTaskAutoRetryAttempts).Value + *accountUserTaskMangedInitialWarehouseSize = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskManagedInitialWarehouseSize).Value + *accountUserTaskTimeoutMs = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskTimeoutMs).Value + *accountUserTaskMinimumTriggerIntervalInSeconds = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds).Value + *accountQuotedIdentifiersIgnoreCase = helpers.FindParameter(t, params, sdk.AccountParameterQuotedIdentifiersIgnoreCase).Value + *accountEnableConsoleOutput = helpers.FindParameter(t, params, sdk.AccountParameterEnableConsoleOutput).Value + }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/basic"), ConfigVariables: configVariables(id, comment), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "is_transient", "false"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "comment", comment), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level.#", "1"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.#", "0"), + + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, { @@ -64,16 +107,23 @@ func TestAcc_StandardDatabase_Minimal(t *testing.T) { resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", newId.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "is_transient", "false"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "comment", newComment), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.#", "0"), + + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, { @@ -106,50 +156,50 @@ func TestAcc_StandardDatabase_ComputedValues(t *testing.T) { catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) t.Cleanup(catalogCleanup) - secondaryAccountIdentifier := acc.SecondaryTestClient().Account.GetAccountIdentifier(t).FullyQualifiedName() - - completeConfigVariables := func( - id sdk.AccountObjectIdentifier, - comment string, - dataRetention int, - maxDataExtension int, - replaceInvalidCharacters bool, - defaultDdlCollation string, - storageSerializationPolicy sdk.StorageSerializationPolicy, - logLevel sdk.LogLevel, - traceLevel sdk.TraceLevel, - ) config.Variables { - return config.Variables{ - "name": config.StringVariable(id.Name()), - "comment": config.StringVariable(comment), - "transient": config.BoolVariable(false), - "data_retention_time_in_days": config.IntegerVariable(dataRetention), - "max_data_extension_time_in_days": config.IntegerVariable(maxDataExtension), - "external_volume": config.StringVariable(externalVolumeId.Name()), - "catalog": config.StringVariable(catalogId.Name()), - "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), - "default_ddl_collation": config.StringVariable(defaultDdlCollation), - "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), - "log_level": config.StringVariable(string(logLevel)), - "trace_level": config.StringVariable(string(traceLevel)), - "account_identifier": config.StringVariable(secondaryAccountIdentifier), - "with_failover": config.BoolVariable(true), - "ignore_edition_check": config.BoolVariable(true), - } - } - var ( - dataRetentionTimeInDays = new(string) - maxDataExtensionTimeInDays = new(string) - externalVolume = new(string) - catalog = new(string) - replaceInvalidCharacters = new(string) - defaultDdlCollation = new(string) - storageSerializationPolicy = new(string) - logLevel = new(string) - traceLevel = new(string) + accountDataRetentionTimeInDays = new(string) + accountMaxDataExtensionTimeInDays = new(string) + accountExternalVolume = new(string) + accountCatalog = new(string) + accountReplaceInvalidCharacters = new(string) + accountDefaultDdlCollation = new(string) + accountStorageSerializationPolicy = new(string) + accountLogLevel = new(string) + accountTraceLevel = new(string) + accountSuspendTaskAfterNumFailures = new(string) + accountTaskAutoRetryAttempts = new(string) + accountUserTaskMangedInitialWarehouseSize = new(string) + accountUserTaskTimeoutMs = new(string) + accountUserTaskMinimumTriggerIntervalInSeconds = new(string) + accountQuotedIdentifiersIgnoreCase = new(string) + accountEnableConsoleOutput = new(string) ) + completeConfigVariables := config.Variables{ + "name": config.StringVariable(id.Name()), + "comment": config.StringVariable(comment), + "transient": config.BoolVariable(false), + "account_identifier": config.StringVariable(secondaryAccountIdentifier), + "with_failover": config.BoolVariable(true), + "ignore_edition_check": config.BoolVariable(true), + "data_retention_time_in_days": config.IntegerVariable(20), + "max_data_extension_time_in_days": config.IntegerVariable(30), + "external_volume": config.StringVariable(externalVolumeId.Name()), + "catalog": config.StringVariable(catalogId.Name()), + "replace_invalid_characters": config.BoolVariable(true), + "default_ddl_collation": config.StringVariable("en_US"), + "storage_serialization_policy": config.StringVariable(string(sdk.StorageSerializationPolicyCompatible)), + "log_level": config.StringVariable(string(sdk.LogLevelInfo)), + "trace_level": config.StringVariable(string(sdk.TraceLevelOnEvent)), + "suspend_task_after_num_failures": config.IntegerVariable(20), + "task_auto_retry_attempts": config.IntegerVariable(20), + "user_task_managed_initial_warehouse_size": config.StringVariable(string(sdk.WarehouseSizeXLarge)), + "user_task_timeout_ms": config.IntegerVariable(1200000), + "user_task_minimum_trigger_interval_in_seconds": config.IntegerVariable(120), + "quoted_identifiers_ignore_case": config.BoolVariable(true), + "enable_console_output": config.BoolVariable(true), + } + resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -158,34 +208,25 @@ func TestAcc_StandardDatabase_ComputedValues(t *testing.T) { }, CheckDestroy: acc.CheckDestroy(t, resources.StandardDatabase), Steps: []resource.TestStep{ - { - ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/basic"), - ConfigVariables: configVariables(id, comment), - }, { PreConfig: func() { - params, err := acc.Client(t).Parameters.ShowParameters(context.Background(), &sdk.ShowParametersOptions{ - In: &sdk.ParametersIn{ - Database: id, - }, - }) - require.NoError(t, err) - - findParam := func(key string) string { - idx := slices.IndexFunc(params, func(parameter *sdk.Parameter) bool { return parameter.Key == key }) - require.NotEqual(t, -1, idx) - return params[idx].Value - } - - *dataRetentionTimeInDays = findParam("DATA_RETENTION_TIME_IN_DAYS") - *maxDataExtensionTimeInDays = findParam("MAX_DATA_EXTENSION_TIME_IN_DAYS") - *externalVolume = findParam("EXTERNAL_VOLUME") - *catalog = findParam("CATALOG") - *replaceInvalidCharacters = findParam("REPLACE_INVALID_CHARACTERS") - *defaultDdlCollation = findParam("DEFAULT_DDL_COLLATION") - *storageSerializationPolicy = findParam("STORAGE_SERIALIZATION_POLICY") - *logLevel = findParam("LOG_LEVEL") - *traceLevel = findParam("TRACE_LEVEL") + params := acc.TestClient().Parameter.ShowAccountParameters(t) + *accountDataRetentionTimeInDays = helpers.FindParameter(t, params, sdk.AccountParameterDataRetentionTimeInDays).Value + *accountMaxDataExtensionTimeInDays = helpers.FindParameter(t, params, sdk.AccountParameterMaxDataExtensionTimeInDays).Value + *accountExternalVolume = helpers.FindParameter(t, params, sdk.AccountParameterExternalVolume).Value + *accountCatalog = helpers.FindParameter(t, params, sdk.AccountParameterCatalog).Value + *accountReplaceInvalidCharacters = helpers.FindParameter(t, params, sdk.AccountParameterReplaceInvalidCharacters).Value + *accountDefaultDdlCollation = helpers.FindParameter(t, params, sdk.AccountParameterDefaultDDLCollation).Value + *accountStorageSerializationPolicy = helpers.FindParameter(t, params, sdk.AccountParameterStorageSerializationPolicy).Value + *accountLogLevel = helpers.FindParameter(t, params, sdk.AccountParameterLogLevel).Value + *accountTraceLevel = helpers.FindParameter(t, params, sdk.AccountParameterTraceLevel).Value + *accountSuspendTaskAfterNumFailures = helpers.FindParameter(t, params, sdk.AccountParameterSuspendTaskAfterNumFailures).Value + *accountTaskAutoRetryAttempts = helpers.FindParameter(t, params, sdk.AccountParameterTaskAutoRetryAttempts).Value + *accountUserTaskMangedInitialWarehouseSize = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskManagedInitialWarehouseSize).Value + *accountUserTaskTimeoutMs = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskTimeoutMs).Value + *accountUserTaskMinimumTriggerIntervalInSeconds = helpers.FindParameter(t, params, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds).Value + *accountQuotedIdentifiersIgnoreCase = helpers.FindParameter(t, params, sdk.AccountParameterQuotedIdentifiersIgnoreCase).Value + *accountEnableConsoleOutput = helpers.FindParameter(t, params, sdk.AccountParameterEnableConsoleOutput).Value }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/basic"), ConfigVariables: configVariables(id, comment), @@ -193,35 +234,49 @@ func TestAcc_StandardDatabase_ComputedValues(t *testing.T) { resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "is_transient", "false"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "comment", comment), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days.0.value", dataRetentionTimeInDays), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days_2", dataRetentionTimeInDays), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", maxDataExtensionTimeInDays), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "external_volume.0.value", externalVolume), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "catalog.0.value", catalog), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "replace_invalid_characters.0.value", replaceInvalidCharacters), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "default_ddl_collation.0.value", defaultDdlCollation), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "storage_serialization_policy.0.value", storageSerializationPolicy), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "log_level.0.value", logLevel), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "trace_level.0.value", traceLevel), + + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/complete-optionals-set"), - ConfigVariables: completeConfigVariables(id, comment, 20, 30, true, "en_US", sdk.StorageSerializationPolicyCompatible, sdk.LogLevelInfo, sdk.TraceLevelOnEvent), + ConfigVariables: completeConfigVariables, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "is_transient", "false"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "comment", comment), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days.0.value", "20"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days_2", "20"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", "30"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume.0.value", externalVolumeId.Name()), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog.0.value", catalogId.Name()), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters.0.value", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation.0.value", "en_US"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy.0.value", string(sdk.StorageSerializationPolicyCompatible)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level.0.value", string(sdk.LogLevelInfo)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level.0.value", string(sdk.TraceLevelOnEvent)), + + resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days", "30"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "suspend_task_after_num_failures", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "task_auto_retry_attempts", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_managed_initial_warehouse_size", string(sdk.WarehouseSizeXLarge)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_timeout_ms", "1200000"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_minimum_trigger_interval_in_seconds", "120"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "quoted_identifiers_ignore_case", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "enable_console_output", "true"), ), }, { @@ -231,16 +286,23 @@ func TestAcc_StandardDatabase_ComputedValues(t *testing.T) { resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "is_transient", "false"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "comment", comment), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days.0.value", dataRetentionTimeInDays), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days_2", dataRetentionTimeInDays), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", maxDataExtensionTimeInDays), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "external_volume.0.value", externalVolume), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "catalog.0.value", catalog), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "replace_invalid_characters.0.value", replaceInvalidCharacters), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "default_ddl_collation.0.value", defaultDdlCollation), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "storage_serialization_policy.0.value", storageSerializationPolicy), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "log_level.0.value", logLevel), - resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "trace_level.0.value", traceLevel), + + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "enable_console_output", accountEnableConsoleOutput), ), }, }, @@ -258,36 +320,30 @@ func TestAcc_StandardDatabase_Complete(t *testing.T) { catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) t.Cleanup(catalogCleanup) - configVariables := func( - id sdk.AccountObjectIdentifier, - comment string, - dataRetention int, - maxDataExtension int, - replaceInvalidCharacters bool, - defaultDdlCollation string, - storageSerializationPolicy sdk.StorageSerializationPolicy, - logLevel sdk.LogLevel, - traceLevel sdk.TraceLevel, - withFailover bool, - ignoreEditionCheck bool, - ) config.Variables { - return config.Variables{ - "name": config.StringVariable(id.Name()), - "comment": config.StringVariable(comment), - "transient": config.BoolVariable(false), - "data_retention_time_in_days": config.IntegerVariable(dataRetention), - "max_data_extension_time_in_days": config.IntegerVariable(maxDataExtension), - "external_volume": config.StringVariable(externalVolumeId.Name()), - "catalog": config.StringVariable(catalogId.Name()), - "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), - "default_ddl_collation": config.StringVariable(defaultDdlCollation), - "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), - "log_level": config.StringVariable(string(logLevel)), - "trace_level": config.StringVariable(string(traceLevel)), - "account_identifier": config.StringVariable(secondaryAccountIdentifier), - "with_failover": config.BoolVariable(withFailover), - "ignore_edition_check": config.BoolVariable(ignoreEditionCheck), - } + completeConfigVariables := config.Variables{ + "name": config.StringVariable(id.Name()), + "comment": config.StringVariable(comment), + "transient": config.BoolVariable(false), + "account_identifier": config.StringVariable(secondaryAccountIdentifier), + "with_failover": config.BoolVariable(true), + "ignore_edition_check": config.BoolVariable(true), + + "data_retention_time_in_days": config.IntegerVariable(20), + "max_data_extension_time_in_days": config.IntegerVariable(30), + "external_volume": config.StringVariable(externalVolumeId.Name()), + "catalog": config.StringVariable(catalogId.Name()), + "replace_invalid_characters": config.BoolVariable(true), + "default_ddl_collation": config.StringVariable("en_US"), + "storage_serialization_policy": config.StringVariable(string(sdk.StorageSerializationPolicyCompatible)), + "log_level": config.StringVariable(string(sdk.LogLevelInfo)), + "trace_level": config.StringVariable(string(sdk.TraceLevelOnEvent)), + "suspend_task_after_num_failures": config.IntegerVariable(20), + "task_auto_retry_attempts": config.IntegerVariable(20), + "user_task_managed_initial_warehouse_size": config.StringVariable(string(sdk.WarehouseSizeXLarge)), + "user_task_timeout_ms": config.IntegerVariable(1200000), + "user_task_minimum_trigger_interval_in_seconds": config.IntegerVariable(120), + "quoted_identifiers_ignore_case": config.BoolVariable(true), + "enable_console_output": config.BoolVariable(true), } resource.Test(t, resource.TestCase{ @@ -300,30 +356,39 @@ func TestAcc_StandardDatabase_Complete(t *testing.T) { Steps: []resource.TestStep{ { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/complete-optionals-set"), - ConfigVariables: configVariables(id, comment, 20, 30, true, "en_US", sdk.StorageSerializationPolicyCompatible, sdk.LogLevelInfo, sdk.TraceLevelOnEvent, true, true), + ConfigVariables: completeConfigVariables, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "is_transient", "false"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "comment", comment), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days.0.value", "20"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", "30"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume.0.value", externalVolumeId.Name()), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog.0.value", catalogId.Name()), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters.0.value", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation.0.value", "en_US"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy.0.value", string(sdk.StorageSerializationPolicyCompatible)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level.0.value", string(sdk.LogLevelInfo)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level.0.value", string(sdk.TraceLevelOnEvent)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.#", "1"), + + resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days", "30"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "suspend_task_after_num_failures", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "task_auto_retry_attempts", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_managed_initial_warehouse_size", string(sdk.WarehouseSizeXLarge)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_timeout_ms", "1200000"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_minimum_trigger_interval_in_seconds", "120"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "quoted_identifiers_ignore_case", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "enable_console_output", "true"), + + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.#", "1"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.ignore_edition_check", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.account_identifier", secondaryAccountIdentifier), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.with_failover", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.#", "1"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.account_identifier", secondaryAccountIdentifier), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.with_failover", "true"), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/complete-optionals-set"), - ConfigVariables: configVariables(id, comment, 20, 30, true, "en_US", sdk.StorageSerializationPolicyCompatible, sdk.LogLevelInfo, sdk.TraceLevelOnEvent, true, true), + ConfigVariables: completeConfigVariables, ResourceName: "snowflake_standard_database.test", ImportState: true, ImportStateVerify: true, @@ -355,36 +420,29 @@ func TestAcc_StandardDatabase_Update(t *testing.T) { } } - fullConfigVariables := func( - id sdk.AccountObjectIdentifier, - comment string, - dataRetention int, - maxDataExtension int, - replaceInvalidCharacters bool, - defaultDdlCollation string, - storageSerializationPolicy sdk.StorageSerializationPolicy, - logLevel sdk.LogLevel, - traceLevel sdk.TraceLevel, - withFailover bool, - ignoreEditionCheck bool, - ) config.Variables { - return config.Variables{ - "name": config.StringVariable(id.Name()), - "comment": config.StringVariable(comment), - "transient": config.BoolVariable(false), - "data_retention_time_in_days": config.IntegerVariable(dataRetention), - "max_data_extension_time_in_days": config.IntegerVariable(maxDataExtension), - "external_volume": config.StringVariable(externalVolumeId.Name()), - "catalog": config.StringVariable(catalogId.Name()), - "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), - "default_ddl_collation": config.StringVariable(defaultDdlCollation), - "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), - "log_level": config.StringVariable(string(logLevel)), - "trace_level": config.StringVariable(string(traceLevel)), - "account_identifier": config.StringVariable(secondaryAccountIdentifier), - "with_failover": config.BoolVariable(withFailover), - "ignore_edition_check": config.BoolVariable(ignoreEditionCheck), - } + completeConfigVariables := config.Variables{ + "name": config.StringVariable(newId.Name()), + "comment": config.StringVariable(newComment), + "transient": config.BoolVariable(false), + "account_identifier": config.StringVariable(secondaryAccountIdentifier), + "with_failover": config.BoolVariable(true), + "ignore_edition_check": config.BoolVariable(true), + "data_retention_time_in_days": config.IntegerVariable(20), + "max_data_extension_time_in_days": config.IntegerVariable(30), + "external_volume": config.StringVariable(externalVolumeId.Name()), + "catalog": config.StringVariable(catalogId.Name()), + "replace_invalid_characters": config.BoolVariable(true), + "default_ddl_collation": config.StringVariable("en_US"), + "storage_serialization_policy": config.StringVariable(string(sdk.StorageSerializationPolicyCompatible)), + "log_level": config.StringVariable(string(sdk.LogLevelInfo)), + "trace_level": config.StringVariable(string(sdk.TraceLevelOnEvent)), + "suspend_task_after_num_failures": config.IntegerVariable(20), + "task_auto_retry_attempts": config.IntegerVariable(20), + "user_task_managed_initial_warehouse_size": config.StringVariable(string(sdk.WarehouseSizeXLarge)), + "user_task_timeout_ms": config.IntegerVariable(1200000), + "user_task_minimum_trigger_interval_in_seconds": config.IntegerVariable(120), + "quoted_identifiers_ignore_case": config.BoolVariable(true), + "enable_console_output": config.BoolVariable(true), } resource.Test(t, resource.TestCase{ @@ -401,25 +459,28 @@ func TestAcc_StandardDatabase_Update(t *testing.T) { }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/complete-optionals-set"), - ConfigVariables: fullConfigVariables(newId, newComment, 20, 30, true, "en_US", sdk.StorageSerializationPolicyCompatible, sdk.LogLevelInfo, sdk.TraceLevelOnEvent, true, true), + ConfigVariables: completeConfigVariables, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", newId.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "is_transient", "false"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "comment", newComment), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days.0.value", "20"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", "30"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume.0.value", externalVolumeId.Name()), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog.0.value", catalogId.Name()), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters.0.value", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation.0.value", "en_US"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy.0.value", string(sdk.StorageSerializationPolicyCompatible)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level.0.value", string(sdk.LogLevelInfo)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level.0.value", string(sdk.TraceLevelOnEvent)), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.ignore_edition_check", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.account_identifier", secondaryAccountIdentifier), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.with_failover", "true"), + + resource.TestCheckResourceAttr("snowflake_standard_database.test", "data_retention_time_in_days", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days", "30"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "suspend_task_after_num_failures", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "task_auto_retry_attempts", "20"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_managed_initial_warehouse_size", string(sdk.WarehouseSizeXLarge)), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_timeout_ms", "1200000"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "user_task_minimum_trigger_interval_in_seconds", "120"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "quoted_identifiers_ignore_case", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "enable_console_output", "true"), ), }, { @@ -441,9 +502,7 @@ func TestAcc_StandardDatabase_HierarchicalValues(t *testing.T) { } } - param, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterMaxDataExtensionTimeInDays) - require.NoError(t, err) - + paramDefault := new(string) var revertAccountParameterToDefault func() resource.Test(t, resource.TestCase{ @@ -455,21 +514,24 @@ func TestAcc_StandardDatabase_HierarchicalValues(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.StandardDatabase), Steps: []resource.TestStep{ { + PreConfig: func() { + *paramDefault = acc.TestClient().Parameter.GetAccountParameter(t, sdk.AccountParameterMaxDataExtensionTimeInDays).Default + }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/basic"), ConfigVariables: configVariables(id, comment), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", param.Default), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days", paramDefault), ), }, { PreConfig: func() { - revertAccountParameterToDefault = acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterMaxDataExtensionTimeInDays, strconv.Itoa(50)) + revertAccountParameterToDefault = acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterMaxDataExtensionTimeInDays, "50") t.Cleanup(revertAccountParameterToDefault) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/basic"), ConfigVariables: configVariables(id, comment), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", "50"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days", "50"), ), }, { @@ -479,7 +541,7 @@ func TestAcc_StandardDatabase_HierarchicalValues(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StandardDatabase/basic"), ConfigVariables: configVariables(id, comment), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_standard_database.test", "max_data_extension_time_in_days.0.value", param.Default), + resource.TestCheckResourceAttrPtr("snowflake_standard_database.test", "max_data_extension_time_in_days", paramDefault), ), }, }, @@ -528,9 +590,9 @@ func TestAcc_StandardDatabase_Replication(t *testing.T) { resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.#", "1"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.ignore_edition_check", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.account_identifier", secondaryAccountIdentifier), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.with_failover", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.#", "1"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.account_identifier", secondaryAccountIdentifier), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.with_failover", "true"), ), }, { @@ -540,9 +602,9 @@ func TestAcc_StandardDatabase_Replication(t *testing.T) { resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.#", "1"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.ignore_edition_check", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.account_identifier", secondaryAccountIdentifier), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.with_failover", "false"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.#", "1"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.account_identifier", secondaryAccountIdentifier), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.with_failover", "false"), ), }, { @@ -560,9 +622,9 @@ func TestAcc_StandardDatabase_Replication(t *testing.T) { resource.TestCheckResourceAttr("snowflake_standard_database.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.#", "1"), resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.ignore_edition_check", "true"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.#", "1"), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.account_identifier", secondaryAccountIdentifier), - resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_for_account.0.with_failover", "true"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.#", "1"), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.account_identifier", secondaryAccountIdentifier), + resource.TestCheckResourceAttr("snowflake_standard_database.test", "replication.0.enable_to_account.0.with_failover", "true"), ), }, { diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf index 553368861b..534c20a278 100644 --- a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf @@ -6,6 +6,10 @@ variable "as_replica_of" { type = string } +variable "comment" { + type = string +} + variable "data_retention_time_in_days" { type = string } @@ -42,10 +46,6 @@ variable "trace_level" { type = string } -variable "comment" { - type = string -} - variable "suspend_task_after_num_failures" { type = number } diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf index 46ab0af809..770f36fc00 100644 --- a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf @@ -1,4 +1,4 @@ resource "snowflake_secondary_database" "test" { - name = var.name - as_replica_of = var.as_replica_of + name = var.name + as_replica_of = var.as_replica_of } diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf index 5c2f7493b6..c51c870e38 100644 --- a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf @@ -1,12 +1,20 @@ resource "snowflake_shared_database" "test" { name = var.name from_share = var.from_share - external_volume = var.external_volume - catalog = var.catalog - replace_invalid_characters = var.replace_invalid_characters - default_ddl_collation = var.default_ddl_collation - storage_serialization_policy = var.storage_serialization_policy - log_level = var.log_level - trace_level = var.trace_level comment = var.comment + + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + suspend_task_after_num_failures = var.suspend_task_after_num_failures + task_auto_retry_attempts = var.task_auto_retry_attempts + user_task_managed_initial_warehouse_size = var.user_task_managed_initial_warehouse_size + user_task_timeout_ms = var.user_task_timeout_ms + user_task_minimum_trigger_interval_in_seconds = var.user_task_minimum_trigger_interval_in_seconds + quoted_identifiers_ignore_case = var.quoted_identifiers_ignore_case + enable_console_output = var.enable_console_output } diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf index b704eb8dfe..03f5793ff2 100644 --- a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf @@ -6,6 +6,10 @@ variable "from_share" { type = string } +variable "comment" { + type = string +} + variable "external_volume" { type = string } @@ -15,7 +19,7 @@ variable "catalog" { } variable "replace_invalid_characters" { - type = bool + type = string } variable "default_ddl_collation" { @@ -34,6 +38,30 @@ variable "trace_level" { type = string } -variable "comment" { +variable "suspend_task_after_num_failures" { + type = number +} + +variable "task_auto_retry_attempts" { + type = number +} + +variable "user_task_managed_initial_warehouse_size" { type = string } + +variable "user_task_timeout_ms" { + type = number +} + +variable "user_task_minimum_trigger_interval_in_seconds" { + type = number +} + +variable "quoted_identifiers_ignore_case" { + type = bool +} + +variable "enable_console_output" { + type = bool +} diff --git a/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/test.tf b/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/test.tf index e4cb80fa75..b8957d25f4 100644 --- a/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/test.tf +++ b/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/test.tf @@ -3,39 +3,25 @@ resource "snowflake_standard_database" "test" { comment = var.comment is_transient = var.transient - data_retention_time_in_days { - value = var.data_retention_time_in_days - } - - data_retention_time_in_days_2 = var.data_retention_time_in_days - - max_data_extension_time_in_days { - value = var.max_data_extension_time_in_days - } - external_volume { - value = var.external_volume - } - catalog { - value = var.catalog - } - replace_invalid_characters { - value = var.replace_invalid_characters - } - default_ddl_collation { - value = var.default_ddl_collation - } - storage_serialization_policy { - value = var.storage_serialization_policy - } - log_level { - value = var.log_level - } - trace_level { - value = var.trace_level - } + data_retention_time_in_days = var.data_retention_time_in_days + max_data_extension_time_in_days = var.max_data_extension_time_in_days + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + suspend_task_after_num_failures = var.suspend_task_after_num_failures + task_auto_retry_attempts = var.task_auto_retry_attempts + user_task_managed_initial_warehouse_size = var.user_task_managed_initial_warehouse_size + user_task_timeout_ms = var.user_task_timeout_ms + user_task_minimum_trigger_interval_in_seconds = var.user_task_minimum_trigger_interval_in_seconds + quoted_identifiers_ignore_case = var.quoted_identifiers_ignore_case + enable_console_output = var.enable_console_output replication { - enable_for_account { + enable_to_account { account_identifier = var.account_identifier with_failover = var.with_failover } diff --git a/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/variables.tf b/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/variables.tf index fa7cd2ab9d..d450c98e35 100644 --- a/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/variables.tf +++ b/pkg/resources/testdata/TestAcc_StandardDatabase/complete-optionals-set/variables.tf @@ -6,6 +6,22 @@ variable "transient" { type = bool } +variable "comment" { + type = string +} + +variable "account_identifier" { + type = string +} + +variable "with_failover" { + type = bool +} + +variable "ignore_edition_check" { + type = bool +} + variable "data_retention_time_in_days" { type = string } @@ -42,18 +58,30 @@ variable "trace_level" { type = string } -variable "comment" { - type = string +variable "suspend_task_after_num_failures" { + type = number } -variable "account_identifier" { +variable "task_auto_retry_attempts" { + type = number +} + +variable "user_task_managed_initial_warehouse_size" { type = string } -variable "with_failover" { +variable "user_task_timeout_ms" { + type = number +} + +variable "user_task_minimum_trigger_interval_in_seconds" { + type = number +} + +variable "quoted_identifiers_ignore_case" { type = bool } -variable "ignore_edition_check" { +variable "enable_console_output" { type = bool } diff --git a/pkg/resources/testdata/TestAcc_StandardDatabase/replication/test.tf b/pkg/resources/testdata/TestAcc_StandardDatabase/replication/test.tf index 91586873cd..e1043debda 100644 --- a/pkg/resources/testdata/TestAcc_StandardDatabase/replication/test.tf +++ b/pkg/resources/testdata/TestAcc_StandardDatabase/replication/test.tf @@ -2,7 +2,7 @@ resource "snowflake_standard_database" "test" { name = var.name replication { - enable_for_account { + enable_to_account { account_identifier = var.account_identifier with_failover = var.with_failover } diff --git a/pkg/sdk/databases.go b/pkg/sdk/databases.go index bdaf321625..e676148167 100644 --- a/pkg/sdk/databases.go +++ b/pkg/sdk/databases.go @@ -176,8 +176,6 @@ type CreateDatabaseOptions struct { UserTaskMinimumTriggerIntervalInSeconds *int `ddl:"parameter" sql:"USER_TASK_MINIMUM_TRIGGER_INTERVAL_IN_SECONDS"` QuotedIdentifiersIgnoreCase *bool `ddl:"parameter" sql:"QUOTED_IDENTIFIERS_IGNORE_CASE"` EnableConsoleOutput *bool `ddl:"parameter" sql:"ENABLE_CONSOLE_OUTPUT"` - // TODO: Preview feature (document in pr desc) - //MetricLevel *MetricLevel `ddl:"parameter" sql:"METRIC_LEVEL"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` @@ -323,8 +321,6 @@ type CreateSecondaryDatabaseOptions struct { UserTaskMinimumTriggerIntervalInSeconds *int `ddl:"parameter" sql:"USER_TASK_MINIMUM_TRIGGER_INTERVAL_IN_SECONDS"` QuotedIdentifiersIgnoreCase *bool `ddl:"parameter" sql:"QUOTED_IDENTIFIERS_IGNORE_CASE"` EnableConsoleOutput *bool `ddl:"parameter" sql:"ENABLE_CONSOLE_OUTPUT"` - // TODO: Preview feature (document in pr desc) - //MetricLevel *MetricLevel `ddl:"parameter" sql:"METRIC_LEVEL"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` } diff --git a/pkg/sdk/testint/databases_integration_test.go b/pkg/sdk/testint/databases_integration_test.go index 2246fe31ba..9fbe65e62f 100644 --- a/pkg/sdk/testint/databases_integration_test.go +++ b/pkg/sdk/testint/databases_integration_test.go @@ -4,6 +4,8 @@ import ( "fmt" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" @@ -112,40 +114,28 @@ func TestInt_DatabasesCreate(t *testing.T) { assert.Equal(t, databaseId.Name(), database.Name) assert.Equal(t, comment, database.Comment) - params, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ - In: &sdk.ParametersIn{ - Database: databaseId, - }, - }) - - assertParameterEquals := func(t *testing.T, parameterName sdk.ObjectParameter, expected string) { + params := testClientHelper().Parameter.ShowDatabaseParameters(t, databaseId) + assertParameterEquals := func(t *testing.T, parameterName sdk.AccountParameter, expected string) { t.Helper() - param, err := collections.FindOne(params, func(param *sdk.Parameter) bool { return param.Key == string(parameterName) }) - assert.NoError(t, err) - assert.NotNil(t, param) - if param != nil { - param := *param - assert.Equal(t, "DATABASE", string(param.Level)) - assert.Equal(t, expected, param.Value) - } + assert.Equal(t, expected, helpers.FindParameter(t, params, parameterName).Value) } - assertParameterEquals(t, sdk.ObjectParameterDataRetentionTimeInDays, "0") - assertParameterEquals(t, sdk.ObjectParameterMaxDataExtensionTimeInDays, "10") - assertParameterEquals(t, sdk.ObjectParameterDefaultDDLCollation, "en_US") - assertParameterEquals(t, sdk.ObjectParameterExternalVolume, externalVolume.Name()) - assertParameterEquals(t, sdk.ObjectParameterCatalog, catalog.Name()) - assertParameterEquals(t, sdk.ObjectParameterLogLevel, string(sdk.LogLevelInfo)) - assertParameterEquals(t, sdk.ObjectParameterTraceLevel, string(sdk.TraceLevelOnEvent)) - assertParameterEquals(t, sdk.ObjectParameterReplaceInvalidCharacters, "true") - assertParameterEquals(t, sdk.ObjectParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyCompatible)) - assertParameterEquals(t, sdk.ObjectParameterSuspendTaskAfterNumFailures, "10") - assertParameterEquals(t, sdk.ObjectParameterTaskAutoRetryAttempts, "10") - assertParameterEquals(t, sdk.ObjectParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) - assertParameterEquals(t, sdk.ObjectParameterUserTaskTimeoutMs, "12000") - assertParameterEquals(t, sdk.ObjectParameterUserTaskMinimumTriggerIntervalInSeconds, "30") - assertParameterEquals(t, sdk.ObjectParameterQuotedIdentifiersIgnoreCase, "true") - assertParameterEquals(t, sdk.ObjectParameterEnableConsoleOutput, "true") + assertParameterEquals(t, sdk.AccountParameterDataRetentionTimeInDays, "0") + assertParameterEquals(t, sdk.AccountParameterMaxDataExtensionTimeInDays, "10") + assertParameterEquals(t, sdk.AccountParameterDefaultDDLCollation, "en_US") + assertParameterEquals(t, sdk.AccountParameterExternalVolume, externalVolume.Name()) + assertParameterEquals(t, sdk.AccountParameterCatalog, catalog.Name()) + assertParameterEquals(t, sdk.AccountParameterLogLevel, string(sdk.LogLevelInfo)) + assertParameterEquals(t, sdk.AccountParameterTraceLevel, string(sdk.TraceLevelOnEvent)) + assertParameterEquals(t, sdk.AccountParameterReplaceInvalidCharacters, "true") + assertParameterEquals(t, sdk.AccountParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyCompatible)) + assertParameterEquals(t, sdk.AccountParameterSuspendTaskAfterNumFailures, "10") + assertParameterEquals(t, sdk.AccountParameterTaskAutoRetryAttempts, "10") + assertParameterEquals(t, sdk.AccountParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) + assertParameterEquals(t, sdk.AccountParameterUserTaskTimeoutMs, "12000") + assertParameterEquals(t, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds, "30") + assertParameterEquals(t, sdk.AccountParameterQuotedIdentifiersIgnoreCase, "true") + assertParameterEquals(t, sdk.AccountParameterEnableConsoleOutput, "true") tag1Value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), database.ID(), sdk.ObjectTypeDatabase) require.NoError(t, err) @@ -236,38 +226,26 @@ func TestInt_DatabasesCreateShared(t *testing.T) { assert.Equal(t, databaseId.Name(), database.Name) assert.Equal(t, comment, database.Comment) - params, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ - In: &sdk.ParametersIn{ - Database: databaseId, - }, - }) - - assertParameterEquals := func(t *testing.T, parameterName sdk.ObjectParameter, expected string) { + params := testClientHelper().Parameter.ShowDatabaseParameters(t, databaseId) + assertParameterEquals := func(t *testing.T, parameterName sdk.AccountParameter, expected string) { t.Helper() - param, err := collections.FindOne(params, func(param *sdk.Parameter) bool { return param.Key == string(parameterName) }) - assert.NoError(t, err) - assert.NotNil(t, param) - if param != nil { - param := *param - assert.Equal(t, "DATABASE", string(param.Level)) - assert.Equal(t, expected, param.Value) - } + assert.Equal(t, expected, helpers.FindParameter(t, params, parameterName).Value) } - assertParameterEquals(t, sdk.ObjectParameterDefaultDDLCollation, "en_US") - assertParameterEquals(t, sdk.ObjectParameterExternalVolume, externalVolume.Name()) - assertParameterEquals(t, sdk.ObjectParameterCatalog, catalog.Name()) - assertParameterEquals(t, sdk.ObjectParameterLogLevel, string(sdk.LogLevelDebug)) - assertParameterEquals(t, sdk.ObjectParameterTraceLevel, string(sdk.TraceLevelAlways)) - assertParameterEquals(t, sdk.ObjectParameterReplaceInvalidCharacters, "true") - assertParameterEquals(t, sdk.ObjectParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyOptimized)) - assertParameterEquals(t, sdk.ObjectParameterSuspendTaskAfterNumFailures, "10") - assertParameterEquals(t, sdk.ObjectParameterTaskAutoRetryAttempts, "10") - assertParameterEquals(t, sdk.ObjectParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) - assertParameterEquals(t, sdk.ObjectParameterUserTaskTimeoutMs, "12000") - assertParameterEquals(t, sdk.ObjectParameterUserTaskMinimumTriggerIntervalInSeconds, "30") - assertParameterEquals(t, sdk.ObjectParameterQuotedIdentifiersIgnoreCase, "true") - assertParameterEquals(t, sdk.ObjectParameterEnableConsoleOutput, "true") + assertParameterEquals(t, sdk.AccountParameterDefaultDDLCollation, "en_US") + assertParameterEquals(t, sdk.AccountParameterExternalVolume, externalVolume.Name()) + assertParameterEquals(t, sdk.AccountParameterCatalog, catalog.Name()) + assertParameterEquals(t, sdk.AccountParameterLogLevel, string(sdk.LogLevelDebug)) + assertParameterEquals(t, sdk.AccountParameterTraceLevel, string(sdk.TraceLevelAlways)) + assertParameterEquals(t, sdk.AccountParameterReplaceInvalidCharacters, "true") + assertParameterEquals(t, sdk.AccountParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyOptimized)) + assertParameterEquals(t, sdk.AccountParameterSuspendTaskAfterNumFailures, "10") + assertParameterEquals(t, sdk.AccountParameterTaskAutoRetryAttempts, "10") + assertParameterEquals(t, sdk.AccountParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) + assertParameterEquals(t, sdk.AccountParameterUserTaskTimeoutMs, "12000") + assertParameterEquals(t, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds, "30") + assertParameterEquals(t, sdk.AccountParameterQuotedIdentifiersIgnoreCase, "true") + assertParameterEquals(t, sdk.AccountParameterEnableConsoleOutput, "true") tag1Value, err := client.SystemFunctions.GetTag(ctx, testTag.ID(), database.ID(), sdk.ObjectTypeDatabase) require.NoError(t, err) @@ -332,40 +310,28 @@ func TestInt_DatabasesCreateSecondary(t *testing.T) { assert.Equal(t, databaseId.Name(), database.Name) assert.Equal(t, comment, database.Comment) - params, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ - In: &sdk.ParametersIn{ - Database: databaseId, - }, - }) - - assertParameterEquals := func(t *testing.T, parameterName sdk.ObjectParameter, expected string) { + params := testClientHelper().Parameter.ShowDatabaseParameters(t, databaseId) + assertParameterEquals := func(t *testing.T, parameterName sdk.AccountParameter, expected string) { t.Helper() - param, err := collections.FindOne(params, func(param *sdk.Parameter) bool { return param.Key == string(parameterName) }) - assert.NoError(t, err) - assert.NotNil(t, param) - if param != nil { - param := *param - assert.Equal(t, "DATABASE", string(param.Level)) - assert.Equal(t, expected, param.Value) - } + assert.Equal(t, expected, helpers.FindParameter(t, params, parameterName).Value) } - assertParameterEquals(t, sdk.ObjectParameterDataRetentionTimeInDays, "10") - assertParameterEquals(t, sdk.ObjectParameterMaxDataExtensionTimeInDays, "10") - assertParameterEquals(t, sdk.ObjectParameterDefaultDDLCollation, "en_US") - assertParameterEquals(t, sdk.ObjectParameterExternalVolume, externalVolume.Name()) - assertParameterEquals(t, sdk.ObjectParameterCatalog, catalog.Name()) - assertParameterEquals(t, sdk.ObjectParameterLogLevel, string(sdk.LogLevelDebug)) - assertParameterEquals(t, sdk.ObjectParameterTraceLevel, string(sdk.TraceLevelAlways)) - assertParameterEquals(t, sdk.ObjectParameterReplaceInvalidCharacters, "true") - assertParameterEquals(t, sdk.ObjectParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyOptimized)) - assertParameterEquals(t, sdk.ObjectParameterSuspendTaskAfterNumFailures, "10") - assertParameterEquals(t, sdk.ObjectParameterTaskAutoRetryAttempts, "10") - assertParameterEquals(t, sdk.ObjectParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) - assertParameterEquals(t, sdk.ObjectParameterUserTaskTimeoutMs, "12000") - assertParameterEquals(t, sdk.ObjectParameterUserTaskMinimumTriggerIntervalInSeconds, "30") - assertParameterEquals(t, sdk.ObjectParameterQuotedIdentifiersIgnoreCase, "true") - assertParameterEquals(t, sdk.ObjectParameterEnableConsoleOutput, "true") + assertParameterEquals(t, sdk.AccountParameterDataRetentionTimeInDays, "10") + assertParameterEquals(t, sdk.AccountParameterMaxDataExtensionTimeInDays, "10") + assertParameterEquals(t, sdk.AccountParameterDefaultDDLCollation, "en_US") + assertParameterEquals(t, sdk.AccountParameterExternalVolume, externalVolume.Name()) + assertParameterEquals(t, sdk.AccountParameterCatalog, catalog.Name()) + assertParameterEquals(t, sdk.AccountParameterLogLevel, string(sdk.LogLevelDebug)) + assertParameterEquals(t, sdk.AccountParameterTraceLevel, string(sdk.TraceLevelAlways)) + assertParameterEquals(t, sdk.AccountParameterReplaceInvalidCharacters, "true") + assertParameterEquals(t, sdk.AccountParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyOptimized)) + assertParameterEquals(t, sdk.AccountParameterSuspendTaskAfterNumFailures, "10") + assertParameterEquals(t, sdk.AccountParameterTaskAutoRetryAttempts, "10") + assertParameterEquals(t, sdk.AccountParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) + assertParameterEquals(t, sdk.AccountParameterUserTaskTimeoutMs, "12000") + assertParameterEquals(t, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds, "30") + assertParameterEquals(t, sdk.AccountParameterQuotedIdentifiersIgnoreCase, "true") + assertParameterEquals(t, sdk.AccountParameterEnableConsoleOutput, "true") } func TestInt_DatabasesAlter(t *testing.T) { @@ -373,27 +339,9 @@ func TestInt_DatabasesAlter(t *testing.T) { secondaryClient := testSecondaryClient(t) ctx := testContext(t) - queryParametersForDatabase := func(t *testing.T, id sdk.AccountObjectIdentifier) []*sdk.Parameter { - t.Helper() - params, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ - In: &sdk.ParametersIn{ - Database: id, - }, - }) - require.NoError(t, err) - return params - } - - assertDatabaseParameterEquals := func(t *testing.T, params []*sdk.Parameter, parameterName sdk.ObjectParameter, expected string) { + assertDatabaseParameterEquals := func(t *testing.T, params []*sdk.Parameter, parameterName sdk.AccountParameter, expected string) { t.Helper() - param, err := collections.FindOne(params, func(param *sdk.Parameter) bool { return param.Key == string(parameterName) }) - assert.NoError(t, err) - assert.NotNil(t, param) - if param != nil { - param := *param - assert.Equal(t, "DATABASE", string(param.Level)) - assert.Equal(t, expected, param.Value) - } + assert.Equal(t, expected, helpers.FindParameter(t, params, parameterName).Value) } assertDatabaseParameterEqualsToDefaultValue := func(t *testing.T, params []*sdk.Parameter, parameterName sdk.ObjectParameter) { @@ -544,24 +492,23 @@ func TestInt_DatabasesAlter(t *testing.T) { }) require.NoError(t, err) - params := queryParametersForDatabase(t, databaseTest.ID()) - - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterDataRetentionTimeInDays, "42") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterMaxDataExtensionTimeInDays, "42") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterExternalVolume, externalVolumeTest.Name()) - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterCatalog, catalogIntegrationTest.Name()) - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterReplaceInvalidCharacters, "true") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterDefaultDDLCollation, "en_US") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyCompatible)) - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterLogLevel, string(sdk.LogLevelInfo)) - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterTraceLevel, string(sdk.TraceLevelOnEvent)) - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterSuspendTaskAfterNumFailures, "10") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterTaskAutoRetryAttempts, "10") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterUserTaskTimeoutMs, "12000") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterUserTaskMinimumTriggerIntervalInSeconds, "30") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterQuotedIdentifiersIgnoreCase, "true") - assertDatabaseParameterEquals(t, params, sdk.ObjectParameterEnableConsoleOutput, "true") + params := testClientHelper().Parameter.ShowDatabaseParameters(t, databaseTest.ID()) + assertDatabaseParameterEquals(t, params, sdk.AccountParameterDataRetentionTimeInDays, "42") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterMaxDataExtensionTimeInDays, "42") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterExternalVolume, externalVolumeTest.Name()) + assertDatabaseParameterEquals(t, params, sdk.AccountParameterCatalog, catalogIntegrationTest.Name()) + assertDatabaseParameterEquals(t, params, sdk.AccountParameterReplaceInvalidCharacters, "true") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterDefaultDDLCollation, "en_US") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterStorageSerializationPolicy, string(sdk.StorageSerializationPolicyCompatible)) + assertDatabaseParameterEquals(t, params, sdk.AccountParameterLogLevel, string(sdk.LogLevelInfo)) + assertDatabaseParameterEquals(t, params, sdk.AccountParameterTraceLevel, string(sdk.TraceLevelOnEvent)) + assertDatabaseParameterEquals(t, params, sdk.AccountParameterSuspendTaskAfterNumFailures, "10") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterTaskAutoRetryAttempts, "10") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterUserTaskManagedInitialWarehouseSize, string(sdk.WarehouseSizeMedium)) + assertDatabaseParameterEquals(t, params, sdk.AccountParameterUserTaskTimeoutMs, "12000") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds, "30") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterQuotedIdentifiersIgnoreCase, "true") + assertDatabaseParameterEquals(t, params, sdk.AccountParameterEnableConsoleOutput, "true") err = client.Databases.Alter(ctx, databaseTest.ID(), &sdk.AlterDatabaseOptions{ Unset: &sdk.DatabaseUnset{ @@ -585,7 +532,7 @@ func TestInt_DatabasesAlter(t *testing.T) { }) require.NoError(t, err) - params = queryParametersForDatabase(t, databaseTest.ID()) + params = testClientHelper().Parameter.ShowDatabaseParameters(t, databaseTest.ID()) assertDatabaseParameterEqualsToDefaultValue(t, params, sdk.ObjectParameterDataRetentionTimeInDays) assertDatabaseParameterEqualsToDefaultValue(t, params, sdk.ObjectParameterMaxDataExtensionTimeInDays) assertDatabaseParameterEqualsToDefaultValue(t, params, sdk.ObjectParameterExternalVolume) diff --git a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD index e518d0c7f3..d175228f57 100644 --- a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD +++ b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD @@ -21,7 +21,7 @@ newer provider versions. We will address these while working on the given object | SECURITY INTEGRATION | 👨‍💻 | [#2719](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2719), [#2568](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2568), [#2177](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2177), [#1851](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1851), [#1773](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1773), [#1741](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1741), [#1637](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1637), [#1503](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1503), [#1498](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1498), [#1421](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1421), [#1224](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1224) | | USER | ❌ | [#2817](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2817), [#2662](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2662), [#1572](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1572), [#1535](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1535), [#1155](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1155) | | WAREHOUSE | 👨‍💻 | [#1844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1844), [#1104](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1104) | -| FUNCTION | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2426](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2426), [#1479](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1479), [#1393](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1393), [#1208](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1208), [#1079](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1079) | +| FUNCTION | ❌ | [2859](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2859), [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2426](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2426), [#1479](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1479), [#1393](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1393), [#1208](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1208), [#1079](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1079) | | MASKING POLICY | ❌ | [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1656](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1656), [#1444](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1444), [#1422](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1422), [#1097](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1097) | | PROCEDURE | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2623](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2623), [#2257](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2257), [#2146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2146), [#1855](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1855), [#1695](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1695), [#1640](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1640), [#1195](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1195), [#1189](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1189), [#1178](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1178), [#1050](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1050) | | ROW ACCESS POLICY | ❌ | [#2053](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2053), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1151](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1151) |