diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 8ed26012f5..cc1ec4044b 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -5,11 +5,53 @@ describe deprecations or breaking changes and help you to change your configurat across different versions. ## v0.93.0 ➞ v0.94.0 - ### *(breaking change)* changes in snowflake_scim_integration In order to fix issues in v0.93.0, when a resource has Azure scim client, `sync_password` field is now set to `default` value in the state. State will be migrated automatically. + +### *(breaking change)* refactored snowflake_schema resource + +Renamed fields: +- renamed `is_managed` to `with_managed_access` +- renamed `data_retention_days` to `data_retention_time_in_days` + +Please rename these fields in your configuration files. State will be migrated automatically. + +Removed fields: +- `tag` +The value of this field will be removed from the state automatically. Please, use [tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association) instead. + +New fields: +- the following set of [parameters](https://docs.snowflake.com/en/sql-reference/parameters) was added: + - `max_data_extension_time_in_days` + - `external_volume` + - `catalog` + - `replace_invalid_characters` + - `default_ddl_collation` + - `storage_serialization_policy` + - `log_level` + - `trace_level` + - `suspend_task_after_num_failures` + - `task_auto_retry_attempts` + - `user_task_managed_initial_warehouse_size` + - `user_task_timeout_ms` + - `user_task_minimum_trigger_interval_in_seconds` + - `quoted_identifiers_ignore_case` + - `enable_console_output` + - `pipe_execution_paused` +- added `show_output` field that holds the response from SHOW SCHEMAS. +- added `describe_output` field that holds the response from DESCRIBE SCHEMA. Note that one needs to grant sufficient privileges e.g. with [grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_ownership) on all objects in the schema. Otherwise, this field is not filled. +- added `parameters` field that holds the response from SHOW PARAMETERS IN SCHEMA. + +We allow creating and managing `PUBLIC` schemas now. When the name of the schema is `PUBLIC`, it's created with `OR_REPLACE`. We've decided this based on [#2826](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2826). + +#### *(behavior change)* Boolean type changes +To easily handle three-value logic (true, false, unknown) in provider's configs, type of `is_transient` and `with_managed_access` was changed from boolean to string. This should not require updating existing configs (boolean value should be accepted and state will be migrated to string automatically), however we recommend changing config values to strings. + +Terraform should recreate resources for configs lacking `is_transient` (`DROP` and then `CREATE` will be run underneath). To prevent this behavior, please set `is_transient` field. + +Terraform should perform an action for configs lacking `with_managed_access` (`ALTER SCHEMA DISABLE MANAGED ACCESS` will be run underneath which should not affect the Snowflake object, because `MANAGED ACCESS` is not set by default) ### *(breaking change)* refactored snowflake_schemas datasource Changes: - `database` is removed and can be specified inside `in` field. diff --git a/docs/resources/database.md b/docs/resources/database.md index abd0644f13..8a8ca5f778 100644 --- a/docs/resources/database.md +++ b/docs/resources/database.md @@ -86,25 +86,25 @@ resource "snowflake_database" "primary" { ### Optional -- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. For more information, see [CATALOG](https://docs.snowflake.com/en/sql-reference/parameters#catalog). - `comment` (String) Specifies a comment for the database. - `data_retention_time_in_days` (Number) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). - `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). - `enable_console_output` (Boolean) If true, enables stdout/stderr fast path logging for anonymous stored procedures. -- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. For more information, see [EXTERNAL_VOLUME](https://docs.snowflake.com/en/sql-reference/parameters#external-volume). - `is_transient` (Boolean) Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. - `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). - `max_data_extension_time_in_days` (Number) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). -- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. -- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. +- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. For more information, see [QUOTED_IDENTIFIERS_IGNORE_CASE](https://docs.snowflake.com/en/sql-reference/parameters#quoted-identifiers-ignore-case). +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. For more information, see [REPLACE_INVALID_CHARACTERS](https://docs.snowflake.com/en/sql-reference/parameters#replace-invalid-characters). - `replication` (Block List, Max: 1) Configures replication for a given database. When specified, this database will be promoted to serve as a primary database for replication. A primary database can be replicated in one or more accounts, allowing users in those accounts to query objects in each secondary (i.e. replica) database. (see [below for nested schema](#nestedblock--replication)) -- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. -- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. -- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. +- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. For more information, see [STORAGE_SERIALIZATION_POLICY](https://docs.snowflake.com/en/sql-reference/parameters#storage-serialization-policy). +- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. For more information, see [SUSPEND_TASK_AFTER_NUM_FAILURES](https://docs.snowflake.com/en/sql-reference/parameters#suspend-task-after-num-failures). +- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. For more information, see [TASK_AUTO_RETRY_ATTEMPTS](https://docs.snowflake.com/en/sql-reference/parameters#task-auto-retry-attempts). - `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). -- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. +- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. For more information, see [USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE](https://docs.snowflake.com/en/sql-reference/parameters#user-task-managed-initial-warehouse-size). - `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds. -- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. +- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. For more information, see [USER_TASK_TIMEOUT_MS](https://docs.snowflake.com/en/sql-reference/parameters#user-task-timeout-ms). ### Read-Only diff --git a/docs/resources/schema.md b/docs/resources/schema.md index 591f0f665d..66431343bd 100644 --- a/docs/resources/schema.md +++ b/docs/resources/schema.md @@ -2,24 +2,48 @@ page_title: "snowflake_schema Resource - terraform-provider-snowflake" subcategory: "" description: |- - + Resource used to manage schema objects. For more information, check schema documentation https://docs.snowflake.com/en/sql-reference/sql/create-schema. --- # snowflake_schema (Resource) - +Resource used to manage schema objects. For more information, check [schema documentation](https://docs.snowflake.com/en/sql-reference/sql/create-schema). ## Example Usage ```terraform +# basic resource resource "snowflake_schema" "schema" { - database = "database" - name = "schema" - comment = "A schema." + name = "schema_name" + database = "database_name" +} + +# resource with all fields set +resource "snowflake_schema" "schema" { + name = "schema_name" + database = "database_name" + with_managed_access = true + is_transient = true + comment = "my schema" + + data_retention_time_in_days = 1 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false + pipe_execution_paused = false - is_transient = false - is_managed = false - data_retention_days = 1 } ``` @@ -33,28 +57,288 @@ resource "snowflake_schema" "schema" { ### Optional +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. For more information, see [CATALOG](https://docs.snowflake.com/en/sql-reference/parameters#catalog). - `comment` (String) Specifies a comment for the schema. -- `data_retention_days` (Number) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the schema, as well as specifying the default Time Travel retention time for all tables created in the schema. Default value for this field is set to -1, which is a fallback to use Snowflake default. -- `is_managed` (Boolean) Specifies a managed schema. Managed access schemas centralize privilege management with the schema owner. -- `is_transient` (Boolean) Specifies a schema as transient. Transient schemas do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. -- `tag` (Block List, Deprecated) Definitions of a tag to associate with the resource. (see [below for nested schema](#nestedblock--tag)) +- `data_retention_time_in_days` (Number) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). +- `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `enable_console_output` (Boolean) If true, enables stdout/stderr fast path logging for anonymous stored procedures. +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. For more information, see [EXTERNAL_VOLUME](https://docs.snowflake.com/en/sql-reference/parameters#external-volume). +- `is_transient` (String) Specifies the schema as transient. Transient schemas do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). +- `max_data_extension_time_in_days` (Number) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). +- `pipe_execution_paused` (Boolean) Specifies whether to pause a running pipe, primarily in preparation for transferring ownership of the pipe to a different role. For more information, see [PIPE_EXECUTION_PAUSED](https://docs.snowflake.com/en/sql-reference/parameters#pipe-execution-paused). +- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. For more information, see [QUOTED_IDENTIFIERS_IGNORE_CASE](https://docs.snowflake.com/en/sql-reference/parameters#quoted-identifiers-ignore-case). +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. For more information, see [REPLACE_INVALID_CHARACTERS](https://docs.snowflake.com/en/sql-reference/parameters#replace-invalid-characters). +- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. For more information, see [STORAGE_SERIALIZATION_POLICY](https://docs.snowflake.com/en/sql-reference/parameters#storage-serialization-policy). +- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. For more information, see [SUSPEND_TASK_AFTER_NUM_FAILURES](https://docs.snowflake.com/en/sql-reference/parameters#suspend-task-after-num-failures). +- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. For more information, see [TASK_AUTO_RETRY_ATTEMPTS](https://docs.snowflake.com/en/sql-reference/parameters#task-auto-retry-attempts). +- `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). +- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. For more information, see [USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE](https://docs.snowflake.com/en/sql-reference/parameters#user-task-managed-initial-warehouse-size). +- `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds. +- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. For more information, see [USER_TASK_TIMEOUT_MS](https://docs.snowflake.com/en/sql-reference/parameters#user-task-timeout-ms). +- `with_managed_access` (String) Specifies a managed schema. Managed access schemas centralize privilege management with the schema owner. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. ### Read-Only +- `describe_output` (List of Object) Outputs the result of `DESCRIBE SCHEMA` for the given object. In order to handle this output, one must grant sufficient privileges, e.g. [grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_ownership) on all objects in the schema. (see [below for nested schema](#nestedatt--describe_output)) - `id` (String) The ID of this resource. +- `parameters` (List of Object) Outputs the result of `SHOW PARAMETERS IN SCHEMA` for the given object. (see [below for nested schema](#nestedatt--parameters)) +- `show_output` (List of Object) Outputs the result of `SHOW SCHEMA` for the given object. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `describe_output` + +Read-Only: + +- `created_on` (String) +- `kind` (String) +- `name` (String) + + + +### Nested Schema for `parameters` + +Read-Only: + +- `catalog` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--catalog)) +- `data_retention_time_in_days` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--data_retention_time_in_days)) +- `default_ddl_collation` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--default_ddl_collation)) +- `enable_console_output` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--enable_console_output)) +- `external_volume` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--external_volume)) +- `log_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--log_level)) +- `max_data_extension_time_in_days` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--max_data_extension_time_in_days)) +- `pipe_execution_paused` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--pipe_execution_paused)) +- `quoted_identifiers_ignore_case` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--quoted_identifiers_ignore_case)) +- `replace_invalid_characters` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--replace_invalid_characters)) +- `storage_serialization_policy` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--storage_serialization_policy)) +- `suspend_task_after_num_failures` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--suspend_task_after_num_failures)) +- `task_auto_retry_attempts` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--task_auto_retry_attempts)) +- `trace_level` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--trace_level)) +- `user_task_managed_initial_warehouse_size` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--user_task_managed_initial_warehouse_size)) +- `user_task_minimum_trigger_interval_in_seconds` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--user_task_minimum_trigger_interval_in_seconds)) +- `user_task_timeout_ms` (List of Object) (see [below for nested schema](#nestedobjatt--parameters--user_task_timeout_ms)) + + +### Nested Schema for `parameters.catalog` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.data_retention_time_in_days` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.default_ddl_collation` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.enable_console_output` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.external_volume` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.log_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.max_data_extension_time_in_days` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.pipe_execution_paused` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.quoted_identifiers_ignore_case` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.replace_invalid_characters` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.storage_serialization_policy` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.suspend_task_after_num_failures` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.task_auto_retry_attempts` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.trace_level` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.user_task_managed_initial_warehouse_size` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.user_task_minimum_trigger_interval_in_seconds` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) + + + +### Nested Schema for `parameters.user_task_timeout_ms` + +Read-Only: + +- `default` (String) +- `description` (String) +- `key` (String) +- `level` (String) +- `value` (String) - -### Nested Schema for `tag` -Required: -- `name` (String) Tag name, e.g. department. -- `value` (String) Tag value, e.g. marketing_info. + +### Nested Schema for `show_output` -Optional: +Read-Only: -- `database` (String) Name of the database that the tag was created in. -- `schema` (String) Name of the schema that the tag was created in. +- `comment` (String) +- `created_on` (String) +- `database_name` (String) +- `dropped_on` (String) +- `is_current` (Boolean) +- `is_default` (Boolean) +- `name` (String) +- `options` (String) +- `owner` (String) +- `owner_role_type` (String) +- `retention_time` (String) ## Import diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md index 4a163b2cab..22f3f3ab11 100644 --- a/docs/resources/secondary_database.md +++ b/docs/resources/secondary_database.md @@ -95,24 +95,24 @@ resource "snowflake_task" "refresh_secondary_database" { ### Optional -- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. For more information, see [CATALOG](https://docs.snowflake.com/en/sql-reference/parameters#catalog). - `comment` (String) Specifies a comment for the database. - `data_retention_time_in_days` (Number) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). - `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). - `enable_console_output` (Boolean) If true, enables stdout/stderr fast path logging for anonymous stored procedures. -- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. For more information, see [EXTERNAL_VOLUME](https://docs.snowflake.com/en/sql-reference/parameters#external-volume). - `is_transient` (Boolean) Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. - `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). - `max_data_extension_time_in_days` (Number) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). -- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. -- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. -- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. -- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. -- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. +- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. For more information, see [QUOTED_IDENTIFIERS_IGNORE_CASE](https://docs.snowflake.com/en/sql-reference/parameters#quoted-identifiers-ignore-case). +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. For more information, see [REPLACE_INVALID_CHARACTERS](https://docs.snowflake.com/en/sql-reference/parameters#replace-invalid-characters). +- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. For more information, see [STORAGE_SERIALIZATION_POLICY](https://docs.snowflake.com/en/sql-reference/parameters#storage-serialization-policy). +- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. For more information, see [SUSPEND_TASK_AFTER_NUM_FAILURES](https://docs.snowflake.com/en/sql-reference/parameters#suspend-task-after-num-failures). +- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. For more information, see [TASK_AUTO_RETRY_ATTEMPTS](https://docs.snowflake.com/en/sql-reference/parameters#task-auto-retry-attempts). - `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). -- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. +- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. For more information, see [USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE](https://docs.snowflake.com/en/sql-reference/parameters#user-task-managed-initial-warehouse-size). - `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds. -- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. +- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. For more information, see [USER_TASK_TIMEOUT_MS](https://docs.snowflake.com/en/sql-reference/parameters#user-task-timeout-ms). ### Read-Only diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md index 6563fb2ccd..d63ca217fc 100644 --- a/docs/resources/shared_database.md +++ b/docs/resources/shared_database.md @@ -80,21 +80,21 @@ resource "snowflake_shared_database" "test" { ### Optional -- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. For more information, see [CATALOG](https://docs.snowflake.com/en/sql-reference/parameters#catalog). - `comment` (String) Specifies a comment for the database. - `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). - `enable_console_output` (Boolean) If true, enables stdout/stderr fast path logging for anonymous stored procedures. -- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. For more information, see [EXTERNAL_VOLUME](https://docs.snowflake.com/en/sql-reference/parameters#external-volume). - `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). -- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. -- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. -- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. -- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. -- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. +- `quoted_identifiers_ignore_case` (Boolean) If true, the case of quoted identifiers is ignored. For more information, see [QUOTED_IDENTIFIERS_IGNORE_CASE](https://docs.snowflake.com/en/sql-reference/parameters#quoted-identifiers-ignore-case). +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. For more information, see [REPLACE_INVALID_CHARACTERS](https://docs.snowflake.com/en/sql-reference/parameters#replace-invalid-characters). +- `storage_serialization_policy` (String) The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. For more information, see [STORAGE_SERIALIZATION_POLICY](https://docs.snowflake.com/en/sql-reference/parameters#storage-serialization-policy). +- `suspend_task_after_num_failures` (Number) How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. For more information, see [SUSPEND_TASK_AFTER_NUM_FAILURES](https://docs.snowflake.com/en/sql-reference/parameters#suspend-task-after-num-failures). +- `task_auto_retry_attempts` (Number) Maximum automatic retries allowed for a user task. For more information, see [TASK_AUTO_RETRY_ATTEMPTS](https://docs.snowflake.com/en/sql-reference/parameters#task-auto-retry-attempts). - `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). -- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. +- `user_task_managed_initial_warehouse_size` (String) The initial size of warehouse to use for managed warehouses in the absence of history. For more information, see [USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE](https://docs.snowflake.com/en/sql-reference/parameters#user-task-managed-initial-warehouse-size). - `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds. -- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. +- `user_task_timeout_ms` (Number) User task execution timeout in milliseconds. For more information, see [USER_TASK_TIMEOUT_MS](https://docs.snowflake.com/en/sql-reference/parameters#user-task-timeout-ms). ### Read-Only diff --git a/examples/resources/snowflake_schema/resource.tf b/examples/resources/snowflake_schema/resource.tf index d2832260e4..d5d1d08fbd 100644 --- a/examples/resources/snowflake_schema/resource.tf +++ b/examples/resources/snowflake_schema/resource.tf @@ -1,9 +1,33 @@ +# basic resource resource "snowflake_schema" "schema" { - database = "database" - name = "schema" - comment = "A schema." + name = "schema_name" + database = "database_name" +} + +# resource with all fields set +resource "snowflake_schema" "schema" { + name = "schema_name" + database = "database_name" + with_managed_access = true + is_transient = true + comment = "my schema" + + data_retention_time_in_days = 1 + max_data_extension_time_in_days = 20 + external_volume = "" + catalog = "" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "COMPATIBLE" + log_level = "INFO" + trace_level = "ALWAYS" + suspend_task_after_num_failures = 10 + task_auto_retry_attempts = 10 + user_task_managed_initial_warehouse_size = "LARGE" + user_task_timeout_ms = 3600000 + user_task_minimum_trigger_interval_in_seconds = 120 + quoted_identifiers_ignore_case = false + enable_console_output = false + pipe_execution_paused = false - is_transient = false - is_managed = false - data_retention_days = 1 } diff --git a/pkg/datasources/schemas.go b/pkg/datasources/schemas.go index a697ba19dd..371e42adf4 100644 --- a/pkg/datasources/schemas.go +++ b/pkg/datasources/schemas.go @@ -161,7 +161,7 @@ func ReadSchemas(ctx context.Context, d *schema.ResourceData, meta any) diag.Dia } if v, ok := d.GetOk("in"); ok { - in := v.([]interface{})[0].(map[string]interface{}) + in := v.([]any)[0].(map[string]any) if v, ok := in["account"]; ok { if account := v.(bool); account { opts.In = &sdk.SchemaIn{Account: sdk.Bool(account)} diff --git a/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf index 98eb66450b..0b457b1229 100644 --- a/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf +++ b/pkg/datasources/testdata/TestAcc_Schemas/optionals_set/test.tf @@ -1,9 +1,9 @@ resource "snowflake_schema" "test" { - name = var.name - database = var.database - comment = var.comment - is_transient = true - is_managed = true + name = var.name + database = var.database + comment = var.comment + is_transient = true + with_managed_access = true } resource "snowflake_table" "test" { diff --git a/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf b/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf index 0bbc4a0850..d06825d7e6 100644 --- a/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf +++ b/pkg/datasources/testdata/TestAcc_Schemas/optionals_unset/test.tf @@ -1,9 +1,9 @@ resource "snowflake_schema" "test" { - name = var.name - database = var.database - comment = var.comment - is_transient = true - is_managed = true + name = var.name + database = var.database + comment = var.comment + is_transient = true + with_managed_access = true } data "snowflake_schemas" "test" { diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index 191f1f1081..6bcfa55c8a 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -89,7 +89,8 @@ func ComputedIfAnyAttributeChanged(key string, changedAttributeKeys ...string) s var result bool for _, changedKey := range changedAttributeKeys { if diff.HasChange(changedKey) { - log.Printf("[DEBUG] ComputedIfAnyAttributeChanged: changed key: %s\n", changedKey) + old, new := diff.GetChange(changedKey) + log.Printf("[DEBUG] ComputedIfAnyAttributeChanged: changed key: %s old: %s new: %s\n", changedKey, old, new) } result = result || diff.HasChange(changedKey) } diff --git a/pkg/resources/database_commons.go b/pkg/resources/database_commons.go index 17e3c46431..61e0d9638d 100644 --- a/pkg/resources/database_commons.go +++ b/pkg/resources/database_commons.go @@ -80,13 +80,13 @@ func init() { { Name: sdk.ObjectParameterCatalog, Type: schema.TypeString, - Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", + Description: "The database parameter that specifies the default catalog to use for Iceberg tables. For more information, see [CATALOG](https://docs.snowflake.com/en/sql-reference/parameters#catalog).", ValidateDiag: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, { Name: sdk.ObjectParameterExternalVolume, Type: schema.TypeString, - Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", + Description: "The database parameter that specifies the default external volume to use for Iceberg tables. For more information, see [EXTERNAL_VOLUME](https://docs.snowflake.com/en/sql-reference/parameters#external-volume).", ValidateDiag: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, { @@ -116,12 +116,12 @@ func init() { { Name: sdk.ObjectParameterReplaceInvalidCharacters, Type: schema.TypeBool, - Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", + Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. For more information, see [REPLACE_INVALID_CHARACTERS](https://docs.snowflake.com/en/sql-reference/parameters#replace-invalid-characters).", }, { Name: sdk.ObjectParameterStorageSerializationPolicy, Type: schema.TypeString, - Description: fmt.Sprintf("The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), + Description: fmt.Sprintf("The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. For more information, see [STORAGE_SERIALIZATION_POLICY](https://docs.snowflake.com/en/sql-reference/parameters#storage-serialization-policy).", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), ValidateDiag: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), DiffSuppress: func(k, oldValue, newValue string, d *schema.ResourceData) bool { return strings.EqualFold(oldValue, newValue) @@ -130,26 +130,26 @@ func init() { { Name: sdk.ObjectParameterSuspendTaskAfterNumFailures, Type: schema.TypeInt, - Description: "How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending.", + Description: "How many times a task must fail in a row before it is automatically suspended. 0 disables auto-suspending. For more information, see [SUSPEND_TASK_AFTER_NUM_FAILURES](https://docs.snowflake.com/en/sql-reference/parameters#suspend-task-after-num-failures).", ValidateDiag: validation.ToDiagFunc(validation.IntAtLeast(0)), }, { Name: sdk.ObjectParameterTaskAutoRetryAttempts, Type: schema.TypeInt, - Description: "Maximum automatic retries allowed for a user task.", + Description: "Maximum automatic retries allowed for a user task. For more information, see [TASK_AUTO_RETRY_ATTEMPTS](https://docs.snowflake.com/en/sql-reference/parameters#task-auto-retry-attempts).", ValidateDiag: validation.ToDiagFunc(validation.IntAtLeast(0)), }, { Name: sdk.ObjectParameterUserTaskManagedInitialWarehouseSize, Type: schema.TypeString, - Description: "The initial size of warehouse to use for managed warehouses in the absence of history.", + Description: "The initial size of warehouse to use for managed warehouses in the absence of history. For more information, see [USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE](https://docs.snowflake.com/en/sql-reference/parameters#user-task-managed-initial-warehouse-size).", ValidateDiag: sdkValidation(sdk.ToWarehouseSize), DiffSuppress: NormalizeAndCompare(sdk.ToWarehouseSize), }, { Name: sdk.ObjectParameterUserTaskTimeoutMs, Type: schema.TypeInt, - Description: "User task execution timeout in milliseconds.", + Description: "User task execution timeout in milliseconds. For more information, see [USER_TASK_TIMEOUT_MS](https://docs.snowflake.com/en/sql-reference/parameters#user-task-timeout-ms).", ValidateDiag: validation.ToDiagFunc(validation.IntBetween(0, 86400000)), }, { @@ -161,7 +161,7 @@ func init() { { Name: sdk.ObjectParameterQuotedIdentifiersIgnoreCase, Type: schema.TypeBool, - Description: "If true, the case of quoted identifiers is ignored.", + Description: "If true, the case of quoted identifiers is ignored. For more information, see [QUOTED_IDENTIFIERS_IGNORE_CASE](https://docs.snowflake.com/en/sql-reference/parameters#quoted-identifiers-ignore-case).", }, { Name: sdk.ObjectParameterEnableConsoleOutput, diff --git a/pkg/resources/diff_suppressions.go b/pkg/resources/diff_suppressions.go index 1f645fc950..0106768e62 100644 --- a/pkg/resources/diff_suppressions.go +++ b/pkg/resources/diff_suppressions.go @@ -24,12 +24,48 @@ func NormalizeAndCompare[T comparable](normalize func(string) (T, error)) schema } } +// NormalizeAndCompareIdentifiersInSet is a diff suppression function that should be used at top-level TypeSet fields that +// hold identifiers to avoid diffs like: +// - "DATABASE"."SCHEMA"."OBJECT" +// + DATABASE.SCHEMA.OBJECT +// where both identifiers are pointing to the same object, but have different structure. When a diff occurs in the +// list or set, we have to handle two suppressions (one that prevents adding and one that prevents the removal). +// It's handled by the two statements with the help of helpers.ContainsIdentifierIgnoringQuotes and by getting +// the current state of ids to compare against. The diff suppressions for lists and sets are running for each element one by one, +// and the first diff is usually .# referring to the collection length (we skip those). +func NormalizeAndCompareIdentifiersInSet(key string) schema.SchemaDiffSuppressFunc { + return func(k, oldValue, newValue string, d *schema.ResourceData) bool { + if strings.HasSuffix(k, ".#") { + return false + } + + if oldValue == "" && !d.GetRawState().IsNull() { + if helpers.ContainsIdentifierIgnoringQuotes(ctyValToSliceString(d.GetRawState().AsValueMap()[key].AsValueSet().Values()), newValue) { + return true + } + } + + if newValue == "" { + if helpers.ContainsIdentifierIgnoringQuotes(expandStringList(d.Get(key).(*schema.Set).List()), oldValue) { + return true + } + } + + return false + } +} + // IgnoreAfterCreation should be used to ignore changes to the given attribute post creation. func IgnoreAfterCreation(_, _, _ string, d *schema.ResourceData) bool { // For new resources always show the diff and in every other case we do not want to use this attribute return d.Id() != "" } +// IgnoreChangeToCurrentSnowflakeValueInShow should be used to ignore changes to the given attribute when its value is equal to value in show_output. +func IgnoreChangeToCurrentSnowflakeValueInShowWithMapping(keyInOutput string, mapping func(any) any) schema.SchemaDiffSuppressFunc { + return IgnoreChangeToCurrentSnowflakePlainValueInOutputWithMapping(ShowOutputAttributeName, keyInOutput, mapping) +} + // IgnoreChangeToCurrentSnowflakeValueInShow should be used to ignore changes to the given attribute when its value is equal to value in show_output. func IgnoreChangeToCurrentSnowflakeValueInShow(keyInOutput string) schema.SchemaDiffSuppressFunc { return IgnoreChangeToCurrentSnowflakePlainValueInOutput(ShowOutputAttributeName, keyInOutput) @@ -61,6 +97,27 @@ func IgnoreChangeToCurrentSnowflakePlainValueInOutput(attrName, keyInOutput stri } } +// IgnoreChangeToCurrentSnowflakePlainValueInOutput should be used to ignore changes to the given attribute when its value is equal to value in provided `attrName`. +func IgnoreChangeToCurrentSnowflakePlainValueInOutputWithMapping(attrName, keyInOutput string, mapping func(any) any) schema.SchemaDiffSuppressFunc { + return func(_, _, new string, d *schema.ResourceData) bool { + if d.Id() == "" { + return false + } + + if queryOutput, ok := d.GetOk(attrName); ok { + queryOutputList := queryOutput.([]any) + if len(queryOutputList) == 1 { + result := mapping(queryOutputList[0].(map[string]any)[keyInOutput]) + log.Printf("[DEBUG] IgnoreChangeToCurrentSnowflakePlainValueInOutputWithMapping: value for key %s is %v, new value is %s, comparison result is: %t", keyInOutput, result, new, new == fmt.Sprintf("%v", result)) + if new == fmt.Sprintf("%v", result) { + return true + } + } + } + return false + } +} + // IgnoreChangeToCurrentSnowflakeListValueInDescribe works similarly to IgnoreChangeToCurrentSnowflakeValueInDescribe, but assumes that in `describe_output` the value is saved in nested `value` field. func IgnoreChangeToCurrentSnowflakeListValueInDescribe(keyInDescribeOutput string) schema.SchemaDiffSuppressFunc { return func(_, _, new string, d *schema.ResourceData) bool { @@ -128,34 +185,3 @@ func IgnoreValuesFromSetIfParamSet(key, param string, values []string) schema.Sc return slices.Contains(values, old) } } - -// NormalizeAndCompareIdentifiersInSet is a diff suppression function that should be used at top-level TypeSet fields that -// hold identifiers to avoid diffs like: -// - "DATABASE"."SCHEMA"."OBJECT" -// + DATABASE.SCHEMA.OBJECT -// where both identifiers are pointing to the same object, but have different structure. When a diff occurs in the -// list or set, we have to handle two suppressions (one that prevents adding and one that prevents the removal). -// It's handled by the two statements with the help of helpers.ContainsIdentifierIgnoringQuotes and by getting -// the current state of ids to compare against. The diff suppressions for lists and sets are running for each element one by one, -// and the first diff is usually .# referring to the collection length (we skip those). -func NormalizeAndCompareIdentifiersInSet(key string) schema.SchemaDiffSuppressFunc { - return func(k, oldValue, newValue string, d *schema.ResourceData) bool { - if strings.HasSuffix(k, ".#") { - return false - } - - if oldValue == "" && !d.GetRawState().IsNull() { - if helpers.ContainsIdentifierIgnoringQuotes(ctyValToSliceString(d.GetRawState().AsValueMap()[key].AsValueSet().Values()), newValue) { - return true - } - } - - if newValue == "" { - if helpers.ContainsIdentifierIgnoringQuotes(expandStringList(d.Get(key).(*schema.Set).List()), oldValue) { - return true - } - } - - return false - } -} diff --git a/pkg/resources/schema.go b/pkg/resources/schema.go index fc184b889a..b26ad5c6ee 100644 --- a/pkg/resources/schema.go +++ b/pkg/resources/schema.go @@ -2,295 +2,521 @@ package resources import ( "context" + "errors" "fmt" "log" + "slices" "strconv" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) var schemaSchema = map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - Description: "Specifies the identifier for the schema; must be unique for the database in which the schema is created.", + Type: schema.TypeString, + Required: true, + Description: "Specifies the identifier for the schema; must be unique for the database in which the schema is created.", + DiffSuppressFunc: suppressIdentifierQuoting, }, "database": { - Type: schema.TypeString, - Required: true, - Description: "The database in which to create the schema.", - ForceNew: true, + Type: schema.TypeString, + Required: true, + Description: "The database in which to create the schema.", + ForceNew: true, + DiffSuppressFunc: suppressIdentifierQuoting, }, - "comment": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies a comment for the schema.", + "with_managed_access": { + Type: schema.TypeString, + Optional: true, + Description: booleanStringFieldDescription("Specifies a managed schema. Managed access schemas centralize privilege management with the schema owner."), + ValidateDiagFunc: validateBooleanString, + Default: BooleanDefault, + DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShowWithMapping("options", func(x any) any { + return slices.Contains(sdk.ParseCommaSeparatedStringArray(x.(string), false), "MANAGED ACCESS") + }), }, "is_transient": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: booleanStringFieldDescription("Specifies the schema as transient. Transient schemas do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss."), + ValidateDiagFunc: validateBooleanString, + Default: BooleanDefault, + DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShowWithMapping("options", func(x any) any { + return slices.Contains(sdk.ParseCommaSeparatedStringArray(x.(string), false), "TRANSIENT") + }), + }, + strings.ToLower(string(sdk.ObjectParameterPipeExecutionPaused)): { Type: schema.TypeBool, Optional: true, - Default: false, - Description: "Specifies a schema as transient. Transient schemas do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", - ForceNew: true, + Computed: true, + Description: "Specifies whether to pause a running pipe, primarily in preparation for transferring ownership of the pipe to a different role. For more information, see [PIPE_EXECUTION_PAUSED](https://docs.snowflake.com/en/sql-reference/parameters#pipe-execution-paused).", }, - "is_managed": { - Type: schema.TypeBool, + "comment": { + Type: schema.TypeString, Optional: true, - Default: false, - Description: "Specifies a managed schema. Managed access schemas centralize privilege management with the schema owner.", + Description: "Specifies a comment for the schema.", + }, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW SCHEMA` for the given object.", + Elem: &schema.Resource{ + Schema: schemas.ShowSchemaSchema, + }, + }, + DescribeOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `DESCRIBE SCHEMA` for the given object. In order to handle this output, one must grant sufficient privileges, e.g. [grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_ownership) on all objects in the schema.", + Elem: &schema.Resource{ + Schema: schemas.SchemaDescribeSchema, + }, }, - "data_retention_days": { - Type: schema.TypeInt, - Optional: true, - Default: IntDefault, - Description: "Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the schema, as well as specifying the default Time Travel retention time for all tables created in the schema. Default value for this field is set to -1, which is a fallback to use Snowflake default.", - ValidateFunc: validation.IntBetween(-1, 90), + ParametersAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW PARAMETERS IN SCHEMA` for the given object.", + Elem: &schema.Resource{ + Schema: schemas.ShowSchemaParametersSchema, + }, }, - "tag": tagReferenceSchema, } // Schema returns a pointer to the resource representing a schema. func Schema() *schema.Resource { return &schema.Resource{ - Create: CreateSchema, - Read: ReadSchema, - Update: UpdateSchema, - Delete: DeleteSchema, - Schema: schemaSchema, + SchemaVersion: 1, + + CreateContext: CreateContextSchema, + ReadContext: ReadContextSchema(true), + UpdateContext: UpdateContextSchema, + DeleteContext: DeleteContextSchema, + Description: "Resource used to manage schema objects. For more information, check [schema documentation](https://docs.snowflake.com/en/sql-reference/sql/create-schema).", + + CustomizeDiff: customdiff.All( + ComputedIfAnyAttributeChanged(ShowOutputAttributeName, "name", "comment", "with_managed_access", "is_transient"), + ComputedIfAnyAttributeChanged(DescribeOutputAttributeName, "name"), + ComputedIfAnyAttributeChanged(ParametersAttributeName, + strings.ToLower(string(sdk.ObjectParameterDataRetentionTimeInDays)), + strings.ToLower(string(sdk.ObjectParameterMaxDataExtensionTimeInDays)), + strings.ToLower(string(sdk.ObjectParameterExternalVolume)), + strings.ToLower(string(sdk.ObjectParameterCatalog)), + strings.ToLower(string(sdk.ObjectParameterReplaceInvalidCharacters)), + strings.ToLower(string(sdk.ObjectParameterDefaultDDLCollation)), + strings.ToLower(string(sdk.ObjectParameterStorageSerializationPolicy)), + strings.ToLower(string(sdk.ObjectParameterLogLevel)), + strings.ToLower(string(sdk.ObjectParameterTraceLevel)), + strings.ToLower(string(sdk.ObjectParameterSuspendTaskAfterNumFailures)), + strings.ToLower(string(sdk.ObjectParameterTaskAutoRetryAttempts)), + strings.ToLower(string(sdk.ObjectParameterUserTaskManagedInitialWarehouseSize)), + strings.ToLower(string(sdk.ObjectParameterUserTaskTimeoutMs)), + strings.ToLower(string(sdk.ObjectParameterUserTaskMinimumTriggerIntervalInSeconds)), + strings.ToLower(string(sdk.ObjectParameterQuotedIdentifiersIgnoreCase)), + strings.ToLower(string(sdk.ObjectParameterEnableConsoleOutput)), + strings.ToLower(string(sdk.ObjectParameterPipeExecutionPaused)), + ), + ParametersCustomDiff( + schemaParametersProvider, + parameter{sdk.AccountParameterDataRetentionTimeInDays, valueTypeInt, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterMaxDataExtensionTimeInDays, valueTypeInt, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterExternalVolume, valueTypeString, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterCatalog, valueTypeString, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterReplaceInvalidCharacters, valueTypeBool, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterDefaultDDLCollation, valueTypeString, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterStorageSerializationPolicy, valueTypeString, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterLogLevel, valueTypeString, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterTraceLevel, valueTypeString, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterSuspendTaskAfterNumFailures, valueTypeInt, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterTaskAutoRetryAttempts, valueTypeInt, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterUserTaskManagedInitialWarehouseSize, valueTypeString, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterUserTaskTimeoutMs, valueTypeInt, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds, valueTypeInt, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterQuotedIdentifiersIgnoreCase, valueTypeBool, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterEnableConsoleOutput, valueTypeBool, sdk.ParameterTypeSchema}, + parameter{sdk.AccountParameterPipeExecutionPaused, valueTypeBool, sdk.ParameterTypeSchema}, + ), + ), + + Schema: helpers.MergeMaps(schemaSchema, DatabaseParametersSchema), Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: ImportSchema, + }, + + StateUpgraders: []schema.StateUpgrader{ + { + Version: 0, + // setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject + Type: cty.EmptyObject, + Upgrade: v093SchemaStateUpgrader, + }, }, } } -// CreateSchema implements schema.CreateFunc. -func CreateSchema(d *schema.ResourceData, meta interface{}) error { +func ImportSchema(ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { + log.Printf("[DEBUG] Starting schema import") client := meta.(*provider.Context).Client - name := d.Get("name").(string) - database := d.Get("database").(string) - - ctx := context.Background() + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.DatabaseObjectIdentifier) - createReq := &sdk.CreateSchemaOptions{ - Transient: GetPropertyAsPointer[bool](d, "is_transient"), - WithManagedAccess: GetPropertyAsPointer[bool](d, "is_managed"), - Tag: getPropertyTags(d, "tag"), - Comment: GetPropertyAsPointer[string](d, "comment"), + s, err := client.Schemas.ShowByID(ctx, id) + if err != nil { + return nil, err + } + if err := d.Set("name", s.Name); err != nil { + return nil, err } - dataRetentionTimeInDays := GetPropertyAsPointer[int](d, "data_retention_days") - if dataRetentionTimeInDays != nil && *dataRetentionTimeInDays != IntDefault { - createReq.DataRetentionTimeInDays = dataRetentionTimeInDays + if err := d.Set("database", s.DatabaseName); err != nil { + return nil, err } - err := client.Schemas.Create(ctx, sdk.NewDatabaseObjectIdentifier(database, name), createReq) - if err != nil { - return fmt.Errorf("error creating schema %v err = %w", name, err) + if err := d.Set("comment", s.Comment); err != nil { + return nil, err } - d.SetId(helpers.EncodeSnowflakeID(database, name)) + if err := d.Set("is_transient", booleanStringFromBool(s.IsTransient())); err != nil { + return nil, err + } - return ReadSchema(d, meta) + if err := d.Set("with_managed_access", booleanStringFromBool(s.IsManagedAccess())); err != nil { + return nil, err + } + return []*schema.ResourceData{d}, nil } -// ReadSchema implements schema.ReadFunc. -func ReadSchema(d *schema.ResourceData, meta interface{}) error { +func schemaParametersProvider(ctx context.Context, d ResourceIdProvider, meta any) ([]*sdk.Parameter, error) { client := meta.(*provider.Context).Client - ctx := context.Background() id := helpers.DecodeSnowflakeID(d.Id()).(sdk.DatabaseObjectIdentifier) + return client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Schema: id, + }, + }) +} - database, err := client.Databases.ShowByID(ctx, id.DatabaseId()) +func CreateContextSchema(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + name := d.Get("name").(string) + database := d.Get("database").(string) + id := sdk.NewDatabaseObjectIdentifier(database, name) + + dataRetentionTimeInDays, + maxDataExtensionTimeInDays, + externalVolume, + catalog, + replaceInvalidCharacters, + defaultDDLCollation, + storageSerializationPolicy, + logLevel, + traceLevel, + suspendTaskAfterNumFailures, + taskAutoRetryAttempts, + userTaskManagedInitialWarehouseSize, + userTaskTimeoutMs, + userTaskMinimumTriggerIntervalInSeconds, + quotedIdentifiersIgnoreCase, + enableConsoleOutput, + err := GetAllDatabaseParameters(d) if err != nil { - d.SetId("") + return diag.FromErr(err) } - s, err := client.Schemas.ShowByID(ctx, id) - if err != nil { - log.Printf("[DEBUG] schema (%s) not found", d.Id()) - d.SetId("") - return nil + opts := &sdk.CreateSchemaOptions{ + DataRetentionTimeInDays: dataRetentionTimeInDays, + MaxDataExtensionTimeInDays: maxDataExtensionTimeInDays, + ExternalVolume: externalVolume, + Catalog: catalog, + ReplaceInvalidCharacters: replaceInvalidCharacters, + DefaultDDLCollation: defaultDDLCollation, + StorageSerializationPolicy: storageSerializationPolicy, + LogLevel: logLevel, + TraceLevel: traceLevel, + SuspendTaskAfterNumFailures: suspendTaskAfterNumFailures, + TaskAutoRetryAttempts: taskAutoRetryAttempts, + UserTaskManagedInitialWarehouseSize: userTaskManagedInitialWarehouseSize, + UserTaskTimeoutMs: userTaskTimeoutMs, + UserTaskMinimumTriggerIntervalInSeconds: userTaskMinimumTriggerIntervalInSeconds, + QuotedIdentifiersIgnoreCase: quotedIdentifiersIgnoreCase, + EnableConsoleOutput: enableConsoleOutput, + PipeExecutionPaused: GetConfigPropertyAsPointerAllowingZeroValue[bool](d, "pipe_execution_paused"), + Comment: GetConfigPropertyAsPointerAllowingZeroValue[string](d, "comment"), } - - var retentionTime int64 - // "retention_time" may sometimes be empty string instead of an integer - { - rt := s.RetentionTime - if rt == "" { - rt = "0" + if v := d.Get("is_transient").(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) + if err != nil { + return diag.FromErr(err) } - - retentionTime, err = strconv.ParseInt(rt, 10, 64) + opts.Transient = sdk.Bool(parsed) + } + if v := d.Get("with_managed_access").(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) if err != nil { - return err + return diag.FromErr(err) } + opts.WithManagedAccess = sdk.Bool(parsed) } - - if dataRetentionDays := d.Get("data_retention_days"); dataRetentionDays.(int) != IntDefault || int64(database.RetentionTime) != retentionTime { - if err := d.Set("data_retention_days", retentionTime); err != nil { - return err + if strings.EqualFold(strings.TrimSpace(name), "PUBLIC") { + opts.OrReplace = sdk.Pointer(true) + } + if err := client.Schemas.Create(ctx, id, opts); err != nil { + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "Failed to create schema.", + Detail: fmt.Sprintf("schema name: %s, err: %s", id.FullyQualifiedName(), err), + }, } } + d.SetId(helpers.EncodeSnowflakeID(database, name)) - values := map[string]any{ - "name": s.Name, - "database": s.DatabaseName, - "comment": s.Comment, - // reset the options before reading back from the DB - "is_transient": false, - "is_managed": false, - } + return ReadContextSchema(false)(ctx, d, meta) +} - for k, v := range values { - if err := d.Set(k, v); err != nil { - return err +func ReadContextSchema(withExternalChangesMarking bool) schema.ReadContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.DatabaseObjectIdentifier) + + _, err := client.Databases.ShowByID(ctx, id.DatabaseId()) + if err != nil { + log.Printf("[DEBUG] database %s for schema %s not found", id.DatabaseId().Name(), id.Name()) + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query database. Marking the resource as removed.", + Detail: fmt.Sprintf("database name: %s, Err: %s", id.DatabaseId(), err), + }, + } } - } - if opts := s.Options; opts != nil && *opts != "" { - for _, opt := range strings.Split(*opts, ", ") { - switch opt { - case "TRANSIENT": - if err := d.Set("is_transient", true); err != nil { - return err - } - case "MANAGED ACCESS": - if err := d.Set("is_managed", true); err != nil { - return err + schema, err := client.Schemas.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query schema. Marking the resource as removed.", + Detail: fmt.Sprintf("schema name: %s, Err: %s", id.FullyQualifiedName(), err), + }, } } + return diag.FromErr(err) + } + if err := d.Set("name", schema.Name); err != nil { + return diag.FromErr(err) } - } - return nil + if err := d.Set("database", schema.DatabaseName); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("comment", schema.Comment); err != nil { + return diag.FromErr(err) + } + + schemaParameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Schema: id, + }, + }) + if err != nil { + return diag.FromErr(err) + } + + if diags := HandleDatabaseParameterRead(d, schemaParameters); diags != nil { + return diags + } + pipeExecutionPaused, err := collections.FindOne(schemaParameters, func(property *sdk.Parameter) bool { + return property.Key == "PIPE_EXECUTION_PAUSED" + }) + if err != nil { + return diag.FromErr(fmt.Errorf("failed to find schema PIPE_EXECUTION_PAUSED parameter, err = %w", err)) + } + value, err := strconv.ParseBool((*pipeExecutionPaused).Value) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set(strings.ToLower(string(sdk.ObjectParameterPipeExecutionPaused)), value); err != nil { + return diag.FromErr(err) + } + + if withExternalChangesMarking { + if err = handleExternalChangesToObjectInShow(d, + showMapping{"options", "is_transient", schema.IsTransient(), booleanStringFromBool(schema.IsTransient()), func(x any) any { + return slices.Contains(sdk.ParseCommaSeparatedStringArray(x.(string), false), "TRANSIENT") + }}, + showMapping{"options", "with_managed_access", schema.IsManagedAccess(), booleanStringFromBool(schema.IsManagedAccess()), func(x any) any { + return slices.Contains(sdk.ParseCommaSeparatedStringArray(x.(string), false), "MANAGED ACCESS") + }}, + ); err != nil { + return diag.FromErr(err) + } + } + + if err = setStateToValuesFromConfig(d, schemaSchema, []string{ + "is_transient", + "with_managed_access", + }); err != nil { + return diag.FromErr(err) + } + + describeResult, err := client.Schemas.Describe(ctx, schema.ID()) + if err != nil { + log.Printf("[DEBUG] describing schema: %s, err: %s", id.FullyQualifiedName(), err) + } else { + if err = d.Set(DescribeOutputAttributeName, schemas.SchemaDescriptionToSchema(describeResult)); err != nil { + return diag.FromErr(err) + } + } + + if err = d.Set(ShowOutputAttributeName, []map[string]any{schemas.SchemaToSchema(schema)}); err != nil { + return diag.FromErr(err) + } + + if err = d.Set(ParametersAttributeName, []map[string]any{schemas.SchemaParametersToSchema(schemaParameters)}); err != nil { + return diag.FromErr(err) + } + return nil + } } -// UpdateSchema implements schema.UpdateFunc. -func UpdateSchema(d *schema.ResourceData, meta interface{}) error { +func UpdateContextSchema(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { id := helpers.DecodeSnowflakeID(d.Id()).(sdk.DatabaseObjectIdentifier) client := meta.(*provider.Context).Client - ctx := context.Background() if d.HasChange("name") { - newId := sdk.NewDatabaseObjectIdentifier(id.DatabaseName(), d.Get("name").(string)) - + newId := sdk.NewDatabaseObjectIdentifier(d.Get("database").(string), d.Get("name").(string)) err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ NewName: sdk.Pointer(newId), }) if err != nil { - return fmt.Errorf("error updating schema name on %v err = %w", d.Id(), err) + return diag.FromErr(err) } - d.SetId(helpers.EncodeSnowflakeID(newId)) id = newId } - if d.HasChange("comment") { - comment := d.Get("comment") - if comment != "" { - err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - Set: &sdk.SchemaSet{ - Comment: sdk.String(comment.(string)), - }, - }) + if d.HasChange("with_managed_access") { + if v := d.Get("with_managed_access").(string); v != BooleanDefault { + var err error + parsed, err := booleanStringToBool(v) if err != nil { - return fmt.Errorf("error updating schema comment on %v err = %w", d.Id(), err) + return diag.FromErr(err) + } + if parsed { + err = client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ + EnableManagedAccess: sdk.Pointer(true), + }) + } else { + err = client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ + DisableManagedAccess: sdk.Pointer(true), + }) } - } else { - err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - Unset: &sdk.SchemaUnset{ - Comment: sdk.Bool(true), - }, - }) if err != nil { - return fmt.Errorf("error updating schema comment on %v err = %w", d.Id(), err) + return diag.FromErr(fmt.Errorf("error handling with_managed_access on %v err = %w", d.Id(), err)) } - } - } - - if d.HasChange("is_managed") { - managed := d.Get("is_managed") - var err error - if managed.(bool) { - err = client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - EnableManagedAccess: sdk.Bool(true), - }) } else { - err = client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - DisableManagedAccess: sdk.Bool(true), - }) - } - if err != nil { - return fmt.Errorf("error changing management state on %v err = %w", d.Id(), err) + // managed access can not be UNSET to a default value + if err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ + DisableManagedAccess: sdk.Pointer(true), + }); err != nil { + return diag.FromErr(fmt.Errorf("error handling with_managed_access on %v err = %w", d.Id(), err)) + } } } - if d.HasChange("data_retention_days") { - if days := d.Get("data_retention_days"); days.(int) != IntDefault { - err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - Set: &sdk.SchemaSet{ - DataRetentionTimeInDays: sdk.Int(days.(int)), - }, - }) - if err != nil { - return fmt.Errorf("error setting data retention days on %v err = %w", d.Id(), err) - } + set := new(sdk.SchemaSet) + unset := new(sdk.SchemaUnset) + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if len(comment) > 0 { + set.Comment = &comment } else { - err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - Unset: &sdk.SchemaUnset{ - DataRetentionTimeInDays: sdk.Bool(true), - }, - }) - if err != nil { - return fmt.Errorf("error unsetting data retention days on %v err = %w", d.Id(), err) - } + unset.Comment = sdk.Bool(true) } } - if d.HasChange("tag") { - unsetTags, setTags := GetTagsDiff(d, "tag") - - if len(unsetTags) > 0 { - err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - UnsetTag: unsetTags, - }) - if err != nil { - return fmt.Errorf("error occurred when dropping tags on %v, err = %w", d.Id(), err) - } + if updateParamDiags := HandleSchemaParametersChanges(d, set, unset); len(updateParamDiags) > 0 { + return updateParamDiags + } + if (*set != sdk.SchemaSet{}) { + err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ + Set: set, + }) + if err != nil { + return diag.FromErr(err) } + } - if len(setTags) > 0 { - err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ - SetTag: setTags, - }) - if err != nil { - return fmt.Errorf("error occurred when setting tags on %v, err = %w", d.Id(), err) - } + if (*unset != sdk.SchemaUnset{}) { + err := client.Schemas.Alter(ctx, id, &sdk.AlterSchemaOptions{ + Unset: unset, + }) + if err != nil { + return diag.FromErr(err) } } - return ReadSchema(d, meta) + return ReadContextSchema(false)(ctx, d, meta) } -// DeleteSchema implements schema.DeleteFunc. -func DeleteSchema(d *schema.ResourceData, meta interface{}) error { - client := meta.(*provider.Context).Client - ctx := context.Background() +func HandleSchemaParametersChanges(d *schema.ResourceData, set *sdk.SchemaSet, unset *sdk.SchemaUnset) diag.Diagnostics { + return JoinDiags( + handleValuePropertyChange[int](d, "data_retention_time_in_days", &set.DataRetentionTimeInDays, &unset.DataRetentionTimeInDays), + handleValuePropertyChange[int](d, "max_data_extension_time_in_days", &set.MaxDataExtensionTimeInDays, &unset.MaxDataExtensionTimeInDays), + handleValuePropertyChangeWithMapping[string](d, "external_volume", &set.ExternalVolume, &unset.ExternalVolume, func(value string) (sdk.AccountObjectIdentifier, error) { + return sdk.NewAccountObjectIdentifier(value), nil + }), + handleValuePropertyChangeWithMapping[string](d, "catalog", &set.Catalog, &unset.Catalog, func(value string) (sdk.AccountObjectIdentifier, error) { + return sdk.NewAccountObjectIdentifier(value), nil + }), + handleValuePropertyChange[bool](d, "pipe_execution_paused", &set.PipeExecutionPaused, &unset.PipeExecutionPaused), + handleValuePropertyChange[bool](d, "replace_invalid_characters", &set.ReplaceInvalidCharacters, &unset.ReplaceInvalidCharacters), + handleValuePropertyChange[string](d, "default_ddl_collation", &set.DefaultDDLCollation, &unset.DefaultDDLCollation), + handleValuePropertyChangeWithMapping[string](d, "storage_serialization_policy", &set.StorageSerializationPolicy, &unset.StorageSerializationPolicy, sdk.ToStorageSerializationPolicy), + handleValuePropertyChangeWithMapping[string](d, "log_level", &set.LogLevel, &unset.LogLevel, sdk.ToLogLevel), + handleValuePropertyChangeWithMapping[string](d, "trace_level", &set.TraceLevel, &unset.TraceLevel, sdk.ToTraceLevel), + handleValuePropertyChange[int](d, "suspend_task_after_num_failures", &set.SuspendTaskAfterNumFailures, &unset.SuspendTaskAfterNumFailures), + handleValuePropertyChange[int](d, "task_auto_retry_attempts", &set.TaskAutoRetryAttempts, &unset.TaskAutoRetryAttempts), + handleValuePropertyChangeWithMapping[string](d, "user_task_managed_initial_warehouse_size", &set.UserTaskManagedInitialWarehouseSize, &unset.UserTaskManagedInitialWarehouseSize, sdk.ToWarehouseSize), + handleValuePropertyChange[int](d, "user_task_timeout_ms", &set.UserTaskTimeoutMs, &unset.UserTaskTimeoutMs), + handleValuePropertyChange[int](d, "user_task_minimum_trigger_interval_in_seconds", &set.UserTaskMinimumTriggerIntervalInSeconds, &unset.UserTaskMinimumTriggerIntervalInSeconds), + handleValuePropertyChange[bool](d, "quoted_identifiers_ignore_case", &set.QuotedIdentifiersIgnoreCase, &unset.QuotedIdentifiersIgnoreCase), + handleValuePropertyChange[bool](d, "enable_console_output", &set.EnableConsoleOutput, &unset.EnableConsoleOutput), + ) +} + +func DeleteContextSchema(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { id := helpers.DecodeSnowflakeID(d.Id()).(sdk.DatabaseObjectIdentifier) + client := meta.(*provider.Context).Client - err := client.Schemas.Drop(ctx, id, new(sdk.DropSchemaOptions)) + err := client.Schemas.Drop(ctx, id, &sdk.DropSchemaOptions{IfExists: sdk.Pointer(true)}) if err != nil { - return fmt.Errorf("error deleting schema %v err = %w", d.Id(), err) + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "Error deleting schema", + Detail: fmt.Sprintf("id %v err = %v", id.Name(), err), + }, + } } d.SetId("") - return nil } diff --git a/pkg/resources/schema_acceptance_test.go b/pkg/resources/schema_acceptance_test.go index c5ea1d76af..4d1ebfd35b 100644 --- a/pkg/resources/schema_acceptance_test.go +++ b/pkg/resources/schema_acceptance_test.go @@ -7,10 +7,17 @@ import ( "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + acchelpers "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/planchecks" r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/stretchr/testify/require" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/importchecks" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + tfjson "github.com/hashicorp/terraform-json" "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" @@ -18,9 +25,76 @@ import ( "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -func TestAcc_Schema(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - comment := "Terraform acceptance test" +func TestAcc_Schema_basic(t *testing.T) { + id := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + databaseId := acc.TestClient().Ids.DatabaseId() + + externalVolumeId, externalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(externalVolumeCleanup) + + catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(catalogCleanup) + + basicConfigVariables := config.Variables{ + "name": config.StringVariable(id.Name()), + "comment": config.StringVariable("foo"), + "database": config.StringVariable(databaseId.Name()), + } + + basicConfigVariablesWithTransient := func(isTransient bool) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "comment": config.StringVariable("foo"), + "database": config.StringVariable(databaseId.Name()), + "is_transient": config.BoolVariable(isTransient), + } + } + + completeConfigVariables := config.Variables{ + "name": config.StringVariable(id.Name()), + "comment": config.StringVariable("foo"), + "database": config.StringVariable(databaseId.Name()), + "with_managed_access": config.BoolVariable(true), + "is_transient": config.BoolVariable(false), + + "data_retention_time_in_days": config.IntegerVariable(1), + "max_data_extension_time_in_days": config.IntegerVariable(1), + "external_volume": config.StringVariable(externalVolumeId.Name()), + "catalog": config.StringVariable(catalogId.Name()), + "replace_invalid_characters": config.BoolVariable(true), + "default_ddl_collation": config.StringVariable("en_US"), + "storage_serialization_policy": config.StringVariable(string(sdk.StorageSerializationPolicyCompatible)), + "log_level": config.StringVariable(string(sdk.LogLevelInfo)), + "trace_level": config.StringVariable(string(sdk.TraceLevelOnEvent)), + "suspend_task_after_num_failures": config.IntegerVariable(20), + "task_auto_retry_attempts": config.IntegerVariable(20), + "user_task_managed_initial_warehouse_size": config.StringVariable(string(sdk.WarehouseSizeXLarge)), + "user_task_timeout_ms": config.IntegerVariable(1200000), + "user_task_minimum_trigger_interval_in_seconds": config.IntegerVariable(120), + "quoted_identifiers_ignore_case": config.BoolVariable(true), + "enable_console_output": config.BoolVariable(true), + "pipe_execution_paused": config.BoolVariable(true), + } + + var ( + accountDataRetentionTimeInDays = new(string) + accountMaxDataExtensionTimeInDays = new(string) + accountExternalVolume = new(string) + accountCatalog = new(string) + accountReplaceInvalidCharacters = new(string) + accountDefaultDdlCollation = new(string) + accountStorageSerializationPolicy = new(string) + accountLogLevel = new(string) + accountTraceLevel = new(string) + accountSuspendTaskAfterNumFailures = new(string) + accountTaskAutoRetryAttempts = new(string) + accountUserTaskMangedInitialWarehouseSize = new(string) + accountUserTaskTimeoutMs = new(string) + accountUserTaskMinimumTriggerIntervalInSeconds = new(string) + accountQuotedIdentifiersIgnoreCase = new(string) + accountEnableConsoleOutput = new(string) + accountPipeExecutionPaused = new(string) + ) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -31,36 +105,296 @@ func TestAcc_Schema(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.Schema), Steps: []resource.TestStep{ { - ConfigDirectory: config.TestNameDirectory(), - ConfigVariables: map[string]config.Variable{ - "name": config.StringVariable(name), - "database": config.StringVariable(acc.TestDatabaseName), - "comment": config.StringVariable(comment), + PreConfig: func() { + params := acc.TestClient().Parameter.ShowAccountParameters(t) + *accountDataRetentionTimeInDays = acchelpers.FindParameter(t, params, sdk.AccountParameterDataRetentionTimeInDays).Value + *accountMaxDataExtensionTimeInDays = acchelpers.FindParameter(t, params, sdk.AccountParameterMaxDataExtensionTimeInDays).Value + *accountExternalVolume = acchelpers.FindParameter(t, params, sdk.AccountParameterExternalVolume).Value + *accountCatalog = acchelpers.FindParameter(t, params, sdk.AccountParameterCatalog).Value + *accountReplaceInvalidCharacters = acchelpers.FindParameter(t, params, sdk.AccountParameterReplaceInvalidCharacters).Value + *accountDefaultDdlCollation = acchelpers.FindParameter(t, params, sdk.AccountParameterDefaultDDLCollation).Value + *accountStorageSerializationPolicy = acchelpers.FindParameter(t, params, sdk.AccountParameterStorageSerializationPolicy).Value + *accountLogLevel = acchelpers.FindParameter(t, params, sdk.AccountParameterLogLevel).Value + *accountTraceLevel = acchelpers.FindParameter(t, params, sdk.AccountParameterTraceLevel).Value + *accountSuspendTaskAfterNumFailures = acchelpers.FindParameter(t, params, sdk.AccountParameterSuspendTaskAfterNumFailures).Value + *accountTaskAutoRetryAttempts = acchelpers.FindParameter(t, params, sdk.AccountParameterTaskAutoRetryAttempts).Value + *accountUserTaskMangedInitialWarehouseSize = acchelpers.FindParameter(t, params, sdk.AccountParameterUserTaskManagedInitialWarehouseSize).Value + *accountUserTaskTimeoutMs = acchelpers.FindParameter(t, params, sdk.AccountParameterUserTaskTimeoutMs).Value + *accountUserTaskMinimumTriggerIntervalInSeconds = acchelpers.FindParameter(t, params, sdk.AccountParameterUserTaskMinimumTriggerIntervalInSeconds).Value + *accountQuotedIdentifiersIgnoreCase = acchelpers.FindParameter(t, params, sdk.AccountParameterQuotedIdentifiersIgnoreCase).Value + *accountEnableConsoleOutput = acchelpers.FindParameter(t, params, sdk.AccountParameterEnableConsoleOutput).Value + *accountPipeExecutionPaused = acchelpers.FindParameter(t, params, sdk.AccountParameterPipeExecutionPaused).Value }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "name", name), - resource.TestCheckResourceAttr("snowflake_schema.test", "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr("snowflake_schema.test", "comment", comment), - checkBool("snowflake_schema.test", "is_transient", false), - checkBool("snowflake_schema.test", "is_managed", false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/basic"), + ConfigVariables: basicConfigVariables, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "database", databaseId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "with_managed_access", r.BooleanDefault), + resource.TestCheckResourceAttr("snowflake_schema.test", "is_transient", r.BooleanDefault), + + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "enable_console_output", accountEnableConsoleOutput), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "pipe_execution_paused", accountPipeExecutionPaused), + + resource.TestCheckResourceAttrSet("snowflake_schema.test", "show_output.0.created_on"), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.name", id.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.is_default", "false"), + resource.TestCheckResourceAttrSet("snowflake_schema.test", "show_output.0.is_current"), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.database_name", databaseId.Name()), + resource.TestCheckResourceAttrSet("snowflake_schema.test", "show_output.0.owner"), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.comment", ""), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.options", ""), ), }, - // UPDATE COMMENT (proves issue https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2606) + // import - without optionals { - ConfigDirectory: config.TestNameDirectory(), - ConfigVariables: map[string]config.Variable{ - "name": config.StringVariable(name), - "database": config.StringVariable(acc.TestDatabaseName), - "comment": config.StringVariable(""), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/basic"), + ConfigVariables: basicConfigVariables, + ResourceName: "snowflake_schema.test", + ImportState: true, + ImportStateCheck: importchecks.ComposeAggregateImportStateCheck( + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name()), + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "database", databaseId.Name()), + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "with_managed_access", "false"), + importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "is_transient", "false"), + ), + }, + // set other fields + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/complete"), + ConfigVariables: completeConfigVariables, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionUpdate), + }, }, - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "name", name), - resource.TestCheckResourceAttr("snowflake_schema.test", "database", acc.TestDatabaseName), - resource.TestCheckResourceAttr("snowflake_schema.test", "comment", ""), - checkBool("snowflake_schema.test", "is_transient", false), - checkBool("snowflake_schema.test", "is_managed", false), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "with_managed_access", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "is_transient", "false"), + resource.TestCheckResourceAttr("snowflake_schema.test", "comment", "foo"), + + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "1"), + resource.TestCheckResourceAttr("snowflake_schema.test", "max_data_extension_time_in_days", "1"), + resource.TestCheckResourceAttr("snowflake_schema.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_schema.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_schema.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_schema.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_schema.test", "suspend_task_after_num_failures", "20"), + resource.TestCheckResourceAttr("snowflake_schema.test", "task_auto_retry_attempts", "20"), + resource.TestCheckResourceAttr("snowflake_schema.test", "user_task_managed_initial_warehouse_size", string(sdk.WarehouseSizeXLarge)), + resource.TestCheckResourceAttr("snowflake_schema.test", "user_task_timeout_ms", "1200000"), + resource.TestCheckResourceAttr("snowflake_schema.test", "user_task_minimum_trigger_interval_in_seconds", "120"), + resource.TestCheckResourceAttr("snowflake_schema.test", "quoted_identifiers_ignore_case", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "enable_console_output", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "pipe_execution_paused", "true"), + + resource.TestCheckResourceAttrSet("snowflake_schema.test", "show_output.0.created_on"), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.name", id.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.is_default", "false"), + resource.TestCheckResourceAttrSet("snowflake_schema.test", "show_output.0.is_current"), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.database_name", databaseId.Name()), + resource.TestCheckResourceAttrSet("snowflake_schema.test", "show_output.0.owner"), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.comment", "foo"), + resource.TestCheckResourceAttr("snowflake_schema.test", "show_output.0.options", "MANAGED ACCESS"), + + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.data_retention_time_in_days.0.value", "1"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.max_data_extension_time_in_days.0.value", "1"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.external_volume.0.value", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.catalog.0.value", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.replace_invalid_characters.0.value", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.default_ddl_collation.0.value", "en_US"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.storage_serialization_policy.0.value", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.log_level.0.value", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.trace_level.0.value", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.suspend_task_after_num_failures.0.value", "20"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.task_auto_retry_attempts.0.value", "20"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.user_task_managed_initial_warehouse_size.0.value", string(sdk.WarehouseSizeXLarge)), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.user_task_timeout_ms.0.value", "1200000"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.user_task_minimum_trigger_interval_in_seconds.0.value", "120"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.quoted_identifiers_ignore_case.0.value", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.enable_console_output.0.value", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "parameters.0.pipe_execution_paused.0.value", "true"), ), }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/complete"), + ConfigVariables: completeConfigVariables, + ResourceName: "snowflake_schema.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"show_output.0.is_current"}, + }, + // unset + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/basic_with_transient"), + ConfigVariables: basicConfigVariablesWithTransient(false), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionUpdate), + }, + }, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "database", databaseId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "with_managed_access", r.BooleanDefault), + resource.TestCheckResourceAttr("snowflake_schema.test", "is_transient", "false"), + + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "enable_console_output", accountEnableConsoleOutput), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "pipe_execution_paused", accountPipeExecutionPaused), + ), + }, + // set is_transient - recreate + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/basic_with_transient"), + ConfigVariables: basicConfigVariablesWithTransient(true), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "database", databaseId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "with_managed_access", r.BooleanDefault), + resource.TestCheckResourceAttr("snowflake_schema.test", "is_transient", "true"), + + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "data_retention_time_in_days", accountDataRetentionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "max_data_extension_time_in_days", accountMaxDataExtensionTimeInDays), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "external_volume", accountExternalVolume), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "catalog", accountCatalog), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "replace_invalid_characters", accountReplaceInvalidCharacters), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "default_ddl_collation", accountDefaultDdlCollation), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "storage_serialization_policy", accountStorageSerializationPolicy), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "log_level", accountLogLevel), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "trace_level", accountTraceLevel), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "suspend_task_after_num_failures", accountSuspendTaskAfterNumFailures), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "task_auto_retry_attempts", accountTaskAutoRetryAttempts), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_managed_initial_warehouse_size", accountUserTaskMangedInitialWarehouseSize), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_timeout_ms", accountUserTaskTimeoutMs), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "user_task_minimum_trigger_interval_in_seconds", accountUserTaskMinimumTriggerIntervalInSeconds), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "quoted_identifiers_ignore_case", accountQuotedIdentifiersIgnoreCase), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "enable_console_output", accountEnableConsoleOutput), + resource.TestCheckResourceAttrPtr("snowflake_schema.test", "pipe_execution_paused", accountPipeExecutionPaused), + ), + }, + }, + }) +} + +func TestAcc_Schema_complete(t *testing.T) { + id := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + databaseId := acc.TestClient().Ids.DatabaseId() + + externalVolumeId, externalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(externalVolumeCleanup) + + catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(catalogCleanup) + + completeConfigVariables := config.Variables{ + "name": config.StringVariable(id.Name()), + "comment": config.StringVariable("foo"), + "database": config.StringVariable(databaseId.Name()), + "with_managed_access": config.BoolVariable(true), + "is_transient": config.BoolVariable(true), + + "data_retention_time_in_days": config.IntegerVariable(1), + "max_data_extension_time_in_days": config.IntegerVariable(1), + "external_volume": config.StringVariable(externalVolumeId.Name()), + "catalog": config.StringVariable(catalogId.Name()), + "replace_invalid_characters": config.BoolVariable(true), + "default_ddl_collation": config.StringVariable("en_US"), + "storage_serialization_policy": config.StringVariable(string(sdk.StorageSerializationPolicyCompatible)), + "log_level": config.StringVariable(string(sdk.LogLevelInfo)), + "trace_level": config.StringVariable(string(sdk.TraceLevelOnEvent)), + "suspend_task_after_num_failures": config.IntegerVariable(20), + "task_auto_retry_attempts": config.IntegerVariable(20), + "user_task_managed_initial_warehouse_size": config.StringVariable(string(sdk.WarehouseSizeXLarge)), + "user_task_timeout_ms": config.IntegerVariable(1200000), + "user_task_minimum_trigger_interval_in_seconds": config.IntegerVariable(120), + "quoted_identifiers_ignore_case": config.BoolVariable(true), + "enable_console_output": config.BoolVariable(true), + "pipe_execution_paused": config.BoolVariable(true), + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Schema), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/complete"), + ConfigVariables: completeConfigVariables, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "is_transient", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "with_managed_access", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "comment", "foo"), + + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "1"), + resource.TestCheckResourceAttr("snowflake_schema.test", "max_data_extension_time_in_days", "1"), + resource.TestCheckResourceAttr("snowflake_schema.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_schema.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_schema.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_schema.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_schema.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_schema.test", "suspend_task_after_num_failures", "20"), + resource.TestCheckResourceAttr("snowflake_schema.test", "task_auto_retry_attempts", "20"), + resource.TestCheckResourceAttr("snowflake_schema.test", "user_task_managed_initial_warehouse_size", string(sdk.WarehouseSizeXLarge)), + resource.TestCheckResourceAttr("snowflake_schema.test", "user_task_timeout_ms", "1200000"), + resource.TestCheckResourceAttr("snowflake_schema.test", "user_task_minimum_trigger_interval_in_seconds", "120"), + resource.TestCheckResourceAttr("snowflake_schema.test", "quoted_identifiers_ignore_case", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "enable_console_output", "true"), + resource.TestCheckResourceAttr("snowflake_schema.test", "pipe_execution_paused", "true"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/complete"), + ConfigVariables: completeConfigVariables, + ResourceName: "snowflake_schema.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"show_output.0.is_current"}, + }, }, }) } @@ -89,8 +423,6 @@ func TestAcc_Schema_Rename(t *testing.T) { resource.TestCheckResourceAttr("snowflake_schema.test", "name", oldSchemaName), resource.TestCheckResourceAttr("snowflake_schema.test", "database", acc.TestDatabaseName), resource.TestCheckResourceAttr("snowflake_schema.test", "comment", comment), - checkBool("snowflake_schema.test", "is_transient", false), - checkBool("snowflake_schema.test", "is_managed", false), ), }, { @@ -109,8 +441,64 @@ func TestAcc_Schema_Rename(t *testing.T) { resource.TestCheckResourceAttr("snowflake_schema.test", "name", newSchemaName), resource.TestCheckResourceAttr("snowflake_schema.test", "database", acc.TestDatabaseName), resource.TestCheckResourceAttr("snowflake_schema.test", "comment", comment), - checkBool("snowflake_schema.test", "is_transient", false), - checkBool("snowflake_schema.test", "is_managed", false), + ), + }, + }, + }) +} + +func TestAcc_Schema_ManagePublic(t *testing.T) { + name := "PUBLIC" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Schema), + Steps: []resource.TestStep{ + // PUBLIC can not be created in v0.93 + { + ExternalProviders: map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: "=0.93.0", + Source: "Snowflake-Labs/snowflake", + }, + }, + Config: schemav093(name, acc.TestDatabaseName), + ExpectError: regexp.MustCompile("Error: error creating schema PUBLIC"), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/basic_with_pipe_execution_paused"), + ConfigVariables: map[string]config.Variable{ + "name": config.StringVariable(name), + "database": config.StringVariable(acc.TestDatabaseName), + "pipe_execution_paused": config.BoolVariable(true), + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "name", name), + resource.TestCheckResourceAttr("snowflake_schema.test", "database", acc.TestDatabaseName), + resource.TestCheckResourceAttr("snowflake_schema.test", "pipe_execution_paused", "true"), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema/basic_with_pipe_execution_paused"), + ConfigVariables: map[string]config.Variable{ + "name": config.StringVariable(name), + "database": config.StringVariable(acc.TestDatabaseName), + "pipe_execution_paused": config.BoolVariable(false), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_schema.test", plancheck.ResourceActionUpdate), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "name", name), + resource.TestCheckResourceAttr("snowflake_schema.test", "database", acc.TestDatabaseName), + resource.TestCheckResourceAttr("snowflake_schema.test", "pipe_execution_paused", "false"), ), }, }, @@ -162,20 +550,20 @@ func TestAcc_Schema_TwoSchemasWithTheSameNameOnDifferentDatabases(t *testing.T) // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2356 issue is fixed. func TestAcc_Schema_DefaultDataRetentionTime(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - id := sdk.NewDatabaseObjectIdentifier(databaseName, schemaName) + db, dbCleanup := acc.TestClient().Database.CreateDatabase(t) + t.Cleanup(dbCleanup) + + id := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(db.ID()) - configVariablesWithoutSchemaDataRetentionTime := func(databaseDataRetentionTime int) config.Variables { + configVariablesWithoutSchemaDataRetentionTime := func() config.Variables { return config.Variables{ - "database": config.StringVariable(databaseName), - "schema": config.StringVariable(schemaName), - "database_data_retention_time": config.IntegerVariable(databaseDataRetentionTime), + "database": config.StringVariable(db.ID().Name()), + "schema": config.StringVariable(id.Name()), } } - configVariablesWithSchemaDataRetentionTime := func(databaseDataRetentionTime int, schemaDataRetentionTime int) config.Variables { - vars := configVariablesWithoutSchemaDataRetentionTime(databaseDataRetentionTime) + configVariablesWithSchemaDataRetentionTime := func(schemaDataRetentionTime int) config.Variables { + vars := configVariablesWithoutSchemaDataRetentionTime() vars["schema_data_retention_time"] = config.IntegerVariable(schemaDataRetentionTime) return vars } @@ -190,58 +578,68 @@ func TestAcc_Schema_DefaultDataRetentionTime(t *testing.T) { Steps: []resource.TestStep{ { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(5), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", r.IntDefaultString), - checkDatabaseAndSchemaDataRetentionTime(t, id, 5, 5), + ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "1"), ), }, + // change param value in database { + PreConfig: func() { + acc.TestClient().Database.UpdateDataRetentionTime(t, db.ID(), 50) + }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(10), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", r.IntDefaultString), - checkDatabaseAndSchemaDataRetentionTime(t, id, 10, 10), + ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + planchecks.PrintPlanDetails("snowflake_schema.test", "data_retention_time_in_days"), + planchecks.ExpectDrift("snowflake_schema.test", "data_retention_time_in_days", sdk.String("1"), sdk.String("50")), + planchecks.ExpectChange("snowflake_schema.test", "data_retention_time_in_days", tfjson.ActionNoop, sdk.String("50"), sdk.String("50")), + planchecks.ExpectComputed("snowflake_schema.test", "data_retention_time_in_days", false), + }, + }, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "50"), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithSchemaDataRetentionTime(10, 5), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", "5"), - checkDatabaseAndSchemaDataRetentionTime(t, id, 10, 5), + ConfigVariables: configVariablesWithSchemaDataRetentionTime(5), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "5"), + checkDatabaseAndSchemaDataRetentionTime(t, id, 50, 5), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithSchemaDataRetentionTime(10, 15), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", "15"), - checkDatabaseAndSchemaDataRetentionTime(t, id, 10, 15), + ConfigVariables: configVariablesWithSchemaDataRetentionTime(15), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "15"), + checkDatabaseAndSchemaDataRetentionTime(t, id, 50, 15), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(10), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", r.IntDefaultString), - checkDatabaseAndSchemaDataRetentionTime(t, id, 10, 10), + ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "50"), + checkDatabaseAndSchemaDataRetentionTime(t, id, 50, 50), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithSchemaDataRetentionTime(10, 0), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", "0"), - checkDatabaseAndSchemaDataRetentionTime(t, id, 10, 0), + ConfigVariables: configVariablesWithSchemaDataRetentionTime(0), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "0"), + checkDatabaseAndSchemaDataRetentionTime(t, id, 50, 0), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithSchemaDataRetentionTime(10, 3), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", "3"), - checkDatabaseAndSchemaDataRetentionTime(t, id, 10, 3), + ConfigVariables: configVariablesWithSchemaDataRetentionTime(3), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "3"), + checkDatabaseAndSchemaDataRetentionTime(t, id, 50, 3), ), }, }, @@ -250,20 +648,18 @@ func TestAcc_Schema_DefaultDataRetentionTime(t *testing.T) { // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2356 issue is fixed. func TestAcc_Schema_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - id := sdk.NewDatabaseObjectIdentifier(databaseName, schemaName) + databaseId := acc.TestClient().Ids.DatabaseId() + id := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() - configVariablesWithoutSchemaDataRetentionTime := func(databaseDataRetentionTime int) config.Variables { + configVariablesWithoutSchemaDataRetentionTime := func() config.Variables { return config.Variables{ - "database": config.StringVariable(databaseName), - "schema": config.StringVariable(schemaName), - "database_data_retention_time": config.IntegerVariable(databaseDataRetentionTime), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(id.Name()), } } - configVariablesWithSchemaDataRetentionTime := func(databaseDataRetentionTime int, schemaDataRetentionTime int) config.Variables { - vars := configVariablesWithoutSchemaDataRetentionTime(databaseDataRetentionTime) + configVariablesWithSchemaDataRetentionTime := func(schemaDataRetentionTime int) config.Variables { + vars := configVariablesWithoutSchemaDataRetentionTime() vars["schema_data_retention_time"] = config.IntegerVariable(schemaDataRetentionTime) return vars } @@ -278,27 +674,24 @@ func TestAcc_Schema_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) Steps: []resource.TestStep{ { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(5), + ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", r.IntDefaultString), - checkDatabaseAndSchemaDataRetentionTime(t, id, 5, 5), + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "1"), ), }, { PreConfig: acc.TestClient().Schema.UpdateDataRetentionTime(t, id, 20), ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet"), - ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(5), + ConfigVariables: configVariablesWithoutSchemaDataRetentionTime(), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", r.IntDefaultString), - checkDatabaseAndSchemaDataRetentionTime(t, id, 5, 5), + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "1"), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet"), - ConfigVariables: configVariablesWithSchemaDataRetentionTime(10, 3), + ConfigVariables: configVariablesWithSchemaDataRetentionTime(3), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_days", "3"), - checkDatabaseAndSchemaDataRetentionTime(t, id, 10, 3), + resource.TestCheckResourceAttr("snowflake_schema.test", "data_retention_time_in_days", "3"), ), ConfigPlanChecks: resource.ConfigPlanChecks{ PostApplyPostRefresh: []plancheck.PlanCheck{ @@ -312,9 +705,8 @@ func TestAcc_Schema_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) func TestAcc_Schema_RemoveDatabaseOutsideOfTerraform(t *testing.T) { schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() - schemaName := schemaId.Name() configVariables := map[string]config.Variable{ - "schema_name": config.StringVariable(schemaName), + "schema_name": config.StringVariable(schemaId.Name()), "database_name": config.StringVariable(acc.TestDatabaseName), } @@ -380,7 +772,7 @@ func TestAcc_Schema_RemoveSchemaOutsideOfTerraform(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Schema_RemoveOutsideOfTerraform"), ConfigVariables: configVariables, // The error occurs in the Create operation, indicating the Read operation removed the resource from the state in the previous step. - ExpectError: regexp.MustCompile("error creating schema"), + ExpectError: regexp.MustCompile("Failed to create schema"), }, }, }) @@ -390,14 +782,10 @@ func checkDatabaseAndSchemaDataRetentionTime(t *testing.T, schemaId sdk.Database t.Helper() return func(state *terraform.State) error { schema, err := acc.TestClient().Schema.Show(t, schemaId) - if err != nil { - return err - } + require.NoError(t, err) database, err := acc.TestClient().Database.Show(t, schemaId.DatabaseId()) - if err != nil { - return err - } + require.NoError(t, err) // "retention_time" may sometimes be an empty string instead of an integer var schemaRetentionTime int64 @@ -408,9 +796,7 @@ func checkDatabaseAndSchemaDataRetentionTime(t *testing.T, schemaId sdk.Database } schemaRetentionTime, err = strconv.ParseInt(rt, 10, 64) - if err != nil { - return err - } + require.NoError(t, err) } if database.RetentionTime != expectedDatabaseRetentionsDays { @@ -424,3 +810,142 @@ func checkDatabaseAndSchemaDataRetentionTime(t *testing.T, schemaId sdk.Database return nil } } + +func TestAcc_Schema_migrateFromVersion093WithoutManagedAccess(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.DatabaseId() + resourceName := "snowflake_schema.test" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: "=0.93.0", + Source: "Snowflake-Labs/snowflake", + }, + }, + Config: schemav093(id.Name(), databaseId.Name()), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", id.Name()), + resource.TestCheckResourceAttr(resourceName, "is_managed", "false"), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: schemav094(id.Name(), databaseId.Name()), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", id.Name()), + resource.TestCheckResourceAttr(resourceName, "with_managed_access", r.BooleanDefault), + ), + }, + }, + }) +} + +func TestAcc_Schema_migrateFromVersion093(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.DatabaseId() + resourceName := "snowflake_schema.test" + + tag, tagCleanup := acc.TestClient().Tag.CreateTag(t) + t.Cleanup(tagCleanup) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: "=0.93.0", + Source: "Snowflake-Labs/snowflake", + }, + }, + Config: schemav093WithIsManagedAndDataRetentionDays(id.Name(), databaseId.Name(), tag.SchemaName, tag.Name, "foo", true, 10), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", id.Name()), + resource.TestCheckResourceAttr(resourceName, "is_managed", "true"), + resource.TestCheckResourceAttr(resourceName, "data_retention_days", "10"), + resource.TestCheckResourceAttr(resourceName, "tag.#", "1"), + resource.TestCheckResourceAttr(resourceName, "tag.0.name", tag.Name), + resource.TestCheckResourceAttr(resourceName, "tag.0.value", "foo"), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: schemav094WithManagedAccessAndDataRetentionTimeInDays(id.Name(), databaseId.Name(), true, 10), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "name", id.Name()), + resource.TestCheckNoResourceAttr(resourceName, "is_managed"), + resource.TestCheckResourceAttr(resourceName, "with_managed_access", "true"), + resource.TestCheckNoResourceAttr(resourceName, "data_retention_days"), + resource.TestCheckResourceAttr(resourceName, "data_retention_time_in_days", "10"), + resource.TestCheckNoResourceAttr(resourceName, "tag.#"), + ), + }, + }, + }) +} + +func schemav093WithIsManagedAndDataRetentionDays(name, database, tagSchema, tagName, tagValue string, isManaged bool, dataRetentionDays int) string { + s := ` +resource "snowflake_schema" "test" { + name = "%[1]s" + database = "%[2]s" + is_managed = %[6]t + data_retention_days = %[7]d + tag { + name = "%[4]s" + value = "%[5]s" + schema = "%[3]s" + database = "%[2]s" + } +} +` + return fmt.Sprintf(s, name, database, tagSchema, tagName, tagValue, isManaged, dataRetentionDays) +} + +func schemav093(name, database string) string { + s := ` +resource "snowflake_schema" "test" { + name = "%s" + database = "%s" +} +` + return fmt.Sprintf(s, name, database) +} + +func schemav094WithManagedAccessAndDataRetentionTimeInDays(name, database string, isManaged bool, dataRetentionDays int) string { + s := ` +resource "snowflake_schema" "test" { + name = "%s" + database = "%s" + with_managed_access = %t + data_retention_time_in_days = %d +} +` + return fmt.Sprintf(s, name, database, isManaged, dataRetentionDays) +} + +func schemav094(name, database string) string { + s := ` +resource "snowflake_schema" "test" { + name = "%s" + database = "%s" +} +` + return fmt.Sprintf(s, name, database) +} diff --git a/pkg/resources/schema_state_upgraders.go b/pkg/resources/schema_state_upgraders.go new file mode 100644 index 0000000000..adf972106f --- /dev/null +++ b/pkg/resources/schema_state_upgraders.go @@ -0,0 +1,21 @@ +package resources + +import ( + "context" +) + +func v093SchemaStateUpgrader(ctx context.Context, rawState map[string]any, meta any) (map[string]any, error) { + if rawState == nil { + return rawState, nil + } + + rawState["with_managed_access"] = rawState["is_managed"] + delete(rawState, "is_managed") + + rawState["data_retention_time_in_days"] = rawState["data_retention_days"] + delete(rawState, "data_retention_days") + + delete(rawState, "tag") + + return rawState, nil +} diff --git a/pkg/resources/testdata/TestAcc_Schema/basic/test.tf b/pkg/resources/testdata/TestAcc_Schema/basic/test.tf new file mode 100644 index 0000000000..eaadbde541 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/basic/test.tf @@ -0,0 +1,4 @@ +resource "snowflake_schema" "test" { + name = var.name + database = var.database +} diff --git a/pkg/resources/testdata/TestAcc_Schema/basic/variables.tf b/pkg/resources/testdata/TestAcc_Schema/basic/variables.tf new file mode 100644 index 0000000000..2cf13b7d6e --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/basic/variables.tf @@ -0,0 +1,7 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_Schema/basic_with_pipe_execution_paused/test.tf b/pkg/resources/testdata/TestAcc_Schema/basic_with_pipe_execution_paused/test.tf new file mode 100644 index 0000000000..5f342926d0 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/basic_with_pipe_execution_paused/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_schema" "test" { + name = var.name + database = var.database + pipe_execution_paused = var.pipe_execution_paused +} diff --git a/pkg/resources/testdata/TestAcc_Schema/basic_with_pipe_execution_paused/variables.tf b/pkg/resources/testdata/TestAcc_Schema/basic_with_pipe_execution_paused/variables.tf new file mode 100644 index 0000000000..c404f0ed86 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/basic_with_pipe_execution_paused/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "pipe_execution_paused" { + type = bool +} diff --git a/pkg/resources/testdata/TestAcc_Schema/basic_with_transient/test.tf b/pkg/resources/testdata/TestAcc_Schema/basic_with_transient/test.tf new file mode 100644 index 0000000000..c107873c4e --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/basic_with_transient/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_schema" "test" { + name = var.name + database = var.database + is_transient = var.is_transient +} diff --git a/pkg/resources/testdata/TestAcc_Schema/basic_with_transient/variables.tf b/pkg/resources/testdata/TestAcc_Schema/basic_with_transient/variables.tf new file mode 100644 index 0000000000..83822c8e9c --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/basic_with_transient/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "is_transient" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_Schema/complete/test.tf b/pkg/resources/testdata/TestAcc_Schema/complete/test.tf new file mode 100644 index 0000000000..6d8141e684 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/complete/test.tf @@ -0,0 +1,26 @@ +resource "snowflake_schema" "test" { + name = var.name + database = var.database + with_managed_access = var.with_managed_access + is_transient = var.is_transient + comment = var.comment + + data_retention_time_in_days = var.data_retention_time_in_days + max_data_extension_time_in_days = var.max_data_extension_time_in_days + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + suspend_task_after_num_failures = var.suspend_task_after_num_failures + task_auto_retry_attempts = var.task_auto_retry_attempts + user_task_managed_initial_warehouse_size = var.user_task_managed_initial_warehouse_size + user_task_timeout_ms = var.user_task_timeout_ms + user_task_minimum_trigger_interval_in_seconds = var.user_task_minimum_trigger_interval_in_seconds + quoted_identifiers_ignore_case = var.quoted_identifiers_ignore_case + enable_console_output = var.enable_console_output + pipe_execution_paused = var.pipe_execution_paused + +} diff --git a/pkg/resources/testdata/TestAcc_Schema/complete/variables.tf b/pkg/resources/testdata/TestAcc_Schema/complete/variables.tf new file mode 100644 index 0000000000..6f394e180b --- /dev/null +++ b/pkg/resources/testdata/TestAcc_Schema/complete/variables.tf @@ -0,0 +1,87 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "with_managed_access" { + type = string +} + +variable "is_transient" { + type = string +} + +variable "comment" { + type = string +} + +variable "data_retention_time_in_days" { + type = string +} + +variable "max_data_extension_time_in_days" { + type = string +} + +variable "external_volume" { + type = string +} + +variable "catalog" { + type = string +} + +variable "replace_invalid_characters" { + type = string +} + +variable "default_ddl_collation" { + type = string +} + +variable "storage_serialization_policy" { + type = string +} + +variable "log_level" { + type = string +} + +variable "trace_level" { + type = string +} + +variable "suspend_task_after_num_failures" { + type = number +} + +variable "task_auto_retry_attempts" { + type = number +} + +variable "user_task_managed_initial_warehouse_size" { + type = string +} + +variable "user_task_timeout_ms" { + type = number +} + +variable "user_task_minimum_trigger_interval_in_seconds" { + type = number +} + +variable "quoted_identifiers_ignore_case" { + type = bool +} + +variable "enable_console_output" { + type = bool +} + +variable "pipe_execution_paused" { + type = bool +} diff --git a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/test.tf b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/test.tf index 3e134a9a21..8c6b4a946b 100644 --- a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/test.tf +++ b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/test.tf @@ -1,10 +1,5 @@ -resource "snowflake_database" "test" { - name = var.database - data_retention_time_in_days = var.database_data_retention_time -} - resource "snowflake_schema" "test" { - name = var.schema - database = snowflake_database.test.name - data_retention_days = var.schema_data_retention_time + name = var.schema + database = var.database + data_retention_time_in_days = var.schema_data_retention_time } diff --git a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf index c249935cf3..6391b86535 100644 --- a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf +++ b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithDataRetentionSet/variables.tf @@ -6,10 +6,6 @@ variable "schema" { type = string } -variable "database_data_retention_time" { - type = number -} - variable "schema_data_retention_time" { type = number } diff --git a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf index 0ccefaed5c..7a75568159 100644 --- a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf +++ b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/test.tf @@ -1,9 +1,4 @@ -resource "snowflake_database" "test" { - name = var.database - data_retention_time_in_days = var.database_data_retention_time -} - resource "snowflake_schema" "test" { name = var.schema - database = snowflake_database.test.name + database = var.database } diff --git a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf index a4b73b46d9..626dbab534 100644 --- a/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf +++ b/pkg/resources/testdata/TestAcc_Schema_DefaultDataRetentionTime/WithoutDataRetentionSet/variables.tf @@ -5,7 +5,3 @@ variable "database" { variable "schema" { type = string } - -variable "database_data_retention_time" { - type = number -} diff --git a/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithSchemaDataRetentionSet/test.tf b/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithSchemaDataRetentionSet/test.tf index b8576b58af..30c3360a8f 100644 --- a/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithSchemaDataRetentionSet/test.tf +++ b/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithSchemaDataRetentionSet/test.tf @@ -4,9 +4,9 @@ resource "snowflake_database" "test" { } resource "snowflake_schema" "test" { - database = snowflake_database.test.name - name = var.schema - data_retention_days = var.schema_data_retention_time + database = snowflake_database.test.name + name = var.schema + data_retention_time_in_days = var.schema_data_retention_time } resource "snowflake_table" "test" { diff --git a/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithTableDataRetentionSet/test.tf b/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithTableDataRetentionSet/test.tf index f417315d00..05571c1112 100644 --- a/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithTableDataRetentionSet/test.tf +++ b/pkg/resources/testdata/TestAcc_Table_DefaultDataRetentionTime/WithTableDataRetentionSet/test.tf @@ -4,9 +4,9 @@ resource "snowflake_database" "test" { } resource "snowflake_schema" "test" { - database = snowflake_database.test.name - name = var.schema - data_retention_days = var.schema_data_retention_time + database = snowflake_database.test.name + name = var.schema + data_retention_time_in_days = var.schema_data_retention_time } resource "snowflake_table" "test" { diff --git a/pkg/sdk/schemas.go b/pkg/sdk/schemas.go index fd7d9b0334..5c398d8918 100644 --- a/pkg/sdk/schemas.go +++ b/pkg/sdk/schemas.go @@ -6,6 +6,8 @@ import ( "errors" "slices" "time" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/internal/collections" ) var ( @@ -565,12 +567,7 @@ func (v *schemas) ShowByID(ctx context.Context, id DatabaseObjectIdentifier) (*S if err != nil { return nil, err } - for _, s := range schemas { - if s.ID() == id { - return &s, nil - } - } - return nil, ErrObjectNotExistOrAuthorized + return collections.FindOne(schemas, func(r Schema) bool { return r.Name == id.Name() }) } func (v *schemas) Use(ctx context.Context, id DatabaseObjectIdentifier) error { diff --git a/pkg/sdk/testint/schemas_integration_test.go b/pkg/sdk/testint/schemas_integration_test.go index 40508d7769..c4d9a810a3 100644 --- a/pkg/sdk/testint/schemas_integration_test.go +++ b/pkg/sdk/testint/schemas_integration_test.go @@ -619,7 +619,7 @@ func TestInt_Schemas(t *testing.T) { require.NoError(t, err) _, err = client.Schemas.ShowByID(ctx, schema.ID()) - assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) + assert.ErrorIs(t, err, sdk.ErrObjectNotFound) }) t.Run("undrop", func(t *testing.T) { @@ -634,7 +634,7 @@ func TestInt_Schemas(t *testing.T) { require.NoError(t, err) _, err = client.Schemas.ShowByID(ctx, schema.ID()) - assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) + assert.ErrorIs(t, err, sdk.ErrObjectNotFound) err = client.Schemas.Undrop(ctx, schema.ID()) require.NoError(t, err)