From 6d82c9e86b9b244cc55aface6e8578389ee650e8 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 28 Aug 2024 13:06:25 +0200 Subject: [PATCH 01/13] Add data metric fields --- MIGRATION_GUIDE.md | 7 +- docs/resources/view.md | 64 ++- examples/resources/snowflake_view/import.sh | 3 +- examples/resources/snowflake_view/resource.tf | 11 +- .../data_metric_function_references_client.go | 27 +- pkg/acceptance/importchecks/import_checks.go | 2 +- pkg/datasources/views_acceptance_test.go | 8 +- pkg/resources/custom_diffs.go | 14 + pkg/resources/doc_helpers.go | 8 + pkg/resources/doc_helpers_test.go | 16 + pkg/resources/helpers.go | 28 ++ .../TestAcc_View/basic_update/test.tf | 7 + .../TestAcc_View/basic_update/variables.tf | 12 + .../testdata/TestAcc_View/complete/test.tf | 17 +- .../TestAcc_View/complete/variables.tf | 16 +- .../user_password_policy_attachment.go | 2 +- pkg/resources/validators.go | 18 + pkg/resources/view.go | 430 ++++++++++++++---- pkg/resources/view_acceptance_test.go | 353 ++++++++------ pkg/resources/view_state_upgraders.go | 3 +- pkg/sdk/client.go | 98 ++-- .../data_metric_function_references_def.go | 135 ++++++ ...ic_function_references_dto_builders_gen.go | 15 + ...data_metric_function_references_dto_gen.go | 10 + .../data_metric_function_references_gen.go | 84 ++++ ...unction_references_gen_integration_test.go | 11 + ...ata_metric_function_references_gen_test.go | 51 +++ ...ata_metric_function_references_impl_gen.go | 33 ++ ...ric_function_references_validations_gen.go | 27 ++ pkg/sdk/poc/main.go | 51 ++- pkg/sdk/policy_references.go | 7 +- ...unction_references_gen_integration_test.go | 41 ++ .../policy_references_integration_test.go | 4 +- pkg/sdk/testint/views_gen_integration_test.go | 26 +- pkg/sdk/views_def.go | 58 ++- pkg/sdk/views_dto_builders_gen.go | 44 +- pkg/sdk/views_dto_gen.go | 15 +- pkg/sdk/views_gen.go | 29 +- pkg/sdk/views_gen_test.go | 49 +- pkg/sdk/views_impl_gen.go | 20 +- pkg/sdk/views_validations_gen.go | 27 +- 41 files changed, 1377 insertions(+), 504 deletions(-) create mode 100644 pkg/sdk/data_metric_function_references_def.go create mode 100644 pkg/sdk/data_metric_function_references_dto_builders_gen.go create mode 100644 pkg/sdk/data_metric_function_references_dto_gen.go create mode 100644 pkg/sdk/data_metric_function_references_gen.go create mode 100644 pkg/sdk/data_metric_function_references_gen_integration_test.go create mode 100644 pkg/sdk/data_metric_function_references_gen_test.go create mode 100644 pkg/sdk/data_metric_function_references_impl_gen.go create mode 100644 pkg/sdk/data_metric_function_references_validations_gen.go create mode 100644 pkg/sdk/testint/data_metric_function_references_gen_integration_test.go diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index c0925ce848..54490d568a 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -13,13 +13,16 @@ New fields: - `change_tracking` - `is_recursive` - `is_temporary` + - `data_metric_schedule` + - `data_metric_functions` - added `show_output` field that holds the response from SHOW VIEWS. - added `describe_output` field that holds the response from DESCRIBE VIEW. Note that one needs to grant sufficient privileges e.g. with [grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_ownership) on the tables used in this view. Otherwise, this field is not filled. #### *(breaking change)* Removed fields from snowflake_view resource Removed fields: -- `tag` -The value of this field will be removed from the state automatically. Please, use [tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association) instead. +- `or_replace` - `OR REPLACE` is added by the provider automatically when `copy_grants` is set to `"true"` +- `tag` - Please, use [tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association) instead. +The value of these field will be removed from the state automatically. #### *(breaking change)* Required warehouse For this resource, the provider now uses [policy references](https://docs.snowflake.com/en/sql-reference/functions/policy_references) which requires a warehouse in the connection. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. diff --git a/docs/resources/view.md b/docs/resources/view.md index a53ef624b5..ba1f8aaf8a 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -38,7 +38,7 @@ resource "snowflake_view" "view" { select * from foo; SQL } -# resource with attached policies +# resource with attached policies and data metric functions resource "snowflake_view" "test" { database = "database" schema = "schema" @@ -55,8 +55,15 @@ resource "snowflake_view" "test" { policy_name = "aggregation_policy" entity_key = ["id"] } + data_metric_functions { + function_name = "data_metric_function" + on = ["id"] + } + data_metric_schedule { + using_cron = "15 * * * * UTC" + } statement = <<-SQL - select id from foo; + SELECT id FROM TABLE; SQL } ``` @@ -77,12 +84,14 @@ SQL - `aggregation_policy` (Block List, Max: 1) Specifies the aggregation policy to set on a view. (see [below for nested schema](#nestedblock--aggregation_policy)) - `change_tracking` (String) Specifies to enable or disable change tracking on the table. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `column` (Block List) If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. (You do not need to specify the data types of the columns.) (see [below for nested schema](#nestedblock--column)) - `comment` (String) Specifies a comment for the view. -- `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. OR REPLACE must be set when COPY GRANTS is set. +- `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. +- `data_metric_functions` (Block Set) Data metric functions used for the view. (see [below for nested schema](#nestedblock--data_metric_functions)) +- `data_metric_schedule` (Block List, Max: 1) Specifies the schedule to run the data metric functions periodically. (see [below for nested schema](#nestedblock--data_metric_schedule)) - `is_recursive` (String) Specifies that the view can refer to itself using recursive syntax without necessarily using a CTE (common table expression). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `is_secure` (String) Specifies that the view is secure. By design, the Snowflake's `SHOW VIEWS` command does not provide information about secure views (consult [view usage notes](https://docs.snowflake.com/en/sql-reference/sql/create-view#usage-notes)) which is essential to manage/import view with Terraform. Use the role owning the view while managing secure views. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `is_temporary` (String) Specifies that the view persists only for the duration of the session that you created it in. A temporary view and all its contents are dropped at the end of the session. In context of this provider, it means that it's dropped after a Terraform operation. This results in a permanent plan with object creation. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. -- `or_replace` (Boolean) Overwrites the View if it exists. - `row_access_policy` (Block List, Max: 1) Specifies the row access policy to set on a view. (see [below for nested schema](#nestedblock--row_access_policy)) ### Read-Only @@ -104,6 +113,50 @@ Optional: - `entity_key` (Set of String) Defines which columns uniquely identify an entity within the view. + +### Nested Schema for `column` + +Required: + +- `column_name` (String) Specifies affected column name. + +Optional: + +- `comment` (String) Specifies a comment for the column. +- `masking_policy` (Block List) (see [below for nested schema](#nestedblock--column--masking_policy)) +- `projection_policy` (String) Specifies the projection policy to set on a column. + + +### Nested Schema for `column.masking_policy` + +Required: + +- `policy_name` (String) Specifies the masking policy to set on a column. + +Optional: + +- `using` (List of String) Specifies the arguments to pass into the conditional masking policy SQL expression. The first column in the list specifies the column for the policy conditions to mask or tokenize the data and must match the column to which the masking policy is set. The additional columns specify the columns to evaluate to determine whether to mask or tokenize the data in each row of the query result when a query is made on the first column. If the USING clause is omitted, Snowflake treats the conditional masking policy as a normal masking policy. + + + + +### Nested Schema for `data_metric_functions` + +Required: + +- `function_name` (String) Identifier of the data metric function to add to the table or view or drop from the table or view. This function identifier must be provided without arguments in parenthesis. +- `on` (Set of String) The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition. + + + +### Nested Schema for `data_metric_schedule` + +Optional: + +- `minutes` (Number) Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: `5` | `15` | `30` | `60` | `720` | `1440`. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument. +- `using_cron` (String) Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes`. + + ### Nested Schema for `row_access_policy` @@ -156,6 +209,5 @@ Read-Only: Import is supported using the following syntax: ```shell -# format is database name | schema name | view name -terraform import snowflake_view.example 'dbName|schemaName|viewName' +terraform import snowflake_view.example '""."".""' ``` diff --git a/examples/resources/snowflake_view/import.sh b/examples/resources/snowflake_view/import.sh index e6f3ed83d3..a5ddab454c 100644 --- a/examples/resources/snowflake_view/import.sh +++ b/examples/resources/snowflake_view/import.sh @@ -1,2 +1 @@ -# format is database name | schema name | view name -terraform import snowflake_view.example 'dbName|schemaName|viewName' +terraform import snowflake_view.example '""."".""' diff --git a/examples/resources/snowflake_view/resource.tf b/examples/resources/snowflake_view/resource.tf index d4506dfcf7..c7310ddc5e 100644 --- a/examples/resources/snowflake_view/resource.tf +++ b/examples/resources/snowflake_view/resource.tf @@ -18,7 +18,7 @@ resource "snowflake_view" "view" { select * from foo; SQL } -# resource with attached policies +# resource with attached policies and data metric functions resource "snowflake_view" "test" { database = "database" schema = "schema" @@ -35,7 +35,14 @@ resource "snowflake_view" "test" { policy_name = "aggregation_policy" entity_key = ["id"] } + data_metric_functions { + function_name = "data_metric_function" + on = ["id"] + } + data_metric_schedule { + using_cron = "15 * * * * UTC" + } statement = <<-SQL - select id from foo; + SELECT id FROM TABLE; SQL } diff --git a/pkg/acceptance/helpers/data_metric_function_references_client.go b/pkg/acceptance/helpers/data_metric_function_references_client.go index dcdbaacd6b..66914c4a0a 100644 --- a/pkg/acceptance/helpers/data_metric_function_references_client.go +++ b/pkg/acceptance/helpers/data_metric_function_references_client.go @@ -2,10 +2,10 @@ package helpers import ( "context" - "fmt" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/require" ) type DataMetricFunctionReferencesClient struct { @@ -19,29 +19,12 @@ func NewDataMetricFunctionReferencesClient(context *TestClientContext) *DataMetr } // GetDataMetricFunctionReferences is based on https://docs.snowflake.com/en/sql-reference/functions/data_metric_function_references. -func (c *DataMetricFunctionReferencesClient) GetDataMetricFunctionReferences(t *testing.T, id sdk.SchemaObjectIdentifier, objectType sdk.ObjectType) ([]DataMetricFunctionReference, error) { +func (c *DataMetricFunctionReferencesClient) GetDataMetricFunctionReferences(t *testing.T, id sdk.SchemaObjectIdentifier, domain sdk.DataMetricFuncionRefEntityDomainOption) []sdk.DataMetricFunctionReference { t.Helper() ctx := context.Background() - s := []DataMetricFunctionReference{} - dmfReferencesId := sdk.NewSchemaObjectIdentifier(id.DatabaseName(), "INFORMATION_SCHEMA", "DATA_METRIC_FUNCTION_REFERENCES") - err := c.context.client.QueryForTests(ctx, &s, fmt.Sprintf(`SELECT * FROM TABLE(%s(REF_ENTITY_NAME => '%s', REF_ENTITY_DOMAIN => '%v'))`, dmfReferencesId.FullyQualifiedName(), id.FullyQualifiedName(), objectType)) + refs, err := c.context.client.DataMetricFunctionReferences.GetForEntity(ctx, sdk.NewGetForEntityDataMetricFunctionReferenceRequest(id, domain)) + require.NoError(t, err) - return s, err -} - -type DataMetricFunctionReference struct { - MetricDatabaseName string `db:"METRIC_DATABASE_NAME"` - MetricSchemaName string `db:"METRIC_SCHEMA_NAME"` - MetricName string `db:"METRIC_NAME"` - MetricSignature string `db:"METRIC_SIGNATURE"` - MetricDataType string `db:"METRIC_DATA_TYPE"` - RefEntityDatabaseName string `db:"REF_ENTITY_DATABASE_NAME"` - RefEntitySchemaName string `db:"REF_ENTITY_SCHEMA_NAME"` - RefEntityName string `db:"REF_ENTITY_NAME"` - RefEntityDomain string `db:"REF_ENTITY_DOMAIN"` - RefArguments string `db:"REF_ARGUMENTS"` - RefId string `db:"REF_ID"` - Schedule string `db:"SCHEDULE"` - ScheduleStatus string `db:"SCHEDULE_STATUS"` + return refs } diff --git a/pkg/acceptance/importchecks/import_checks.go b/pkg/acceptance/importchecks/import_checks.go index e71c6c86bf..2a1a57d27b 100644 --- a/pkg/acceptance/importchecks/import_checks.go +++ b/pkg/acceptance/importchecks/import_checks.go @@ -46,7 +46,7 @@ func TestCheckResourceAttrInstanceState(id string, attributeName, attributeValue if attrVal, ok := v.Attributes[attributeName]; ok { if attrVal != attributeValue { - return fmt.Errorf("expected: %s, got: %s", attributeValue, attrVal) + return fmt.Errorf("invalid value for attribute %s - expected: %s, got: %s", attributeName, attributeValue, attrVal) } return nil diff --git a/pkg/datasources/views_acceptance_test.go b/pkg/datasources/views_acceptance_test.go index ef5069cb31..7edce8f234 100644 --- a/pkg/datasources/views_acceptance_test.go +++ b/pkg/datasources/views_acceptance_test.go @@ -41,17 +41,11 @@ func TestAcc_Views(t *testing.T) { func views(viewId sdk.SchemaObjectIdentifier) string { return fmt.Sprintf(` - resource "snowflake_unsafe_execute" "use_warehouse" { - execute = "USE WAREHOUSE \"%v\"" - revert = "SELECT 1" - } - resource snowflake_view "v"{ name = "%v" schema = "%v" database = "%v" statement = "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES where ROLE_OWNER like 'foo%%'" - depends_on = [snowflake_unsafe_execute.use_warehouse] } data snowflake_views "v" { @@ -59,5 +53,5 @@ func views(viewId sdk.SchemaObjectIdentifier) string { schema = snowflake_view.v.schema depends_on = [snowflake_view.v] } - `, acc.TestWarehouseName, viewId.Name(), viewId.SchemaName(), viewId.DatabaseName()) + `, viewId.Name(), viewId.SchemaName(), viewId.DatabaseName()) } diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index 853c4fd55e..75f64ec72d 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -98,6 +98,20 @@ func ComputedIfAnyAttributeChanged(key string, changedAttributeKeys ...string) s }) } +func asdf(key string, changedAttributeKeys ...string) schema.CustomizeDiffFunc { + return customdiff.ComputedIf(key, func(ctx context.Context, diff *schema.ResourceDiff, meta interface{}) bool { + var result bool + for _, changedKey := range changedAttributeKeys { + if diff.HasChange(changedKey) { + old, new := diff.GetChange(changedKey) + log.Printf("[DEBUG] ComputedIfAnyAttributeChanged: changed key: %s old: %s new: %s\n", changedKey, old, new) + } + result = result || diff.HasChange(changedKey) + } + return result + }) +} + // TODO(SNOW-1629468): Adjust the function to make it more flexible func ComputedIfAnyAttributeChangedWithSuppressDiff(key string, suppressDiffFunc schema.SchemaDiffSuppressFunc, changedAttributeKeys ...string) schema.CustomizeDiffFunc { return customdiff.ComputedIf(key, func(ctx context.Context, diff *schema.ResourceDiff, meta interface{}) bool { diff --git a/pkg/resources/doc_helpers.go b/pkg/resources/doc_helpers.go index a7e8a278f1..4e0ee8b425 100644 --- a/pkg/resources/doc_helpers.go +++ b/pkg/resources/doc_helpers.go @@ -13,6 +13,14 @@ func possibleValuesListed[T ~string](values []T) string { return strings.Join(valuesWrapped, " | ") } +func possibleValuesListedInt(values []int) string { + valuesWrapped := make([]string, len(values)) + for i, value := range values { + valuesWrapped[i] = fmt.Sprintf("`%d`", value) + } + return strings.Join(valuesWrapped, " | ") +} + func characterList(values []rune) string { valuesWrapped := make([]string, len(values)) for i, value := range values { diff --git a/pkg/resources/doc_helpers_test.go b/pkg/resources/doc_helpers_test.go index 60842d565b..2d987e6771 100644 --- a/pkg/resources/doc_helpers_test.go +++ b/pkg/resources/doc_helpers_test.go @@ -21,3 +21,19 @@ func Test_PossibleValuesListed_empty(t *testing.T) { assert.Empty(t, result) } + +func Test_PossibleValuesListedInt(t *testing.T) { + values := []int{42, 21} + + result := possibleValuesListedInt(values) + + assert.Equal(t, "`42` | `21`", result) +} + +func Test_PossibleValuesListedInt_empty(t *testing.T) { + var values []int + + result := possibleValuesListedInt(values) + + assert.Empty(t, result) +} diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index 7b91689d48..f18951f42f 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -328,3 +328,31 @@ func ListDiff[T comparable](beforeList []T, afterList []T) (added []T, removed [ return added, removed } + +// ListDiff Compares two lists (before and after), then compares and returns two lists that include +// added and removed items between those lists. +// type X = map[string]any + +// func ListDiffMap(beforeList []any, afterList []any) (added []any, removed []any) { +// type key struct { +// name string +// columns []string +// } +// added = make([]any, 0) +// removed = make([]any, 0) + +// for _, privilegeBeforeChange := range beforeList { +// m := privilegeBeforeChange.(map[string]any) +// if !slices.Contains(afterList, privilegeBeforeChange) { +// removed = append(removed, privilegeBeforeChange) +// } +// } + +// for _, privilegeAfterChange := range afterList { +// if !slices.Contains(beforeList, privilegeAfterChange) { +// added = append(added, privilegeAfterChange) +// } +// } + +// return added, removed +// } diff --git a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf index 2bd1ef8145..605f72139c 100644 --- a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf +++ b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf @@ -11,6 +11,13 @@ resource "snowflake_view" "test" { policy_name = var.aggregation_policy entity_key = var.aggregation_policy_entity_key } + data_metric_functions { + function_name = var.data_metric_function + on = var.data_metric_function_on + } + data_metric_schedule { + using_cron = var.data_metric_schedule_using_cron + } statement = var.statement comment = var.comment } diff --git a/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf b/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf index 42cc6286e8..e2da9f2f40 100644 --- a/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf +++ b/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf @@ -33,3 +33,15 @@ variable "aggregation_policy_entity_key" { variable "comment" { type = string } + +variable "data_metric_schedule_using_cron" { + type = string +} + +variable "data_metric_function" { + type = string +} + +variable "data_metric_function_on" { + type = list(string) +} diff --git a/pkg/resources/testdata/TestAcc_View/complete/test.tf b/pkg/resources/testdata/TestAcc_View/complete/test.tf index 45a4a42eb0..d4b9ab0e0a 100644 --- a/pkg/resources/testdata/TestAcc_View/complete/test.tf +++ b/pkg/resources/testdata/TestAcc_View/complete/test.tf @@ -4,24 +4,23 @@ resource "snowflake_view" "test" { database = var.database schema = var.schema is_secure = var.is_secure - or_replace = var.or_replace copy_grants = var.copy_grants change_tracking = var.change_tracking is_temporary = var.is_temporary + data_metric_functions { + function_name = var.data_metric_function + on = var.data_metric_function_on + } + data_metric_schedule { + using_cron = var.data_metric_schedule_using_cron + } row_access_policy { policy_name = var.row_access_policy on = var.row_access_policy_on - } aggregation_policy { policy_name = var.aggregation_policy entity_key = var.aggregation_policy_entity_key } - statement = var.statement - depends_on = [snowflake_unsafe_execute.use_warehouse] -} - -resource "snowflake_unsafe_execute" "use_warehouse" { - execute = "USE WAREHOUSE \"${var.warehouse}\"" - revert = "SELECT 1" + statement = var.statement } diff --git a/pkg/resources/testdata/TestAcc_View/complete/variables.tf b/pkg/resources/testdata/TestAcc_View/complete/variables.tf index 4423777db3..4cdf99c64b 100644 --- a/pkg/resources/testdata/TestAcc_View/complete/variables.tf +++ b/pkg/resources/testdata/TestAcc_View/complete/variables.tf @@ -22,10 +22,6 @@ variable "change_tracking" { type = string } -variable "or_replace" { - type = bool -} - variable "copy_grants" { type = bool } @@ -57,3 +53,15 @@ variable "statement" { variable "warehouse" { type = string } + +variable "data_metric_schedule_using_cron" { + type = string +} + +variable "data_metric_function" { + type = string +} + +variable "data_metric_function_on" { + type = list(string) +} diff --git a/pkg/resources/user_password_policy_attachment.go b/pkg/resources/user_password_policy_attachment.go index 12882103a3..5ec96deebb 100644 --- a/pkg/resources/user_password_policy_attachment.go +++ b/pkg/resources/user_password_policy_attachment.go @@ -80,7 +80,7 @@ func ReadUserPasswordPolicyAttachment(d *schema.ResourceData, meta any) error { passwordPolicyReferences := make([]sdk.PolicyReference, 0) for _, policyReference := range policyReferences { - if policyReference.PolicyKind == "PASSWORD_POLICY" { + if policyReference.PolicyKind == sdk.PolicyKindPasswordPolicy { passwordPolicyReferences = append(passwordPolicyReferences, policyReference) } } diff --git a/pkg/resources/validators.go b/pkg/resources/validators.go index e43003b90c..b4bac596fd 100644 --- a/pkg/resources/validators.go +++ b/pkg/resources/validators.go @@ -174,6 +174,24 @@ func StringInSlice(valid []string, ignoreCase bool) schema.SchemaValidateDiagFun } } +// IntInSlice has the same implementation as validation.StringInSlice, but adapted to schema.SchemaValidateDiagFunc +func IntInSlice(valid []int) schema.SchemaValidateDiagFunc { + return func(i interface{}, path cty.Path) diag.Diagnostics { + v, ok := i.(int) + if !ok { + return diag.Errorf("expected type of %v to be integer", path) + } + + for _, validInt := range valid { + if v == validInt { + return nil + } + } + + return diag.Errorf("expected %v to be one of %q, got %d", path, valid, v) + } +} + func sdkValidation[T any](normalize func(string) (T, error)) schema.SchemaValidateDiagFunc { return func(val interface{}, _ cty.Path) diag.Diagnostics { _, err := normalize(val.(string)) diff --git a/pkg/resources/view.go b/pkg/resources/view.go index c61f823b1b..aa647bc115 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -41,22 +41,14 @@ var viewSchema = map[string]*schema.Schema{ ForceNew: true, DiffSuppressFunc: suppressIdentifierQuoting, }, - "or_replace": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: "Overwrites the View if it exists.", - }, - // TODO [SNOW-1348118: this is used only during or_replace, we would like to change the behavior before v1 "copy_grants": { Type: schema.TypeBool, Optional: true, Default: false, - Description: "Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. OR REPLACE must be set when COPY GRANTS is set.", + Description: "Retains the access permissions from the original view when a new view is created using the OR REPLACE clause.", DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { return oldValue != "" && oldValue != newValue }, - RequiredWith: []string{"or_replace"}, }, "is_secure": { Type: schema.TypeString, @@ -69,7 +61,6 @@ var viewSchema = map[string]*schema.Schema{ "is_temporary": { Type: schema.TypeString, Optional: true, - ForceNew: true, Default: BooleanDefault, ValidateDiagFunc: validateBooleanString, Description: booleanStringFieldDescription("Specifies that the view persists only for the duration of the session that you created it in. A temporary view and all its contents are dropped at the end of the session. In context of this provider, it means that it's dropped after a Terraform operation. This results in a permanent plan with object creation."), @@ -77,7 +68,6 @@ var viewSchema = map[string]*schema.Schema{ "is_recursive": { Type: schema.TypeString, Optional: true, - ForceNew: true, Default: BooleanDefault, ValidateDiagFunc: validateBooleanString, Description: booleanStringFieldDescription("Specifies that the view can refer to itself using recursive syntax without necessarily using a CTE (common table expression)."), @@ -92,59 +82,69 @@ var viewSchema = map[string]*schema.Schema{ }), Description: booleanStringFieldDescription("Specifies to enable or disable change tracking on the table."), }, - // TODO(next pr): support remaining fields - // "data_metric_functions": { - // Type: schema.TypeSet, - // Optional: true, - // Elem: &schema.Resource{ - // Schema: map[string]*schema.Schema{ - // "metric_name": { - // Type: schema.TypeString, - // Optional: true, - // Description: "Identifier of the data metric function to add to the table or view or drop from the table or view.", - // }, - // "column_name": { - // Type: schema.TypeString, - // Optional: true, - // Description: "The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition.", - // }, - // }, - // }, - // Description: "Data metric functions used for the view.", - // }, - // "data_metric_schedule": { - // Type: schema.TypeList, - // Optional: true, - // MaxItems: 1, - // Elem: &schema.Resource{ - // Schema: map[string]*schema.Schema{ - // "minutes": { - // Type: schema.TypeInt, - // Optional: true, - // Description: "Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron` and `trigger_on_changes`.", - // // TODO: move to sdk - // ValidateFunc: validation.IntInSlice([]int{5, 15, 30, 60, 720, 1440}), - // ConflictsWith: []string{"data_metric_schedule.using_cron", "data_metric_schedule.trigger_on_changes"}, - // }, - // "using_cron": { - // Type: schema.TypeString, - // Optional: true, - // Description: "Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes` and `trigger_on_changes`.", - // // TODO: validate? - // ConflictsWith: []string{"data_metric_schedule.minutes", "data_metric_schedule.trigger_on_changes"}, - // }, - // "trigger_on_changes": { - // Type: schema.TypeString, - // Optional: true, - // Default: BooleanDefault, - // Description: booleanStringFieldDescription("Specifies that the DMF runs when a DML operation modifies the table, such as inserting a new row or deleting a row. Conflicts with `minutes` and `using_cron`."), - // ConflictsWith: []string{"data_metric_schedule.minutes", "data_metric_schedule.using_cron"}, - // }, - // }, - // }, - // Description: "Specifies the schedule to run the data metric function periodically.", - // }, - // "columns": { + "data_metric_functions": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "function_name": { + Type: schema.TypeString, + Required: true, + Description: "Identifier of the data metric function to add to the table or view or drop from the table or view. This function identifier must be provided without arguments in parenthesis.", + DiffSuppressFunc: suppressIdentifierQuoting, + }, + "on": { + Type: schema.TypeSet, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: "The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition.", + }, + // TODO (next pr) + // "schedule_status": { + // Type: schema.TypeString, + // Optional: true, + // ValidateDiagFunc: sdkValidation(sdk.ToAllowedDataMetricScheduleStatusOption), + // Description: fmt.Sprintf("The status of the metrics association. Valid values are: %v. When status of a data metric function is changed, it is being reassigned with `DROP DATA METRIC FUNCTION` and `ADD DATA METRIC FUNCTION`, and then its status is changed by `MODIFY DATA METRIC FUNCTION` ", possibleValuesListed(sdk.AllAllowedDataMetricScheduleStatusOptions)), + // DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToAllowedDataMetricScheduleStatusOption), func(_, oldValue, newValue string, _ *schema.ResourceData) bool { + // if newValue == "" { + // return true + // } + // return false + // }), + // }, + }, + }, + Description: "Data metric functions used for the view.", + RequiredWith: []string{"data_metric_schedule"}, + }, + "data_metric_schedule": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "minutes": { + Type: schema.TypeInt, + Optional: true, + Description: fmt.Sprintf("Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: %s. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.", possibleValuesListedInt(sdk.AllViewDataMetricScheduleMinutes)), + ValidateDiagFunc: IntInSlice(sdk.AllViewDataMetricScheduleMinutes), + ConflictsWith: []string{"data_metric_schedule.using_cron"}, + }, + "using_cron": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes`.", + ConflictsWith: []string{"data_metric_schedule.minutes"}, + }, + }, + }, + Description: "Specifies the schedule to run the data metric functions periodically.", + RequiredWith: []string{"data_metric_functions"}, + }, + // TODO (next pr): add columns + // "column": { // Type: schema.TypeList, // Optional: true, // Elem: &schema.Resource{ @@ -159,7 +159,6 @@ var viewSchema = map[string]*schema.Schema{ // Optional: true, // Elem: &schema.Resource{ // Schema: map[string]*schema.Schema{ - // // TODO: change to `name`? in other policies as well // "policy_name": { // Type: schema.TypeString, // Required: true, @@ -182,11 +181,11 @@ var viewSchema = map[string]*schema.Schema{ // DiffSuppressFunc: DiffSuppressStatement, // Description: "Specifies the projection policy to set on a column.", // }, - // "comment": { - // Type: schema.TypeString, - // Optional: true, - // Description: "Specifies a comment for the column.", - // }, + // "comment": { + // Type: schema.TypeString, + // Optional: true, + // Description: "Specifies a comment for the column.", + // }, // }, // }, // Description: "If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. (You do not need to specify the data types of the columns.)", @@ -304,16 +303,24 @@ func View() *schema.Resource { func ImportView(ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { log.Printf("[DEBUG] Starting view import") client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return nil, err + } v, err := client.Views.ShowByID(ctx, id) if err != nil { return nil, err } - if err := d.Set("name", v.Name); err != nil { + if err := d.Set("name", id.Name()); err != nil { + return nil, err + } + if err := d.Set("database", id.DatabaseName()); err != nil { + return nil, err + } + if err := d.Set("schema", id.SchemaName()); err != nil { return nil, err } - if err := d.Set("change_tracking", booleanStringFromBool(v.IsChangeTracking())); err != nil { return nil, err } @@ -340,13 +347,12 @@ func CreateView(orReplace bool) schema.CreateContextFunc { statement := d.Get("statement").(string) req := sdk.NewCreateViewRequest(id, statement) - // TODO(next pr): remove or_replace field - if v := d.Get("or_replace"); v.(bool) || orReplace { + if orReplace { req.WithOrReplace(true) } if v := d.Get("copy_grants"); v.(bool) { - req.WithCopyGrants(true) + req.WithCopyGrants(true).WithOrReplace(true) } if v := d.Get("is_secure").(string); v != BooleanDefault { @@ -378,11 +384,18 @@ func CreateView(orReplace bool) schema.CreateContextFunc { } if v := d.Get("row_access_policy"); len(v.([]any)) > 0 { - req.WithRowAccessPolicy(*sdk.NewViewRowAccessPolicyRequest(extractPolicyWithColumns(v, "on"))) + id, columns, err := extractPolicyWithColumns(v, "on") + if err != nil { + return diag.FromErr(err) + } + req.WithRowAccessPolicy(*sdk.NewViewRowAccessPolicyRequest(id, columns)) } if v := d.Get("aggregation_policy"); len(v.([]any)) > 0 { - id, columns := extractPolicyWithColumns(v, "entity_key") + id, columns, err := extractPolicyWithColumns(v, "entity_key") + if err != nil { + return diag.FromErr(err) + } aggregationPolicyReq := sdk.NewViewAggregationPolicyRequest(id) if len(columns) > 0 { aggregationPolicyReq.WithEntityKey(columns) @@ -395,7 +408,7 @@ func CreateView(orReplace bool) schema.CreateContextFunc { return diag.FromErr(fmt.Errorf("error creating view %v err = %w", id.Name(), err)) } - d.SetId(helpers.EncodeSnowflakeID(id)) + d.SetId(helpers.EncodeResourceIdentifier(id)) if v := d.Get("change_tracking").(string); v != BooleanDefault { parsed, err := booleanStringToBool(v) @@ -409,24 +422,92 @@ func CreateView(orReplace bool) schema.CreateContextFunc { } } + if v := d.Get("data_metric_schedule"); len(v.([]any)) > 0 { + var req *sdk.ViewSetDataMetricScheduleRequest + dmsConfig := v.([]any)[0].(map[string]any) + if v, ok := dmsConfig["minutes"]; ok && v.(int) > 0 { + req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("%d MINUTE", v.(int))) + } else if v, ok := dmsConfig["using_cron"]; ok { + req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("USING CRON %s", v.(string))) + } + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*req)) + if err != nil { + return diag.FromErr(fmt.Errorf("error setting data matric schedule in view %v err = %w", id.Name(), err)) + } + } + + if v, ok := d.GetOk("data_metric_functions"); ok { + addedRaw, err := extractDataMetricFunctions(v.(*schema.Set).List()) + if err != nil { + return diag.FromErr(err) + } + added := make([]sdk.ViewDataMetricFunction, len(addedRaw)) + for i := range addedRaw { + added[i] = sdk.ViewDataMetricFunction{ + DataMetricFunction: addedRaw[i].DataMetricFunction, + On: addedRaw[i].On, + } + } + err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest(added))) + if err != nil { + return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) + } + changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw)) + for i := range addedRaw { + if addedRaw[i].ScheduleStatus != "" { + expectedStatus, err := sdk.ToAllowedDataMetricScheduleStatusOption(addedRaw[i].ScheduleStatus) + if err != nil { + return diag.FromErr(err) + } + var statusCmd sdk.ViewDataMetricScheduleStatusOperationOption + switch expectedStatus { + case sdk.DataMetricScheduleStatusStarted: + statusCmd = sdk.ViewDataMetricScheduleStatusOperationResume + case sdk.DataMetricScheduleStatusSuspended: + statusCmd = sdk.ViewDataMetricScheduleStatusOperationSuspend + default: + return diag.FromErr(fmt.Errorf("unexpected data metric function status: %v", expectedStatus)) + } + changeSchedule = append(changeSchedule, sdk.ViewModifyDataMetricFunction{ + DataMetricFunction: addedRaw[i].DataMetricFunction, + On: addedRaw[i].On, + ViewDataMetricScheduleStatusOperationOption: statusCmd, + }) + } + } + if len(changeSchedule) > 0 { + err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithModifyDataMetricFunction(*sdk.NewViewModifyDataMetricFunctionsRequest(changeSchedule))) + if err != nil { + return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) + } + } + } + return ReadView(false)(ctx, d, meta) } } -func extractPolicyWithColumns(v any, columnsKey string) (sdk.SchemaObjectIdentifier, []sdk.Column) { +func extractPolicyWithColumns(v any, columnsKey string) (sdk.SchemaObjectIdentifier, []sdk.Column, error) { policyConfig := v.([]any)[0].(map[string]any) + id, err := sdk.ParseSchemaObjectIdentifier(policyConfig["policy_name"].(string)) + if err != nil { + return sdk.SchemaObjectIdentifier{}, nil, err + } columnsRaw := expandStringList(policyConfig[columnsKey].(*schema.Set).List()) columns := make([]sdk.Column, len(columnsRaw)) for i := range columnsRaw { columns[i] = sdk.Column{Value: columnsRaw[i]} } - return sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(policyConfig["policy_name"].(string)), columns + return id, columns, nil } func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc { return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } view, err := client.Views.ShowByID(ctx, id) if err != nil { @@ -490,6 +571,10 @@ func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc { if err != nil { return diag.FromErr(err) } + err = handleDataMetricFunctions(ctx, client, id, d) + if err != nil { + return diag.FromErr(err) + } if view.Text != "" { // Want to only capture the SELECT part of the query because before that is the CREATE part of the view. extractor := snowflake.NewViewSelectStatementExtractor(view.Text) @@ -530,7 +615,7 @@ func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.Sche for _, p := range policyRefs { policyName := sdk.NewSchemaObjectIdentifier(*p.PolicyDb, *p.PolicySchema, p.PolicyName) switch p.PolicyKind { - case string(sdk.PolicyKindAggregationPolicy): + case sdk.PolicyKindAggregationPolicy: var entityKey []string if p.RefArgColumnNames != nil { entityKey = sdk.ParseCommaSeparatedStringArray(*p.RefArgColumnNames, true) @@ -539,7 +624,7 @@ func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.Sche "policy_name": policyName.FullyQualifiedName(), "entity_key": entityKey, }) - case string(sdk.PolicyKindRowAccessPolicy): + case sdk.PolicyKindRowAccessPolicy: var on []string if p.RefArgColumnNames != nil { on = sdk.ParseCommaSeparatedStringArray(*p.RefArgColumnNames, true) @@ -561,12 +646,100 @@ func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.Sche return err } +func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.SchemaObjectIdentifier, d *schema.ResourceData) error { + dataMetricFunctionReferences, err := client.DataMetricFunctionReferences.GetForEntity(ctx, sdk.NewGetForEntityDataMetricFunctionReferenceRequest(id, sdk.DataMetricFuncionRefEntityDomainView)) + if err != nil { + return err + } + if len(dataMetricFunctionReferences) == 0 { + return d.Set("data_metric_schedule", nil) + } + dataMetricFunctions := make([]map[string]any, len(dataMetricFunctionReferences)) + var schedule string + for i, dmfRef := range dataMetricFunctionReferences { + dmfName := sdk.NewSchemaObjectIdentifier(dmfRef.MetricDatabaseName, dmfRef.MetricSchemaName, dmfRef.MetricName) + var columns []string + for _, v := range dmfRef.RefArguments { + columns = append(columns, v.Name) + } + // TODO (next pr) + // var scheduleStatus sdk.DataMetricScheduleStatusOption + // status, err := sdk.ToDataMetricScheduleStatusOption(dmfRef.ScheduleStatus) + // if err != nil { + // return err + // } + // if slices.Contains(sdk.AllDataMetricScheduleStatusStartedOptions, status) { + // scheduleStatus = sdk.DataMetricScheduleStatusStarted + // } + // if slices.Contains(sdk.AllDataMetricScheduleStatusSuspendedOptions, status) { + // scheduleStatus = sdk.DataMetricScheduleStatusSuspended + // } + dataMetricFunctions[i] = map[string]any{ + "function_name": dmfName.FullyQualifiedName(), + "on": columns, + // "schedule_status": string(scheduleStatus), + } + schedule = dmfRef.Schedule + } + if err = d.Set("data_metric_functions", dataMetricFunctions); err != nil { + return err + } + + return d.Set("data_metric_schedule", []map[string]any{ + { + "using_cron": schedule, + }, + }) +} + +type ViewDataMetricFunctionDDL struct { + DataMetricFunction sdk.SchemaObjectIdentifier + On []sdk.Column + ScheduleStatus string +} + +func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionDDL, err error) { + for _, v := range v.([]any) { + config := v.(map[string]any) + columnsRaw := expandStringList(config["on"].(*schema.Set).List()) + columns := make([]sdk.Column, len(columnsRaw)) + for i := range columnsRaw { + columns[i] = sdk.Column{Value: columnsRaw[i]} + } + id, err := sdk.ParseSchemaObjectIdentifier(config["function_name"].(string)) + if err != nil { + return nil, err + } + dmfs = append(dmfs, ViewDataMetricFunctionDDL{ + DataMetricFunction: id, + On: columns, + // TODO (next pr) + // ScheduleStatus: config["schedule_status"].(string), + }) + } + return +} + +func changedKeys(d *schema.ResourceData, keys []string) []string { + changed := make([]string, 0, len(keys)) + for _, key := range keys { + if d.HasChange(key) { + changed = append(changed, key) + } + } + return changed +} + func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } // change on these fields can not be ForceNew because then view is dropped explicitly and copying grants does not have effect if d.HasChange("statement") || d.HasChange("is_temporary") || d.HasChange("is_recursive") || d.HasChange("copy_grant") { + log.Printf("[DEBUG] Detected change on %q, recreating...", changedKeys(d, []string{"statement", "is_temporary", "is_recursive", "copy_grant"})) return CreateView(true)(ctx, d, meta) } @@ -578,7 +751,7 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag return diag.FromErr(fmt.Errorf("error renaming view %v err = %w", d.Id(), err)) } - d.SetId(helpers.EncodeSnowflakeID(newId)) + d.SetId(helpers.EncodeResourceIdentifier(newId)) id = newId } @@ -631,17 +804,83 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag } } + if d.HasChange("data_metric_schedule") { + if v := d.Get("data_metric_schedule"); len(v.([]any)) > 0 { + var req *sdk.ViewSetDataMetricScheduleRequest + dmsConfig := v.([]any)[0].(map[string]any) + if v := dmsConfig["minutes"]; v.(int) > 0 { + req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("%d MINUTE", v.(int))) + } else if v, ok := dmsConfig["using_cron"]; ok { + req = sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("USING CRON %s", v.(string))) + } + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*req)) + if err != nil { + return diag.FromErr(fmt.Errorf("error setting data matric schedule in view %v err = %w", id.Name(), err)) + } + } else { + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithUnsetDataMetricSchedule(*sdk.NewViewUnsetDataMetricScheduleRequest())) + if err != nil { + return diag.FromErr(fmt.Errorf("error unsetting data matric schedule in view %v err = %w", id.Name(), err)) + } + } + } + + if d.HasChange("data_metric_functions") { + old, new := d.GetChange("data_metric_functions") + removedRaw, addedRaw := old.(*schema.Set).List(), new.(*schema.Set).List() + added, err := extractDataMetricFunctions(addedRaw) + if err != nil { + return diag.FromErr(err) + } + removed, err := extractDataMetricFunctions(removedRaw) + if err != nil { + return diag.FromErr(err) + } + if len(removed) > 0 { + removed2 := make([]sdk.ViewDataMetricFunction, len(removed)) + for i := range removed { + removed2[i] = sdk.ViewDataMetricFunction{ + DataMetricFunction: removed[i].DataMetricFunction, + On: removed[i].On, + } + } + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithDropDataMetricFunction(*sdk.NewViewDropDataMetricFunctionRequest(removed2))) + if err != nil { + return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) + } + } + if len(added) > 0 { + added2 := make([]sdk.ViewDataMetricFunction, len(added)) + for i := range added { + added2[i] = sdk.ViewDataMetricFunction{ + DataMetricFunction: added[i].DataMetricFunction, + On: added[i].On, + } + } + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest(added2))) + if err != nil { + return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) + } + } + } + if d.HasChange("row_access_policy") { var addReq *sdk.ViewAddRowAccessPolicyRequest var dropReq *sdk.ViewDropRowAccessPolicyRequest oldRaw, newRaw := d.GetChange("row_access_policy") if len(oldRaw.([]any)) > 0 { - oldId, _ := extractPolicyWithColumns(oldRaw, "on") + oldId, _, err := extractPolicyWithColumns(oldRaw, "on") + if err != nil { + return diag.FromErr(err) + } dropReq = sdk.NewViewDropRowAccessPolicyRequest(oldId) } if len(newRaw.([]any)) > 0 { - newId, newColumns := extractPolicyWithColumns(newRaw, "on") + newId, newColumns, err := extractPolicyWithColumns(newRaw, "on") + if err != nil { + return diag.FromErr(err) + } addReq = sdk.NewViewAddRowAccessPolicyRequest(newId, newColumns) } req := sdk.NewAlterViewRequest(id) @@ -659,12 +898,15 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag } if d.HasChange("aggregation_policy") { if v, ok := d.GetOk("aggregation_policy"); ok { - newId, newColumns := extractPolicyWithColumns(v, "entity_key") + newId, newColumns, err := extractPolicyWithColumns(v, "entity_key") + if err != nil { + return diag.FromErr(err) + } aggregationPolicyReq := sdk.NewViewSetAggregationPolicyRequest(newId) if len(newColumns) > 0 { aggregationPolicyReq.WithEntityKey(newColumns) } - err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetAggregationPolicy(*aggregationPolicyReq.WithForce(true))) + err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetAggregationPolicy(*aggregationPolicyReq.WithForce(true))) if err != nil { return diag.FromErr(fmt.Errorf("error setting aggregation policy for view %v: %w", d.Id(), err)) } @@ -680,10 +922,14 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag } func DeleteView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.SchemaObjectIdentifier) + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + client := meta.(*provider.Context).Client - err := client.Views.Drop(ctx, sdk.NewDropViewRequest(id).WithIfExists(true)) + err = client.Views.Drop(ctx, sdk.NewDropViewRequest(id).WithIfExists(true)) if err != nil { return diag.Diagnostics{ diag.Diagnostic{ diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index be3b810735..c753c955b3 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -25,44 +25,57 @@ import ( ) // TODO(SNOW-1423486): Fix using warehouse in all tests and remove unsetting testenvs.ConfigureClientOnce -// TODO(next pr): cleanup setting warehouse with unsafe_execute func TestAcc_View_basic(t *testing.T) { t.Setenv(string(testenvs.ConfigureClientOnce), "") _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) - rowAccessPolicy, rowAccessPolicyCleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeVARCHAR) + rowAccessPolicy, rowAccessPolicyCleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeNumber) t.Cleanup(rowAccessPolicyCleanup) aggregationPolicy, aggregationPolicyCleanup := acc.TestClient().AggregationPolicy.CreateAggregationPolicy(t) t.Cleanup(aggregationPolicyCleanup) - rowAccessPolicy2, rowAccessPolicy2Cleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeVARCHAR) + rowAccessPolicy2, rowAccessPolicy2Cleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeNumber) t.Cleanup(rowAccessPolicy2Cleanup) aggregationPolicy2, aggregationPolicy2Cleanup := acc.TestClient().AggregationPolicy.CreateAggregationPolicy(t) t.Cleanup(aggregationPolicy2Cleanup) + functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG") + function2Id := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "MAX") + + cron, cron2 := "10 * * * * UTC", "20 * * * * UTC" + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - otherStatement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES where ROLE_OWNER like 'foo%%'" + table, tableCleanup := acc.TestClient().Table.CreateTableWithColumns(t, []sdk.TableColumnRequest{ + *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber), + *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber), + }) + t.Cleanup(tableCleanup) + statement := fmt.Sprintf("SELECT id, foo FROM %s", table.ID().FullyQualifiedName()) + otherStatement := fmt.Sprintf("SELECT foo, id FROM %s", table.ID().FullyQualifiedName()) comment := "Terraform test resource'" viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) - viewModelWithDependency := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) + viewModelWithDependency := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) // generators currently don't handle lists, so use the old way - basicUpdate := func(rap, ap sdk.SchemaObjectIdentifier, statement string) config.Variables { + basicUpdate := func(rap, ap, functionId sdk.SchemaObjectIdentifier, statement, cron string, scheduleStatus sdk.DataMetricScheduleStatusOption) config.Variables { return config.Variables{ - "name": config.StringVariable(id.Name()), - "database": config.StringVariable(id.DatabaseName()), - "schema": config.StringVariable(id.SchemaName()), - "statement": config.StringVariable(statement), - "row_access_policy": config.StringVariable(rap.FullyQualifiedName()), - "row_access_policy_on": config.ListVariable(config.StringVariable("ROLE_NAME")), - "aggregation_policy": config.StringVariable(ap.FullyQualifiedName()), - "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ROLE_NAME")), - "comment": config.StringVariable(comment), + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + "row_access_policy": config.StringVariable(rap.FullyQualifiedName()), + "row_access_policy_on": config.ListVariable(config.StringVariable("ID")), + "aggregation_policy": config.StringVariable(ap.FullyQualifiedName()), + "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")), + "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()), + "data_metric_function_on": config.ListVariable(config.StringVariable("ID")), + "data_metric_function_schedule_status": config.StringVariable(string(scheduleStatus)), + "data_metric_schedule_using_cron": config.StringVariable(cron), + "comment": config.StringVariable(comment), } } @@ -75,7 +88,7 @@ func TestAcc_View_basic(t *testing.T) { Steps: []resource.TestStep{ // without optionals { - Config: accconfig.FromModel(t, viewModelWithDependency) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModelWithDependency), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(statement). @@ -87,18 +100,25 @@ func TestAcc_View_basic(t *testing.T) { Config: accconfig.FromModel(t, viewModel), ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), + resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). HasStatementString(statement)), }, - // set policies externally + // set policies and dmfs externally { PreConfig: func() { - acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithAddRowAccessPolicy(*sdk.NewViewAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []sdk.Column{{Value: "ROLE_NAME"}}))) + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithAddRowAccessPolicy(*sdk.NewViewAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []sdk.Column{{Value: "ID"}}))) acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithSetAggregationPolicy(*sdk.NewViewSetAggregationPolicyRequest(aggregationPolicy))) + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest(fmt.Sprintf("USING CRON %s", cron)))) + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest([]sdk.ViewDataMetricFunction{ + { + DataMetricFunction: functionId, + On: []sdk.Column{{Value: "ID"}}, + }, + }))) }, Config: accconfig.FromModel(t, viewModel), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). @@ -108,12 +128,14 @@ func TestAcc_View_basic(t *testing.T) { HasSchemaString(id.SchemaName()), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "0")), ), }, // set other fields { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), - ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, statement), + ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, statement, cron, sdk.DataMetricScheduleStatusStarted), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("snowflake_view.test", plancheck.ResourceActionUpdate), @@ -128,17 +150,52 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + ), + }, + // change policies and dmfs + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), + ConfigVariables: basicUpdate(rowAccessPolicy2.ID(), aggregationPolicy2, function2Id, statement, cron2, sdk.DataMetricScheduleStatusStarted), + Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasCommentString(comment), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy2.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy2.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusStarted))), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", function2Id.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), ), }, - // change policies + // change dmf status { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), - ConfigVariables: basicUpdate(rowAccessPolicy2.ID(), aggregationPolicy2, statement), + ConfigVariables: basicUpdate(rowAccessPolicy2.ID(), aggregationPolicy2, function2Id, statement, cron2, sdk.DataMetricScheduleStatusSuspended), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(statement). @@ -148,17 +205,25 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy2.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy2.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusSuspended))), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", function2Id.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), ), }, // change statement and policies { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), - ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement), + ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(otherStatement). @@ -168,11 +233,18 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), ), }, // change statements externally @@ -181,7 +253,7 @@ func TestAcc_View_basic(t *testing.T) { acc.TestClient().View.RecreateView(t, id, statement) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), - ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement), + ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(otherStatement). @@ -191,11 +263,18 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), ), }, // unset policies externally @@ -205,7 +284,7 @@ func TestAcc_View_basic(t *testing.T) { acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithUnsetAggregationPolicy(*sdk.NewViewUnsetAggregationPolicyRequest())) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), - ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement), + ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(otherStatement). @@ -215,22 +294,29 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ROLE_NAME")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), ), }, // import - with optionals { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), - ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, otherStatement), + ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted), ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), + resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). HasNameString(id.Name()). HasStatementString(otherStatement). HasDatabaseString(id.DatabaseName()). @@ -239,14 +325,14 @@ func TestAcc_View_basic(t *testing.T) { HasIsSecureString("false"). HasIsTemporaryString("false"). HasChangeTrackingString("false"), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.0", "ROLE_NAME")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.0", "ROLE_NAME")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.0", "ID")), ), }, // unset @@ -261,6 +347,8 @@ func TestAcc_View_basic(t *testing.T) { HasCommentString(""), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")), + assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")), + assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_functions.#")), ), }, // recreate - change is_recursive @@ -277,6 +365,8 @@ func TestAcc_View_basic(t *testing.T) { HasChangeTrackingString("default"), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")), + assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")), + assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_functions.#")), ), }, }, @@ -289,7 +379,7 @@ func TestAcc_View_recursive(t *testing.T) { acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) + viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -299,7 +389,7 @@ func TestAcc_View_recursive(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(statement). @@ -308,11 +398,11 @@ func TestAcc_View_recursive(t *testing.T) { HasIsRecursiveString("true")), }, { - Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")), ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), + resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). @@ -331,7 +421,7 @@ func TestAcc_View_temporary(t *testing.T) { acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) + viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ @@ -340,7 +430,7 @@ func TestAcc_View_temporary(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel.WithIsTemporary("true")) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel.WithIsTemporary("true")), Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(statement). @@ -357,33 +447,49 @@ func TestAcc_View_complete(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - // use a simple table to test change_tracking, otherwise it fails with: Change tracking is not supported on queries with joins of type '[LEFT_OUTER_JOIN]' - table, tableCleanup := acc.TestClient().Table.CreateTable(t) + table, tableCleanup := acc.TestClient().Table.CreateTableWithColumns(t, []sdk.TableColumnRequest{ + *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber), + *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber), + }) t.Cleanup(tableCleanup) - statement := fmt.Sprintf("SELECT id FROM %s", table.ID().FullyQualifiedName()) + statement := fmt.Sprintf("SELECT id, foo FROM %s", table.ID().FullyQualifiedName()) rowAccessPolicy, rowAccessPolicyCleanup := acc.TestClient().RowAccessPolicy.CreateRowAccessPolicyWithDataType(t, sdk.DataTypeNumber) t.Cleanup(rowAccessPolicyCleanup) aggregationPolicy, aggregationPolicyCleanup := acc.TestClient().AggregationPolicy.CreateAggregationPolicy(t) t.Cleanup(aggregationPolicyCleanup) + projectionPolicy, projectionPolicyCleanup := acc.TestClient().ProjectionPolicy.CreateProjectionPolicy(t) + t.Cleanup(projectionPolicyCleanup) + + maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicyIdentity(t, sdk.DataTypeNumber) + t.Cleanup(maskingPolicyCleanup) + + functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG") + m := func() map[string]config.Variable { return map[string]config.Variable{ - "name": config.StringVariable(id.Name()), - "database": config.StringVariable(id.DatabaseName()), - "schema": config.StringVariable(id.SchemaName()), - "comment": config.StringVariable("Terraform test resource"), - "is_secure": config.BoolVariable(true), - "is_temporary": config.BoolVariable(false), - "or_replace": config.BoolVariable(false), - "copy_grants": config.BoolVariable(false), - "change_tracking": config.BoolVariable(true), - "row_access_policy": config.StringVariable(rowAccessPolicy.ID().FullyQualifiedName()), - "row_access_policy_on": config.ListVariable(config.StringVariable("ID")), - "aggregation_policy": config.StringVariable(aggregationPolicy.FullyQualifiedName()), - "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")), - "statement": config.StringVariable(statement), - "warehouse": config.StringVariable(acc.TestWarehouseName), + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "comment": config.StringVariable("Terraform test resource"), + "is_secure": config.BoolVariable(true), + "is_temporary": config.BoolVariable(false), + "copy_grants": config.BoolVariable(false), + "change_tracking": config.BoolVariable(true), + "row_access_policy": config.StringVariable(rowAccessPolicy.ID().FullyQualifiedName()), + "row_access_policy_on": config.ListVariable(config.StringVariable("ID")), + "aggregation_policy": config.StringVariable(aggregationPolicy.FullyQualifiedName()), + "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")), + "statement": config.StringVariable(statement), + "warehouse": config.StringVariable(acc.TestWarehouseName), + "column_name": config.StringVariable("ID"), + "masking_policy": config.StringVariable(maskingPolicy.ID().FullyQualifiedName()), + "masking_policy_using": config.ListVariable(config.StringVariable("ID")), + "projection_policy": config.StringVariable(projectionPolicy.FullyQualifiedName()), + "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()), + "data_metric_function_on": config.ListVariable(config.StringVariable("ID")), + "data_metric_schedule_using_cron": config.StringVariable("5 * * * * UTC"), } } resource.Test(t, resource.TestCase{ @@ -405,6 +511,13 @@ func TestAcc_View_complete(t *testing.T) { HasIsSecureString("true"). HasIsTemporaryString("false"). HasChangeTrackingString("true"), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", "5 * * * * UTC")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), @@ -427,8 +540,8 @@ func TestAcc_View_complete(t *testing.T) { ConfigVariables: m(), ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), + resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). HasNameString(id.Name()). HasStatementString(statement). HasDatabaseString(id.DatabaseName()). @@ -436,14 +549,21 @@ func TestAcc_View_complete(t *testing.T) { HasCommentString("Terraform test resource"). HasIsSecureString("true"). HasIsTemporaryString("false").HasChangeTrackingString("true"), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "aggregation_policy.0.entity_key.0", "ID")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "row_access_policy.0.on.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_schedule.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_schedule.0.using_cron", "5 * * * * UTC")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_schedule.0.minutes", "0")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.0.on.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.0.on.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.0", "ID")), ), }, }, @@ -455,7 +575,7 @@ func TestAcc_View_Rename(t *testing.T) { statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() newId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithComment("foo").WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) + viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithComment("foo") newViewModel := model.View("test", newId.DatabaseName(), newId.Name(), newId.SchemaName(), statement).WithComment("foo") resource.Test(t, resource.TestCase{ @@ -467,7 +587,7 @@ func TestAcc_View_Rename(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_view.test", "comment", "foo"), @@ -497,8 +617,7 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithOrReplace(false).WithCopyGrants(false). - WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) + viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithCopyGrants(false) var createdOn string @@ -511,7 +630,7 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_view.test", "database", id.DatabaseName()), @@ -526,7 +645,7 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) { }, // Checks that copy_grants changes don't trigger a drop { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true).WithOrReplace(true)) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"), resource.TestCheckResourceAttrWith("snowflake_view.test", "show_output.0.created_on", func(value string) error { @@ -547,9 +666,7 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithOrReplace(true).WithCopyGrants(true). - WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) - + viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithCopyGrants(true) var createdOn string resource.Test(t, resource.TestCase{ @@ -561,7 +678,7 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "copy_grants", "true"), resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"), @@ -573,7 +690,7 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) { ), }, { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(false)) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel.WithCopyGrants(false)), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"), resource.TestCheckResourceAttrWith("snowflake_view.test", "show_output.0.created_on", func(value string) error { @@ -604,7 +721,7 @@ func TestAcc_ViewCopyGrantsStatementUpdate(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: viewConfigWithGrants(viewId, tableId, `\"name\"`) + useWarehouseConfig(acc.TestWarehouseName), + Config: viewConfigWithGrants(viewId, tableId, `\"name\"`), Check: resource.ComposeAggregateTestCheckFunc( // there should be more than one privilege, because we applied grant all privileges and initially there's always one which is ownership resource.TestCheckResourceAttr("data.snowflake_grants.grants", "grants.#", "2"), @@ -612,7 +729,7 @@ func TestAcc_ViewCopyGrantsStatementUpdate(t *testing.T) { ), }, { - Config: viewConfigWithGrants(viewId, tableId, "*") + useWarehouseConfig(acc.TestWarehouseName), + Config: viewConfigWithGrants(viewId, tableId, "*"), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("data.snowflake_grants.grants", "grants.#", "2"), resource.TestCheckResourceAttr("data.snowflake_grants.grants", "grants.1.privilege", "SELECT"), @@ -626,7 +743,7 @@ func TestAcc_View_copyGrants(t *testing.T) { t.Setenv(string(testenvs.ConfigureClientOnce), "") id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) + viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -636,19 +753,10 @@ func TestAcc_View_copyGrants(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)) + useWarehouseConfig(acc.TestWarehouseName), - ExpectError: regexp.MustCompile("all of `copy_grants,or_replace` must be specified"), - }, - { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true).WithOrReplace(true)) + useWarehouseConfig(acc.TestWarehouseName), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), - ), - }, - { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(false).WithOrReplace(true)) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_view.test", "copy_grants", "true"), ), }, }, @@ -672,7 +780,7 @@ func TestAcc_View_Issue2640(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: viewConfigWithMultilineUnionStatement(id, part1, part2) + useWarehouseConfig(acc.TestWarehouseName), + Config: viewConfigWithMultilineUnionStatement(id, part1, part2), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_view.test", "statement", statement), @@ -698,8 +806,8 @@ func TestAcc_View_Issue2640(t *testing.T) { }, ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeSnowflakeID(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeSnowflakeID(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), + resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). HasNameString(id.Name()). HasStatementString(statement). HasDatabaseString(id.DatabaseName()). @@ -717,7 +825,7 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_view.test" statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithDependsOn([]string{"snowflake_unsafe_execute.use_warehouse"}) + viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) tag, tagCleanup := acc.TestClient().Tag.CreateTag(t) t.Cleanup(tagCleanup) @@ -741,36 +849,30 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "tag.#", "1"), resource.TestCheckResourceAttr(resourceName, "tag.0.name", tag.Name), resource.TestCheckResourceAttr(resourceName, "tag.0.value", "foo"), + resource.TestCheckResourceAttr(resourceName, "or_replace", "true"), ), }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: accconfig.FromModel(t, viewModel) + useWarehouseConfig(acc.TestWarehouseName), + Config: accconfig.FromModel(t, viewModel), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "name", id.Name()), resource.TestCheckNoResourceAttr(resourceName, "tag.#"), + resource.TestCheckNoResourceAttr(resourceName, "or_replace"), ), }, }, }) } -func useWarehouseConfig(name string) string { - return fmt.Sprintf(` -resource "snowflake_unsafe_execute" "use_warehouse" { - execute = "USE WAREHOUSE \"%s\"" - revert = "SELECT 1" -} -`, name) -} - func viewv_0_94_1_WithTags(id sdk.SchemaObjectIdentifier, tagSchema, tagName, tagValue, statement string) string { s := ` resource "snowflake_view" "test" { name = "%[1]s" database = "%[2]s" - schema = "%[6]s" + schema = "%[6]s" statement = "%[7]s" + or_replace = true tag { name = "%[4]s" value = "%[5]s" @@ -801,10 +903,8 @@ resource "snowflake_view" "test" { database = "%[1]s" schema = "%[2]s" statement = "select %[5]s from \"%[1]s\".\"%[2]s\".\"${snowflake_table.table.name}\"" - or_replace = true copy_grants = true is_secure = true - depends_on = [snowflake_unsafe_execute.use_warehouse, snowflake_table.table] } resource "snowflake_account_role" "test" { @@ -821,7 +921,7 @@ resource "snowflake_grant_privileges_to_account_role" "grant" { } data "snowflake_grants" "grants" { - depends_on = [snowflake_grant_privileges_to_account_role.grant, snowflake_view.test, snowflake_unsafe_execute.use_warehouse] + depends_on = [snowflake_grant_privileges_to_account_role.grant, snowflake_view.test] grants_on { object_name = "\"%[1]s\".\"%[2]s\".\"${snowflake_view.test.name}\"" object_type = "VIEW" @@ -842,7 +942,6 @@ resource "snowflake_view" "test" { %[5]s SQL is_secure = true - depends_on = [snowflake_unsafe_execute.use_warehouse] } `, id.DatabaseName(), id.SchemaName(), id.Name(), part1, part2) } diff --git a/pkg/resources/view_state_upgraders.go b/pkg/resources/view_state_upgraders.go index f48b54c568..d8a2366684 100644 --- a/pkg/resources/view_state_upgraders.go +++ b/pkg/resources/view_state_upgraders.go @@ -15,6 +15,7 @@ func v0_94_1_ViewStateUpgrader(ctx context.Context, rawState map[string]any, met } delete(rawState, "tag") + delete(rawState, "or_replace") - return rawState, nil + return migratePipeSeparatedObjectIdentifierResourceIdToFullyQualifiedName(ctx, rawState, meta) } diff --git a/pkg/sdk/client.go b/pkg/sdk/client.go index fe50185a18..c541793ef5 100644 --- a/pkg/sdk/client.go +++ b/pkg/sdk/client.go @@ -39,54 +39,55 @@ type Client struct { ReplicationFunctions ReplicationFunctions // DDL Commands - Accounts Accounts - Alerts Alerts - ApiIntegrations ApiIntegrations - ApplicationPackages ApplicationPackages - ApplicationRoles ApplicationRoles - Applications Applications - Comments Comments - CortexSearchServices CortexSearchServices - DatabaseRoles DatabaseRoles - Databases Databases - DynamicTables DynamicTables - ExternalFunctions ExternalFunctions - ExternalTables ExternalTables - EventTables EventTables - FailoverGroups FailoverGroups - FileFormats FileFormats - Functions Functions - Grants Grants - ManagedAccounts ManagedAccounts - MaskingPolicies MaskingPolicies - MaterializedViews MaterializedViews - NetworkPolicies NetworkPolicies - NetworkRules NetworkRules - NotificationIntegrations NotificationIntegrations - Parameters Parameters - PasswordPolicies PasswordPolicies - Pipes Pipes - PolicyReferences PolicyReferences - Procedures Procedures - ResourceMonitors ResourceMonitors - Roles Roles - RowAccessPolicies RowAccessPolicies - Schemas Schemas - SecurityIntegrations SecurityIntegrations - Sequences Sequences - SessionPolicies SessionPolicies - Sessions Sessions - Shares Shares - Stages Stages - StorageIntegrations StorageIntegrations - Streamlits Streamlits - Streams Streams - Tables Tables - Tags Tags - Tasks Tasks - Users Users - Views Views - Warehouses Warehouses + Accounts Accounts + Alerts Alerts + ApiIntegrations ApiIntegrations + ApplicationPackages ApplicationPackages + ApplicationRoles ApplicationRoles + Applications Applications + Comments Comments + CortexSearchServices CortexSearchServices + DatabaseRoles DatabaseRoles + Databases Databases + DataMetricFunctionReferences DataMetricFunctionReferences + DynamicTables DynamicTables + ExternalFunctions ExternalFunctions + ExternalTables ExternalTables + EventTables EventTables + FailoverGroups FailoverGroups + FileFormats FileFormats + Functions Functions + Grants Grants + ManagedAccounts ManagedAccounts + MaskingPolicies MaskingPolicies + MaterializedViews MaterializedViews + NetworkPolicies NetworkPolicies + NetworkRules NetworkRules + NotificationIntegrations NotificationIntegrations + Parameters Parameters + PasswordPolicies PasswordPolicies + Pipes Pipes + PolicyReferences PolicyReferences + Procedures Procedures + ResourceMonitors ResourceMonitors + Roles Roles + RowAccessPolicies RowAccessPolicies + Schemas Schemas + SecurityIntegrations SecurityIntegrations + Sequences Sequences + SessionPolicies SessionPolicies + Sessions Sessions + Shares Shares + Stages Stages + StorageIntegrations StorageIntegrations + Streamlits Streamlits + Streams Streams + Tables Tables + Tags Tags + Tasks Tasks + Users Users + Views Views + Warehouses Warehouses } func (c *Client) GetAccountLocator() string { @@ -205,6 +206,7 @@ func (c *Client) initialize() { c.CortexSearchServices = &cortexSearchServices{client: c} c.DatabaseRoles = &databaseRoles{client: c} c.Databases = &databases{client: c} + c.DataMetricFunctionReferences = &dataMetricFunctionReferences{client: c} c.DynamicTables = &dynamicTables{client: c} c.ExternalFunctions = &externalFunctions{client: c} c.ExternalTables = &externalTables{client: c} diff --git a/pkg/sdk/data_metric_function_references_def.go b/pkg/sdk/data_metric_function_references_def.go new file mode 100644 index 0000000000..0b4ec04722 --- /dev/null +++ b/pkg/sdk/data_metric_function_references_def.go @@ -0,0 +1,135 @@ +package sdk + +import ( + "fmt" + "strings" + + g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator" +) + +//go:generate go run ./poc/main.go + +type DataMetricFuncionRefEntityDomainOption string + +const ( + DataMetricFuncionRefEntityDomainView DataMetricFuncionRefEntityDomainOption = "VIEW" +) + +type DataMetricScheduleStatusOption string + +const ( + DataMetricScheduleStatusStarted DataMetricScheduleStatusOption = "STARTED" + DataMetricScheduleStatusStartedAndPendingScheduleUpdate DataMetricScheduleStatusOption = "STARTED_AND_PENDING_SCHEDULE_UPDATE" + DataMetricScheduleStatusSuspended DataMetricScheduleStatusOption = "SUSPENDED" + DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_TABLE_DOES_NOT_EXIST_OR_NOT_AUTHORIZED" + DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_DATA_METRIC_FUNCTION_DOES_NOT_EXIST_OR_NOT_AUTHORIZED" + DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_TABLE_COLUMN_DOES_NOT_EXIST_OR_NOT_AUTHORIZED" + DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction DataMetricScheduleStatusOption = "SUSPENDED_INSUFFICIENT_PRIVILEGE_TO_EXECUTE_DATA_METRIC_FUNCTION" + DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized DataMetricScheduleStatusOption = "SUSPENDED_ACTIVE_EVENT_TABLE_DOES_NOT_EXIST_OR_NOT_AUTHORIZED" + DataMetricScheduleStatusSuspendedByUserAction DataMetricScheduleStatusOption = "SUSPENDED_BY_USER_ACTION" +) + +// TODO: make is a separate type? +var AllAllowedDataMetricScheduleStatusOptions = []DataMetricScheduleStatusOption{ + DataMetricScheduleStatusStarted, + DataMetricScheduleStatusSuspended, +} + +var AllDataMetricScheduleStatusStartedOptions = []DataMetricScheduleStatusOption{ + DataMetricScheduleStatusStarted, + DataMetricScheduleStatusStartedAndPendingScheduleUpdate, +} + +var AllDataMetricScheduleStatusSuspendedOptions = []DataMetricScheduleStatusOption{ + DataMetricScheduleStatusSuspended, + DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized, + DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized, + DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized, + DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction, + DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized, +} + +func ToAllowedDataMetricScheduleStatusOption(s string) (DataMetricScheduleStatusOption, error) { + s = strings.ToUpper(s) + switch s { + case string(DataMetricScheduleStatusStarted): + return DataMetricScheduleStatusStarted, nil + case string(DataMetricScheduleStatusSuspended): + return DataMetricScheduleStatusSuspended, nil + default: + return "", fmt.Errorf("invalid DataMetricScheduleStatusOption: %s", s) + } +} + +func ToDataMetricScheduleStatusOption(s string) (DataMetricScheduleStatusOption, error) { + s = strings.ToUpper(s) + switch s { + case string(DataMetricScheduleStatusStarted): + return DataMetricScheduleStatusStarted, nil + case string(DataMetricScheduleStatusStartedAndPendingScheduleUpdate): + return DataMetricScheduleStatusStartedAndPendingScheduleUpdate, nil + case string(DataMetricScheduleStatusSuspended): + return DataMetricScheduleStatusSuspended, nil + case string(DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized): + return DataMetricScheduleStatusSuspendedTableDoesNotExistOrNotAuthorized, nil + case string(DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized): + return DataMetricScheduleStatusSuspendedDataMetricFunctionDoesNotExistOrNotAuthorized, nil + case string(DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized): + return DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized, nil + case string(DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction): + return DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction, nil + case string(DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized): + return DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized, nil + case string(DataMetricScheduleStatusSuspendedByUserAction): + return DataMetricScheduleStatusSuspendedByUserAction, nil + default: + return "", fmt.Errorf("invalid DataMetricScheduleStatusOption: %s", s) + } +} + +var DataMetricFunctionReferenceDef = g.NewInterface( + "DataMetricFunctionReferences", + "DataMetricFunctionReference", + g.KindOfT[SchemaObjectIdentifier](), +).CustomOperation( + "GetForEntity", + "https://docs.snowflake.com/en/sql-reference/functions/data_metric_function_references", + g.NewQueryStruct("GetForEntity"). + SQL("SELECT * FROM TABLE(REF_ENTITY_NAME => "). + Identifier("refEntityName", g.KindOfT[SchemaObjectIdentifier](), g.IdentifierOptions().Required()). + SQL(", "). + Assignment( + "REF_ENTITY_DOMAIN", + g.KindOfT[DataMetricFuncionRefEntityDomainOption](), + g.ParameterOptions().SingleQuotes().ArrowEquals().Required(), + ). + SQL(")"), + g.DbStruct("dataMetricFunctionReferencesRow"). + Text("metric_database_name"). + Text("metric_schema_name"). + Text("metric_name"). + Text("argument_signature"). + Text("data_type"). + Text("ref_database_name"). + Text("ref_schema_name"). + Text("ref_entity_name"). + Text("ref_entity_domain"). + Text("ref_arguments"). + Text("ref_id"). + Text("schedule"). + Text("schedule_status"), + g.PlainStruct("DataMetricFunctionReference"). + Text("MetricDatabaseName"). + Text("MetricSchemaName"). + Text("MetricName"). + Text("ArgumentSignature"). + Text("DataType"). + Text("RefDatabaseName"). + Text("RefSchemaName"). + Text("RefEntityName"). + Text("RefEntityDomain"). + Text("RefArguments"). + Text("RefId"). + Text("Schedule"). + Text("ScheduleStatus"), +) diff --git a/pkg/sdk/data_metric_function_references_dto_builders_gen.go b/pkg/sdk/data_metric_function_references_dto_builders_gen.go new file mode 100644 index 0000000000..a78dd8844a --- /dev/null +++ b/pkg/sdk/data_metric_function_references_dto_builders_gen.go @@ -0,0 +1,15 @@ +// Code generated by dto builder generator; DO NOT EDIT. + +package sdk + +import () + +func NewGetForEntityDataMetricFunctionReferenceRequest( + refEntityName ObjectIdentifier, + RefEntityDomain DataMetricFuncionRefEntityDomainOption, +) *GetForEntityDataMetricFunctionReferenceRequest { + s := GetForEntityDataMetricFunctionReferenceRequest{} + s.refEntityName = refEntityName + s.RefEntityDomain = RefEntityDomain + return &s +} diff --git a/pkg/sdk/data_metric_function_references_dto_gen.go b/pkg/sdk/data_metric_function_references_dto_gen.go new file mode 100644 index 0000000000..68006a0545 --- /dev/null +++ b/pkg/sdk/data_metric_function_references_dto_gen.go @@ -0,0 +1,10 @@ +package sdk + +//go:generate go run ./dto-builder-generator/main.go + +var _ optionsProvider[GetForEntityDataMetricFunctionReferenceOptions] = new(GetForEntityDataMetricFunctionReferenceRequest) + +type GetForEntityDataMetricFunctionReferenceRequest struct { + refEntityName ObjectIdentifier // required + RefEntityDomain DataMetricFuncionRefEntityDomainOption // required +} diff --git a/pkg/sdk/data_metric_function_references_gen.go b/pkg/sdk/data_metric_function_references_gen.go new file mode 100644 index 0000000000..59f2d50756 --- /dev/null +++ b/pkg/sdk/data_metric_function_references_gen.go @@ -0,0 +1,84 @@ +package sdk + +import ( + "context" + "encoding/json" + "log" +) + +type DataMetricFunctionReferences interface { + GetForEntity(ctx context.Context, request *GetForEntityDataMetricFunctionReferenceRequest) ([]DataMetricFunctionReference, error) +} + +// GetForEntityDataMetricFunctionReferenceOptions is based on https://docs.snowflake.com/en/sql-reference/functions/data_metric_function_references. +type GetForEntityDataMetricFunctionReferenceOptions struct { + selectEverythingFrom bool `ddl:"static" sql:"SELECT * FROM TABLE"` + parameters *dataMetricFunctionReferenceParameters `ddl:"list,parentheses,no_comma"` +} +type dataMetricFunctionReferenceParameters struct { + functionFullyQualifiedName bool `ddl:"static" sql:"SNOWFLAKE.INFORMATION_SCHEMA.DATA_METRIC_FUNCTION_REFERENCES"` + arguments *dataMetricFunctionReferenceFunctionArguments `ddl:"list,parentheses"` +} +type dataMetricFunctionReferenceFunctionArguments struct { + refEntityName []ObjectIdentifier `ddl:"parameter,single_quotes,arrow_equals" sql:"REF_ENTITY_NAME"` + refEntityDomain *DataMetricFuncionRefEntityDomainOption `ddl:"parameter,single_quotes,arrow_equals" sql:"REF_ENTITY_DOMAIN"` +} + +type dataMetricFunctionReferencesRow struct { + MetricDatabaseName string `db:"METRIC_DATABASE_NAME"` + MetricSchemaName string `db:"METRIC_SCHEMA_NAME"` + MetricName string `db:"METRIC_NAME"` + ArgumentSignature string `db:"ARGUMENT_SIGNATURE"` + DataType string `db:"DATA_TYPE"` + RefDatabaseName string `db:"REF_DATABASE_NAME"` + RefSchemaName string `db:"REF_SCHEMA_NAME"` + RefEntityName string `db:"REF_ENTITY_NAME"` + RefEntityDomain string `db:"REF_ENTITY_DOMAIN"` + RefArguments string `db:"REF_ARGUMENTS"` + RefId string `db:"REF_ID"` + Schedule string `db:"SCHEDULE"` + ScheduleStatus string `db:"SCHEDULE_STATUS"` +} + +type DataMetricFunctionRefArgument struct { + Domain string `json:"domain"` + Id string `json:"id"` + Name string `json:"name"` +} +type DataMetricFunctionReference struct { + MetricDatabaseName string + MetricSchemaName string + MetricName string + ArgumentSignature string + DataType string + RefEntityDatabaseName string + RefEntitySchemaName string + RefEntityName string + RefEntityDomain string + RefArguments []DataMetricFunctionRefArgument + RefId string + Schedule string + ScheduleStatus string +} + +func (row dataMetricFunctionReferencesRow) convert() *DataMetricFunctionReference { + x := &DataMetricFunctionReference{ + MetricDatabaseName: row.MetricDatabaseName, + MetricSchemaName: row.MetricSchemaName, + MetricName: row.MetricName, + ArgumentSignature: row.ArgumentSignature, + DataType: row.DataType, + RefEntityDatabaseName: row.RefDatabaseName, + RefEntitySchemaName: row.RefSchemaName, + RefEntityName: row.RefEntityName, + RefEntityDomain: row.RefEntityDomain, + RefId: row.RefId, + Schedule: row.Schedule, + ScheduleStatus: row.ScheduleStatus, + } + err := json.Unmarshal([]byte(row.RefArguments), &x.RefArguments) + if err != nil { + log.Println(err) + } + return x +} diff --git a/pkg/sdk/data_metric_function_references_gen_integration_test.go b/pkg/sdk/data_metric_function_references_gen_integration_test.go new file mode 100644 index 0000000000..c855280322 --- /dev/null +++ b/pkg/sdk/data_metric_function_references_gen_integration_test.go @@ -0,0 +1,11 @@ +package sdk + +import "testing" + +func TestInt_DataMetricFunctionReferences(t *testing.T) { + // TODO: prepare common resources + + t.Run("GetForEntity", func(t *testing.T) { + // TODO: fill me + }) +} diff --git a/pkg/sdk/data_metric_function_references_gen_test.go b/pkg/sdk/data_metric_function_references_gen_test.go new file mode 100644 index 0000000000..d73646432f --- /dev/null +++ b/pkg/sdk/data_metric_function_references_gen_test.go @@ -0,0 +1,51 @@ +package sdk + +import "testing" + +func TestDataMetricFunctionReferences_GetForEntity(t *testing.T) { + t.Run("validation: missing parameters", func(t *testing.T) { + opts := &GetForEntityDataMetricFunctionReferenceOptions{} + assertOptsInvalidJoinedErrors(t, opts, errNotSet("GetForEntityDataMetricFunctionReferenceOptions", "parameters")) + }) + + t.Run("validation: missing arguments", func(t *testing.T) { + opts := &GetForEntityDataMetricFunctionReferenceOptions{ + parameters: &dataMetricFunctionReferenceParameters{}, + } + assertOptsInvalidJoinedErrors(t, opts, errNotSet("dataMetricFunctionReferenceParameters", "arguments")) + }) + + t.Run("validation: missing refEntityName", func(t *testing.T) { + opts := &GetForEntityDataMetricFunctionReferenceOptions{ + parameters: &dataMetricFunctionReferenceParameters{ + arguments: &dataMetricFunctionReferenceFunctionArguments{ + refEntityDomain: Pointer(DataMetricFuncionRefEntityDomainView), + }, + }, + } + assertOptsInvalidJoinedErrors(t, opts, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityName")) + }) + + t.Run("validation: missing refEntityDomain", func(t *testing.T) { + opts := &GetForEntityDataMetricFunctionReferenceOptions{ + parameters: &dataMetricFunctionReferenceParameters{ + arguments: &dataMetricFunctionReferenceFunctionArguments{ + refEntityName: []ObjectIdentifier{NewSchemaObjectIdentifier("a", "b", "c")}, + }, + }, + } + assertOptsInvalidJoinedErrors(t, opts, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityDomain")) + }) + + t.Run("view domain", func(t *testing.T) { + opts := &GetForEntityDataMetricFunctionReferenceOptions{ + parameters: &dataMetricFunctionReferenceParameters{ + arguments: &dataMetricFunctionReferenceFunctionArguments{ + refEntityName: []ObjectIdentifier{NewSchemaObjectIdentifier("a", "b", "c")}, + refEntityDomain: Pointer(DataMetricFuncionRefEntityDomainView), + }, + }, + } + assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.DATA_METRIC_FUNCTION_REFERENCES (REF_ENTITY_NAME => '\"a\".\"b\".\"c\"', REF_ENTITY_DOMAIN => 'VIEW'))`) + }) +} diff --git a/pkg/sdk/data_metric_function_references_impl_gen.go b/pkg/sdk/data_metric_function_references_impl_gen.go new file mode 100644 index 0000000000..c44e74eac5 --- /dev/null +++ b/pkg/sdk/data_metric_function_references_impl_gen.go @@ -0,0 +1,33 @@ +package sdk + +import ( + "context" +) + +var _ DataMetricFunctionReferences = (*dataMetricFunctionReferences)(nil) + +type dataMetricFunctionReferences struct { + client *Client +} + +func (v *dataMetricFunctionReferences) GetForEntity(ctx context.Context, request *GetForEntityDataMetricFunctionReferenceRequest) ([]DataMetricFunctionReference, error) { + opts := request.toOpts() + dbRows, err := validateAndQuery[dataMetricFunctionReferencesRow](v.client, ctx, opts) + if err != nil { + return nil, err + } + resultList := convertRows[dataMetricFunctionReferencesRow, DataMetricFunctionReference](dbRows) + return resultList, nil +} + +func (r *GetForEntityDataMetricFunctionReferenceRequest) toOpts() *GetForEntityDataMetricFunctionReferenceOptions { + opts := &GetForEntityDataMetricFunctionReferenceOptions{ + parameters: &dataMetricFunctionReferenceParameters{ + arguments: &dataMetricFunctionReferenceFunctionArguments{ + refEntityName: []ObjectIdentifier{r.refEntityName}, + refEntityDomain: Pointer(r.RefEntityDomain), + }, + }, + } + return opts +} diff --git a/pkg/sdk/data_metric_function_references_validations_gen.go b/pkg/sdk/data_metric_function_references_validations_gen.go new file mode 100644 index 0000000000..395a1f323c --- /dev/null +++ b/pkg/sdk/data_metric_function_references_validations_gen.go @@ -0,0 +1,27 @@ +package sdk + +import "errors" + +var _ validatable = new(GetForEntityDataMetricFunctionReferenceOptions) + +func (opts *GetForEntityDataMetricFunctionReferenceOptions) validate() error { + if opts == nil { + return ErrNilOptions + } + var errs []error + if !valueSet(opts.parameters) { + errs = append(errs, errNotSet("GetForEntityDataMetricFunctionReferenceOptions", "parameters")) + } else { + if !valueSet(opts.parameters.arguments) { + errs = append(errs, errNotSet("dataMetricFunctionReferenceParameters", "arguments")) + } else { + if opts.parameters.arguments.refEntityDomain == nil { + errs = append(errs, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityDomain")) + } + if opts.parameters.arguments.refEntityName == nil { + errs = append(errs, errNotSet("dataMetricFunctionReferenceFunctionArguments", "refEntityName")) + } + } + } + return errors.Join(errs...) +} diff --git a/pkg/sdk/poc/main.go b/pkg/sdk/poc/main.go index f8f1014bdb..e62a113123 100644 --- a/pkg/sdk/poc/main.go +++ b/pkg/sdk/poc/main.go @@ -17,31 +17,32 @@ import ( ) var definitionMapping = map[string]*generator.Interface{ - "database_role_def.go": example.DatabaseRole, - "network_policies_def.go": sdk.NetworkPoliciesDef, - "session_policies_def.go": sdk.SessionPoliciesDef, - "tasks_def.go": sdk.TasksDef, - "streams_def.go": sdk.StreamsDef, - "application_roles_def.go": sdk.ApplicationRolesDef, - "views_def.go": sdk.ViewsDef, - "stages_def.go": sdk.StagesDef, - "functions_def.go": sdk.FunctionsDef, - "procedures_def.go": sdk.ProceduresDef, - "event_tables_def.go": sdk.EventTablesDef, - "application_packages_def.go": sdk.ApplicationPackagesDef, - "storage_integration_def.go": sdk.StorageIntegrationDef, - "managed_accounts_def.go": sdk.ManagedAccountsDef, - "row_access_policies_def.go": sdk.RowAccessPoliciesDef, - "applications_def.go": sdk.ApplicationsDef, - "sequences_def.go": sdk.SequencesDef, - "materialized_views_def.go": sdk.MaterializedViewsDef, - "api_integrations_def.go": sdk.ApiIntegrationsDef, - "notification_integrations_def.go": sdk.NotificationIntegrationsDef, - "external_functions_def.go": sdk.ExternalFunctionsDef, - "streamlits_def.go": sdk.StreamlitsDef, - "network_rule_def.go": sdk.NetworkRuleDef, - "security_integrations_def.go": sdk.SecurityIntegrationsDef, - "cortex_search_services_def.go": sdk.CortexSearchServiceDef, + "database_role_def.go": example.DatabaseRole, + "network_policies_def.go": sdk.NetworkPoliciesDef, + "session_policies_def.go": sdk.SessionPoliciesDef, + "tasks_def.go": sdk.TasksDef, + "streams_def.go": sdk.StreamsDef, + "application_roles_def.go": sdk.ApplicationRolesDef, + "views_def.go": sdk.ViewsDef, + "stages_def.go": sdk.StagesDef, + "functions_def.go": sdk.FunctionsDef, + "procedures_def.go": sdk.ProceduresDef, + "event_tables_def.go": sdk.EventTablesDef, + "application_packages_def.go": sdk.ApplicationPackagesDef, + "storage_integration_def.go": sdk.StorageIntegrationDef, + "managed_accounts_def.go": sdk.ManagedAccountsDef, + "row_access_policies_def.go": sdk.RowAccessPoliciesDef, + "applications_def.go": sdk.ApplicationsDef, + "sequences_def.go": sdk.SequencesDef, + "materialized_views_def.go": sdk.MaterializedViewsDef, + "api_integrations_def.go": sdk.ApiIntegrationsDef, + "notification_integrations_def.go": sdk.NotificationIntegrationsDef, + "external_functions_def.go": sdk.ExternalFunctionsDef, + "streamlits_def.go": sdk.StreamlitsDef, + "network_rule_def.go": sdk.NetworkRuleDef, + "security_integrations_def.go": sdk.SecurityIntegrationsDef, + "cortex_search_services_def.go": sdk.CortexSearchServiceDef, + "data_metric_function_references_def.go": sdk.DataMetricFunctionReferenceDef, } func main() { diff --git a/pkg/sdk/policy_references.go b/pkg/sdk/policy_references.go index 9f5ee04e21..8decc63793 100644 --- a/pkg/sdk/policy_references.go +++ b/pkg/sdk/policy_references.go @@ -68,19 +68,20 @@ type policyReferenceFunctionArguments struct { refEntityDomain *PolicyEntityDomain `ddl:"parameter,single_quotes,arrow_equals" sql:"REF_ENTITY_DOMAIN"` } -// TODO: use PolicyKind in PolicyReference type PolicyKind string const ( PolicyKindAggregationPolicy PolicyKind = "AGGREGATION_POLICY" PolicyKindRowAccessPolicy PolicyKind = "ROW_ACCESS_POLICY" + PolicyKindPasswordPolicy PolicyKind = "PASSWORD_POLICY" + PolicyKindMaskingPolicy PolicyKind = "MASKING_POLICY" ) type PolicyReference struct { PolicyDb *string PolicySchema *string PolicyName string - PolicyKind string + PolicyKind PolicyKind RefDatabaseName *string RefSchemaName *string RefEntityName string @@ -113,7 +114,7 @@ type policyReferenceDBRow struct { func (row policyReferenceDBRow) convert() *PolicyReference { policyReference := PolicyReference{ PolicyName: row.PolicyName, - PolicyKind: row.PolicyKind, + PolicyKind: PolicyKind(row.PolicyKind), RefEntityName: row.RefEntityName, RefEntityDomain: row.RefEntityDomain, } diff --git a/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go new file mode 100644 index 0000000000..799df31aab --- /dev/null +++ b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go @@ -0,0 +1,41 @@ +package testint + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/require" +) + +func TestInt_DataMetricFunctionReferences(t *testing.T) { + client := testClient(t) + ctx := testContext(t) + + t.Run("view domain", func(t *testing.T) { + functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG") + statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" + view, viewCleanup := testClientHelper().View.CreateView(t, statement) + t.Cleanup(viewCleanup) + + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(view.ID()).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest("5 MINUTE"))) + require.NoError(t, err) + err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(view.ID()).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest([]sdk.ViewDataMetricFunction{{ + DataMetricFunction: functionId, + On: []sdk.Column{{Value: "ROLE_NAME"}}, + }}))) + require.NoError(t, err) + + dmfs, err := client.DataMetricFunctionReferences.GetForEntity(ctx, sdk.NewGetForEntityDataMetricFunctionReferenceRequest(view.ID(), sdk.DataMetricFuncionRefEntityDomainView)) + require.NoError(t, err) + require.Equal(t, 1, len(dmfs)) + dmf := dmfs[0] + require.Equal(t, string(sdk.DataMetricFuncionRefEntityDomainView), dmf.RefEntityDomain) + require.Equal(t, functionId.DatabaseName(), dmf.MetricDatabaseName) + require.Equal(t, functionId.SchemaName(), dmf.MetricSchemaName) + require.Equal(t, functionId.Name(), dmf.MetricName) + require.Equal(t, view.ID().DatabaseName(), dmf.RefEntityDatabaseName) + require.Equal(t, view.ID().SchemaName(), dmf.RefEntitySchemaName) + require.Equal(t, view.ID().Name(), dmf.RefEntityName) + require.Equal(t, "*/5 * * * * UTC", dmf.Schedule) + }) +} diff --git a/pkg/sdk/testint/policy_references_integration_test.go b/pkg/sdk/testint/policy_references_integration_test.go index c1d0d8bc2e..2f0c09e841 100644 --- a/pkg/sdk/testint/policy_references_integration_test.go +++ b/pkg/sdk/testint/policy_references_integration_test.go @@ -35,7 +35,7 @@ func TestInt_PolicyReferences(t *testing.T) { require.NoError(t, err) require.Equal(t, 1, len(policyReferences)) require.Equal(t, passwordPolicyId.Name(), policyReferences[0].PolicyName) - require.Equal(t, "PASSWORD_POLICY", policyReferences[0].PolicyKind) + require.Equal(t, sdk.PolicyKindPasswordPolicy, policyReferences[0].PolicyKind) }) t.Run("tag domain", func(t *testing.T) { @@ -54,7 +54,7 @@ func TestInt_PolicyReferences(t *testing.T) { require.NoError(t, err) require.Equal(t, 1, len(policyReferences)) require.Equal(t, maskingPolicy.ID().Name(), policyReferences[0].PolicyName) - require.Equal(t, "MASKING_POLICY", policyReferences[0].PolicyKind) + require.Equal(t, sdk.PolicyKindMaskingPolicy, policyReferences[0].PolicyKind) err = client.Tags.Alter(ctx, sdk.NewAlterTagRequest(tag.ID()).WithUnset( sdk.NewTagUnsetRequest().WithMaskingPolicies([]sdk.SchemaObjectIdentifier{maskingPolicy.ID()}), diff --git a/pkg/sdk/testint/views_gen_integration_test.go b/pkg/sdk/testint/views_gen_integration_test.go index 59e77f666b..b212348f2d 100644 --- a/pkg/sdk/testint/views_gen_integration_test.go +++ b/pkg/sdk/testint/views_gen_integration_test.go @@ -106,7 +106,7 @@ func TestInt_Views(t *testing.T) { } } - assertDataMetricFunctionReference := func(t *testing.T, dataMetricFunctionReference helpers.DataMetricFunctionReference, + assertDataMetricFunctionReference := func(t *testing.T, dataMetricFunctionReference sdk.DataMetricFunctionReference, viewId sdk.SchemaObjectIdentifier, schedule string, ) { @@ -405,12 +405,11 @@ func TestInt_Views(t *testing.T) { err := client.Views.Alter(ctx, alterRequest) require.NoError(t, err) - alteredViewDetails, err := client.Views.Describe(ctx, id) + policyReferences, err := testClientHelper().PolicyReferences.GetPolicyReferences(t, view.ID(), sdk.ObjectTypeView) require.NoError(t, err) + require.Len(t, policyReferences, 1) - assert.Equal(t, 1, len(alteredViewDetails)) - // TODO [SNOW-1348118]: make nicer during the view rework - assert.Equal(t, maskingPolicy.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*alteredViewDetails[0].PolicyName).FullyQualifiedName()) + assertPolicyReference(t, policyReferences[0], maskingPolicy.ID(), "MASKING_POLICY", view.ID(), sdk.Pointer("ID")) alterRequest = sdk.NewAlterViewRequest(id).WithUnsetMaskingPolicyOnColumn( *sdk.NewViewUnsetColumnMaskingPolicyRequest("ID"), @@ -418,11 +417,8 @@ func TestInt_Views(t *testing.T) { err = client.Views.Alter(ctx, alterRequest) require.NoError(t, err) - alteredViewDetails, err = client.Views.Describe(ctx, id) - require.NoError(t, err) - - assert.Equal(t, 1, len(alteredViewDetails)) - assert.Empty(t, alteredViewDetails[0].PolicyName) + _, err = testClientHelper().PolicyReferences.GetPolicyReference(t, view.ID(), sdk.ObjectTypeView) + require.Error(t, err, "no rows in result set") }) t.Run("alter view: set and unset projection policy on column", func(t *testing.T) { @@ -562,7 +558,7 @@ func TestInt_Views(t *testing.T) { // set cron schedule cron := "5 * * * * UTC" - alterRequest := sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest().WithUsingCron(sdk.ViewUsingCronRequest{Cron: cron})) + alterRequest := sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest(cron)) err := client.Views.Alter(ctx, alterRequest) require.NoError(t, err) @@ -576,8 +572,7 @@ func TestInt_Views(t *testing.T) { err = client.Views.Alter(ctx, alterRequest) require.NoError(t, err) - dataMetricFunctionReferences, err := testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.ObjectTypeView) - require.NoError(t, err) + dataMetricFunctionReferences := testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.DataMetricFuncionRefEntityDomainView) require.Len(t, dataMetricFunctionReferences, 1) assertDataMetricFunctionReference(t, dataMetricFunctionReferences[0], view.ID(), cron) @@ -592,7 +587,7 @@ func TestInt_Views(t *testing.T) { err = client.Views.Alter(ctx, alterRequest) require.NoError(t, err) - dataMetricFunctionReferences, err = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.ObjectTypeView) + dataMetricFunctionReferences = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.DataMetricFuncionRefEntityDomainView) require.NoError(t, err) require.Len(t, dataMetricFunctionReferences, 0) @@ -610,8 +605,7 @@ func TestInt_Views(t *testing.T) { err = client.Views.Alter(ctx, alterRequest) require.NoError(t, err) - dataMetricFunctionReferences, err = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.ObjectTypeView) - require.NoError(t, err) + dataMetricFunctionReferences = testClientHelper().DataMetricFunctionReferences.GetDataMetricFunctionReferences(t, view.ID(), sdk.DataMetricFuncionRefEntityDomainView) require.Len(t, dataMetricFunctionReferences, 2) assertDataMetricFunctionReference(t, dataMetricFunctionReferences[0], view.ID(), cron) diff --git a/pkg/sdk/views_def.go b/pkg/sdk/views_def.go index 680407fb5a..e981a19c70 100644 --- a/pkg/sdk/views_def.go +++ b/pkg/sdk/views_def.go @@ -1,9 +1,40 @@ package sdk -import g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator" +import ( + "fmt" + "strings" + + g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator" +) //go:generate go run ./poc/main.go +var AllViewDataMetricScheduleMinutes = []int{5, 15, 30, 60, 720, 1440} + +type ViewDataMetricScheduleStatusOperationOption string + +const ( + ViewDataMetricScheduleStatusOperationResume ViewDataMetricScheduleStatusOperationOption = "RESUME" + ViewDataMetricScheduleStatusOperationSuspend ViewDataMetricScheduleStatusOperationOption = "SUSPEND" +) + +var AllViewDataMetricScheduleStatusOperationOptions = []ViewDataMetricScheduleStatusOperationOption{ + ViewDataMetricScheduleStatusOperationResume, + ViewDataMetricScheduleStatusOperationSuspend, +} + +func ToViewDataMetricScheduleStatusOperationOption(s string) (ViewDataMetricScheduleStatusOperationOption, error) { + s = strings.ToUpper(s) + switch s { + case string(ViewDataMetricScheduleStatusOperationResume): + return ViewDataMetricScheduleStatusOperationResume, nil + case string(ViewDataMetricScheduleStatusOperationSuspend): + return ViewDataMetricScheduleStatusOperationSuspend, nil + default: + return "", fmt.Errorf("invalid ViewDataMetricScheduleStatusOperationOption: %s", s) + } +} + var viewDbRow = g.DbStruct("viewDBRow"). Text("created_on"). Text("name"). @@ -79,6 +110,16 @@ var dataMetricFunctionDef = g.NewQueryStruct("ViewDataMetricFunction"). ListAssignment("ON", "Column", g.ParameterOptions().Required().NoEquals().Parentheses()). WithValidation(g.ValidIdentifier, "DataMetricFunction") +var modifyDataMetricFunctionDef = g.NewQueryStruct("ViewModifyDataMetricFunction"). + Identifier("DataMetricFunction", g.KindOfT[SchemaObjectIdentifier](), g.IdentifierOptions().Required()). + ListAssignment("ON", "Column", g.ParameterOptions().Required().NoEquals().Parentheses()). + Assignment( + "", + g.KindOfT[ViewDataMetricScheduleStatusOperationOption](), + g.ParameterOptions().NoEquals().NoQuotes(), + ). + WithValidation(g.ValidIdentifier, "DataMetricFunction") + var viewColumn = g.NewQueryStruct("ViewColumn"). Text("Name", g.KeywordOptions().Required().DoubleQuotes()). OptionalQueryStructField("ProjectionPolicy", viewColumnProjectionPolicy, g.KeywordOptions()). @@ -111,12 +152,13 @@ var viewDropDataMetricFunction = g.NewQueryStruct("ViewDropDataMetricFunction"). SQL("DROP"). ListAssignment("DATA METRIC FUNCTION", "ViewDataMetricFunction", g.ParameterOptions().NoEquals().Required()) +var viewModifyDataMetricFunction = g.NewQueryStruct("ViewModifyDataMetricFunctions"). + SQL("MODIFY"). + ListAssignment("DATA METRIC FUNCTION", "ViewModifyDataMetricFunction", g.ParameterOptions().NoEquals().Required()) + var viewSetDataMetricSchedule = g.NewQueryStruct("ViewSetDataMetricSchedule"). - SQL("SET DATA_METRIC_SCHEDULE ="). - OptionalQueryStructField("Minutes", viewMinute, g.KeywordOptions()). - OptionalQueryStructField("UsingCron", viewUsingCron, g.KeywordOptions()). - OptionalSQL("TRIGGER_ON_CHANGES"). - WithValidation(g.ExactlyOneValueSet, "Minutes", "UsingCron", "TriggerOnChanges") + SQL("SET"). + TextAssignment("DATA_METRIC_SCHEDULE", g.ParameterOptions().SingleQuotes()) var viewUnsetDataMetricSchedule = g.NewQueryStruct("ViewUnsetDataMetricSchedule"). SQL("UNSET DATA_METRIC_SCHEDULE") @@ -244,6 +286,7 @@ var ViewsDef = g.NewInterface( OptionalUnsetTags(). OptionalQueryStructField("AddDataMetricFunction", viewAddDataMetricFunction, g.KeywordOptions()). OptionalQueryStructField("DropDataMetricFunction", viewDropDataMetricFunction, g.KeywordOptions()). + OptionalQueryStructField("ModifyDataMetricFunction", viewModifyDataMetricFunction, g.KeywordOptions()). OptionalQueryStructField("SetDataMetricSchedule", viewSetDataMetricSchedule, g.KeywordOptions()). OptionalQueryStructField("UnsetDataMetricSchedule", viewUnsetDataMetricSchedule, g.KeywordOptions()). OptionalQueryStructField("AddRowAccessPolicy", viewAddRowAccessPolicy, g.KeywordOptions()). @@ -260,7 +303,7 @@ var ViewsDef = g.NewInterface( OptionalQueryStructField("UnsetTagsOnColumn", viewUnsetColumnTags, g.KeywordOptions()). WithValidation(g.ValidIdentifier, "name"). WithValidation(g.ExactlyOneValueSet, "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", - "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", + "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", @@ -269,6 +312,7 @@ var ViewsDef = g.NewInterface( WithValidation(g.ConflictingFields, "IfExists", "UnsetSecure"), columnDef, dataMetricFunctionDef, + modifyDataMetricFunctionDef, ). DropOperation( "https://docs.snowflake.com/en/sql-reference/sql/drop-view", diff --git a/pkg/sdk/views_dto_builders_gen.go b/pkg/sdk/views_dto_builders_gen.go index d16848ec28..7b26eef482 100644 --- a/pkg/sdk/views_dto_builders_gen.go +++ b/pkg/sdk/views_dto_builders_gen.go @@ -204,6 +204,11 @@ func (s *AlterViewRequest) WithDropDataMetricFunction(DropDataMetricFunction Vie return s } +func (s *AlterViewRequest) WithModifyDataMetricFunction(ModifyDataMetricFunction ViewModifyDataMetricFunctionsRequest) *AlterViewRequest { + s.ModifyDataMetricFunction = &ModifyDataMetricFunction + return s +} + func (s *AlterViewRequest) WithSetDataMetricSchedule(SetDataMetricSchedule ViewSetDataMetricScheduleRequest) *AlterViewRequest { s.SetDataMetricSchedule = &SetDataMetricSchedule return s @@ -290,38 +295,19 @@ func NewViewDropDataMetricFunctionRequest( return &s } -func NewViewSetDataMetricScheduleRequest() *ViewSetDataMetricScheduleRequest { - return &ViewSetDataMetricScheduleRequest{} -} - -func (s *ViewSetDataMetricScheduleRequest) WithMinutes(Minutes ViewMinuteRequest) *ViewSetDataMetricScheduleRequest { - s.Minutes = &Minutes - return s -} - -func (s *ViewSetDataMetricScheduleRequest) WithUsingCron(UsingCron ViewUsingCronRequest) *ViewSetDataMetricScheduleRequest { - s.UsingCron = &UsingCron - return s -} - -func (s *ViewSetDataMetricScheduleRequest) WithTriggerOnChanges(TriggerOnChanges bool) *ViewSetDataMetricScheduleRequest { - s.TriggerOnChanges = &TriggerOnChanges - return s -} - -func NewViewMinuteRequest( - Minutes int, -) *ViewMinuteRequest { - s := ViewMinuteRequest{} - s.Minutes = Minutes +func NewViewModifyDataMetricFunctionsRequest( + DataMetricFunction []ViewModifyDataMetricFunction, +) *ViewModifyDataMetricFunctionsRequest { + s := ViewModifyDataMetricFunctionsRequest{} + s.DataMetricFunction = DataMetricFunction return &s } -func NewViewUsingCronRequest( - Cron string, -) *ViewUsingCronRequest { - s := ViewUsingCronRequest{} - s.Cron = Cron +func NewViewSetDataMetricScheduleRequest( + DataMetricSchedule string, +) *ViewSetDataMetricScheduleRequest { + s := ViewSetDataMetricScheduleRequest{} + s.DataMetricSchedule = DataMetricSchedule return &s } diff --git a/pkg/sdk/views_dto_gen.go b/pkg/sdk/views_dto_gen.go index 4b089e63d3..35d5c7b4ed 100644 --- a/pkg/sdk/views_dto_gen.go +++ b/pkg/sdk/views_dto_gen.go @@ -70,6 +70,7 @@ type AlterViewRequest struct { UnsetTags []ObjectIdentifier AddDataMetricFunction *ViewAddDataMetricFunctionRequest DropDataMetricFunction *ViewDropDataMetricFunctionRequest + ModifyDataMetricFunction *ViewModifyDataMetricFunctionsRequest SetDataMetricSchedule *ViewSetDataMetricScheduleRequest UnsetDataMetricSchedule *ViewUnsetDataMetricScheduleRequest AddRowAccessPolicy *ViewAddRowAccessPolicyRequest @@ -94,18 +95,12 @@ type ViewDropDataMetricFunctionRequest struct { DataMetricFunction []ViewDataMetricFunction // required } -type ViewSetDataMetricScheduleRequest struct { - Minutes *ViewMinuteRequest - UsingCron *ViewUsingCronRequest - TriggerOnChanges *bool -} - -type ViewMinuteRequest struct { - Minutes int // required +type ViewModifyDataMetricFunctionsRequest struct { + DataMetricFunction []ViewModifyDataMetricFunction // required } -type ViewUsingCronRequest struct { - Cron string // required +type ViewSetDataMetricScheduleRequest struct { + DataMetricSchedule string // required } type ViewUnsetDataMetricScheduleRequest struct{} diff --git a/pkg/sdk/views_gen.go b/pkg/sdk/views_gen.go index 2c85f8b30a..49b1e3952c 100644 --- a/pkg/sdk/views_gen.go +++ b/pkg/sdk/views_gen.go @@ -73,6 +73,7 @@ type AlterViewOptions struct { UnsetTags []ObjectIdentifier `ddl:"keyword" sql:"UNSET TAG"` AddDataMetricFunction *ViewAddDataMetricFunction `ddl:"keyword"` DropDataMetricFunction *ViewDropDataMetricFunction `ddl:"keyword"` + ModifyDataMetricFunction *ViewModifyDataMetricFunctions `ddl:"keyword"` SetDataMetricSchedule *ViewSetDataMetricSchedule `ddl:"keyword"` UnsetDataMetricSchedule *ViewUnsetDataMetricSchedule `ddl:"keyword"` AddRowAccessPolicy *ViewAddRowAccessPolicy `ddl:"keyword"` @@ -95,6 +96,11 @@ type ViewDataMetricFunction struct { DataMetricFunction SchemaObjectIdentifier `ddl:"identifier"` On []Column `ddl:"parameter,parentheses,no_equals" sql:"ON"` } +type ViewModifyDataMetricFunction struct { + DataMetricFunction SchemaObjectIdentifier `ddl:"identifier"` + On []Column `ddl:"parameter,parentheses,no_equals" sql:"ON"` + ViewDataMetricScheduleStatusOperationOption `ddl:"parameter,no_quotes,no_equals"` +} type ViewAddDataMetricFunction struct { add bool `ddl:"static" sql:"ADD"` DataMetricFunction []ViewDataMetricFunction `ddl:"parameter,no_equals" sql:"DATA METRIC FUNCTION"` @@ -103,24 +109,13 @@ type ViewDropDataMetricFunction struct { drop bool `ddl:"static" sql:"DROP"` DataMetricFunction []ViewDataMetricFunction `ddl:"parameter,no_equals" sql:"DATA METRIC FUNCTION"` } +type ViewModifyDataMetricFunctions struct { + modify bool `ddl:"static" sql:"MODIFY"` + DataMetricFunction []ViewModifyDataMetricFunction `ddl:"parameter,no_equals" sql:"DATA METRIC FUNCTION"` +} type ViewSetDataMetricSchedule struct { - setDataMetricSchedule bool `ddl:"static" sql:"SET DATA_METRIC_SCHEDULE ="` - Minutes *ViewMinute `ddl:"keyword"` - UsingCron *ViewUsingCron `ddl:"keyword"` - TriggerOnChanges *bool `ddl:"keyword,single_quotes" sql:"TRIGGER_ON_CHANGES"` -} -type ViewMinute struct { - prefix bool `ddl:"static" sql:"'"` - Minutes int `ddl:"keyword"` - suffix bool `ddl:"static" sql:"MINUTE'"` -} -type ViewUsingCron struct { - prefix bool `ddl:"static" sql:"'USING CRON"` - Cron string `ddl:"keyword"` - suffix bool `ddl:"static" sql:"'"` -} -type ViewTriggerOnChanges struct { - triggerOnChanges bool `ddl:"static" sql:"TRIGGER_ON_CHANGES"` + set bool `ddl:"static" sql:"SET"` + DataMetricSchedule string `ddl:"parameter,single_quotes" sql:"DATA_METRIC_SCHEDULE"` } type ViewUnsetDataMetricSchedule struct { unsetDataMetricSchedule bool `ddl:"static" sql:"UNSET DATA_METRIC_SCHEDULE"` diff --git a/pkg/sdk/views_gen_test.go b/pkg/sdk/views_gen_test.go index 94ef5d195d..919ab84e89 100644 --- a/pkg/sdk/views_gen_test.go +++ b/pkg/sdk/views_gen_test.go @@ -142,29 +142,16 @@ func TestViews_Alter(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) - t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present", func(t *testing.T) { + t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.ModifyDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present", func(t *testing.T) { opts := defaultOpts() - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn")) + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn")) }) - t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present - more present", func(t *testing.T) { + t.Run("validation: exactly one field from [opts.RenameTo opts.SetComment opts.UnsetComment opts.SetSecure opts.SetChangeTracking opts.UnsetSecure opts.SetTags opts.UnsetTags opts.AddDataMetricFunction opts.DropDataMetricFunction opts.ModifyDataMetricFunction opts.AddRowAccessPolicy opts.DropRowAccessPolicy opts.DropAndAddRowAccessPolicy opts.DropAllRowAccessPolicies opts.SetMaskingPolicyOnColumn opts.UnsetMaskingPolicyOnColumn opts.SetTagsOnColumn opts.UnsetTagsOnColumn] should be present - more present", func(t *testing.T) { opts := defaultOpts() opts.SetChangeTracking = Bool(true) opts.DropAllRowAccessPolicies = Bool(true) - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn")) - }) - - t.Run("validation: exactly one field from [opts.SetDataMetricSchedule.UsingCron opts.SetDataMetricSchedule.TriggerOnChanges opts.SetDataMetricSchedule.Minutes] should be present - more present", func(t *testing.T) { - opts := defaultOpts() - opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{ - UsingCron: &ViewUsingCron{ - Cron: "5 * * * * UTC", - }, - TriggerOnChanges: Pointer(true), - } - - opts.DropAllRowAccessPolicies = Bool(true) - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions.SetDataMetricSchedule", "Minutes", "UsingCron", "TriggerOnChanges")) + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn")) }) t.Run("validation: conflicting fields for [opts.IfExists opts.SetSecure]", func(t *testing.T) { @@ -310,28 +297,22 @@ func TestViews_Alter(t *testing.T) { assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s DROP DATA METRIC FUNCTION %s ON (\"foo\")", id.FullyQualifiedName(), dmfId.FullyQualifiedName()) }) - t.Run("set data metric schedule", func(t *testing.T) { - opts := defaultOpts() - opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{ - Minutes: &ViewMinute{ - Minutes: 5, - }, - } - assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = ' 5 MINUTE'", id.FullyQualifiedName()) + t.Run("modify data metric function", func(t *testing.T) { + dmfId := randomSchemaObjectIdentifier() - opts = defaultOpts() - opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{ - UsingCron: &ViewUsingCron{ - Cron: "5 * * * * UTC", - }, + opts := defaultOpts() + opts.ModifyDataMetricFunction = &ViewModifyDataMetricFunctions{ + DataMetricFunction: []ViewModifyDataMetricFunction{{DataMetricFunction: dmfId, On: []Column{{"foo"}}, ViewDataMetricScheduleStatusOperationOption: ViewDataMetricScheduleStatusOperationSuspend}}, } - assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = 'USING CRON 5 * * * * UTC '", id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s MODIFY DATA METRIC FUNCTION %s ON (\"foo\") SUSPEND", id.FullyQualifiedName(), dmfId.FullyQualifiedName()) + }) - opts = defaultOpts() + t.Run("set data metric schedule", func(t *testing.T) { + opts := defaultOpts() opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{ - TriggerOnChanges: Pointer(true), + DataMetricSchedule: "5 MINUTE", } - assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = 'TRIGGER_ON_CHANGES'", id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, "ALTER VIEW %s SET DATA_METRIC_SCHEDULE = '5 MINUTE'", id.FullyQualifiedName()) }) t.Run("unset data metric schedule", func(t *testing.T) { diff --git a/pkg/sdk/views_impl_gen.go b/pkg/sdk/views_impl_gen.go index 0bf564d6b8..149dfbccbd 100644 --- a/pkg/sdk/views_impl_gen.go +++ b/pkg/sdk/views_impl_gen.go @@ -141,21 +141,15 @@ func (r *AlterViewRequest) toOpts() *AlterViewOptions { } } - if r.SetDataMetricSchedule != nil { - opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{ - TriggerOnChanges: r.SetDataMetricSchedule.TriggerOnChanges, - } - - if r.SetDataMetricSchedule.Minutes != nil { - opts.SetDataMetricSchedule.Minutes = &ViewMinute{ - Minutes: r.SetDataMetricSchedule.Minutes.Minutes, - } + if r.ModifyDataMetricFunction != nil { + opts.ModifyDataMetricFunction = &ViewModifyDataMetricFunctions{ + DataMetricFunction: r.ModifyDataMetricFunction.DataMetricFunction, } + } - if r.SetDataMetricSchedule.UsingCron != nil { - opts.SetDataMetricSchedule.UsingCron = &ViewUsingCron{ - Cron: r.SetDataMetricSchedule.UsingCron.Cron, - } + if r.SetDataMetricSchedule != nil { + opts.SetDataMetricSchedule = &ViewSetDataMetricSchedule{ + DataMetricSchedule: r.SetDataMetricSchedule.DataMetricSchedule, } } diff --git a/pkg/sdk/views_validations_gen.go b/pkg/sdk/views_validations_gen.go index 7f8fd1fd55..000e54f902 100644 --- a/pkg/sdk/views_validations_gen.go +++ b/pkg/sdk/views_validations_gen.go @@ -1,7 +1,5 @@ package sdk -import "fmt" - var ( _ validatable = new(CreateViewOptions) _ validatable = new(AlterViewOptions) @@ -31,21 +29,7 @@ func (opts *CreateViewOptions) validate() error { } if valueSet(opts.AggregationPolicy) { if !ValidObjectIdentifier(opts.AggregationPolicy.AggregationPolicy) { - errs = append(errs, errInvalidIdentifier("CreateViewOptions", "AggregationPolicy")) - } - } - if valueSet(opts.Columns) { - for i, columnOption := range opts.Columns { - if valueSet(columnOption.MaskingPolicy) { - if !ValidObjectIdentifier(columnOption.MaskingPolicy.MaskingPolicy) { - errs = append(errs, errInvalidIdentifier(fmt.Sprintf("CreateViewOptions.Columns[%d]", i), "MaskingPolicy")) - } - } - if valueSet(columnOption.ProjectionPolicy) { - if !ValidObjectIdentifier(columnOption.ProjectionPolicy.ProjectionPolicy) { - errs = append(errs, errInvalidIdentifier(fmt.Sprintf("CreateViewOptions.Columns[%d]", i), "ProjectionPolicy")) - } - } + errs = append(errs, ErrInvalidObjectIdentifier) } } return JoinErrors(errs...) @@ -59,8 +43,8 @@ func (opts *AlterViewOptions) validate() error { if !ValidObjectIdentifier(opts.name) { errs = append(errs, ErrInvalidObjectIdentifier) } - if !exactlyOneValueSet(opts.RenameTo, opts.SetComment, opts.UnsetComment, opts.SetSecure, opts.SetChangeTracking, opts.UnsetSecure, opts.SetTags, opts.UnsetTags, opts.AddDataMetricFunction, opts.DropDataMetricFunction, opts.SetDataMetricSchedule, opts.UnsetDataMetricSchedule, opts.AddRowAccessPolicy, opts.DropRowAccessPolicy, opts.DropAndAddRowAccessPolicy, opts.DropAllRowAccessPolicies, opts.SetAggregationPolicy, opts.UnsetAggregationPolicy, opts.SetMaskingPolicyOnColumn, opts.UnsetMaskingPolicyOnColumn, opts.SetProjectionPolicyOnColumn, opts.UnsetProjectionPolicyOnColumn, opts.SetTagsOnColumn, opts.UnsetTagsOnColumn) { - errs = append(errs, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn")) + if !exactlyOneValueSet(opts.RenameTo, opts.SetComment, opts.UnsetComment, opts.SetSecure, opts.SetChangeTracking, opts.UnsetSecure, opts.SetTags, opts.UnsetTags, opts.AddDataMetricFunction, opts.DropDataMetricFunction, opts.ModifyDataMetricFunction, opts.SetDataMetricSchedule, opts.UnsetDataMetricSchedule, opts.AddRowAccessPolicy, opts.DropRowAccessPolicy, opts.DropAndAddRowAccessPolicy, opts.DropAllRowAccessPolicies, opts.SetAggregationPolicy, opts.UnsetAggregationPolicy, opts.SetMaskingPolicyOnColumn, opts.UnsetMaskingPolicyOnColumn, opts.SetProjectionPolicyOnColumn, opts.UnsetProjectionPolicyOnColumn, opts.SetTagsOnColumn, opts.UnsetTagsOnColumn) { + errs = append(errs, errExactlyOneOf("AlterViewOptions", "RenameTo", "SetComment", "UnsetComment", "SetSecure", "SetChangeTracking", "UnsetSecure", "SetTags", "UnsetTags", "AddDataMetricFunction", "DropDataMetricFunction", "ModifyDataMetricFunction", "SetDataMetricSchedule", "UnsetDataMetricSchedule", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllRowAccessPolicies", "SetAggregationPolicy", "UnsetAggregationPolicy", "SetMaskingPolicyOnColumn", "UnsetMaskingPolicyOnColumn", "SetProjectionPolicyOnColumn", "UnsetProjectionPolicyOnColumn", "SetTagsOnColumn", "UnsetTagsOnColumn")) } if everyValueSet(opts.IfExists, opts.SetSecure) { errs = append(errs, errOneOf("AlterViewOptions", "IfExists", "SetSecure")) @@ -68,11 +52,6 @@ func (opts *AlterViewOptions) validate() error { if everyValueSet(opts.IfExists, opts.UnsetSecure) { errs = append(errs, errOneOf("AlterViewOptions", "IfExists", "UnsetSecure")) } - if valueSet(opts.SetDataMetricSchedule) { - if !exactlyOneValueSet(opts.SetDataMetricSchedule.Minutes, opts.SetDataMetricSchedule.UsingCron, opts.SetDataMetricSchedule.TriggerOnChanges) { - errs = append(errs, errExactlyOneOf("AlterViewOptions.SetDataMetricSchedule", "Minutes", "UsingCron", "TriggerOnChanges")) - } - } if valueSet(opts.AddRowAccessPolicy) { if !ValidObjectIdentifier(opts.AddRowAccessPolicy.RowAccessPolicy) { errs = append(errs, errInvalidIdentifier("AlterViewOptions.AddRowAccessPolicy", "RowAccessPolicy")) From 30d9b1104e0091eb2a4764f753e91160786d8bb1 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 28 Aug 2024 13:08:56 +0200 Subject: [PATCH 02/13] Fix docs --- docs/resources/view.md | 27 --------------------------- 1 file changed, 27 deletions(-) diff --git a/docs/resources/view.md b/docs/resources/view.md index ba1f8aaf8a..4c5328ae49 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -84,7 +84,6 @@ SQL - `aggregation_policy` (Block List, Max: 1) Specifies the aggregation policy to set on a view. (see [below for nested schema](#nestedblock--aggregation_policy)) - `change_tracking` (String) Specifies to enable or disable change tracking on the table. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. -- `column` (Block List) If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. (You do not need to specify the data types of the columns.) (see [below for nested schema](#nestedblock--column)) - `comment` (String) Specifies a comment for the view. - `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. - `data_metric_functions` (Block Set) Data metric functions used for the view. (see [below for nested schema](#nestedblock--data_metric_functions)) @@ -113,32 +112,6 @@ Optional: - `entity_key` (Set of String) Defines which columns uniquely identify an entity within the view. - -### Nested Schema for `column` - -Required: - -- `column_name` (String) Specifies affected column name. - -Optional: - -- `comment` (String) Specifies a comment for the column. -- `masking_policy` (Block List) (see [below for nested schema](#nestedblock--column--masking_policy)) -- `projection_policy` (String) Specifies the projection policy to set on a column. - - -### Nested Schema for `column.masking_policy` - -Required: - -- `policy_name` (String) Specifies the masking policy to set on a column. - -Optional: - -- `using` (List of String) Specifies the arguments to pass into the conditional masking policy SQL expression. The first column in the list specifies the column for the policy conditions to mask or tokenize the data and must match the column to which the masking policy is set. The additional columns specify the columns to evaluate to determine whether to mask or tokenize the data in each row of the query result when a query is made on the first column. If the USING clause is omitted, Snowflake treats the conditional masking policy as a normal masking policy. - - - ### Nested Schema for `data_metric_functions` From bd6f987eabcf25cfc0c9a71cc11c8b26ee4dc652 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 28 Aug 2024 13:19:35 +0200 Subject: [PATCH 03/13] Cleanup --- pkg/resources/custom_diffs.go | 14 ----- pkg/resources/helpers.go | 28 ---------- pkg/resources/view.go | 97 ++++++++++++++++++----------------- 3 files changed, 49 insertions(+), 90 deletions(-) diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index 75f64ec72d..853c4fd55e 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -98,20 +98,6 @@ func ComputedIfAnyAttributeChanged(key string, changedAttributeKeys ...string) s }) } -func asdf(key string, changedAttributeKeys ...string) schema.CustomizeDiffFunc { - return customdiff.ComputedIf(key, func(ctx context.Context, diff *schema.ResourceDiff, meta interface{}) bool { - var result bool - for _, changedKey := range changedAttributeKeys { - if diff.HasChange(changedKey) { - old, new := diff.GetChange(changedKey) - log.Printf("[DEBUG] ComputedIfAnyAttributeChanged: changed key: %s old: %s new: %s\n", changedKey, old, new) - } - result = result || diff.HasChange(changedKey) - } - return result - }) -} - // TODO(SNOW-1629468): Adjust the function to make it more flexible func ComputedIfAnyAttributeChangedWithSuppressDiff(key string, suppressDiffFunc schema.SchemaDiffSuppressFunc, changedAttributeKeys ...string) schema.CustomizeDiffFunc { return customdiff.ComputedIf(key, func(ctx context.Context, diff *schema.ResourceDiff, meta interface{}) bool { diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index f18951f42f..7b91689d48 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -328,31 +328,3 @@ func ListDiff[T comparable](beforeList []T, afterList []T) (added []T, removed [ return added, removed } - -// ListDiff Compares two lists (before and after), then compares and returns two lists that include -// added and removed items between those lists. -// type X = map[string]any - -// func ListDiffMap(beforeList []any, afterList []any) (added []any, removed []any) { -// type key struct { -// name string -// columns []string -// } -// added = make([]any, 0) -// removed = make([]any, 0) - -// for _, privilegeBeforeChange := range beforeList { -// m := privilegeBeforeChange.(map[string]any) -// if !slices.Contains(afterList, privilegeBeforeChange) { -// removed = append(removed, privilegeBeforeChange) -// } -// } - -// for _, privilegeAfterChange := range afterList { -// if !slices.Contains(beforeList, privilegeAfterChange) { -// added = append(added, privilegeAfterChange) -// } -// } - -// return added, removed -// } diff --git a/pkg/resources/view.go b/pkg/resources/view.go index aa647bc115..e73716c1ce 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -452,35 +452,36 @@ func CreateView(orReplace bool) schema.CreateContextFunc { if err != nil { return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } - changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw)) - for i := range addedRaw { - if addedRaw[i].ScheduleStatus != "" { - expectedStatus, err := sdk.ToAllowedDataMetricScheduleStatusOption(addedRaw[i].ScheduleStatus) - if err != nil { - return diag.FromErr(err) - } - var statusCmd sdk.ViewDataMetricScheduleStatusOperationOption - switch expectedStatus { - case sdk.DataMetricScheduleStatusStarted: - statusCmd = sdk.ViewDataMetricScheduleStatusOperationResume - case sdk.DataMetricScheduleStatusSuspended: - statusCmd = sdk.ViewDataMetricScheduleStatusOperationSuspend - default: - return diag.FromErr(fmt.Errorf("unexpected data metric function status: %v", expectedStatus)) - } - changeSchedule = append(changeSchedule, sdk.ViewModifyDataMetricFunction{ - DataMetricFunction: addedRaw[i].DataMetricFunction, - On: addedRaw[i].On, - ViewDataMetricScheduleStatusOperationOption: statusCmd, - }) - } - } - if len(changeSchedule) > 0 { - err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithModifyDataMetricFunction(*sdk.NewViewModifyDataMetricFunctionsRequest(changeSchedule))) - if err != nil { - return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) - } - } + // TODO (next pr) + // changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw)) + // for i := range addedRaw { + // if addedRaw[i].ScheduleStatus != "" { + // expectedStatus, err := sdk.ToAllowedDataMetricScheduleStatusOption(addedRaw[i].ScheduleStatus) + // if err != nil { + // return diag.FromErr(err) + // } + // var statusCmd sdk.ViewDataMetricScheduleStatusOperationOption + // switch expectedStatus { + // case sdk.DataMetricScheduleStatusStarted: + // statusCmd = sdk.ViewDataMetricScheduleStatusOperationResume + // case sdk.DataMetricScheduleStatusSuspended: + // statusCmd = sdk.ViewDataMetricScheduleStatusOperationSuspend + // default: + // return diag.FromErr(fmt.Errorf("unexpected data metric function status: %v", expectedStatus)) + // } + // changeSchedule = append(changeSchedule, sdk.ViewModifyDataMetricFunction{ + // DataMetricFunction: addedRaw[i].DataMetricFunction, + // On: addedRaw[i].On, + // ViewDataMetricScheduleStatusOperationOption: statusCmd, + // }) + // } + // } + // if len(changeSchedule) > 0 { + // err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithModifyDataMetricFunction(*sdk.NewViewModifyDataMetricFunctionsRequest(changeSchedule))) + // if err != nil { + // return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) + // } + // } } return ReadView(false)(ctx, d, meta) @@ -692,13 +693,13 @@ func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.S }) } -type ViewDataMetricFunctionDDL struct { +type ViewDataMetricFunctionConfig struct { DataMetricFunction sdk.SchemaObjectIdentifier On []sdk.Column ScheduleStatus string } -func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionDDL, err error) { +func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionConfig, err error) { for _, v := range v.([]any) { config := v.(map[string]any) columnsRaw := expandStringList(config["on"].(*schema.Set).List()) @@ -710,7 +711,7 @@ func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionDDL, err er if err != nil { return nil, err } - dmfs = append(dmfs, ViewDataMetricFunctionDDL{ + dmfs = append(dmfs, ViewDataMetricFunctionConfig{ DataMetricFunction: id, On: columns, // TODO (next pr) @@ -828,36 +829,36 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag if d.HasChange("data_metric_functions") { old, new := d.GetChange("data_metric_functions") removedRaw, addedRaw := old.(*schema.Set).List(), new.(*schema.Set).List() - added, err := extractDataMetricFunctions(addedRaw) + addedConfig, err := extractDataMetricFunctions(addedRaw) if err != nil { return diag.FromErr(err) } - removed, err := extractDataMetricFunctions(removedRaw) + removedConfig, err := extractDataMetricFunctions(removedRaw) if err != nil { return diag.FromErr(err) } - if len(removed) > 0 { - removed2 := make([]sdk.ViewDataMetricFunction, len(removed)) - for i := range removed { - removed2[i] = sdk.ViewDataMetricFunction{ - DataMetricFunction: removed[i].DataMetricFunction, - On: removed[i].On, + if len(removedConfig) > 0 { + removed := make([]sdk.ViewDataMetricFunction, len(removedConfig)) + for i := range removedConfig { + removed[i] = sdk.ViewDataMetricFunction{ + DataMetricFunction: removedConfig[i].DataMetricFunction, + On: removedConfig[i].On, } } - err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithDropDataMetricFunction(*sdk.NewViewDropDataMetricFunctionRequest(removed2))) + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithDropDataMetricFunction(*sdk.NewViewDropDataMetricFunctionRequest(removed))) if err != nil { return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } } - if len(added) > 0 { - added2 := make([]sdk.ViewDataMetricFunction, len(added)) - for i := range added { - added2[i] = sdk.ViewDataMetricFunction{ - DataMetricFunction: added[i].DataMetricFunction, - On: added[i].On, + if len(addedConfig) > 0 { + added := make([]sdk.ViewDataMetricFunction, len(addedConfig)) + for i := range addedConfig { + added[i] = sdk.ViewDataMetricFunction{ + DataMetricFunction: addedConfig[i].DataMetricFunction, + On: addedConfig[i].On, } } - err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest(added2))) + err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest(added))) if err != nil { return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } From 1e11a713be94b5beff2ad0ba8c3815b6cb58478f Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 28 Aug 2024 13:22:31 +0200 Subject: [PATCH 04/13] Cleanup --- pkg/sdk/data_metric_function_references_def.go | 1 - ...metric_function_references_gen_integration_test.go | 11 ----------- 2 files changed, 12 deletions(-) delete mode 100644 pkg/sdk/data_metric_function_references_gen_integration_test.go diff --git a/pkg/sdk/data_metric_function_references_def.go b/pkg/sdk/data_metric_function_references_def.go index 0b4ec04722..ea24d761b3 100644 --- a/pkg/sdk/data_metric_function_references_def.go +++ b/pkg/sdk/data_metric_function_references_def.go @@ -29,7 +29,6 @@ const ( DataMetricScheduleStatusSuspendedByUserAction DataMetricScheduleStatusOption = "SUSPENDED_BY_USER_ACTION" ) -// TODO: make is a separate type? var AllAllowedDataMetricScheduleStatusOptions = []DataMetricScheduleStatusOption{ DataMetricScheduleStatusStarted, DataMetricScheduleStatusSuspended, diff --git a/pkg/sdk/data_metric_function_references_gen_integration_test.go b/pkg/sdk/data_metric_function_references_gen_integration_test.go deleted file mode 100644 index c855280322..0000000000 --- a/pkg/sdk/data_metric_function_references_gen_integration_test.go +++ /dev/null @@ -1,11 +0,0 @@ -package sdk - -import "testing" - -func TestInt_DataMetricFunctionReferences(t *testing.T) { - // TODO: prepare common resources - - t.Run("GetForEntity", func(t *testing.T) { - // TODO: fill me - }) -} From ac69c2dceee96498a9c41d6ff2f2a48582e53835 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 28 Aug 2024 18:47:37 +0200 Subject: [PATCH 05/13] Review suggestions --- MIGRATION_GUIDE.md | 2 +- docs/resources/view.md | 8 +- examples/resources/snowflake_view/resource.tf | 2 +- .../resourceassert/view_resource_gen.go | 8 +- .../config/model/view_model_gen.go | 4 +- pkg/resources/doc_helpers.go | 12 +- pkg/resources/doc_helpers_test.go | 20 +-- .../TestAcc_View/basic_update/test.tf | 2 +- .../testdata/TestAcc_View/complete/test.tf | 2 +- pkg/resources/view.go | 24 ++-- pkg/resources/view_acceptance_test.go | 122 +++++++++--------- ...ata_metric_function_references_gen_test.go | 5 + ...unction_references_gen_integration_test.go | 1 + pkg/sdk/views_validations_gen.go | 18 ++- 14 files changed, 119 insertions(+), 111 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 2d3d2b7f6e..b13db8c3fb 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -14,7 +14,7 @@ New fields: - `is_recursive` - `is_temporary` - `data_metric_schedule` - - `data_metric_functions` + - `data_metric_function` - added `show_output` field that holds the response from SHOW VIEWS. - added `describe_output` field that holds the response from DESCRIBE VIEW. Note that one needs to grant sufficient privileges e.g. with [grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_ownership) on the tables used in this view. Otherwise, this field is not filled. diff --git a/docs/resources/view.md b/docs/resources/view.md index 4c5328ae49..e3f4c3dc52 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -55,7 +55,7 @@ resource "snowflake_view" "test" { policy_name = "aggregation_policy" entity_key = ["id"] } - data_metric_functions { + data_metric_function { function_name = "data_metric_function" on = ["id"] } @@ -86,7 +86,7 @@ SQL - `change_tracking` (String) Specifies to enable or disable change tracking on the table. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `comment` (String) Specifies a comment for the view. - `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. -- `data_metric_functions` (Block Set) Data metric functions used for the view. (see [below for nested schema](#nestedblock--data_metric_functions)) +- `data_metric_function` (Block Set) Data metric functions used for the view. (see [below for nested schema](#nestedblock--data_metric_function)) - `data_metric_schedule` (Block List, Max: 1) Specifies the schedule to run the data metric functions periodically. (see [below for nested schema](#nestedblock--data_metric_schedule)) - `is_recursive` (String) Specifies that the view can refer to itself using recursive syntax without necessarily using a CTE (common table expression). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `is_secure` (String) Specifies that the view is secure. By design, the Snowflake's `SHOW VIEWS` command does not provide information about secure views (consult [view usage notes](https://docs.snowflake.com/en/sql-reference/sql/create-view#usage-notes)) which is essential to manage/import view with Terraform. Use the role owning the view while managing secure views. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. @@ -112,8 +112,8 @@ Optional: - `entity_key` (Set of String) Defines which columns uniquely identify an entity within the view. - -### Nested Schema for `data_metric_functions` + +### Nested Schema for `data_metric_function` Required: diff --git a/examples/resources/snowflake_view/resource.tf b/examples/resources/snowflake_view/resource.tf index c7310ddc5e..de20fb54cb 100644 --- a/examples/resources/snowflake_view/resource.tf +++ b/examples/resources/snowflake_view/resource.tf @@ -35,7 +35,7 @@ resource "snowflake_view" "test" { policy_name = "aggregation_policy" entity_key = ["id"] } - data_metric_functions { + data_metric_function { function_name = "data_metric_function" on = ["id"] } diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go index ebac0c6239..089d8fded2 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_gen.go @@ -57,8 +57,8 @@ func (v *ViewResourceAssert) HasCopyGrantsString(expected string) *ViewResourceA return v } -func (v *ViewResourceAssert) HasDataMetricFunctionsString(expected string) *ViewResourceAssert { - v.AddAssertion(assert.ValueSet("data_metric_functions", expected)) +func (v *ViewResourceAssert) HasDataMetricFunctionString(expected string) *ViewResourceAssert { + v.AddAssertion(assert.ValueSet("data_metric_function", expected)) return v } @@ -141,8 +141,8 @@ func (v *ViewResourceAssert) HasNoCopyGrants() *ViewResourceAssert { return v } -func (v *ViewResourceAssert) HasNoDataMetricFunctions() *ViewResourceAssert { - v.AddAssertion(assert.ValueNotSet("data_metric_functions")) +func (v *ViewResourceAssert) HasNoDataMetricFunction() *ViewResourceAssert { + v.AddAssertion(assert.ValueNotSet("data_metric_function")) return v } diff --git a/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go index 8c1a2e0ff6..62e2d6cadb 100644 --- a/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/view_model_gen.go @@ -15,7 +15,7 @@ type ViewModel struct { Columns tfconfig.Variable `json:"columns,omitempty"` Comment tfconfig.Variable `json:"comment,omitempty"` CopyGrants tfconfig.Variable `json:"copy_grants,omitempty"` - DataMetricFunctions tfconfig.Variable `json:"data_metric_functions,omitempty"` + DataMetricFunctions tfconfig.Variable `json:"data_metric_function,omitempty"` DataMetricSchedule tfconfig.Variable `json:"data_metric_schedule,omitempty"` Database tfconfig.Variable `json:"database,omitempty"` IsRecursive tfconfig.Variable `json:"is_recursive,omitempty"` @@ -80,7 +80,7 @@ func (v *ViewModel) WithCopyGrants(copyGrants bool) *ViewModel { return v } -// data_metric_functions attribute type is not yet supported, so WithDataMetricFunctions can't be generated +// data_metric_function attribute type is not yet supported, so WithDataMetricFunctions can't be generated // data_metric_schedule attribute type is not yet supported, so WithDataMetricSchedule can't be generated diff --git a/pkg/resources/doc_helpers.go b/pkg/resources/doc_helpers.go index 4e0ee8b425..9fe20cf817 100644 --- a/pkg/resources/doc_helpers.go +++ b/pkg/resources/doc_helpers.go @@ -5,18 +5,10 @@ import ( "strings" ) -func possibleValuesListed[T ~string](values []T) string { +func possibleValuesListed[T ~string | ~int](values []T) string { valuesWrapped := make([]string, len(values)) for i, value := range values { - valuesWrapped[i] = fmt.Sprintf("`%s`", value) - } - return strings.Join(valuesWrapped, " | ") -} - -func possibleValuesListedInt(values []int) string { - valuesWrapped := make([]string, len(values)) - for i, value := range values { - valuesWrapped[i] = fmt.Sprintf("`%d`", value) + valuesWrapped[i] = fmt.Sprintf("`%v`", value) } return strings.Join(valuesWrapped, " | ") } diff --git a/pkg/resources/doc_helpers_test.go b/pkg/resources/doc_helpers_test.go index 2d987e6771..81ecbccd8f 100644 --- a/pkg/resources/doc_helpers_test.go +++ b/pkg/resources/doc_helpers_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/assert" ) -func Test_PossibleValuesListed(t *testing.T) { +func Test_PossibleValuesListedStrings(t *testing.T) { values := []string{"abc", "DEF"} result := possibleValuesListed(values) @@ -14,26 +14,18 @@ func Test_PossibleValuesListed(t *testing.T) { assert.Equal(t, "`abc` | `DEF`", result) } -func Test_PossibleValuesListed_empty(t *testing.T) { - var values []string - - result := possibleValuesListed(values) - - assert.Empty(t, result) -} - -func Test_PossibleValuesListedInt(t *testing.T) { +func Test_PossibleValuesListedInts(t *testing.T) { values := []int{42, 21} - result := possibleValuesListedInt(values) + result := possibleValuesListed(values) assert.Equal(t, "`42` | `21`", result) } -func Test_PossibleValuesListedInt_empty(t *testing.T) { - var values []int +func Test_PossibleValuesListed_empty(t *testing.T) { + var values []string - result := possibleValuesListedInt(values) + result := possibleValuesListed(values) assert.Empty(t, result) } diff --git a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf index 605f72139c..e403c93692 100644 --- a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf +++ b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf @@ -11,7 +11,7 @@ resource "snowflake_view" "test" { policy_name = var.aggregation_policy entity_key = var.aggregation_policy_entity_key } - data_metric_functions { + data_metric_function { function_name = var.data_metric_function on = var.data_metric_function_on } diff --git a/pkg/resources/testdata/TestAcc_View/complete/test.tf b/pkg/resources/testdata/TestAcc_View/complete/test.tf index d4b9ab0e0a..6e4c53c023 100644 --- a/pkg/resources/testdata/TestAcc_View/complete/test.tf +++ b/pkg/resources/testdata/TestAcc_View/complete/test.tf @@ -7,7 +7,7 @@ resource "snowflake_view" "test" { copy_grants = var.copy_grants change_tracking = var.change_tracking is_temporary = var.is_temporary - data_metric_functions { + data_metric_function { function_name = var.data_metric_function on = var.data_metric_function_on } diff --git a/pkg/resources/view.go b/pkg/resources/view.go index e73716c1ce..34d20c30a3 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -82,7 +82,7 @@ var viewSchema = map[string]*schema.Schema{ }), Description: booleanStringFieldDescription("Specifies to enable or disable change tracking on the table."), }, - "data_metric_functions": { + "data_metric_function": { Type: schema.TypeSet, Optional: true, Elem: &schema.Resource{ @@ -101,7 +101,7 @@ var viewSchema = map[string]*schema.Schema{ }, Description: "The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition.", }, - // TODO (next pr) + // TODO (SNOW-1348118 - next pr) // "schedule_status": { // Type: schema.TypeString, // Optional: true, @@ -128,7 +128,7 @@ var viewSchema = map[string]*schema.Schema{ "minutes": { Type: schema.TypeInt, Optional: true, - Description: fmt.Sprintf("Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: %s. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.", possibleValuesListedInt(sdk.AllViewDataMetricScheduleMinutes)), + Description: fmt.Sprintf("Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: %s. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.", possibleValuesListed(sdk.AllViewDataMetricScheduleMinutes)), ValidateDiagFunc: IntInSlice(sdk.AllViewDataMetricScheduleMinutes), ConflictsWith: []string{"data_metric_schedule.using_cron"}, }, @@ -141,9 +141,9 @@ var viewSchema = map[string]*schema.Schema{ }, }, Description: "Specifies the schedule to run the data metric functions periodically.", - RequiredWith: []string{"data_metric_functions"}, + RequiredWith: []string{"data_metric_function"}, }, - // TODO (next pr): add columns + // TODO (SNOW-1348118 - next pr): add columns // "column": { // Type: schema.TypeList, // Optional: true, @@ -436,7 +436,7 @@ func CreateView(orReplace bool) schema.CreateContextFunc { } } - if v, ok := d.GetOk("data_metric_functions"); ok { + if v, ok := d.GetOk("data_metric_function"); ok { addedRaw, err := extractDataMetricFunctions(v.(*schema.Set).List()) if err != nil { return diag.FromErr(err) @@ -452,7 +452,7 @@ func CreateView(orReplace bool) schema.CreateContextFunc { if err != nil { return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } - // TODO (next pr) + // TODO (SNOW-1348118 - next pr) // changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw)) // for i := range addedRaw { // if addedRaw[i].ScheduleStatus != "" { @@ -663,7 +663,7 @@ func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.S for _, v := range dmfRef.RefArguments { columns = append(columns, v.Name) } - // TODO (next pr) + // TODO (SNOW-1348118 - next pr) // var scheduleStatus sdk.DataMetricScheduleStatusOption // status, err := sdk.ToDataMetricScheduleStatusOption(dmfRef.ScheduleStatus) // if err != nil { @@ -682,7 +682,7 @@ func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.S } schedule = dmfRef.Schedule } - if err = d.Set("data_metric_functions", dataMetricFunctions); err != nil { + if err = d.Set("data_metric_function", dataMetricFunctions); err != nil { return err } @@ -714,7 +714,7 @@ func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionConfig, err dmfs = append(dmfs, ViewDataMetricFunctionConfig{ DataMetricFunction: id, On: columns, - // TODO (next pr) + // TODO (SNOW-1348118 - next pr) // ScheduleStatus: config["schedule_status"].(string), }) } @@ -826,8 +826,8 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag } } - if d.HasChange("data_metric_functions") { - old, new := d.GetChange("data_metric_functions") + if d.HasChange("data_metric_function") { + old, new := d.GetChange("data_metric_function") removedRaw, addedRaw := old.(*schema.Set).List(), new.(*schema.Set).List() addedConfig, err := extractDataMetricFunctions(addedRaw) if err != nil { diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index c753c955b3..8c5cd24590 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -48,6 +48,7 @@ func TestAcc_View_basic(t *testing.T) { cron, cron2 := "10 * * * * UTC", "20 * * * * UTC" id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceId := helpers.EncodeResourceIdentifier(id) table, tableCleanup := acc.TestClient().Table.CreateTableWithColumns(t, []sdk.TableColumnRequest{ *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber), *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber), @@ -100,8 +101,8 @@ func TestAcc_View_basic(t *testing.T) { Config: accconfig.FromModel(t, viewModel), ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())), + resourceassert.ImportedViewResource(t, resourceId). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). @@ -129,7 +130,7 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "0")), ), }, // set other fields @@ -158,10 +159,10 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), ), }, // change policies and dmfs @@ -186,10 +187,10 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusStarted))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", function2Id.FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), ), }, // change dmf status @@ -214,10 +215,10 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusSuspended))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", function2Id.FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), ), }, // change statement and policies @@ -241,10 +242,10 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), ), }, // change statements externally @@ -271,10 +272,10 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), ), }, // unset policies externally @@ -302,10 +303,10 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), ), }, @@ -315,8 +316,8 @@ func TestAcc_View_basic(t *testing.T) { ConfigVariables: basicUpdate(rowAccessPolicy.ID(), aggregationPolicy, functionId, otherStatement, cron, sdk.DataMetricScheduleStatusStarted), ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())), + resourceassert.ImportedViewResource(t, resourceId). HasNameString(id.Name()). HasStatementString(otherStatement). HasDatabaseString(id.DatabaseName()). @@ -325,14 +326,14 @@ func TestAcc_View_basic(t *testing.T) { HasIsSecureString("false"). HasIsTemporaryString("false"). HasChangeTrackingString("false"), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.0", "ID")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.0", "ID")), ), }, // unset @@ -348,7 +349,7 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_functions.#")), + assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_function.#")), ), }, // recreate - change is_recursive @@ -366,7 +367,7 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")), assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_functions.#")), + assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_function.#")), ), }, }, @@ -447,6 +448,7 @@ func TestAcc_View_complete(t *testing.T) { _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceId := helpers.EncodeResourceIdentifier(id) table, tableCleanup := acc.TestClient().Table.CreateTableWithColumns(t, []sdk.TableColumnRequest{ *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber), *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber), @@ -514,10 +516,10 @@ func TestAcc_View_complete(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", "5 * * * * UTC")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.#", "1")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_functions.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), @@ -540,8 +542,8 @@ func TestAcc_View_complete(t *testing.T) { ConfigVariables: m(), ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), - resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). + ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())), + resourceassert.ImportedViewResource(t, resourceId). HasNameString(id.Name()). HasStatementString(statement). HasDatabaseString(id.DatabaseName()). @@ -549,21 +551,21 @@ func TestAcc_View_complete(t *testing.T) { HasCommentString("Terraform test resource"). HasIsSecureString("true"). HasIsTemporaryString("false").HasChangeTrackingString("true"), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_schedule.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_schedule.0.using_cron", "5 * * * * UTC")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_schedule.0.minutes", "0")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.0.function_name", functionId.FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.0.on.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "data_metric_functions.0.on.0", "ID")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "aggregation_policy.0.entity_key.0", "ID")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.#", "1")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "row_access_policy.0.on.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.0.using_cron", "5 * * * * UTC")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.0.minutes", "0")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.function_name", functionId.FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.on.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.on.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.0", "ID")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.#", "1")), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.0", "ID")), ), }, }, diff --git a/pkg/sdk/data_metric_function_references_gen_test.go b/pkg/sdk/data_metric_function_references_gen_test.go index d73646432f..db7d737167 100644 --- a/pkg/sdk/data_metric_function_references_gen_test.go +++ b/pkg/sdk/data_metric_function_references_gen_test.go @@ -3,6 +3,11 @@ package sdk import "testing" func TestDataMetricFunctionReferences_GetForEntity(t *testing.T) { + t.Run("validation: nil options", func(t *testing.T) { + var opts *GetForEntityDataMetricFunctionReferenceOptions + assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) + }) + t.Run("validation: missing parameters", func(t *testing.T) { opts := &GetForEntityDataMetricFunctionReferenceOptions{} assertOptsInvalidJoinedErrors(t, opts, errNotSet("GetForEntityDataMetricFunctionReferenceOptions", "parameters")) diff --git a/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go index 799df31aab..25ca45c568 100644 --- a/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go +++ b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go @@ -17,6 +17,7 @@ func TestInt_DataMetricFunctionReferences(t *testing.T) { view, viewCleanup := testClientHelper().View.CreateView(t, statement) t.Cleanup(viewCleanup) + // when we specify schedule by a number of minutes, a cron is returned from Snowflake - see SNOW-1640024 err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(view.ID()).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest("5 MINUTE"))) require.NoError(t, err) err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(view.ID()).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest([]sdk.ViewDataMetricFunction{{ diff --git a/pkg/sdk/views_validations_gen.go b/pkg/sdk/views_validations_gen.go index 000e54f902..376ec4cb91 100644 --- a/pkg/sdk/views_validations_gen.go +++ b/pkg/sdk/views_validations_gen.go @@ -1,5 +1,7 @@ package sdk +import "fmt" + var ( _ validatable = new(CreateViewOptions) _ validatable = new(AlterViewOptions) @@ -29,7 +31,21 @@ func (opts *CreateViewOptions) validate() error { } if valueSet(opts.AggregationPolicy) { if !ValidObjectIdentifier(opts.AggregationPolicy.AggregationPolicy) { - errs = append(errs, ErrInvalidObjectIdentifier) + errs = append(errs, errInvalidIdentifier("CreateViewOptions", "AggregationPolicy")) + } + } + if valueSet(opts.Columns) { + for i, columnOption := range opts.Columns { + if valueSet(columnOption.MaskingPolicy) { + if !ValidObjectIdentifier(columnOption.MaskingPolicy.MaskingPolicy) { + errs = append(errs, errInvalidIdentifier(fmt.Sprintf("CreateViewOptions.Columns[%d]", i), "MaskingPolicy")) + } + } + if valueSet(columnOption.ProjectionPolicy) { + if !ValidObjectIdentifier(columnOption.ProjectionPolicy.ProjectionPolicy) { + errs = append(errs, errInvalidIdentifier(fmt.Sprintf("CreateViewOptions.Columns[%d]", i), "ProjectionPolicy")) + } + } } } return JoinErrors(errs...) From a5e8e32e45a05a3a23a17d2570774842107c01a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Thu, 29 Aug 2024 11:17:36 +0200 Subject: [PATCH 06/13] Fix tests --- pkg/resources/view.go | 9 ------- .../data_metric_function_references_gen.go | 21 ++++++++------- ...unction_references_gen_integration_test.go | 26 ++++++++++++------- pkg/sdk/testint/views_gen_integration_test.go | 4 +-- 4 files changed, 30 insertions(+), 30 deletions(-) diff --git a/pkg/resources/view.go b/pkg/resources/view.go index 34d20c30a3..e9e929f2da 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -525,18 +525,9 @@ func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc { return diag.FromErr(err) } - if err = d.Set("name", view.Name); err != nil { - return diag.FromErr(err) - } if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil { return diag.FromErr(err) } - if err = d.Set("database", view.DatabaseName); err != nil { - return diag.FromErr(err) - } - if err = d.Set("schema", view.SchemaName); err != nil { - return diag.FromErr(err) - } if err = d.Set("copy_grants", view.HasCopyGrants()); err != nil { return diag.FromErr(err) } diff --git a/pkg/sdk/data_metric_function_references_gen.go b/pkg/sdk/data_metric_function_references_gen.go index 59f2d50756..7170ae8f59 100644 --- a/pkg/sdk/data_metric_function_references_gen.go +++ b/pkg/sdk/data_metric_function_references_gen.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "log" + "strings" ) type DataMetricFunctionReferences interface { @@ -28,10 +29,10 @@ type dataMetricFunctionReferencesRow struct { MetricDatabaseName string `db:"METRIC_DATABASE_NAME"` MetricSchemaName string `db:"METRIC_SCHEMA_NAME"` MetricName string `db:"METRIC_NAME"` - ArgumentSignature string `db:"ARGUMENT_SIGNATURE"` - DataType string `db:"DATA_TYPE"` - RefDatabaseName string `db:"REF_DATABASE_NAME"` - RefSchemaName string `db:"REF_SCHEMA_NAME"` + ArgumentSignature string `db:"METRIC_SIGNATURE"` + DataType string `db:"METRIC_DATA_TYPE"` + RefDatabaseName string `db:"REF_ENTITY_DATABASE_NAME"` + RefSchemaName string `db:"REF_ENTITY_SCHEMA_NAME"` RefEntityName string `db:"REF_ENTITY_NAME"` RefEntityDomain string `db:"REF_ENTITY_DOMAIN"` RefArguments string `db:"REF_ARGUMENTS"` @@ -63,14 +64,14 @@ type DataMetricFunctionReference struct { func (row dataMetricFunctionReferencesRow) convert() *DataMetricFunctionReference { x := &DataMetricFunctionReference{ - MetricDatabaseName: row.MetricDatabaseName, - MetricSchemaName: row.MetricSchemaName, - MetricName: row.MetricName, + MetricDatabaseName: strings.Trim(row.MetricDatabaseName, `"`), + MetricSchemaName: strings.Trim(row.MetricSchemaName, `"`), + MetricName: strings.Trim(row.MetricName, `"`), ArgumentSignature: row.ArgumentSignature, DataType: row.DataType, - RefEntityDatabaseName: row.RefDatabaseName, - RefEntitySchemaName: row.RefSchemaName, - RefEntityName: row.RefEntityName, + RefEntityDatabaseName: strings.Trim(row.RefDatabaseName, `"`), + RefEntitySchemaName: strings.Trim(row.RefSchemaName, `"`), + RefEntityName: strings.Trim(row.RefEntityName, `"`), RefEntityDomain: row.RefEntityDomain, RefId: row.RefId, Schedule: row.Schedule, diff --git a/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go index 25ca45c568..cf10780f10 100644 --- a/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go +++ b/pkg/sdk/testint/data_metric_function_references_gen_integration_test.go @@ -1,8 +1,11 @@ package testint import ( + "strings" "testing" + "github.com/stretchr/testify/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/stretchr/testify/require" ) @@ -12,7 +15,7 @@ func TestInt_DataMetricFunctionReferences(t *testing.T) { ctx := testContext(t) t.Run("view domain", func(t *testing.T) { - functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG") + functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "BLANK_COUNT") statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" view, viewCleanup := testClientHelper().View.CreateView(t, statement) t.Cleanup(viewCleanup) @@ -30,13 +33,18 @@ func TestInt_DataMetricFunctionReferences(t *testing.T) { require.NoError(t, err) require.Equal(t, 1, len(dmfs)) dmf := dmfs[0] - require.Equal(t, string(sdk.DataMetricFuncionRefEntityDomainView), dmf.RefEntityDomain) - require.Equal(t, functionId.DatabaseName(), dmf.MetricDatabaseName) - require.Equal(t, functionId.SchemaName(), dmf.MetricSchemaName) - require.Equal(t, functionId.Name(), dmf.MetricName) - require.Equal(t, view.ID().DatabaseName(), dmf.RefEntityDatabaseName) - require.Equal(t, view.ID().SchemaName(), dmf.RefEntitySchemaName) - require.Equal(t, view.ID().Name(), dmf.RefEntityName) - require.Equal(t, "*/5 * * * * UTC", dmf.Schedule) + assert.Equal(t, string(sdk.DataMetricFuncionRefEntityDomainView), strings.ToUpper(dmf.RefEntityDomain)) + assert.Equal(t, functionId.DatabaseName(), dmf.MetricDatabaseName) + assert.Equal(t, functionId.SchemaName(), dmf.MetricSchemaName) + assert.Equal(t, functionId.Name(), dmf.MetricName) + assert.Equal(t, view.ID().DatabaseName(), dmf.RefEntityDatabaseName) + assert.Equal(t, view.ID().SchemaName(), dmf.RefEntitySchemaName) + assert.Equal(t, view.ID().Name(), dmf.RefEntityName) + assert.Equal(t, "TABLE(VARCHAR)", dmf.ArgumentSignature) + assert.Equal(t, "NUMBER(38,0)", dmf.DataType) + assert.NotEmpty(t, dmf.RefArguments) + assert.NotEmpty(t, dmf.RefId) + assert.Equal(t, "*/5 * * * * UTC", dmf.Schedule) + assert.Equal(t, string(sdk.DataMetricScheduleStatusStarted), dmf.ScheduleStatus) }) } diff --git a/pkg/sdk/testint/views_gen_integration_test.go b/pkg/sdk/testint/views_gen_integration_test.go index b212348f2d..78003a45d6 100644 --- a/pkg/sdk/testint/views_gen_integration_test.go +++ b/pkg/sdk/testint/views_gen_integration_test.go @@ -557,8 +557,8 @@ func TestInt_Views(t *testing.T) { t.Cleanup(dataMetricFunction2Cleanup) // set cron schedule - cron := "5 * * * * UTC" - alterRequest := sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest(cron)) + cron := "*/5 * * * * UTC" + alterRequest := sdk.NewAlterViewRequest(id).WithSetDataMetricSchedule(*sdk.NewViewSetDataMetricScheduleRequest("USING CRON " + cron)) err := client.Views.Alter(ctx, alterRequest) require.NoError(t, err) From 0eef529f60df7ade4736823a87a3034d9941efc2 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 28 Aug 2024 18:50:55 +0200 Subject: [PATCH 07/13] Add columns --- MIGRATION_GUIDE.md | 1 + examples/resources/snowflake_view/resource.tf | 21 +- .../testdata/TestAcc_View/complete/test.tf | 16 ++ .../TestAcc_View/complete/variables.tf | 23 ++ pkg/resources/view.go | 245 +++++++++++++----- pkg/resources/view_acceptance_test.go | 20 +- ...unction_references_gen_integration_test.go | 11 + pkg/sdk/policy_references.go | 1 + pkg/snowflake/parser.go | 70 ++++- pkg/snowflake/parser_test.go | 2 + 10 files changed, 328 insertions(+), 82 deletions(-) create mode 100644 pkg/sdk/data_metric_function_references_gen_integration_test.go diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 67a266ffbb..f51691da54 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -32,6 +32,7 @@ New fields: - `is_temporary` - `data_metric_schedule` - `data_metric_function` + - `column` - added `show_output` field that holds the response from SHOW VIEWS. - added `describe_output` field that holds the response from DESCRIBE VIEW. Note that one needs to grant sufficient privileges e.g. with [grant_ownership](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/grant_ownership) on the tables used in this view. Otherwise, this field is not filled. diff --git a/examples/resources/snowflake_view/resource.tf b/examples/resources/snowflake_view/resource.tf index de20fb54cb..a0c4088f75 100644 --- a/examples/resources/snowflake_view/resource.tf +++ b/examples/resources/snowflake_view/resource.tf @@ -18,7 +18,7 @@ resource "snowflake_view" "view" { select * from foo; SQL } -# resource with attached policies and data metric functions +# resource with attached policies, columns and data metric functions resource "snowflake_view" "test" { database = "database" schema = "schema" @@ -27,6 +27,23 @@ resource "snowflake_view" "test" { is_secure = "true" change_tracking = "true" is_temporary = "true" + column { + column_name = "id" + comment = "column comment" + + } + column { + column_name = "address" + projection_policy { + policy_name = "projection_policy" + } + + masking_policy { + policy_name = "masking_policy" + using = ["address"] + } + + } row_access_policy { policy_name = "row_access_policy" on = ["id"] @@ -43,6 +60,6 @@ resource "snowflake_view" "test" { using_cron = "15 * * * * UTC" } statement = <<-SQL - SELECT id FROM TABLE; + SELECT id, address FROM TABLE; SQL } diff --git a/pkg/resources/testdata/TestAcc_View/complete/test.tf b/pkg/resources/testdata/TestAcc_View/complete/test.tf index 6e4c53c023..960e96f371 100644 --- a/pkg/resources/testdata/TestAcc_View/complete/test.tf +++ b/pkg/resources/testdata/TestAcc_View/complete/test.tf @@ -7,6 +7,22 @@ resource "snowflake_view" "test" { copy_grants = var.copy_grants change_tracking = var.change_tracking is_temporary = var.is_temporary + columns { + column_name = var.column1_name + comment = var.column1_comment + + } + columns { + column_name = var.column2_name + projection_policy { + policy_name = var.column2_projection_policy + } + + masking_policy { + policy_name = var.column2_masking_policy + using = var.column2_masking_policy_using + } + } data_metric_function { function_name = var.data_metric_function on = var.data_metric_function_on diff --git a/pkg/resources/testdata/TestAcc_View/complete/variables.tf b/pkg/resources/testdata/TestAcc_View/complete/variables.tf index 4cdf99c64b..02d4158484 100644 --- a/pkg/resources/testdata/TestAcc_View/complete/variables.tf +++ b/pkg/resources/testdata/TestAcc_View/complete/variables.tf @@ -65,3 +65,26 @@ variable "data_metric_function" { variable "data_metric_function_on" { type = list(string) } + +variable "column1_name" { + type = string +} + +variable "column1_comment" { + type = string +} +variable "column2_name" { + type = string +} + +variable "column2_masking_policy" { + type = string +} + +variable "column2_masking_policy_using" { + type = list(string) +} + +variable "column2_projection_policy" { + type = string +} diff --git a/pkg/resources/view.go b/pkg/resources/view.go index e9e929f2da..b384a92569 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -8,6 +8,7 @@ import ( "strconv" "strings" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" @@ -143,53 +144,64 @@ var viewSchema = map[string]*schema.Schema{ Description: "Specifies the schedule to run the data metric functions periodically.", RequiredWith: []string{"data_metric_function"}, }, - // TODO (SNOW-1348118 - next pr): add columns - // "column": { - // Type: schema.TypeList, - // Optional: true, - // Elem: &schema.Resource{ - // Schema: map[string]*schema.Schema{ - // "column_name": { - // Type: schema.TypeString, - // Required: true, - // Description: "Specifies affected column name.", - // }, - // "masking_policy": { - // Type: schema.TypeList, - // Optional: true, - // Elem: &schema.Resource{ - // Schema: map[string]*schema.Schema{ - // "policy_name": { - // Type: schema.TypeString, - // Required: true, - // Description: "Specifies the masking policy to set on a column.", - // }, - // "using": { - // Type: schema.TypeList, - // Optional: true, - // Elem: &schema.Schema{ - // Type: schema.TypeString, - // }, - // Description: "Specifies the arguments to pass into the conditional masking policy SQL expression. The first column in the list specifies the column for the policy conditions to mask or tokenize the data and must match the column to which the masking policy is set. The additional columns specify the columns to evaluate to determine whether to mask or tokenize the data in each row of the query result when a query is made on the first column. If the USING clause is omitted, Snowflake treats the conditional masking policy as a normal masking policy.", - // }, - // }, - // }, - // }, - // "projection_policy": { - // Type: schema.TypeString, - // Optional: true, - // DiffSuppressFunc: DiffSuppressStatement, - // Description: "Specifies the projection policy to set on a column.", - // }, - // "comment": { - // Type: schema.TypeString, - // Optional: true, - // Description: "Specifies a comment for the column.", - // }, - // }, - // }, - // Description: "If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. (You do not need to specify the data types of the columns.)", - // }, + "column": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "column_name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies affected column name.", + }, + "masking_policy": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "policy_name": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppressIdentifierQuoting, + Description: "Specifies the masking policy to set on a column.", + }, + // TODO: check if is really only 1 arg allowed? + "using": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: "Specifies the arguments to pass into the conditional masking policy SQL expression. The first column in the list specifies the column for the policy conditions to mask or tokenize the data and must match the column to which the masking policy is set. The additional columns specify the columns to evaluate to determine whether to mask or tokenize the data in each row of the query result when a query is made on the first column. If the USING clause is omitted, Snowflake treats the conditional masking policy as a normal masking policy.", + }, + }, + }, + }, + "projection_policy": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "policy_name": { + Type: schema.TypeString, + Required: true, + DiffSuppressFunc: suppressIdentifierQuoting, + Description: "Specifies the projection policy to set on a column.", + }, + }, + }, + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the column.", + }, + }, + }, + Description: "If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. (You do not need to specify the data types of the columns.)", + }, "comment": { Type: schema.TypeString, Optional: true, @@ -383,8 +395,16 @@ func CreateView(orReplace bool) schema.CreateContextFunc { req.WithComment(v) } + if v := d.Get("column"); len(v.([]any)) > 0 { + columns, err := extractColumns(v) + if err != nil { + return diag.FromErr(err) + } + req.WithColumns(columns) + } + if v := d.Get("row_access_policy"); len(v.([]any)) > 0 { - id, columns, err := extractPolicyWithColumns(v, "on") + id, columns, err := extractPolicyWithColumnsSet(v, "on") if err != nil { return diag.FromErr(err) } @@ -392,7 +412,7 @@ func CreateView(orReplace bool) schema.CreateContextFunc { } if v := d.Get("aggregation_policy"); len(v.([]any)) > 0 { - id, columns, err := extractPolicyWithColumns(v, "entity_key") + id, columns, err := extractPolicyWithColumnsSet(v, "entity_key") if err != nil { return diag.FromErr(err) } @@ -488,12 +508,42 @@ func CreateView(orReplace bool) schema.CreateContextFunc { } } -func extractPolicyWithColumns(v any, columnsKey string) (sdk.SchemaObjectIdentifier, []sdk.Column, error) { +func extractColumns(v any) ([]sdk.ViewColumnRequest, error) { + columns := make([]sdk.ViewColumnRequest, len(v.([]any))) + for i, columnConfigRaw := range v.([]any) { + columnConfig := columnConfigRaw.(map[string]any) + columnsReq := *sdk.NewViewColumnRequest(columnConfig["column_name"].(string)) + if len(columnConfig["projection_policy"].([]any)) > 0 { + projectionPolicyId, _, err := extractPolicyWithColumnsSet(columnConfig["projection_policy"], "") + if err != nil { + return nil, err + } + columnsReq.WithProjectionPolicy(*sdk.NewViewColumnProjectionPolicyRequest(projectionPolicyId)) + } + if len(columnConfig["masking_policy"].([]any)) > 0 { + maskingPolicyId, maskingPolicyColumns, err := extractPolicyWithColumnsList(columnConfig["masking_policy"], "using") + if err != nil { + return nil, err + } + columnsReq.WithMaskingPolicy(*sdk.NewViewColumnMaskingPolicyRequest(maskingPolicyId).WithUsing(maskingPolicyColumns)) + } + if commentRaw := columnConfig["comment"].(string); len(commentRaw) > 0 { + columnsReq.WithComment(commentRaw) + } + columns[i] = columnsReq + } + return columns, nil +} + +func extractPolicyWithColumnsSet(v any, columnsKey string) (sdk.SchemaObjectIdentifier, []sdk.Column, error) { policyConfig := v.([]any)[0].(map[string]any) id, err := sdk.ParseSchemaObjectIdentifier(policyConfig["policy_name"].(string)) if err != nil { return sdk.SchemaObjectIdentifier{}, nil, err } + if policyConfig[columnsKey] == nil { + return id, nil, nil + } columnsRaw := expandStringList(policyConfig[columnsKey].(*schema.Set).List()) columns := make([]sdk.Column, len(columnsRaw)) for i := range columnsRaw { @@ -502,6 +552,23 @@ func extractPolicyWithColumns(v any, columnsKey string) (sdk.SchemaObjectIdentif return id, columns, nil } +func extractPolicyWithColumnsList(v any, columnsKey string) (sdk.SchemaObjectIdentifier, []sdk.Column, error) { + policyConfig := v.([]any)[0].(map[string]any) + id, err := sdk.ParseSchemaObjectIdentifier(policyConfig["policy_name"].(string)) + if err != nil { + return sdk.SchemaObjectIdentifier{}, nil, err + } + if policyConfig[columnsKey] == nil { + return id, nil, nil + } + columnsRaw := expandStringList(policyConfig[columnsKey].([]any)) + columns := make([]sdk.Column, len(columnsRaw)) + for i := range columnsRaw { + columns[i] = sdk.Column{Value: columnsRaw[i]} + } + return id, columns, nil +} + func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc { return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client @@ -558,8 +625,11 @@ func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc { }); err != nil { return diag.FromErr(err) } - - err = handlePolicyReferences(ctx, client, id, d) + policyRefs, err := client.PolicyReferences.GetForEntity(ctx, sdk.NewGetForEntityPolicyReferenceRequest(id, sdk.PolicyEntityDomainView)) + if err != nil { + return diag.FromErr(fmt.Errorf("getting policy references for view: %w", err)) + } + err = handlePolicyReferences(policyRefs, d) if err != nil { return diag.FromErr(err) } @@ -588,6 +658,10 @@ func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc { if err = d.Set(DescribeOutputAttributeName, schemas.ViewDescriptionToSchema(describeResult)); err != nil { return diag.FromErr(err) } + err = handleColumns(d, describeResult, policyRefs) + if err != nil { + return diag.FromErr(err) + } } if err = d.Set(ShowOutputAttributeName, []map[string]any{schemas.ViewToSchema(view)}); err != nil { @@ -597,11 +671,7 @@ func ReadView(withExternalChangesMarking bool) schema.ReadContextFunc { } } -func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.SchemaObjectIdentifier, d *schema.ResourceData) error { - policyRefs, err := client.PolicyReferences.GetForEntity(ctx, sdk.NewGetForEntityPolicyReferenceRequest(id, sdk.PolicyEntityDomainView)) - if err != nil { - return fmt.Errorf("getting policy references for view: %w", err) - } +func handlePolicyReferences(policyRefs []sdk.PolicyReference, d *schema.ResourceData) error { var aggregationPolicies []map[string]any var rowAccessPolicies []map[string]any for _, p := range policyRefs { @@ -626,16 +696,16 @@ func handlePolicyReferences(ctx context.Context, client *sdk.Client, id sdk.Sche "on": on, }) default: - log.Printf("[WARN] unexpected policy kind %v in policy references returned from Snowflake", p.PolicyKind) + log.Printf("[DEBUG] unexpected policy kind %v in policy references returned from Snowflake", p.PolicyKind) } } - if err = d.Set("aggregation_policy", aggregationPolicies); err != nil { + if err := d.Set("aggregation_policy", aggregationPolicies); err != nil { return err } - if err = d.Set("row_access_policy", rowAccessPolicies); err != nil { + if err := d.Set("row_access_policy", rowAccessPolicies); err != nil { return err } - return err + return nil } func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.SchemaObjectIdentifier, d *schema.ResourceData) error { @@ -684,6 +754,49 @@ func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.S }) } +func handleColumns(d *schema.ResourceData, columns []sdk.ViewDetails, policyRefs []sdk.PolicyReference) error { + if len(columns) == 0 { + return d.Set("column", nil) + } + columnsRaw := make([]map[string]any, len(columns)) + for i, column := range columns { + columnsRaw[i] = map[string]any{ + "column_name": column.Name, + "comment": column.Comment, + } + projectionPolicy, err := collections.FindOne(policyRefs, func(r sdk.PolicyReference) bool { + return r.PolicyKind == sdk.PolicyKindProjectionPolicy && r.RefColumnName != nil && *r.RefColumnName == column.Name + }) + if err == nil { + if projectionPolicy.PolicyDb != nil && projectionPolicy.PolicySchema != nil { + columnsRaw[i]["projection_policy"] = []map[string]any{ + { + "policy_name": sdk.NewSchemaObjectIdentifier(*projectionPolicy.PolicyDb, *projectionPolicy.PolicySchema, projectionPolicy.PolicyName).FullyQualifiedName(), + }, + } + } else { + log.Printf("could not store projection policy name: policy db and schema can not be empty") + } + } + maskingPolicy, err := collections.FindOne(policyRefs, func(r sdk.PolicyReference) bool { + return r.PolicyKind == sdk.PolicyKindMaskingPolicy && r.RefColumnName != nil && *r.RefColumnName == column.Name + }) + if err == nil { + if maskingPolicy.PolicyDb != nil && maskingPolicy.PolicySchema != nil { + columnsRaw[i]["masking_policy"] = []map[string]any{ + { + "policy_name": sdk.NewSchemaObjectIdentifier(*maskingPolicy.PolicyDb, *maskingPolicy.PolicySchema, maskingPolicy.PolicyName).FullyQualifiedName(), + "using": []string{*maskingPolicy.RefColumnName}, + }, + } + } else { + log.Printf("could not store masking policy name: policy db and schema can not be empty") + } + } + } + return d.Set("column", columnsRaw) +} + type ViewDataMetricFunctionConfig struct { DataMetricFunction sdk.SchemaObjectIdentifier On []sdk.Column @@ -730,8 +843,8 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag } // change on these fields can not be ForceNew because then view is dropped explicitly and copying grants does not have effect - if d.HasChange("statement") || d.HasChange("is_temporary") || d.HasChange("is_recursive") || d.HasChange("copy_grant") { - log.Printf("[DEBUG] Detected change on %q, recreating...", changedKeys(d, []string{"statement", "is_temporary", "is_recursive", "copy_grant"})) + if d.HasChange("statement") || d.HasChange("is_temporary") || d.HasChange("is_recursive") || d.HasChange("copy_grant") || d.HasChange("column") { + log.Printf("[DEBUG] Detected change on %q, recreating...", changedKeys(d, []string{"statement", "is_temporary", "is_recursive", "copy_grant", "column"})) return CreateView(true)(ctx, d, meta) } @@ -862,14 +975,14 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag oldRaw, newRaw := d.GetChange("row_access_policy") if len(oldRaw.([]any)) > 0 { - oldId, _, err := extractPolicyWithColumns(oldRaw, "on") + oldId, _, err := extractPolicyWithColumnsSet(oldRaw, "on") if err != nil { return diag.FromErr(err) } dropReq = sdk.NewViewDropRowAccessPolicyRequest(oldId) } if len(newRaw.([]any)) > 0 { - newId, newColumns, err := extractPolicyWithColumns(newRaw, "on") + newId, newColumns, err := extractPolicyWithColumnsSet(newRaw, "on") if err != nil { return diag.FromErr(err) } @@ -890,7 +1003,7 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag } if d.HasChange("aggregation_policy") { if v, ok := d.GetOk("aggregation_policy"); ok { - newId, newColumns, err := extractPolicyWithColumns(v, "entity_key") + newId, newColumns, err := extractPolicyWithColumnsSet(v, "entity_key") if err != nil { return diag.FromErr(err) } diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index 8c5cd24590..2b73409f3e 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -485,10 +485,12 @@ func TestAcc_View_complete(t *testing.T) { "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")), "statement": config.StringVariable(statement), "warehouse": config.StringVariable(acc.TestWarehouseName), - "column_name": config.StringVariable("ID"), - "masking_policy": config.StringVariable(maskingPolicy.ID().FullyQualifiedName()), - "masking_policy_using": config.ListVariable(config.StringVariable("ID")), - "projection_policy": config.StringVariable(projectionPolicy.FullyQualifiedName()), + "column1_name": config.StringVariable("ID"), + "column1_comment": config.StringVariable("col comment"), + "column2_name": config.StringVariable("FOO"), + "column2_masking_policy": config.StringVariable(maskingPolicy.ID().FullyQualifiedName()), + "column2_masking_policy_using": config.ListVariable(config.StringVariable("FOO")), + "column2_projection_policy": config.StringVariable(projectionPolicy.FullyQualifiedName()), "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()), "data_metric_function_on": config.ListVariable(config.StringVariable("ID")), "data_metric_schedule_using_cron": config.StringVariable("5 * * * * UTC"), @@ -528,6 +530,16 @@ func TestAcc_View_complete(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.#", "2")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.0.column_name", "ID")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.0.masking_policy.#", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.0.projection_policy.#", "0")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.0.comment", "col comment")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.1.column_name", "FOO")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.1.masking_policy.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.1.masking_policy.0.policy_name", maskingPolicy.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.1.projection_policy.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.1.projection_policy.0.policy_name", projectionPolicy.FullyQualifiedName())), resourceshowoutputassert.ViewShowOutput(t, "snowflake_view.test"). HasName(id.Name()). HasDatabaseName(id.DatabaseName()). diff --git a/pkg/sdk/data_metric_function_references_gen_integration_test.go b/pkg/sdk/data_metric_function_references_gen_integration_test.go new file mode 100644 index 0000000000..c855280322 --- /dev/null +++ b/pkg/sdk/data_metric_function_references_gen_integration_test.go @@ -0,0 +1,11 @@ +package sdk + +import "testing" + +func TestInt_DataMetricFunctionReferences(t *testing.T) { + // TODO: prepare common resources + + t.Run("GetForEntity", func(t *testing.T) { + // TODO: fill me + }) +} diff --git a/pkg/sdk/policy_references.go b/pkg/sdk/policy_references.go index 8decc63793..8dbc6dfdf9 100644 --- a/pkg/sdk/policy_references.go +++ b/pkg/sdk/policy_references.go @@ -75,6 +75,7 @@ const ( PolicyKindRowAccessPolicy PolicyKind = "ROW_ACCESS_POLICY" PolicyKindPasswordPolicy PolicyKind = "PASSWORD_POLICY" PolicyKindMaskingPolicy PolicyKind = "MASKING_POLICY" + PolicyKindProjectionPolicy PolicyKind = "PROJECTION_POLICY" ) type PolicyReference struct { diff --git a/pkg/snowflake/parser.go b/pkg/snowflake/parser.go index 63c97f1ffc..3b59047018 100644 --- a/pkg/snowflake/parser.go +++ b/pkg/snowflake/parser.go @@ -2,6 +2,7 @@ package snowflake import ( "fmt" + "log" "strings" "unicode" ) @@ -42,24 +43,73 @@ func (e *ViewSelectStatementExtractor) Extract() (string, error) { e.consumeToken("if not exists") e.consumeSpace() e.consumeID() - // TODO column list + e.consumeSpace() + e.consumeColumns() e.consumeSpace() e.consumeToken("copy grants") e.consumeComment() e.consumeSpace() e.consumeComment() e.consumeSpace() - e.extractRowAccessPolicy() - e.extractAggregationPolicy() + e.consumeRowAccessPolicy() + e.consumeAggregationPolicy() e.consumeToken("as") e.consumeSpace() - fmt.Printf("[DEBUG] extracted statement %s from view query %s\n", string(e.input[e.pos:]), string(e.input)) + log.Printf("[DEBUG] extracted statement %s from view query %s\n", string(e.input[e.pos:]), string(e.input)) return string(e.input[e.pos:]), nil } -func (e *ViewSelectStatementExtractor) extractRowAccessPolicy() { +func (e *ViewSelectStatementExtractor) consumeColumns() { + ok := e.consumeToken("(") + if !ok { + return + } + fmt.Printf("%d: %s\n", e.pos, string(e.input[e.pos:])) + for { + isLast := e.consumeColumn() + fmt.Printf("%d: %s\n", e.pos, string(e.input[e.pos:])) + if isLast { + break + } + } +} + +func (e *ViewSelectStatementExtractor) consumeColumn() (isLast bool) { + e.consumeSpace() + e.consumeID() + if e.input[e.pos-1] == ')' { + isLast = true + } + e.consumeSpace() + ok := e.consumeToken("projection policy") + if ok { + e.consumeSpace() + e.consumeID() + if e.input[e.pos-1] == ')' { + isLast = true + } + e.consumeSpace() + } + ok = e.consumeToken("masking policy") + if ok { + e.consumeSpace() + e.consumeID() + e.consumeSpace() + e.consumeToken("using") + e.consumeSpace() + fmt.Printf("%d: %s\n", e.pos, string(e.input[e.pos:])) + e.consumeIdentifierList() + if string(e.input[e.pos-2:e.pos]) == "))" { + isLast = true + } + e.consumeSpace() + } + return +} + +func (e *ViewSelectStatementExtractor) consumeRowAccessPolicy() { ok := e.consumeToken("row access policy") if !ok { return @@ -69,11 +119,11 @@ func (e *ViewSelectStatementExtractor) extractRowAccessPolicy() { e.consumeSpace() e.consumeToken("on") e.consumeSpace() - e.extractIdentifierList() + e.consumeIdentifierList() e.consumeSpace() } -func (e *ViewSelectStatementExtractor) extractAggregationPolicy() { +func (e *ViewSelectStatementExtractor) consumeAggregationPolicy() { ok := e.consumeToken("aggregation policy") if !ok { return @@ -83,11 +133,11 @@ func (e *ViewSelectStatementExtractor) extractAggregationPolicy() { e.consumeSpace() e.consumeToken("entity key") e.consumeSpace() - e.extractIdentifierList() + e.consumeIdentifierList() e.consumeSpace() } -func (e *ViewSelectStatementExtractor) extractIdentifierList() { +func (e *ViewSelectStatementExtractor) consumeIdentifierList() { e.consumeSpace() if !e.consumeToken("(") { return @@ -95,7 +145,7 @@ func (e *ViewSelectStatementExtractor) extractIdentifierList() { for { e.consumeSpace() e.consumeID() - if e.input[e.pos-1] == ')' { + if e.input[e.pos-1] == ')' || strings.HasSuffix(string(e.input[e.pos-2:e.pos]), "),") { break } e.consumeSpace() diff --git a/pkg/snowflake/parser_test.go b/pkg/snowflake/parser_test.go index efa03aadb6..e4b0f1627e 100644 --- a/pkg/snowflake/parser_test.go +++ b/pkg/snowflake/parser_test.go @@ -37,6 +37,7 @@ from bar;` issue2640 := `CREATE OR REPLACE SECURE VIEW "CLASSIFICATION" comment = 'Classification View of the union of classification tables' AS select * from AB1_SUBSCRIPTION.CLASSIFICATION.CLASSIFICATION union select * from AB2_SUBSCRIPTION.CLASSIFICATION.CLASSIFICATION` withRowAccessAndAggregationPolicy := `CREATE SECURE VIEW "rgdxfmnfhh"."PUBLIC"."rgdxfmnfhh" COMMENT = 'Terraform test resource' ROW ACCESS policy rap on (title, title2) AGGREGATION POLICY rap AS SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES` withRowAccessAndAggregationPolicyWithEntityKey := `CREATE SECURE VIEW "rgdxfmnfhh"."PUBLIC"."rgdxfmnfhh" COMMENT = 'Terraform test resource' ROW ACCESS policy rap on (title, title2) AGGREGATION POLICY rap ENTITY KEY (foo, bar) AS SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES` + allFields := `CREATE OR REPLACE SECURE TEMPORARY VIEW "rgdxfmnfhh"."PUBLIC"."rgdxfmnfhh" (id MASKING POLICY mp USING ("col1", "cond1") PROJECTION POLICY pp COMMENT = 'asdf', foo MASKING POLICY mp USING ("col1", "cond1")) COMMENT = 'Terraform test resource' ROW ACCESS policy rap on (title, title2) AGGREGATION POLICY rap ENTITY KEY (foo, bar) AS SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES` type args struct { input string } @@ -64,6 +65,7 @@ from bar;` {"issue2640", args{issue2640}, "select * from AB1_SUBSCRIPTION.CLASSIFICATION.CLASSIFICATION union select * from AB2_SUBSCRIPTION.CLASSIFICATION.CLASSIFICATION", false}, {"with row access policy and aggregation policy", args{withRowAccessAndAggregationPolicy}, "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES", false}, {"with row access policy and aggregation policy with entity key", args{withRowAccessAndAggregationPolicyWithEntityKey}, "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES", false}, + {"all fields", args{allFields}, "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES", false}, } for _, tt := range tests { tt := tt From 69878806e2edebfa8291520ae54298873326557b Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 28 Aug 2024 18:57:49 +0200 Subject: [PATCH 08/13] Cleanup --- ...metric_function_references_gen_integration_test.go | 11 ----------- pkg/snowflake/parser.go | 3 --- 2 files changed, 14 deletions(-) delete mode 100644 pkg/sdk/data_metric_function_references_gen_integration_test.go diff --git a/pkg/sdk/data_metric_function_references_gen_integration_test.go b/pkg/sdk/data_metric_function_references_gen_integration_test.go deleted file mode 100644 index c855280322..0000000000 --- a/pkg/sdk/data_metric_function_references_gen_integration_test.go +++ /dev/null @@ -1,11 +0,0 @@ -package sdk - -import "testing" - -func TestInt_DataMetricFunctionReferences(t *testing.T) { - // TODO: prepare common resources - - t.Run("GetForEntity", func(t *testing.T) { - // TODO: fill me - }) -} diff --git a/pkg/snowflake/parser.go b/pkg/snowflake/parser.go index 3b59047018..2913e93a7b 100644 --- a/pkg/snowflake/parser.go +++ b/pkg/snowflake/parser.go @@ -66,10 +66,8 @@ func (e *ViewSelectStatementExtractor) consumeColumns() { if !ok { return } - fmt.Printf("%d: %s\n", e.pos, string(e.input[e.pos:])) for { isLast := e.consumeColumn() - fmt.Printf("%d: %s\n", e.pos, string(e.input[e.pos:])) if isLast { break } @@ -99,7 +97,6 @@ func (e *ViewSelectStatementExtractor) consumeColumn() (isLast bool) { e.consumeSpace() e.consumeToken("using") e.consumeSpace() - fmt.Printf("%d: %s\n", e.pos, string(e.input[e.pos:])) e.consumeIdentifierList() if string(e.input[e.pos-2:e.pos]) == "))" { isLast = true From 79bab0b3f418196493ef8afcff4206ddcee18c60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Fri, 30 Aug 2024 09:56:41 +0200 Subject: [PATCH 09/13] wip --- docs/data-sources/views.md | 77 ++++- docs/resources/view.md | 57 +++- pkg/datasources/streamlits.go | 2 +- pkg/datasources/views.go | 201 +++++++++--- pkg/datasources/views_acceptance_test.go | 109 +++++-- .../testdata/TestAcc_View/basic/test.tf | 7 + .../testdata/TestAcc_View/basic/variables.tf | 4 + .../TestAcc_View/basic_copy_grants/test.tf | 15 + .../basic_copy_grants/variables.tf | 27 ++ .../TestAcc_View/basic_is_recursive/test.tf | 14 + .../basic_is_recursive/variables.tf | 23 ++ .../TestAcc_View/basic_update/test.tf | 14 +- .../TestAcc_View/basic_update/variables.tf | 8 + .../testdata/TestAcc_View/complete/test.tf | 11 +- pkg/resources/view.go | 189 ++++++----- pkg/resources/view_acceptance_test.go | 298 +++++++++++++----- .../data_metric_function_references_def.go | 1 + 17 files changed, 813 insertions(+), 244 deletions(-) create mode 100644 pkg/resources/testdata/TestAcc_View/basic_copy_grants/test.tf create mode 100644 pkg/resources/testdata/TestAcc_View/basic_copy_grants/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_View/basic_is_recursive/test.tf create mode 100644 pkg/resources/testdata/TestAcc_View/basic_is_recursive/variables.tf diff --git a/docs/data-sources/views.md b/docs/data-sources/views.md index 95eb43bbe3..9c61f6d351 100644 --- a/docs/data-sources/views.md +++ b/docs/data-sources/views.md @@ -2,12 +2,12 @@ page_title: "snowflake_views Data Source - terraform-provider-snowflake" subcategory: "" description: |- - + Datasource used to get details of filtered views. Filtering is aligned with the current possibilities for SHOW VIEWS https://docs.snowflake.com/en/sql-reference/sql/show-views query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection views. --- # snowflake_views (Data Source) - +Datasource used to get details of filtered views. Filtering is aligned with the current possibilities for [SHOW VIEWS](https://docs.snowflake.com/en/sql-reference/sql/show-views) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `views`. ## Example Usage @@ -21,22 +21,83 @@ data "snowflake_views" "current" { ## Schema -### Required +### Optional -- `database` (String) The database from which to return the schemas from. -- `schema` (String) The schema from which to return the views from. +- `in` (Block List, Max: 1) IN clause to filter the list of views (see [below for nested schema](#nestedblock--in)) +- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). +- `limit` (Block List, Max: 1) Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`. (see [below for nested schema](#nestedblock--limit)) +- `starts_with` (String) Filters the output with **case-sensitive** characters indicating the beginning of the object name. +- `with_describe` (Boolean) Runs DESC VIEW for each view returned by SHOW VIEWS. The output of describe is saved to the description field. By default this value is set to true. ### Read-Only - `id` (String) The ID of this resource. -- `views` (List of Object) The views in the schema (see [below for nested schema](#nestedatt--views)) +- `views` (List of Object) Holds the aggregated output of all views details queries. (see [below for nested schema](#nestedatt--views)) + + +### Nested Schema for `in` + +Optional: + +- `account` (Boolean) Returns records for the entire account. +- `database` (String) Returns records for the current database in use or for a specified database. +- `schema` (String) Returns records for the current schema in use or a specified schema. Use fully qualified name. + + + +### Nested Schema for `limit` + +Required: + +- `rows` (Number) The maximum number of rows to return. + +Optional: + +- `from` (String) Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied. + ### Nested Schema for `views` Read-Only: +- `describe_output` (List of Object) (see [below for nested schema](#nestedobjatt--views--describe_output)) +- `show_output` (List of Object) (see [below for nested schema](#nestedobjatt--views--show_output)) + + +### Nested Schema for `views.describe_output` + +Read-Only: + +- `check` (String) +- `comment` (String) +- `default` (String) +- `expression` (String) +- `is_nullable` (Boolean) +- `is_primary` (Boolean) +- `is_unique` (Boolean) +- `kind` (String) +- `name` (String) +- `policy_name` (String) +- `privacy_domain` (String) +- `type` (String) + + + +### Nested Schema for `views.show_output` + +Read-Only: + +- `change_tracking` (String) - `comment` (String) -- `database` (String) +- `created_on` (String) +- `database_name` (String) +- `is_materialized` (Boolean) +- `is_secure` (Boolean) +- `kind` (String) - `name` (String) -- `schema` (String) +- `owner` (String) +- `owner_role_type` (String) +- `reserved` (String) +- `schema_name` (String) +- `text` (String) diff --git a/docs/resources/view.md b/docs/resources/view.md index e3f4c3dc52..21d9a37b3a 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -38,7 +38,7 @@ resource "snowflake_view" "view" { select * from foo; SQL } -# resource with attached policies and data metric functions +# resource with attached policies, columns and data metric functions resource "snowflake_view" "test" { database = "database" schema = "schema" @@ -47,6 +47,23 @@ resource "snowflake_view" "test" { is_secure = "true" change_tracking = "true" is_temporary = "true" + column { + column_name = "id" + comment = "column comment" + + } + column { + column_name = "address" + projection_policy { + policy_name = "projection_policy" + } + + masking_policy { + policy_name = "masking_policy" + using = ["address"] + } + + } row_access_policy { policy_name = "row_access_policy" on = ["id"] @@ -63,7 +80,7 @@ resource "snowflake_view" "test" { using_cron = "15 * * * * UTC" } statement = <<-SQL - SELECT id FROM TABLE; + SELECT id, address FROM TABLE; SQL } ``` @@ -84,6 +101,7 @@ SQL - `aggregation_policy` (Block List, Max: 1) Specifies the aggregation policy to set on a view. (see [below for nested schema](#nestedblock--aggregation_policy)) - `change_tracking` (String) Specifies to enable or disable change tracking on the table. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `column` (Block List) If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. (You do not need to specify the data types of the columns.) (see [below for nested schema](#nestedblock--column)) - `comment` (String) Specifies a comment for the view. - `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. - `data_metric_function` (Block Set) Data metric functions used for the view. (see [below for nested schema](#nestedblock--data_metric_function)) @@ -112,6 +130,40 @@ Optional: - `entity_key` (Set of String) Defines which columns uniquely identify an entity within the view. + +### Nested Schema for `column` + +Required: + +- `column_name` (String) Specifies affected column name. + +Optional: + +- `comment` (String) Specifies a comment for the column. +- `masking_policy` (Block List, Max: 1) (see [below for nested schema](#nestedblock--column--masking_policy)) +- `projection_policy` (Block List, Max: 1) (see [below for nested schema](#nestedblock--column--projection_policy)) + + +### Nested Schema for `column.masking_policy` + +Required: + +- `policy_name` (String) Specifies the masking policy to set on a column. + +Optional: + +- `using` (List of String) Specifies the arguments to pass into the conditional masking policy SQL expression. The first column in the list specifies the column for the policy conditions to mask or tokenize the data and must match the column to which the masking policy is set. The additional columns specify the columns to evaluate to determine whether to mask or tokenize the data in each row of the query result when a query is made on the first column. If the USING clause is omitted, Snowflake treats the conditional masking policy as a normal masking policy. + + + +### Nested Schema for `column.projection_policy` + +Required: + +- `policy_name` (String) Specifies the projection policy to set on a column. + + + ### Nested Schema for `data_metric_function` @@ -119,6 +171,7 @@ Required: - `function_name` (String) Identifier of the data metric function to add to the table or view or drop from the table or view. This function identifier must be provided without arguments in parenthesis. - `on` (Set of String) The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition. +- `schedule_status` (String) The status of the metrics association. Valid values are: `STARTED` | `SUSPENDED`. When status of a data metric function is changed, it is being reassigned with `DROP DATA METRIC FUNCTION` and `ADD DATA METRIC FUNCTION`, and then its status is changed by `MODIFY DATA METRIC FUNCTION` diff --git a/pkg/datasources/streamlits.go b/pkg/datasources/streamlits.go index ef71fb8c62..4fed061263 100644 --- a/pkg/datasources/streamlits.go +++ b/pkg/datasources/streamlits.go @@ -88,7 +88,7 @@ var streamlitsSchema = map[string]*schema.Schema{ resources.DescribeOutputAttributeName: { Type: schema.TypeList, Computed: true, - Description: "Holds the output of DESCRIBE STREAMLITS.", + Description: "Holds the output of DESCRIBE STREAMLIT.", Elem: &schema.Resource{ Schema: schemas.DescribeStreamlitSchema, }, diff --git a/pkg/datasources/views.go b/pkg/datasources/views.go index 2d17aa7d59..c9b063589d 100644 --- a/pkg/datasources/views.go +++ b/pkg/datasources/views.go @@ -2,48 +2,102 @@ package datasources import ( "context" - "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) var viewsSchema = map[string]*schema.Schema{ - "database": { + "with_describe": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Runs DESC VIEW for each view returned by SHOW VIEWS. The output of describe is saved to the description field. By default this value is set to true.", + }, + "in": { + Type: schema.TypeList, + Optional: true, + Description: "IN clause to filter the list of views", + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "account": { + Type: schema.TypeBool, + Optional: true, + Description: "Returns records for the entire account.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema"}, + }, + "database": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the current database in use or for a specified database.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema"}, + }, + "schema": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the current schema in use or a specified schema. Use fully qualified name.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema"}, + }, + }, + }, + }, + "like": { Type: schema.TypeString, - Required: true, - Description: "The database from which to return the schemas from.", + Optional: true, + Description: "Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`).", }, - "schema": { + "starts_with": { Type: schema.TypeString, - Required: true, - Description: "The schema from which to return the views from.", + Optional: true, + Description: "Filters the output with **case-sensitive** characters indicating the beginning of the object name.", }, - "views": { + "limit": { Type: schema.TypeList, - Computed: true, - Description: "The views in the schema", + Optional: true, + Description: "Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`.", + MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, + "rows": { + Type: schema.TypeInt, + Required: true, + Description: "The maximum number of rows to return.", }, - "database": { - Type: schema.TypeString, - Computed: true, + "from": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied.", }, - "schema": { - Type: schema.TypeString, - Computed: true, + }, + }, + }, + "views": { + Type: schema.TypeList, + Computed: true, + Description: "Holds the aggregated output of all views details queries.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + resources.ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW VIEWS.", + Elem: &schema.Resource{ + Schema: schemas.ShowViewSchema, + }, }, - "comment": { - Type: schema.TypeString, - Optional: true, - Computed: true, + resources.DescribeOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of DESCRIBE VIEW.", + Elem: &schema.Resource{ + Schema: schemas.ViewDescribeSchema, + }, }, }, }, @@ -52,38 +106,91 @@ var viewsSchema = map[string]*schema.Schema{ func Views() *schema.Resource { return &schema.Resource{ - Read: ReadViews, - Schema: viewsSchema, + ReadContext: ReadViews, + Schema: viewsSchema, + Description: "Datasource used to get details of filtered views. Filtering is aligned with the current possibilities for [SHOW VIEWS](https://docs.snowflake.com/en/sql-reference/sql/show-views) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `views`.", } } -func ReadViews(d *schema.ResourceData, meta interface{}) error { +func ReadViews(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - ctx := context.Background() - databaseName := d.Get("database").(string) - schemaName := d.Get("schema").(string) - - schemaId := sdk.NewDatabaseObjectIdentifier(databaseName, schemaName) - extractedViews, err := client.Views.Show(ctx, sdk.NewShowViewRequest().WithIn( - sdk.ExtendedIn{In: sdk.In{Schema: schemaId}}, - )) + req := sdk.NewShowViewRequest() + + if v, ok := d.GetOk("in"); ok { + in := v.([]any)[0].(map[string]any) + if v, ok := in["account"]; ok && v.(bool) { + req.WithIn(sdk.ExtendedIn{In: sdk.In{Account: sdk.Bool(true)}}) + } + if v, ok := in["database"]; ok { + database := v.(string) + if database != "" { + req.WithIn(sdk.ExtendedIn{In: sdk.In{Database: sdk.NewAccountObjectIdentifier(database)}}) + } + } + if v, ok := in["schema"]; ok { + schema := v.(string) + if schema != "" { + schemaId, err := sdk.ParseDatabaseObjectIdentifier(schema) + if err != nil { + return diag.FromErr(err) + } + req.WithIn(sdk.ExtendedIn{In: sdk.In{Schema: schemaId}}) + } + } + } + + if likePattern, ok := d.GetOk("like"); ok { + req.WithLike(sdk.Like{ + Pattern: sdk.String(likePattern.(string)), + }) + } + + if v, ok := d.GetOk("starts_with"); ok { + req.WithStartsWith(v.(string)) + } + + if v, ok := d.GetOk("limit"); ok { + l := v.([]interface{})[0].(map[string]any) + limit := sdk.LimitFrom{} + if v, ok := l["rows"]; ok { + rows := v.(int) + limit.Rows = sdk.Int(rows) + } + if v, ok := l["from"]; ok { + from := v.(string) + limit.From = sdk.String(from) + } + req.WithLimit(limit) + } + + views, err := client.Views.Show(ctx, req) if err != nil { - log.Printf("[DEBUG] failed when searching views in schema (%s), err = %s", schemaId.FullyQualifiedName(), err.Error()) - d.SetId("") - return nil + return diag.FromErr(err) } - views := make([]map[string]any, len(extractedViews)) + d.SetId("views_read") + + flattenedViews := make([]map[string]any, len(views)) + for i, view := range views { + view := view + var viewDescriptions []map[string]any + if d.Get("with_describe").(bool) { + describeOutput, err := client.Views.Describe(ctx, view.ID()) + if err != nil { + return diag.FromErr(err) + } + viewDescriptions = schemas.ViewDescriptionToSchema(describeOutput) + } - for i, view := range extractedViews { - views[i] = map[string]any{ - "name": view.Name, - "database": view.DatabaseName, - "schema": view.SchemaName, - "comment": view.Comment, + flattenedViews[i] = map[string]any{ + resources.ShowOutputAttributeName: []map[string]any{schemas.ViewToSchema(&view)}, + resources.DescribeOutputAttributeName: viewDescriptions, } } - d.SetId(helpers.EncodeSnowflakeID(databaseName, schemaName)) - return d.Set("views", views) + if err := d.Set("views", flattenedViews); err != nil { + return diag.FromErr(err) + } + + return nil } diff --git a/pkg/datasources/views_acceptance_test.go b/pkg/datasources/views_acceptance_test.go index 7edce8f234..ed4cb48a3a 100644 --- a/pkg/datasources/views_acceptance_test.go +++ b/pkg/datasources/views_acceptance_test.go @@ -6,8 +6,6 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) @@ -16,7 +14,12 @@ import ( func TestAcc_Views(t *testing.T) { t.Setenv(string(testenvs.ConfigureClientOnce), "") - viewId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + + viewNamePrefix := acc.TestClient().Ids.Alpha() + viewName := viewNamePrefix + "1" + acc.TestClient().Ids.Alpha() + viewName2 := viewNamePrefix + "2" + acc.TestClient().Ids.Alpha() + resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -26,32 +29,98 @@ func TestAcc_Views(t *testing.T) { CheckDestroy: nil, Steps: []resource.TestStep{ { - Config: views(viewId), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.snowflake_views.v", "database", viewId.DatabaseName()), - resource.TestCheckResourceAttr("data.snowflake_views.v", "schema", viewId.SchemaName()), - resource.TestCheckResourceAttrSet("data.snowflake_views.v", "views.#"), - resource.TestCheckResourceAttr("data.snowflake_views.v", "views.#", "1"), - resource.TestCheckResourceAttr("data.snowflake_views.v", "views.0.name", viewId.Name()), + Config: views(acc.TestDatabaseName, acc.TestSchemaName, schemaId.Name(), viewName, viewName2, viewNamePrefix+"%"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.#", "1"), + + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.name", viewName2), + resource.TestCheckResourceAttrSet("data.snowflake_views.in_schema", "views.0.show_output.0.created_on"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.kind", ""), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.reserved", ""), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.database_name", schemaId.DatabaseName()), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.schema_name", schemaId.Name()), + resource.TestCheckResourceAttrSet("data.snowflake_views.in_schema", "views.0.show_output.0.owner"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.comment", ""), + resource.TestCheckResourceAttrSet("data.snowflake_views.in_schema", "views.0.show_output.0.text"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.is_secure", "false"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.is_materialized", "false"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.owner_role_type", "ROLE"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.show_output.0.change_tracking", "OFF"), + + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.#", "2"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.name", "ROLE_NAME"), + resource.TestCheckResourceAttrSet("data.snowflake_views.in_schema", "views.0.describe_output.0.type"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.kind", "COLUMN"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.is_nullable", "true"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.default", ""), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.is_primary", "false"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.is_unique", "false"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.check", ""), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.expression", ""), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.comment", ""), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.policy_name", ""), + resource.TestCheckNoResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.0.policy_domain"), + resource.TestCheckResourceAttr("data.snowflake_views.in_schema", "views.0.describe_output.1.name", "ROLE_OWNER"), + + resource.TestCheckResourceAttr("data.snowflake_views.filtering", "views.#", "1"), + resource.TestCheckResourceAttr("data.snowflake_views.filtering", "views.0.show_output.0.name", viewName2), ), }, }, }) } -func views(viewId sdk.SchemaObjectIdentifier) string { +func views(databaseName, defaultSchemaName, schemaName, view1Name, view2Name, viewPrefix string) string { return fmt.Sprintf(` - resource snowflake_view "v"{ - name = "%v" - schema = "%v" - database = "%v" + resource snowflake_schema "test" { + database = "%[1]v" + name = "%[3]v" + } + + resource snowflake_view "v1"{ + database = "%[1]v" + schema = "%[2]v" + name = "%[4]v" + statement = "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES where ROLE_OWNER like 'foo%%'" + column { + column_name = "ROLE_NAME" + } + column { + column_name = "ROLE_OWNER" + } + } + + resource snowflake_view "v2"{ + database = snowflake_schema.test.database + schema = snowflake_schema.test.name + name = "%[5]v" statement = "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES where ROLE_OWNER like 'foo%%'" + column { + column_name = "ROLE_NAME" + } + column { + column_name = "ROLE_OWNER" + } + } + + data snowflake_views "in_schema" { + depends_on = [ snowflake_view.v1, snowflake_view.v2 ] + in { + schema = snowflake_schema.test.fully_qualified_name + } } - data snowflake_views "v" { - database = snowflake_view.v.database - schema = snowflake_view.v.schema - depends_on = [snowflake_view.v] + data snowflake_views "filtering" { + depends_on = [ snowflake_view.v1, snowflake_view.v2 ] + in { + database = snowflake_schema.test.database + } + like = "%[6]v" + starts_with = trimsuffix("%[6]v", "%%") + limit { + rows = 1 + from = snowflake_view.v1.name + } } - `, viewId.Name(), viewId.SchemaName(), viewId.DatabaseName()) + `, databaseName, defaultSchemaName, schemaName, view1Name, view2Name, viewPrefix) } diff --git a/pkg/resources/testdata/TestAcc_View/basic/test.tf b/pkg/resources/testdata/TestAcc_View/basic/test.tf index 74efa22d33..905a79328a 100644 --- a/pkg/resources/testdata/TestAcc_View/basic/test.tf +++ b/pkg/resources/testdata/TestAcc_View/basic/test.tf @@ -3,4 +3,11 @@ resource "snowflake_view" "test" { database = var.database schema = var.schema statement = var.statement + + dynamic "column" { + for_each = var.columns + content { + column_name = column.value["column_name"] + } + } } diff --git a/pkg/resources/testdata/TestAcc_View/basic/variables.tf b/pkg/resources/testdata/TestAcc_View/basic/variables.tf index 5b5810d23d..2219f130a5 100644 --- a/pkg/resources/testdata/TestAcc_View/basic/variables.tf +++ b/pkg/resources/testdata/TestAcc_View/basic/variables.tf @@ -13,3 +13,7 @@ variable "schema" { variable "statement" { type = string } + +variable "columns" { + type = set(map(string)) +} diff --git a/pkg/resources/testdata/TestAcc_View/basic_copy_grants/test.tf b/pkg/resources/testdata/TestAcc_View/basic_copy_grants/test.tf new file mode 100644 index 0000000000..cdc7295f9d --- /dev/null +++ b/pkg/resources/testdata/TestAcc_View/basic_copy_grants/test.tf @@ -0,0 +1,15 @@ +resource "snowflake_view" "test" { + name = var.name + database = var.database + schema = var.schema + statement = var.statement + copy_grants = var.copy_grants + is_secure = var.is_secure + + dynamic "column" { + for_each = var.columns + content { + column_name = column.value["column_name"] + } + } +} diff --git a/pkg/resources/testdata/TestAcc_View/basic_copy_grants/variables.tf b/pkg/resources/testdata/TestAcc_View/basic_copy_grants/variables.tf new file mode 100644 index 0000000000..86e63c4564 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_View/basic_copy_grants/variables.tf @@ -0,0 +1,27 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "statement" { + type = string +} + +variable "copy_grants" { + type = bool +} + +variable "is_secure" { + type = bool +} + +variable "columns" { + type = set(map(string)) +} diff --git a/pkg/resources/testdata/TestAcc_View/basic_is_recursive/test.tf b/pkg/resources/testdata/TestAcc_View/basic_is_recursive/test.tf new file mode 100644 index 0000000000..42308b4d48 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_View/basic_is_recursive/test.tf @@ -0,0 +1,14 @@ +resource "snowflake_view" "test" { + name = var.name + database = var.database + schema = var.schema + statement = var.statement + is_recursive = var.is_recursive + + dynamic "column" { + for_each = var.columns + content { + column_name = column.value["column_name"] + } + } +} diff --git a/pkg/resources/testdata/TestAcc_View/basic_is_recursive/variables.tf b/pkg/resources/testdata/TestAcc_View/basic_is_recursive/variables.tf new file mode 100644 index 0000000000..b38898bbcf --- /dev/null +++ b/pkg/resources/testdata/TestAcc_View/basic_is_recursive/variables.tf @@ -0,0 +1,23 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "statement" { + type = string +} + +variable "is_recursive" { + type = bool +} + +variable "columns" { + type = set(map(string)) +} diff --git a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf index e403c93692..2df97a8f62 100644 --- a/pkg/resources/testdata/TestAcc_View/basic_update/test.tf +++ b/pkg/resources/testdata/TestAcc_View/basic_update/test.tf @@ -2,18 +2,26 @@ resource "snowflake_view" "test" { name = var.name database = var.database schema = var.schema + + dynamic "column" { + for_each = var.columns + content { + column_name = column.value["column_name"] + } + } + row_access_policy { policy_name = var.row_access_policy on = var.row_access_policy_on - } aggregation_policy { policy_name = var.aggregation_policy entity_key = var.aggregation_policy_entity_key } data_metric_function { - function_name = var.data_metric_function - on = var.data_metric_function_on + function_name = var.data_metric_function + on = var.data_metric_function_on + schedule_status = var.schedule_status } data_metric_schedule { using_cron = var.data_metric_schedule_using_cron diff --git a/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf b/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf index e2da9f2f40..4b814dd06d 100644 --- a/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf +++ b/pkg/resources/testdata/TestAcc_View/basic_update/variables.tf @@ -45,3 +45,11 @@ variable "data_metric_function" { variable "data_metric_function_on" { type = list(string) } + +variable "schedule_status" { + type = string +} + +variable "columns" { + type = set(map(string)) +} diff --git a/pkg/resources/testdata/TestAcc_View/complete/test.tf b/pkg/resources/testdata/TestAcc_View/complete/test.tf index 960e96f371..33c05fdb69 100644 --- a/pkg/resources/testdata/TestAcc_View/complete/test.tf +++ b/pkg/resources/testdata/TestAcc_View/complete/test.tf @@ -7,25 +7,24 @@ resource "snowflake_view" "test" { copy_grants = var.copy_grants change_tracking = var.change_tracking is_temporary = var.is_temporary - columns { + column { column_name = var.column1_name comment = var.column1_comment - } - columns { + column { column_name = var.column2_name projection_policy { policy_name = var.column2_projection_policy } - masking_policy { policy_name = var.column2_masking_policy using = var.column2_masking_policy_using } } data_metric_function { - function_name = var.data_metric_function - on = var.data_metric_function_on + function_name = var.data_metric_function + on = var.data_metric_function_on + schedule_status = "STARTED" } data_metric_schedule { using_cron = var.data_metric_schedule_using_cron diff --git a/pkg/resources/view.go b/pkg/resources/view.go index b384a92569..bf195e6d1a 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "log" + "slices" "strconv" "strings" @@ -102,19 +103,13 @@ var viewSchema = map[string]*schema.Schema{ }, Description: "The table or view columns on which to associate the data metric function. The data types of the columns must match the data types of the columns specified in the data metric function definition.", }, - // TODO (SNOW-1348118 - next pr) - // "schedule_status": { - // Type: schema.TypeString, - // Optional: true, - // ValidateDiagFunc: sdkValidation(sdk.ToAllowedDataMetricScheduleStatusOption), - // Description: fmt.Sprintf("The status of the metrics association. Valid values are: %v. When status of a data metric function is changed, it is being reassigned with `DROP DATA METRIC FUNCTION` and `ADD DATA METRIC FUNCTION`, and then its status is changed by `MODIFY DATA METRIC FUNCTION` ", possibleValuesListed(sdk.AllAllowedDataMetricScheduleStatusOptions)), - // DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToAllowedDataMetricScheduleStatusOption), func(_, oldValue, newValue string, _ *schema.ResourceData) bool { - // if newValue == "" { - // return true - // } - // return false - // }), - // }, + "schedule_status": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: sdkValidation(sdk.ToAllowedDataMetricScheduleStatusOption), + Description: fmt.Sprintf("The status of the metrics association. Valid values are: %v. When status of a data metric function is changed, it is being reassigned with `DROP DATA METRIC FUNCTION` and `ADD DATA METRIC FUNCTION`, and then its status is changed by `MODIFY DATA METRIC FUNCTION` ", possibleValuesListed(sdk.AllAllowedDataMetricScheduleStatusOptions)), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToAllowedDataMetricScheduleStatusOption)), + }, }, }, Description: "Data metric functions used for the view.", @@ -166,7 +161,6 @@ var viewSchema = map[string]*schema.Schema{ DiffSuppressFunc: suppressIdentifierQuoting, Description: "Specifies the masking policy to set on a column.", }, - // TODO: check if is really only 1 arg allowed? "using": { Type: schema.TypeList, Optional: true, @@ -473,35 +467,35 @@ func CreateView(orReplace bool) schema.CreateContextFunc { return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } // TODO (SNOW-1348118 - next pr) - // changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw)) - // for i := range addedRaw { - // if addedRaw[i].ScheduleStatus != "" { - // expectedStatus, err := sdk.ToAllowedDataMetricScheduleStatusOption(addedRaw[i].ScheduleStatus) - // if err != nil { - // return diag.FromErr(err) - // } - // var statusCmd sdk.ViewDataMetricScheduleStatusOperationOption - // switch expectedStatus { - // case sdk.DataMetricScheduleStatusStarted: - // statusCmd = sdk.ViewDataMetricScheduleStatusOperationResume - // case sdk.DataMetricScheduleStatusSuspended: - // statusCmd = sdk.ViewDataMetricScheduleStatusOperationSuspend - // default: - // return diag.FromErr(fmt.Errorf("unexpected data metric function status: %v", expectedStatus)) - // } - // changeSchedule = append(changeSchedule, sdk.ViewModifyDataMetricFunction{ - // DataMetricFunction: addedRaw[i].DataMetricFunction, - // On: addedRaw[i].On, - // ViewDataMetricScheduleStatusOperationOption: statusCmd, - // }) - // } - // } - // if len(changeSchedule) > 0 { - // err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithModifyDataMetricFunction(*sdk.NewViewModifyDataMetricFunctionsRequest(changeSchedule))) - // if err != nil { - // return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) - // } - // } + changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw)) + for i := range addedRaw { + if addedRaw[i].ScheduleStatus != "" { + expectedStatus, err := sdk.ToAllowedDataMetricScheduleStatusOption(addedRaw[i].ScheduleStatus) + if err != nil { + return diag.FromErr(err) + } + var statusCmd sdk.ViewDataMetricScheduleStatusOperationOption + switch expectedStatus { + case sdk.DataMetricScheduleStatusStarted: + statusCmd = sdk.ViewDataMetricScheduleStatusOperationResume + case sdk.DataMetricScheduleStatusSuspended: + statusCmd = sdk.ViewDataMetricScheduleStatusOperationSuspend + default: + return diag.FromErr(fmt.Errorf("unexpected data metric function status: %v", expectedStatus)) + } + changeSchedule = append(changeSchedule, sdk.ViewModifyDataMetricFunction{ + DataMetricFunction: addedRaw[i].DataMetricFunction, + On: addedRaw[i].On, + ViewDataMetricScheduleStatusOperationOption: statusCmd, + }) + } + } + if len(changeSchedule) > 0 { + err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithModifyDataMetricFunction(*sdk.NewViewModifyDataMetricFunctionsRequest(changeSchedule))) + if err != nil { + return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) + } + } } return ReadView(false)(ctx, d, meta) @@ -725,21 +719,21 @@ func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.S columns = append(columns, v.Name) } // TODO (SNOW-1348118 - next pr) - // var scheduleStatus sdk.DataMetricScheduleStatusOption - // status, err := sdk.ToDataMetricScheduleStatusOption(dmfRef.ScheduleStatus) - // if err != nil { - // return err - // } - // if slices.Contains(sdk.AllDataMetricScheduleStatusStartedOptions, status) { - // scheduleStatus = sdk.DataMetricScheduleStatusStarted - // } - // if slices.Contains(sdk.AllDataMetricScheduleStatusSuspendedOptions, status) { - // scheduleStatus = sdk.DataMetricScheduleStatusSuspended - // } + var scheduleStatus sdk.DataMetricScheduleStatusOption + status, err := sdk.ToDataMetricScheduleStatusOption(dmfRef.ScheduleStatus) + if err != nil { + return err + } + if slices.Contains(sdk.AllDataMetricScheduleStatusStartedOptions, status) { + scheduleStatus = sdk.DataMetricScheduleStatusStarted + } + if slices.Contains(sdk.AllDataMetricScheduleStatusSuspendedOptions, status) { + scheduleStatus = sdk.DataMetricScheduleStatusSuspended + } dataMetricFunctions[i] = map[string]any{ - "function_name": dmfName.FullyQualifiedName(), - "on": columns, - // "schedule_status": string(scheduleStatus), + "function_name": dmfName.FullyQualifiedName(), + "on": columns, + "schedule_status": string(scheduleStatus), } schedule = dmfRef.Schedule } @@ -783,10 +777,14 @@ func handleColumns(d *schema.ResourceData, columns []sdk.ViewDetails, policyRefs }) if err == nil { if maskingPolicy.PolicyDb != nil && maskingPolicy.PolicySchema != nil { + var usingArgs []string + if maskingPolicy.RefArgColumnNames != nil { + usingArgs = sdk.ParseCommaSeparatedStringArray(*maskingPolicy.RefArgColumnNames, true) + } columnsRaw[i]["masking_policy"] = []map[string]any{ { "policy_name": sdk.NewSchemaObjectIdentifier(*maskingPolicy.PolicyDb, *maskingPolicy.PolicySchema, maskingPolicy.PolicyName).FullyQualifiedName(), - "using": []string{*maskingPolicy.RefColumnName}, + "using": append([]string{*maskingPolicy.RefColumnName}, usingArgs...), }, } } else { @@ -818,8 +816,7 @@ func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionConfig, err dmfs = append(dmfs, ViewDataMetricFunctionConfig{ DataMetricFunction: id, On: columns, - // TODO (SNOW-1348118 - next pr) - // ScheduleStatus: config["schedule_status"].(string), + ScheduleStatus: config["schedule_status"].(string), }) } return @@ -933,20 +930,37 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag if d.HasChange("data_metric_function") { old, new := d.GetChange("data_metric_function") removedRaw, addedRaw := old.(*schema.Set).List(), new.(*schema.Set).List() - addedConfig, err := extractDataMetricFunctions(addedRaw) + addedConfigs, err := extractDataMetricFunctions(addedRaw) if err != nil { return diag.FromErr(err) } - removedConfig, err := extractDataMetricFunctions(removedRaw) + removedConfigs, err := extractDataMetricFunctions(removedRaw) if err != nil { return diag.FromErr(err) } - if len(removedConfig) > 0 { - removed := make([]sdk.ViewDataMetricFunction, len(removedConfig)) - for i := range removedConfig { + + addedConfigsCopy := slices.Clone(addedConfigs) + statusChangeConfig := make([]ViewDataMetricFunctionConfig, 0) + + for i, addedConfig := range addedConfigsCopy { + removedConfigDeleteIndex := slices.IndexFunc(removedConfigs, func(removedConfig ViewDataMetricFunctionConfig) bool { + return slices.Equal(addedConfig.On, removedConfig.On) && + addedConfig.DataMetricFunction.FullyQualifiedName() == removedConfig.DataMetricFunction.FullyQualifiedName() && + addedConfig.ScheduleStatus != removedConfig.ScheduleStatus + }) + if removedConfigDeleteIndex != -1 { + addedConfigs = append(addedConfigs[:i], addedConfigs[i+1:]...) + removedConfigs = append(removedConfigs[:removedConfigDeleteIndex], removedConfigs[removedConfigDeleteIndex+1:]...) + statusChangeConfig = append(statusChangeConfig, addedConfigsCopy[i]) + } + } + + if len(removedConfigs) > 0 { + removed := make([]sdk.ViewDataMetricFunction, len(removedConfigs)) + for i := range removedConfigs { removed[i] = sdk.ViewDataMetricFunction{ - DataMetricFunction: removedConfig[i].DataMetricFunction, - On: removedConfig[i].On, + DataMetricFunction: removedConfigs[i].DataMetricFunction, + On: removedConfigs[i].On, } } err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithDropDataMetricFunction(*sdk.NewViewDropDataMetricFunctionRequest(removed))) @@ -954,12 +968,13 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } } - if len(addedConfig) > 0 { - added := make([]sdk.ViewDataMetricFunction, len(addedConfig)) - for i := range addedConfig { + + if len(addedConfigs) > 0 { + added := make([]sdk.ViewDataMetricFunction, len(addedConfigs)) + for i := range addedConfigs { added[i] = sdk.ViewDataMetricFunction{ - DataMetricFunction: addedConfig[i].DataMetricFunction, - On: addedConfig[i].On, + DataMetricFunction: addedConfigs[i].DataMetricFunction, + On: addedConfigs[i].On, } } err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithAddDataMetricFunction(*sdk.NewViewAddDataMetricFunctionRequest(added))) @@ -967,6 +982,36 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } } + + if len(statusChangeConfig) > 0 { + changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(statusChangeConfig)) + for i := range statusChangeConfig { + if statusChangeConfig[i].ScheduleStatus != "" { + expectedStatus, err := sdk.ToAllowedDataMetricScheduleStatusOption(statusChangeConfig[i].ScheduleStatus) + if err != nil { + return diag.FromErr(err) + } + var statusCmd sdk.ViewDataMetricScheduleStatusOperationOption + switch expectedStatus { + case sdk.DataMetricScheduleStatusStarted: + statusCmd = sdk.ViewDataMetricScheduleStatusOperationResume + case sdk.DataMetricScheduleStatusSuspended: + statusCmd = sdk.ViewDataMetricScheduleStatusOperationSuspend + default: + return diag.FromErr(fmt.Errorf("unexpected data metric function status: %v", expectedStatus)) + } + changeSchedule = append(changeSchedule, sdk.ViewModifyDataMetricFunction{ + DataMetricFunction: statusChangeConfig[i].DataMetricFunction, + On: statusChangeConfig[i].On, + ViewDataMetricScheduleStatusOperationOption: statusCmd, + }) + } + } + err = client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithModifyDataMetricFunction(*sdk.NewViewModifyDataMetricFunctionsRequest(changeSchedule))) + if err != nil { + return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) + } + } } if d.HasChange("row_access_policy") { diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index 2b73409f3e..5532e2899b 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -5,6 +5,8 @@ import ( "regexp" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" accconfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" @@ -45,7 +47,8 @@ func TestAcc_View_basic(t *testing.T) { functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG") function2Id := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "MAX") - cron, cron2 := "10 * * * * UTC", "20 * * * * UTC" + cron := "10 * * * * UTC" + cron2 := "20 * * * * UTC" id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceId := helpers.EncodeResourceIdentifier(id) @@ -56,27 +59,52 @@ func TestAcc_View_basic(t *testing.T) { t.Cleanup(tableCleanup) statement := fmt.Sprintf("SELECT id, foo FROM %s", table.ID().FullyQualifiedName()) otherStatement := fmt.Sprintf("SELECT foo, id FROM %s", table.ID().FullyQualifiedName()) - comment := "Terraform test resource'" + comment := random.Comment() - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) - viewModelWithDependency := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) + // generators currently don't handle lists, so use the old way + basicView := func(configStatement string) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(configStatement), + "columns": config.SetVariable( + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ID"), + }), + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("FOO"), + }), + ), + } + } + basicViewWithIsRecursive := basicView(otherStatement) + basicViewWithIsRecursive["is_recursive"] = config.BoolVariable(true) // generators currently don't handle lists, so use the old way basicUpdate := func(rap, ap, functionId sdk.SchemaObjectIdentifier, statement, cron string, scheduleStatus sdk.DataMetricScheduleStatusOption) config.Variables { return config.Variables{ - "name": config.StringVariable(id.Name()), - "database": config.StringVariable(id.DatabaseName()), - "schema": config.StringVariable(id.SchemaName()), - "statement": config.StringVariable(statement), - "row_access_policy": config.StringVariable(rap.FullyQualifiedName()), - "row_access_policy_on": config.ListVariable(config.StringVariable("ID")), - "aggregation_policy": config.StringVariable(ap.FullyQualifiedName()), - "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")), - "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()), - "data_metric_function_on": config.ListVariable(config.StringVariable("ID")), - "data_metric_function_schedule_status": config.StringVariable(string(scheduleStatus)), - "data_metric_schedule_using_cron": config.StringVariable(cron), - "comment": config.StringVariable(comment), + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + "row_access_policy": config.StringVariable(rap.FullyQualifiedName()), + "row_access_policy_on": config.ListVariable(config.StringVariable("ID")), + "aggregation_policy": config.StringVariable(ap.FullyQualifiedName()), + "aggregation_policy_entity_key": config.ListVariable(config.StringVariable("ID")), + "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()), + "data_metric_function_on": config.ListVariable(config.StringVariable("ID")), + "data_metric_schedule_using_cron": config.StringVariable(cron), + "comment": config.StringVariable(comment), + "schedule_status": config.StringVariable(string(scheduleStatus)), + "columns": config.SetVariable( + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ID"), + }), + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("FOO"), + }), + ), } } @@ -89,24 +117,32 @@ func TestAcc_View_basic(t *testing.T) { Steps: []resource.TestStep{ // without optionals { - Config: accconfig.FromModel(t, viewModelWithDependency), - Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). - HasNameString(id.Name()). - HasStatementString(statement). - HasDatabaseString(id.DatabaseName()). - HasSchemaString(id.SchemaName())), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: basicView(statement), + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.#", "2")), + ), }, // import - without optionals { - Config: accconfig.FromModel(t, viewModel), - ResourceName: "snowflake_view.test", - ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: basicView(statement), + ResourceName: "snowflake_view.test", + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())), + assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "column.#", "2")), resourceassert.ImportedViewResource(t, resourceId). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasStatementString(statement)), + HasStatementString(statement), + ), }, // set policies and dmfs externally { @@ -121,12 +157,14 @@ func TestAcc_View_basic(t *testing.T) { }, }))) }, - Config: accconfig.FromModel(t, viewModel), - Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). - HasNameString(id.Name()). - HasStatementString(statement). - HasDatabaseString(id.DatabaseName()). - HasSchemaString(id.SchemaName()), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: basicView(statement), + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "0")), @@ -185,9 +223,9 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusStarted))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.schedule_status", string(sdk.DataMetricScheduleStatusStarted))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), @@ -213,9 +251,9 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.schedule_status", string(sdk.DataMetricScheduleStatusSuspended))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), + assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.schedule_status", string(sdk.DataMetricScheduleStatusSuspended))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), @@ -309,7 +347,6 @@ func TestAcc_View_basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), ), }, - // import - with optionals { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_update"), @@ -338,8 +375,9 @@ func TestAcc_View_basic(t *testing.T) { }, // unset { - Config: accconfig.FromModel(t, viewModel.WithStatement(otherStatement)), - ResourceName: "snowflake_view.test", + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: basicView(otherStatement), + ResourceName: "snowflake_view.test", Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(otherStatement). @@ -354,7 +392,8 @@ func TestAcc_View_basic(t *testing.T) { }, // recreate - change is_recursive { - Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_is_recursive"), + ConfigVariables: basicViewWithIsRecursive, Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(otherStatement). @@ -380,7 +419,21 @@ func TestAcc_View_recursive(t *testing.T) { acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) + basicView := config.Variables{ + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + "is_recursive": config.BoolVariable(true), + "columns": config.SetVariable( + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ROLE_NAME"), + }), + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ROLE_OWNER"), + }), + ), + } resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -390,7 +443,8 @@ func TestAcc_View_recursive(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_is_recursive"), + ConfigVariables: basicView, Check: assert.AssertThat(t, resourceassert.ViewResource(t, "snowflake_view.test"). HasNameString(id.Name()). HasStatementString(statement). @@ -399,9 +453,10 @@ func TestAcc_View_recursive(t *testing.T) { HasIsRecursiveString("true")), }, { - Config: accconfig.FromModel(t, viewModel.WithIsRecursive("true")), - ResourceName: "snowflake_view.test", - ImportState: true, + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_is_recursive"), + ConfigVariables: basicView, + ResourceName: "snowflake_view.test", + ImportState: true, ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). HasNameString(id.Name()). @@ -464,7 +519,27 @@ func TestAcc_View_complete(t *testing.T) { projectionPolicy, projectionPolicyCleanup := acc.TestClient().ProjectionPolicy.CreateProjectionPolicy(t) t.Cleanup(projectionPolicyCleanup) - maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicyIdentity(t, sdk.DataTypeNumber) + maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicyWithOptions(t, + acc.TestClient().Ids.SchemaId(), + []sdk.TableColumnSignature{ + { + Name: "One", + Type: sdk.DataTypeNumber, + }, + { + Name: "Two", + Type: sdk.DataTypeNumber, + }, + }, + sdk.DataTypeNumber, + ` +case + when One > 0 then One + else Two +end;; +`, + new(sdk.CreateMaskingPolicyOptions), + ) t.Cleanup(maskingPolicyCleanup) functionId := sdk.NewSchemaObjectIdentifier("SNOWFLAKE", "CORE", "AVG") @@ -489,7 +564,7 @@ func TestAcc_View_complete(t *testing.T) { "column1_comment": config.StringVariable("col comment"), "column2_name": config.StringVariable("FOO"), "column2_masking_policy": config.StringVariable(maskingPolicy.ID().FullyQualifiedName()), - "column2_masking_policy_using": config.ListVariable(config.StringVariable("FOO")), + "column2_masking_policy_using": config.ListVariable(config.StringVariable("FOO"), config.StringVariable("ID")), "column2_projection_policy": config.StringVariable(projectionPolicy.FullyQualifiedName()), "data_metric_function": config.StringVariable(functionId.FullyQualifiedName()), "data_metric_function_on": config.ListVariable(config.StringVariable("ID")), @@ -589,8 +664,22 @@ func TestAcc_View_Rename(t *testing.T) { statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() newId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithComment("foo") - newViewModel := model.View("test", newId.DatabaseName(), newId.Name(), newId.SchemaName(), statement).WithComment("foo") + viewConfig := func(identifier sdk.SchemaObjectIdentifier) config.Variables { + return config.Variables{ + "name": config.StringVariable(identifier.Name()), + "database": config.StringVariable(identifier.DatabaseName()), + "schema": config.StringVariable(identifier.SchemaName()), + "statement": config.StringVariable(statement), + "columns": config.SetVariable( + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ROLE_NAME"), + }), + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ROLE_OWNER"), + }), + ), + } + } resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -601,16 +690,17 @@ func TestAcc_View_Rename(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: viewConfig(id), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), - resource.TestCheckResourceAttr("snowflake_view.test", "comment", "foo"), resource.TestCheckResourceAttr("snowflake_view.test", "fully_qualified_name", id.FullyQualifiedName()), ), }, // rename with one param changed { - Config: accconfig.FromModel(t, newViewModel), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: viewConfig(newId), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("snowflake_view.test", plancheck.ResourceActionUpdate), @@ -618,7 +708,6 @@ func TestAcc_View_Rename(t *testing.T) { }, Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "name", newId.Name()), - resource.TestCheckResourceAttr("snowflake_view.test", "comment", "foo"), resource.TestCheckResourceAttr("snowflake_view.test", "fully_qualified_name", newId.FullyQualifiedName()), ), }, @@ -631,7 +720,24 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithCopyGrants(false) + viewConfig := func(copyGrants bool) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + "copy_grants": config.BoolVariable(copyGrants), + "is_secure": config.BoolVariable(true), + "columns": config.SetVariable( + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ID"), + }), + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("FOO"), + }), + ), + } + } var createdOn string @@ -644,7 +750,8 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_copy_grants"), + ConfigVariables: viewConfig(false), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), resource.TestCheckResourceAttr("snowflake_view.test", "database", id.DatabaseName()), @@ -659,7 +766,8 @@ func TestAcc_ViewChangeCopyGrants(t *testing.T) { }, // Checks that copy_grants changes don't trigger a drop { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_copy_grants"), + ConfigVariables: viewConfig(true), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"), resource.TestCheckResourceAttrWith("snowflake_view.test", "show_output.0.created_on", func(value string) error { @@ -680,7 +788,25 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement).WithIsSecure("true").WithCopyGrants(true) + viewConfig := func(copyGrants bool) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + "copy_grants": config.BoolVariable(copyGrants), + "is_secure": config.BoolVariable(true), + "columns": config.SetVariable( + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ID"), + }), + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("FOO"), + }), + ), + } + } + var createdOn string resource.Test(t, resource.TestCase{ @@ -692,7 +818,8 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.View), Steps: []resource.TestStep{ { - Config: accconfig.FromModel(t, viewModel), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_copy_grants"), + ConfigVariables: viewConfig(true), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "copy_grants", "true"), resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"), @@ -704,7 +831,8 @@ func TestAcc_ViewChangeCopyGrantsReversed(t *testing.T) { ), }, { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(false)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic_copy_grants"), + ConfigVariables: viewConfig(false), Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr("snowflake_view.test", "show_output.#", "1"), resource.TestCheckResourceAttrWith("snowflake_view.test", "show_output.0.created_on", func(value string) error { @@ -753,30 +881,6 @@ func TestAcc_ViewCopyGrantsStatementUpdate(t *testing.T) { }) } -func TestAcc_View_copyGrants(t *testing.T) { - t.Setenv(string(testenvs.ConfigureClientOnce), "") - id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) - resource.Test(t, resource.TestCase{ - ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, - TerraformVersionChecks: []tfversion.TerraformVersionCheck{ - tfversion.RequireAbove(tfversion.Version1_5_0), - }, - CheckDestroy: acc.CheckDestroy(t, resources.View), - Steps: []resource.TestStep{ - { - Config: accconfig.FromModel(t, viewModel.WithCopyGrants(true)), - Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_view.test", "name", id.Name()), - resource.TestCheckResourceAttr("snowflake_view.test", "copy_grants", "true"), - ), - }, - }, - }) -} - func TestAcc_View_Issue2640(t *testing.T) { t.Setenv(string(testenvs.ConfigureClientOnce), "") id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() @@ -839,7 +943,20 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_view.test" statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" - viewModel := model.View("test", id.DatabaseName(), id.Name(), id.SchemaName(), statement) + viewConfig := config.Variables{ + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + "columns": config.SetVariable( + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ROLE_NAME"), + }), + config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable("ROLE_OWNER"), + }), + ), + } tag, tagCleanup := acc.TestClient().Tag.CreateTag(t) t.Cleanup(tagCleanup) @@ -868,7 +985,8 @@ func TestAcc_view_migrateFromVersion_0_94_1(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: accconfig.FromModel(t, viewModel), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: viewConfig, Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "name", id.Name()), resource.TestCheckNoResourceAttr(resourceName, "tag.#"), @@ -919,6 +1037,10 @@ resource "snowflake_view" "test" { statement = "select %[5]s from \"%[1]s\".\"%[2]s\".\"${snowflake_table.table.name}\"" copy_grants = true is_secure = true + + column { + column_name = "%[5]s" + } } resource "snowflake_account_role" "test" { @@ -956,6 +1078,12 @@ resource "snowflake_view" "test" { %[5]s SQL is_secure = true + column { + column_name = "ROLE_OWNER" + } + column { + column_name = "ROLE_NAME" + } } `, id.DatabaseName(), id.SchemaName(), id.Name(), part1, part2) } diff --git a/pkg/sdk/data_metric_function_references_def.go b/pkg/sdk/data_metric_function_references_def.go index ea24d761b3..1de2263ac6 100644 --- a/pkg/sdk/data_metric_function_references_def.go +++ b/pkg/sdk/data_metric_function_references_def.go @@ -46,6 +46,7 @@ var AllDataMetricScheduleStatusSuspendedOptions = []DataMetricScheduleStatusOpti DataMetricScheduleStatusSuspendedTableColumnDoesNotExistOrNotAuthorized, DataMetricScheduleStatusSuspendedInsufficientPrivilegeToExecuteDataMetricFunction, DataMetricScheduleStatusSuspendedActiveEventTableDoesNotExistOrNotAuthorized, + DataMetricScheduleStatusSuspendedByUserAction, } func ToAllowedDataMetricScheduleStatusOption(s string) (DataMetricScheduleStatusOption, error) { From 0b528abb1083f6a645fa00fde0e6e5ea5c68d635 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Fri, 30 Aug 2024 15:29:06 +0200 Subject: [PATCH 10/13] wip --- MIGRATION_GUIDE.md | 16 ++++++++++++++++ docs/resources/view.md | 3 --- examples/resources/snowflake_view/resource.tf | 3 --- pkg/datasources/views.go | 2 +- pkg/resources/view.go | 2 -- 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index f51691da54..f13167c897 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -23,6 +23,22 @@ across different versions. - `comment` - `owner_role_type` +### snowflake_views data source changes +New filtering options: +- `in` +- `like` +- `starts_with` +- `limit` +- `with_describe` + +New output fields +- `show_output` +- `describe_output` + +Breaking changes: +- `database` and `schema` are right now under `in` field +- `views` field now organizes output of show under `show_output` field and the output of describe under `describe_output` field. + ### snowflake_view resource changes New fields: - `row_access_policy` diff --git a/docs/resources/view.md b/docs/resources/view.md index 21d9a37b3a..ab2682b498 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -50,19 +50,16 @@ resource "snowflake_view" "test" { column { column_name = "id" comment = "column comment" - } column { column_name = "address" projection_policy { policy_name = "projection_policy" } - masking_policy { policy_name = "masking_policy" using = ["address"] } - } row_access_policy { policy_name = "row_access_policy" diff --git a/examples/resources/snowflake_view/resource.tf b/examples/resources/snowflake_view/resource.tf index a0c4088f75..b41c2c308d 100644 --- a/examples/resources/snowflake_view/resource.tf +++ b/examples/resources/snowflake_view/resource.tf @@ -30,19 +30,16 @@ resource "snowflake_view" "test" { column { column_name = "id" comment = "column comment" - } column { column_name = "address" projection_policy { policy_name = "projection_policy" } - masking_policy { policy_name = "masking_policy" using = ["address"] } - } row_access_policy { policy_name = "row_access_policy" diff --git a/pkg/datasources/views.go b/pkg/datasources/views.go index c9b063589d..fa99c1becf 100644 --- a/pkg/datasources/views.go +++ b/pkg/datasources/views.go @@ -150,7 +150,7 @@ func ReadViews(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagn } if v, ok := d.GetOk("limit"); ok { - l := v.([]interface{})[0].(map[string]any) + l := v.([]any)[0].(map[string]any) limit := sdk.LimitFrom{} if v, ok := l["rows"]; ok { rows := v.(int) diff --git a/pkg/resources/view.go b/pkg/resources/view.go index bf195e6d1a..435c5f38d9 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -466,7 +466,6 @@ func CreateView(orReplace bool) schema.CreateContextFunc { if err != nil { return diag.FromErr(fmt.Errorf("error adding data matric functions in view %v err = %w", id.Name(), err)) } - // TODO (SNOW-1348118 - next pr) changeSchedule := make([]sdk.ViewModifyDataMetricFunction, 0, len(addedRaw)) for i := range addedRaw { if addedRaw[i].ScheduleStatus != "" { @@ -718,7 +717,6 @@ func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.S for _, v := range dmfRef.RefArguments { columns = append(columns, v.Name) } - // TODO (SNOW-1348118 - next pr) var scheduleStatus sdk.DataMetricScheduleStatusOption status, err := sdk.ToDataMetricScheduleStatusOption(dmfRef.ScheduleStatus) if err != nil { From 24658c00462307f4b4e3257924b009c862df1f16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Mon, 2 Sep 2024 13:12:19 +0200 Subject: [PATCH 11/13] Changes after review --- .../assert/objectassert/view_snowflake_ext.go | 72 ++++ .../resourceassert/view_resource_ext.go | 52 +++ .../collections/collection_helpers.go | 9 + pkg/resources/resource.go | 4 + .../testdata/TestAcc_View/columns/test.tf | 23 ++ .../TestAcc_View/columns/variables.tf | 27 ++ pkg/resources/view.go | 45 ++- pkg/resources/view_acceptance_test.go | 284 ++++++++++---- pkg/resources/view_test.go | 355 ++++++++++++++++++ 9 files changed, 793 insertions(+), 78 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_ext.go create mode 100644 pkg/resources/testdata/TestAcc_View/columns/test.tf create mode 100644 pkg/resources/testdata/TestAcc_View/columns/variables.tf create mode 100644 pkg/resources/view_test.go diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go index 05598f49ad..7e06b64669 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go @@ -2,8 +2,12 @@ package objectassert import ( "fmt" + "slices" "testing" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) @@ -28,3 +32,71 @@ func (v *ViewAssert) HasNonEmptyText() *ViewAssert { }) return v } + +func (v *ViewAssert) HasNoRowAccessPolicyReferences() *ViewAssert { + return v.hasNoPolicyReference(sdk.PolicyKindRowAccessPolicy) +} + +func (v *ViewAssert) HasNoAggregationPolicyReferences() *ViewAssert { + return v.hasNoPolicyReference(sdk.PolicyKindAggregationPolicy) +} + +func (v *ViewAssert) HasNoMaskingPolicyReferences() *ViewAssert { + return v.hasNoPolicyReference(sdk.PolicyKindMaskingPolicy) +} + +func (v *ViewAssert) HasNoProjectionPolicyReferences() *ViewAssert { + return v.hasNoPolicyReference(sdk.PolicyKindProjectionPolicy) +} + +func (v *ViewAssert) hasNoPolicyReference(kind sdk.PolicyKind) *ViewAssert { + v.AddAssertion(func(t *testing.T, o *sdk.View) error { + t.Helper() + refs, err := acc.TestClient().PolicyReferences.GetPolicyReferences(t, o.ID(), sdk.ObjectTypeView) + if err != nil { + return err + } + refs = slices.DeleteFunc(refs, func(reference helpers.PolicyReference) bool { + return reference.PolicyKind != string(kind) + }) + if len(refs) > 0 { + return fmt.Errorf("expected no %s policy references; got: %v", kind, refs) + } + return nil + }) + return v +} + +func (v *ViewAssert) HasRowAccessPolicyReferences(n int) *ViewAssert { + return v.hasPolicyReference(sdk.PolicyKindRowAccessPolicy, n) +} + +func (v *ViewAssert) HasAggregationPolicyReferences(n int) *ViewAssert { + return v.hasPolicyReference(sdk.PolicyKindAggregationPolicy, n) +} + +func (v *ViewAssert) HasMaskingPolicyReferences(n int) *ViewAssert { + return v.hasPolicyReference(sdk.PolicyKindMaskingPolicy, n) +} + +func (v *ViewAssert) HasProjectionPolicyReferences(n int) *ViewAssert { + return v.hasPolicyReference(sdk.PolicyKindProjectionPolicy, n) +} + +func (v *ViewAssert) hasPolicyReference(kind sdk.PolicyKind, n int) *ViewAssert { + v.AddAssertion(func(t *testing.T, o *sdk.View) error { + t.Helper() + refs, err := acc.TestClient().PolicyReferences.GetPolicyReferences(t, o.ID(), sdk.ObjectTypeView) + if err != nil { + return err + } + refs = slices.DeleteFunc(refs, func(reference helpers.PolicyReference) bool { + return reference.PolicyKind != string(kind) + }) + if len(refs) != n { + return fmt.Errorf("expected %d %s policy references; got: %d, %v", n, kind, len(refs), refs) + } + return nil + }) + return v +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_ext.go new file mode 100644 index 0000000000..12736a6a30 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/view_resource_ext.go @@ -0,0 +1,52 @@ +package resourceassert + +import ( + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (v *ViewResourceAssert) HasColumnLength(len int) *ViewResourceAssert { + v.AddAssertion(assert.ValueSet("column.#", strconv.FormatInt(int64(len), 10))) + return v +} + +func (v *ViewResourceAssert) HasAggregationPolicyLength(len int) *ViewResourceAssert { + v.AddAssertion(assert.ValueSet("aggregation_policy.#", strconv.FormatInt(int64(len), 10))) + return v +} + +func (v *ViewResourceAssert) HasRowAccessPolicyLength(len int) *ViewResourceAssert { + v.AddAssertion(assert.ValueSet("row_access_policy.#", strconv.FormatInt(int64(len), 10))) + return v +} + +func (v *ViewResourceAssert) HasDataMetricScheduleLength(len int) *ViewResourceAssert { + v.AddAssertion(assert.ValueSet("data_metric_schedule.#", strconv.FormatInt(int64(len), 10))) + return v +} + +func (v *ViewResourceAssert) HasDataMetricFunctionLength(len int) *ViewResourceAssert { + v.AddAssertion(assert.ValueSet("data_metric_function.#", strconv.FormatInt(int64(len), 10))) + return v +} + +func (v *ViewResourceAssert) HasNoAggregationPolicyByLength() *ViewResourceAssert { + v.AddAssertion(assert.ValueNotSet("aggregation_policy.#")) + return v +} + +func (v *ViewResourceAssert) HasNoRowAccessPolicyByLength() *ViewResourceAssert { + v.AddAssertion(assert.ValueNotSet("row_access_policy.#")) + return v +} + +func (v *ViewResourceAssert) HasNoDataMetricScheduleByLength() *ViewResourceAssert { + v.AddAssertion(assert.ValueNotSet("data_metric_schedule.#")) + return v +} + +func (v *ViewResourceAssert) HasNoDataMetricFunctionByLength() *ViewResourceAssert { + v.AddAssertion(assert.ValueNotSet("data_metric_function.#")) + return v +} diff --git a/pkg/internal/collections/collection_helpers.go b/pkg/internal/collections/collection_helpers.go index 0244492488..18d855ea18 100644 --- a/pkg/internal/collections/collection_helpers.go +++ b/pkg/internal/collections/collection_helpers.go @@ -15,3 +15,12 @@ func FindOne[T any](collection []T, condition func(T) bool) (*T, error) { } return nil, ErrObjectNotFound } + +// TODO [SNOW-1473414]: move collection helpers fully with a separate PR +func Map[T any, R any](collection []T, mapper func(T) R) []R { + result := make([]R, len(collection)) + for i, elem := range collection { + result[i] = mapper(elem) + } + return result +} diff --git a/pkg/resources/resource.go b/pkg/resources/resource.go index 2a7437cad9..a4e1494f67 100644 --- a/pkg/resources/resource.go +++ b/pkg/resources/resource.go @@ -9,6 +9,10 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) +type ResourceValueSetter interface { + Set(string, any) error +} + func DeleteResource(t string, builder func(string) *snowflake.Builder) func(*schema.ResourceData, interface{}) error { return func(d *schema.ResourceData, meta interface{}) error { client := meta.(*provider.Context).Client diff --git a/pkg/resources/testdata/TestAcc_View/columns/test.tf b/pkg/resources/testdata/TestAcc_View/columns/test.tf new file mode 100644 index 0000000000..fd5b201fe7 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_View/columns/test.tf @@ -0,0 +1,23 @@ +resource "snowflake_view" "test" { + name = var.name + database = var.database + schema = var.schema + statement = var.statement + + column { + column_name = "ID" + + projection_policy { + policy_name = var.projection_name + } + + masking_policy { + policy_name = var.masking_name + using = var.masking_using + } + } + + column { + column_name = "FOO" + } +} diff --git a/pkg/resources/testdata/TestAcc_View/columns/variables.tf b/pkg/resources/testdata/TestAcc_View/columns/variables.tf new file mode 100644 index 0000000000..ba6e4bfe0d --- /dev/null +++ b/pkg/resources/testdata/TestAcc_View/columns/variables.tf @@ -0,0 +1,27 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "statement" { + type = string +} + +variable "projection_name" { + type = string +} + +variable "masking_name" { + type = string +} + +variable "masking_using" { + type = list(string) +} diff --git a/pkg/resources/view.go b/pkg/resources/view.go index 435c5f38d9..854175bdf0 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -502,27 +502,46 @@ func CreateView(orReplace bool) schema.CreateContextFunc { } func extractColumns(v any) ([]sdk.ViewColumnRequest, error) { + _, ok := v.([]any) + if v == nil || !ok { + return nil, fmt.Errorf("unable to extract columns, input is either nil or non expected type (%T): %v", v, v) + } columns := make([]sdk.ViewColumnRequest, len(v.([]any))) for i, columnConfigRaw := range v.([]any) { - columnConfig := columnConfigRaw.(map[string]any) - columnsReq := *sdk.NewViewColumnRequest(columnConfig["column_name"].(string)) - if len(columnConfig["projection_policy"].([]any)) > 0 { - projectionPolicyId, _, err := extractPolicyWithColumnsSet(columnConfig["projection_policy"], "") + columnConfig, ok := columnConfigRaw.(map[string]any) + if !ok { + return nil, fmt.Errorf("unable to extract column, non expected type of %T: %v", columnConfigRaw, columnConfigRaw) + } + + columnName, ok := columnConfig["column_name"] + if !ok { + return nil, fmt.Errorf("unable to extract column, missing column_name key in column") + } + columnsReq := *sdk.NewViewColumnRequest(columnName.(string)) + + projectionPolicy, ok := columnConfig["projection_policy"] + if ok && len(projectionPolicy.([]any)) > 0 { + projectionPolicyId, _, err := extractPolicyWithColumnsSet(projectionPolicy, "") if err != nil { return nil, err } columnsReq.WithProjectionPolicy(*sdk.NewViewColumnProjectionPolicyRequest(projectionPolicyId)) } - if len(columnConfig["masking_policy"].([]any)) > 0 { - maskingPolicyId, maskingPolicyColumns, err := extractPolicyWithColumnsList(columnConfig["masking_policy"], "using") + + maskingPolicy, ok := columnConfig["masking_policy"] + if ok && len(maskingPolicy.([]any)) > 0 { + maskingPolicyId, maskingPolicyColumns, err := extractPolicyWithColumnsList(maskingPolicy, "using") if err != nil { return nil, err } columnsReq.WithMaskingPolicy(*sdk.NewViewColumnMaskingPolicyRequest(maskingPolicyId).WithUsing(maskingPolicyColumns)) } - if commentRaw := columnConfig["comment"].(string); len(commentRaw) > 0 { - columnsReq.WithComment(commentRaw) + + comment, ok := columnConfig["comment"] + if ok && len(comment.(string)) > 0 { + columnsReq.WithComment(comment.(string)) } + columns[i] = columnsReq } return columns, nil @@ -552,7 +571,7 @@ func extractPolicyWithColumnsList(v any, columnsKey string) (sdk.SchemaObjectIde return sdk.SchemaObjectIdentifier{}, nil, err } if policyConfig[columnsKey] == nil { - return id, nil, nil + return id, nil, fmt.Errorf("unable to extract policy with column list, unable to find columnsKey: %s", columnsKey) } columnsRaw := expandStringList(policyConfig[columnsKey].([]any)) columns := make([]sdk.Column, len(columnsRaw)) @@ -746,7 +765,7 @@ func handleDataMetricFunctions(ctx context.Context, client *sdk.Client, id sdk.S }) } -func handleColumns(d *schema.ResourceData, columns []sdk.ViewDetails, policyRefs []sdk.PolicyReference) error { +func handleColumns(d ResourceValueSetter, columns []sdk.ViewDetails, policyRefs []sdk.PolicyReference) error { if len(columns) == 0 { return d.Set("column", nil) } @@ -754,7 +773,11 @@ func handleColumns(d *schema.ResourceData, columns []sdk.ViewDetails, policyRefs for i, column := range columns { columnsRaw[i] = map[string]any{ "column_name": column.Name, - "comment": column.Comment, + } + if column.Comment != nil { + columnsRaw[i]["comment"] = *column.Comment + } else { + columnsRaw[i]["comment"] = nil } projectionPolicy, err := collections.FindOne(policyRefs, func(r sdk.PolicyReference) bool { return r.PolicyKind == sdk.PolicyKindProjectionPolicy && r.RefColumnName != nil && *r.RefColumnName == column.Name diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index 5532e2899b..e641266880 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -5,6 +5,9 @@ import ( "regexp" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" @@ -61,7 +64,7 @@ func TestAcc_View_basic(t *testing.T) { otherStatement := fmt.Sprintf("SELECT foo, id FROM %s", table.ID().FullyQualifiedName()) comment := random.Comment() - // generators currently don't handle lists, so use the old way + // generators currently don't handle lists of objects, so use the old way basicView := func(configStatement string) config.Variables { return config.Variables{ "name": config.StringVariable(id.Name()), @@ -81,7 +84,7 @@ func TestAcc_View_basic(t *testing.T) { basicViewWithIsRecursive := basicView(otherStatement) basicViewWithIsRecursive["is_recursive"] = config.BoolVariable(true) - // generators currently don't handle lists, so use the old way + // generators currently don't handle lists of objects, so use the old way basicUpdate := func(rap, ap, functionId sdk.SchemaObjectIdentifier, statement, cron string, scheduleStatus sdk.DataMetricScheduleStatusOption) config.Variables { return config.Variables{ "name": config.StringVariable(id.Name()), @@ -124,8 +127,8 @@ func TestAcc_View_basic(t *testing.T) { HasNameString(id.Name()). HasStatementString(statement). HasDatabaseString(id.DatabaseName()). - HasSchemaString(id.SchemaName()), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.#", "2")), + HasSchemaString(id.SchemaName()). + HasColumnLength(2), ), }, // import - without optionals @@ -135,13 +138,12 @@ func TestAcc_View_basic(t *testing.T) { ResourceName: "snowflake_view.test", ImportState: true, ImportStateCheck: assert.AssertThatImport(t, - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "name", id.Name())), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "column.#", "2")), resourceassert.ImportedViewResource(t, resourceId). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasStatementString(statement), + HasStatementString(statement). + HasColumnLength(2), ), }, // set policies and dmfs externally @@ -164,11 +166,11 @@ func TestAcc_View_basic(t *testing.T) { HasNameString(id.Name()). HasStatementString(statement). HasDatabaseString(id.DatabaseName()). - HasSchemaString(id.SchemaName()), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "0")), + HasSchemaString(id.SchemaName()). + HasAggregationPolicyLength(0). + HasRowAccessPolicyLength(0). + HasDataMetricScheduleLength(0). + HasDataMetricFunctionLength(0), ), }, // set other fields @@ -185,19 +187,19 @@ func TestAcc_View_basic(t *testing.T) { HasStatementString(statement). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasCommentString(comment), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), + HasCommentString(comment). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), @@ -212,19 +214,19 @@ func TestAcc_View_basic(t *testing.T) { HasStatementString(statement). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasCommentString(comment), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), + HasCommentString(comment). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy2.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy2.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.schedule_status", string(sdk.DataMetricScheduleStatusStarted))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), @@ -240,19 +242,19 @@ func TestAcc_View_basic(t *testing.T) { HasStatementString(statement). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasCommentString(comment), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), + HasCommentString(comment). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy2.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy2.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron2)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.schedule_status", string(sdk.DataMetricScheduleStatusSuspended))), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", function2Id.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), @@ -268,19 +270,19 @@ func TestAcc_View_basic(t *testing.T) { HasStatementString(otherStatement). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasCommentString(comment), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), + HasCommentString(comment). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), @@ -298,19 +300,19 @@ func TestAcc_View_basic(t *testing.T) { HasStatementString(otherStatement). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasCommentString(comment), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), + HasCommentString(comment). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), @@ -329,19 +331,19 @@ func TestAcc_View_basic(t *testing.T) { HasStatementString(otherStatement). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasCommentString(comment), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), + HasCommentString(comment). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", cron)), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), @@ -362,12 +364,12 @@ func TestAcc_View_basic(t *testing.T) { HasCommentString(comment). HasIsSecureString("false"). HasIsTemporaryString("false"). - HasChangeTrackingString("false"), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.#", "1")), + HasChangeTrackingString("false"). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.0", "ID")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.0", "ID")), @@ -383,11 +385,11 @@ func TestAcc_View_basic(t *testing.T) { HasStatementString(otherStatement). HasDatabaseString(id.DatabaseName()). HasSchemaString(id.SchemaName()). - HasCommentString(""), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_function.#")), + HasCommentString(""). + HasNoAggregationPolicyByLength(). + HasNoRowAccessPolicyByLength(). + HasNoDataMetricScheduleByLength(). + HasNoDataMetricFunctionByLength(), ), }, // recreate - change is_recursive @@ -402,11 +404,11 @@ func TestAcc_View_basic(t *testing.T) { HasCommentString(""). HasIsRecursiveString("true"). HasIsTemporaryString("default"). - HasChangeTrackingString("default"), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "aggregation_policy.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "row_access_policy.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_schedule.#")), - assert.Check(resource.TestCheckNoResourceAttr("snowflake_view.test", "data_metric_function.#")), + HasChangeTrackingString("default"). + HasNoAggregationPolicyByLength(). + HasNoRowAccessPolicyByLength(). + HasNoDataMetricScheduleByLength(). + HasNoDataMetricFunctionByLength(), ), }, }, @@ -457,7 +459,7 @@ func TestAcc_View_recursive(t *testing.T) { ConfigVariables: basicView, ResourceName: "snowflake_view.test", ImportState: true, - ImportStateCheck: assert.AssertThatImport(t, assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(helpers.EncodeResourceIdentifier(id), "name", id.Name())), + ImportStateCheck: assert.AssertThatImport(t, resourceassert.ImportedViewResource(t, helpers.EncodeResourceIdentifier(id)). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). @@ -589,23 +591,23 @@ end;; HasCommentString("Terraform test resource"). HasIsSecureString("true"). HasIsTemporaryString("false"). - HasChangeTrackingString("true"), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.#", "1")), + HasChangeTrackingString("true"). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1). + HasColumnLength(2), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.using_cron", "5 * * * * UTC")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_schedule.0.minutes", "0")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.function_name", functionId.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "data_metric_function.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "aggregation_policy.0.entity_key.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.#", "1")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "row_access_policy.0.on.0", "ID")), - assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.#", "2")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.0.column_name", "ID")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.0.masking_policy.#", "0")), assert.Check(resource.TestCheckResourceAttr("snowflake_view.test", "column.0.projection_policy.#", "0")), @@ -637,19 +639,20 @@ end;; HasSchemaString(id.SchemaName()). HasCommentString("Terraform test resource"). HasIsSecureString("true"). - HasIsTemporaryString("false").HasChangeTrackingString("true"), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.#", "1")), + HasIsTemporaryString("false"). + HasChangeTrackingString("true"). + HasDataMetricScheduleLength(1). + HasDataMetricFunctionLength(1). + HasAggregationPolicyLength(1). + HasRowAccessPolicyLength(1), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.0.using_cron", "5 * * * * UTC")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_schedule.0.minutes", "0")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.function_name", functionId.FullyQualifiedName())), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.on.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "data_metric_function.0.on.0", "ID")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.policy_name", aggregationPolicy.FullyQualifiedName())), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "aggregation_policy.0.entity_key.0", "ID")), - assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.policy_name", rowAccessPolicy.ID().FullyQualifiedName())), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.#", "1")), assert.CheckImport(importchecks.TestCheckResourceAttrInstanceState(resourceId, "row_access_policy.0.on.0", "ID")), @@ -659,6 +662,153 @@ end;; }) } +func TestAcc_View_columns(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + table, tableCleanup := acc.TestClient().Table.CreateTableWithColumns(t, []sdk.TableColumnRequest{ + *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber), + *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber), + *sdk.NewTableColumnRequest("bar", sdk.DataTypeNumber), + }) + t.Cleanup(tableCleanup) + statement := fmt.Sprintf("SELECT id, foo FROM %s", table.ID().FullyQualifiedName()) + + maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicyWithOptions(t, + acc.TestClient().Ids.SchemaId(), + []sdk.TableColumnSignature{ + { + Name: "One", + Type: sdk.DataTypeNumber, + }, + }, + sdk.DataTypeNumber, + ` +case + when One > 0 then One + else 0 +end;; +`, + new(sdk.CreateMaskingPolicyOptions), + ) + t.Cleanup(maskingPolicyCleanup) + + projectionPolicy, projectionPolicyCleanup := acc.TestClient().ProjectionPolicy.CreateProjectionPolicy(t) + t.Cleanup(projectionPolicyCleanup) + + // generators currently don't handle lists of objects, so use the old way + basicView := func(columns ...string) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + "columns": config.SetVariable( + collections.Map(columns, func(columnName string) config.Variable { + return config.MapVariable(map[string]config.Variable{ + "column_name": config.StringVariable(columnName), + }) + })..., + ), + } + } + + basicViewWithPolicies := func() config.Variables { + conf := basicView("ID", "FOO") + delete(conf, "columns") + conf["projection_name"] = config.StringVariable(projectionPolicy.FullyQualifiedName()) + conf["masking_name"] = config.StringVariable(maskingPolicy.ID().FullyQualifiedName()) + conf["masking_using"] = config.ListVariable(config.StringVariable("ID")) + return conf + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.View), + Steps: []resource.TestStep{ + // Columns without policies + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: basicView("ID", "FOO"), + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasColumnLength(2), + ), + }, + // Columns with policies added externally + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/basic"), + ConfigVariables: basicView("ID", "FOO"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_view.test", plancheck.ResourceActionUpdate), + }, + }, + PreConfig: func() { + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithSetMaskingPolicyOnColumn(*sdk.NewViewSetColumnMaskingPolicyRequest("ID", maskingPolicy.ID()).WithUsing([]sdk.Column{{Value: "ID"}}))) + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithSetProjectionPolicyOnColumn(*sdk.NewViewSetProjectionPolicyRequest("ID", projectionPolicy).WithForce(true))) + }, + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasColumnLength(2), + objectassert.View(t, id). + HasNoMaskingPolicyReferences(). + HasNoProjectionPolicyReferences(), + ), + }, + // With all policies on columns + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/columns"), + ConfigVariables: basicViewWithPolicies(), + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasColumnLength(2), + objectassert.View(t, id). + HasMaskingPolicyReferences(1). + HasProjectionPolicyReferences(1), + ), + }, + // Remove policies on columns externally + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/columns"), + ConfigVariables: basicViewWithPolicies(), + PreConfig: func() { + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithUnsetMaskingPolicyOnColumn(*sdk.NewViewUnsetColumnMaskingPolicyRequest("ID"))) + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithUnsetProjectionPolicyOnColumn(*sdk.NewViewUnsetProjectionPolicyRequest("ID"))) + }, + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasColumnLength(2), + objectassert.View(t, id). + HasMaskingPolicyReferences(1). + HasProjectionPolicyReferences(1), + ), + }, + }, + }) +} + func TestAcc_View_Rename(t *testing.T) { t.Setenv(string(testenvs.ConfigureClientOnce), "") statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" diff --git a/pkg/resources/view_test.go b/pkg/resources/view_test.go new file mode 100644 index 0000000000..15df40852a --- /dev/null +++ b/pkg/resources/view_test.go @@ -0,0 +1,355 @@ +package resources + +import ( + "fmt" + "reflect" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/stretchr/testify/assert" +) + +type testResourceValueSetter struct { + internalMap map[string]any +} + +func newTestResourceValueSetter() *testResourceValueSetter { + return &testResourceValueSetter{ + internalMap: make(map[string]any), + } +} + +func (s *testResourceValueSetter) Set(key string, value any) error { + s.internalMap[key] = value + return nil +} + +func Test_handleColumns(t *testing.T) { + testCases := []struct { + InputColumns []sdk.ViewDetails + InputPolicyReferences []sdk.PolicyReference + Expected map[string]any + Error string + }{ + { + InputColumns: []sdk.ViewDetails{}, + InputPolicyReferences: []sdk.PolicyReference{}, + Expected: map[string]any{ + "column": nil, + }, + }, + { + InputColumns: []sdk.ViewDetails{ + { + Name: "name", + Comment: nil, + }, + }, + InputPolicyReferences: []sdk.PolicyReference{}, + Expected: map[string]any{ + "column": []map[string]any{ + { + "column_name": "name", + "comment": nil, + }, + }, + }, + }, + { + InputColumns: []sdk.ViewDetails{ + { + Name: "name", + Comment: sdk.String("comment"), + }, + }, + InputPolicyReferences: []sdk.PolicyReference{}, + Expected: map[string]any{ + "column": []map[string]any{ + { + "column_name": "name", + "comment": "comment", + }, + }, + }, + }, + { + InputColumns: []sdk.ViewDetails{ + { + Name: "name", + Comment: sdk.String("comment"), + }, + { + Name: "name2", + Comment: sdk.String("comment2"), + }, + }, + InputPolicyReferences: []sdk.PolicyReference{}, + Expected: map[string]any{ + "column": []map[string]any{ + { + "column_name": "name", + "comment": "comment", + }, + { + "column_name": "name2", + "comment": "comment2", + }, + }, + }, + }, + { + InputColumns: []sdk.ViewDetails{ + { + Name: "name", + Comment: sdk.String("comment"), + }, + { + Name: "name2", + Comment: sdk.String("comment2"), + }, + }, + InputPolicyReferences: []sdk.PolicyReference{ + { + PolicyDb: sdk.String("db"), + PolicySchema: sdk.String("sch"), + PolicyName: "policyName", + PolicyKind: sdk.PolicyKindProjectionPolicy, + RefColumnName: sdk.String("name"), + }, + }, + Expected: map[string]any{ + "column": []map[string]any{ + { + "column_name": "name", + "comment": "comment", + "projection_policy": []map[string]any{ + { + "policy_name": sdk.NewSchemaObjectIdentifier("db", "sch", "policyName").FullyQualifiedName(), + }, + }, + }, + { + "column_name": "name2", + "comment": "comment2", + }, + }, + }, + }, + { + InputColumns: []sdk.ViewDetails{ + { + Name: "name", + Comment: sdk.String("comment"), + }, + { + Name: "name2", + Comment: sdk.String("comment2"), + }, + }, + InputPolicyReferences: []sdk.PolicyReference{ + { + PolicyDb: sdk.String("db"), + PolicySchema: sdk.String("sch"), + PolicyName: "policyName", + PolicyKind: sdk.PolicyKindProjectionPolicy, + RefColumnName: sdk.String("name"), + }, + { + PolicyDb: sdk.String("db"), + PolicySchema: sdk.String("sch"), + PolicyName: "policyName2", + PolicyKind: sdk.PolicyKindMaskingPolicy, + RefColumnName: sdk.String("name"), + RefArgColumnNames: sdk.String("[one,two]"), + }, + }, + Expected: map[string]any{ + "column": []map[string]any{ + { + "column_name": "name", + "comment": "comment", + "projection_policy": []map[string]any{ + { + "policy_name": sdk.NewSchemaObjectIdentifier("db", "sch", "policyName").FullyQualifiedName(), + }, + }, + "masking_policy": []map[string]any{ + { + "policy_name": sdk.NewSchemaObjectIdentifier("db", "sch", "policyName2").FullyQualifiedName(), + "using": []string{"name", "one", "two"}, + }, + }, + }, + { + "column_name": "name2", + "comment": "comment2", + }, + }, + }, + }, + } + + for i, tc := range testCases { + t.Run(fmt.Sprintf("handle columns(%d): %v - %v", i, tc.InputColumns, tc.InputPolicyReferences), func(t *testing.T) { + valueSetter := newTestResourceValueSetter() + err := handleColumns(valueSetter, tc.InputColumns, tc.InputPolicyReferences) + + if tc.Error != "" { + assert.NotNil(t, err) + assert.Contains(t, err.Error(), tc.Error) + } else { + assert.Nil(t, err) + assert.Equal(t, tc.Expected, valueSetter.internalMap) + } + }) + } +} + +func Test_extractColumns(t *testing.T) { + testCases := []struct { + Input any + Expected []sdk.ViewColumnRequest + Error string + }{ + { + Input: "", + Error: "unable to extract columns, input is either nil or non expected type (string): ", + }, + { + Input: nil, + Error: "unable to extract columns, input is either nil or non expected type (): ", + }, + { + Input: []any{""}, + Error: "unable to extract column, non expected type of string: ", + }, + { + Input: []any{ + map[string]any{}, + }, + Error: "unable to extract column, missing column_name key in column", + }, + { + Input: []any{ + map[string]any{ + "column_name": "abc", + }, + }, + Expected: []sdk.ViewColumnRequest{ + *sdk.NewViewColumnRequest("abc"), + }, + }, + { + Input: []any{ + map[string]any{ + "column_name": "abc", + }, + map[string]any{ + "column_name": "cba", + }, + }, + Expected: []sdk.ViewColumnRequest{ + *sdk.NewViewColumnRequest("abc"), + *sdk.NewViewColumnRequest("cba"), + }, + }, + { + Input: []any{ + map[string]any{ + "column_name": "abc", + "projection_policy": []any{ + map[string]any{ + "policy_name": "db.sch.proj", + }, + }, + "masking_policy": []any{ + map[string]any{ + "policy_name": "db.sch.mask", + "using": []any{"one", "two"}, + }, + }, + }, + map[string]any{ + "column_name": "cba", + }, + }, + Expected: []sdk.ViewColumnRequest{ + *sdk.NewViewColumnRequest("abc"). + WithProjectionPolicy(*sdk.NewViewColumnProjectionPolicyRequest(sdk.NewSchemaObjectIdentifier("db", "sch", "proj"))). + WithMaskingPolicy(*sdk.NewViewColumnMaskingPolicyRequest(sdk.NewSchemaObjectIdentifier("db", "sch", "mask")).WithUsing([]sdk.Column{{Value: "one"}, {Value: "two"}})), + *sdk.NewViewColumnRequest("cba"), + }, + }, + } + + for i, tc := range testCases { + t.Run(fmt.Sprintf("%d: %s", i, tc.Input), func(t *testing.T) { + req, err := extractColumns(tc.Input) + + if tc.Error != "" { + assert.Nil(t, req) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), tc.Error) + } else { + assert.True(t, reflect.DeepEqual(tc.Expected, req)) + assert.Nil(t, err) + } + }) + } +} + +func Test_extractPolicyWithColumnsList(t *testing.T) { + testCases := []struct { + Input any + ColumnKey string + ExpectedId sdk.SchemaObjectIdentifier + ExpectedColumns []sdk.Column + Error string + }{ + { + Input: []any{ + map[string]any{ + "policy_name": "db.sch.pol", + "using": []any{"one", "two"}, + }, + }, + ColumnKey: "non-existing", + Error: "unable to extract policy with column list, unable to find columnsKey: non-existing", + }, + { + Input: []any{ + map[string]any{ + "policy_name": "db.sch.pol", + }, + }, + ColumnKey: "using", + Error: "unable to extract policy with column list, unable to find columnsKey: using", + }, + { + Input: []any{ + map[string]any{ + "policy_name": "db.sch.pol", + "using": []any{"one", "two"}, + }, + }, + ColumnKey: "using", + ExpectedId: sdk.NewSchemaObjectIdentifier("db", "sch", "pol"), + ExpectedColumns: []sdk.Column{{Value: "one"}, {Value: "two"}}, + }, + } + + for i, tc := range testCases { + t.Run(fmt.Sprintf("%d: %s", i, tc.Input), func(t *testing.T) { + id, cols, err := extractPolicyWithColumnsList(tc.Input, tc.ColumnKey) + + if tc.Error != "" { + assert.NotNil(t, err) + assert.Contains(t, err.Error(), tc.Error) + } else { + assert.Nil(t, err) + assert.Equal(t, tc.ExpectedId, id) + assert.Equal(t, tc.ExpectedColumns, cols) + } + }) + } +} From 528c71067c69b1f4c14e4b26dbc8974f23162608 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Mon, 2 Sep 2024 13:15:15 +0200 Subject: [PATCH 12/13] Changes after review --- pkg/resources/view.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/resources/view.go b/pkg/resources/view.go index 854175bdf0..e5a832f5c8 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -779,7 +779,7 @@ func handleColumns(d ResourceValueSetter, columns []sdk.ViewDetails, policyRefs } else { columnsRaw[i]["comment"] = nil } - projectionPolicy, err := collections.FindOne(policyRefs, func(r sdk.PolicyReference) bool { + projectionPolicy, err := collections.FindFirst(policyRefs, func(r sdk.PolicyReference) bool { return r.PolicyKind == sdk.PolicyKindProjectionPolicy && r.RefColumnName != nil && *r.RefColumnName == column.Name }) if err == nil { @@ -793,7 +793,7 @@ func handleColumns(d ResourceValueSetter, columns []sdk.ViewDetails, policyRefs log.Printf("could not store projection policy name: policy db and schema can not be empty") } } - maskingPolicy, err := collections.FindOne(policyRefs, func(r sdk.PolicyReference) bool { + maskingPolicy, err := collections.FindFirst(policyRefs, func(r sdk.PolicyReference) bool { return r.PolicyKind == sdk.PolicyKindMaskingPolicy && r.RefColumnName != nil && *r.RefColumnName == column.Name }) if err == nil { From aeb90894e84c52142a3e885b80513ac9f226184a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Mon, 2 Sep 2024 16:47:42 +0200 Subject: [PATCH 13/13] Changes after review --- .../assert/objectassert/view_snowflake_ext.go | 41 +++++++++---------- .../helpers/projection_policy_client.go | 4 +- .../resources_acceptance_tests_arch_test.go | 7 +++- pkg/resources/stream_acceptance_test.go | 6 +++ .../OnView/test.tf | 3 ++ .../OnView_NoGrant/test.tf | 3 ++ pkg/resources/view_acceptance_test.go | 12 +++--- pkg/resources/view_test.go | 11 +---- 8 files changed, 48 insertions(+), 39 deletions(-) diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go index 7e06b64669..494bdadc1a 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/view_snowflake_ext.go @@ -5,7 +5,6 @@ import ( "slices" "testing" - acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -33,26 +32,26 @@ func (v *ViewAssert) HasNonEmptyText() *ViewAssert { return v } -func (v *ViewAssert) HasNoRowAccessPolicyReferences() *ViewAssert { - return v.hasNoPolicyReference(sdk.PolicyKindRowAccessPolicy) +func (v *ViewAssert) HasNoRowAccessPolicyReferences(client *helpers.TestClient) *ViewAssert { + return v.hasNoPolicyReference(client, sdk.PolicyKindRowAccessPolicy) } -func (v *ViewAssert) HasNoAggregationPolicyReferences() *ViewAssert { - return v.hasNoPolicyReference(sdk.PolicyKindAggregationPolicy) +func (v *ViewAssert) HasNoAggregationPolicyReferences(client *helpers.TestClient) *ViewAssert { + return v.hasNoPolicyReference(client, sdk.PolicyKindAggregationPolicy) } -func (v *ViewAssert) HasNoMaskingPolicyReferences() *ViewAssert { - return v.hasNoPolicyReference(sdk.PolicyKindMaskingPolicy) +func (v *ViewAssert) HasNoMaskingPolicyReferences(client *helpers.TestClient) *ViewAssert { + return v.hasNoPolicyReference(client, sdk.PolicyKindMaskingPolicy) } -func (v *ViewAssert) HasNoProjectionPolicyReferences() *ViewAssert { - return v.hasNoPolicyReference(sdk.PolicyKindProjectionPolicy) +func (v *ViewAssert) HasNoProjectionPolicyReferences(client *helpers.TestClient) *ViewAssert { + return v.hasNoPolicyReference(client, sdk.PolicyKindProjectionPolicy) } -func (v *ViewAssert) hasNoPolicyReference(kind sdk.PolicyKind) *ViewAssert { +func (v *ViewAssert) hasNoPolicyReference(client *helpers.TestClient, kind sdk.PolicyKind) *ViewAssert { v.AddAssertion(func(t *testing.T, o *sdk.View) error { t.Helper() - refs, err := acc.TestClient().PolicyReferences.GetPolicyReferences(t, o.ID(), sdk.ObjectTypeView) + refs, err := client.PolicyReferences.GetPolicyReferences(t, o.ID(), sdk.ObjectTypeView) if err != nil { return err } @@ -67,26 +66,26 @@ func (v *ViewAssert) hasNoPolicyReference(kind sdk.PolicyKind) *ViewAssert { return v } -func (v *ViewAssert) HasRowAccessPolicyReferences(n int) *ViewAssert { - return v.hasPolicyReference(sdk.PolicyKindRowAccessPolicy, n) +func (v *ViewAssert) HasRowAccessPolicyReferences(client *helpers.TestClient, n int) *ViewAssert { + return v.hasPolicyReference(client, sdk.PolicyKindRowAccessPolicy, n) } -func (v *ViewAssert) HasAggregationPolicyReferences(n int) *ViewAssert { - return v.hasPolicyReference(sdk.PolicyKindAggregationPolicy, n) +func (v *ViewAssert) HasAggregationPolicyReferences(client *helpers.TestClient, n int) *ViewAssert { + return v.hasPolicyReference(client, sdk.PolicyKindAggregationPolicy, n) } -func (v *ViewAssert) HasMaskingPolicyReferences(n int) *ViewAssert { - return v.hasPolicyReference(sdk.PolicyKindMaskingPolicy, n) +func (v *ViewAssert) HasMaskingPolicyReferences(client *helpers.TestClient, n int) *ViewAssert { + return v.hasPolicyReference(client, sdk.PolicyKindMaskingPolicy, n) } -func (v *ViewAssert) HasProjectionPolicyReferences(n int) *ViewAssert { - return v.hasPolicyReference(sdk.PolicyKindProjectionPolicy, n) +func (v *ViewAssert) HasProjectionPolicyReferences(client *helpers.TestClient, n int) *ViewAssert { + return v.hasPolicyReference(client, sdk.PolicyKindProjectionPolicy, n) } -func (v *ViewAssert) hasPolicyReference(kind sdk.PolicyKind, n int) *ViewAssert { +func (v *ViewAssert) hasPolicyReference(client *helpers.TestClient, kind sdk.PolicyKind, n int) *ViewAssert { v.AddAssertion(func(t *testing.T, o *sdk.View) error { t.Helper() - refs, err := acc.TestClient().PolicyReferences.GetPolicyReferences(t, o.ID(), sdk.ObjectTypeView) + refs, err := client.PolicyReferences.GetPolicyReferences(t, o.ID(), sdk.ObjectTypeView) if err != nil { return err } diff --git a/pkg/acceptance/helpers/projection_policy_client.go b/pkg/acceptance/helpers/projection_policy_client.go index 4adf3c07d4..e79b5d28b5 100644 --- a/pkg/acceptance/helpers/projection_policy_client.go +++ b/pkg/acceptance/helpers/projection_policy_client.go @@ -31,7 +31,7 @@ func (c *ProjectionPolicyClient) CreateProjectionPolicy(t *testing.T) (sdk.Schem ctx := context.Background() id := c.ids.RandomSchemaObjectIdentifier() - _, err := c.client().ExecForTests(ctx, fmt.Sprintf(`CREATE PROJECTION POLICY %s AS () RETURNS PROJECTION_CONSTRAINT -> PROJECTION_CONSTRAINT(ALLOW => false)`, id.Name())) + _, err := c.client().ExecForTests(ctx, fmt.Sprintf(`CREATE PROJECTION POLICY %s AS () RETURNS PROJECTION_CONSTRAINT -> PROJECTION_CONSTRAINT(ALLOW => false)`, id.FullyQualifiedName())) require.NoError(t, err) return id, c.DropProjectionPolicyFunc(t, id) } @@ -41,7 +41,7 @@ func (c *ProjectionPolicyClient) DropProjectionPolicyFunc(t *testing.T, id sdk.S ctx := context.Background() return func() { - _, err := c.client().ExecForTests(ctx, fmt.Sprintf(`DROP PROJECTION POLICY IF EXISTS %s`, id.Name())) + _, err := c.client().ExecForTests(ctx, fmt.Sprintf(`DROP PROJECTION POLICY IF EXISTS %s`, id.FullyQualifiedName())) require.NoError(t, err) } } diff --git a/pkg/architests/resources_acceptance_tests_arch_test.go b/pkg/architests/resources_acceptance_tests_arch_test.go index 7e3ddad5ca..3cfef473cc 100644 --- a/pkg/architests/resources_acceptance_tests_arch_test.go +++ b/pkg/architests/resources_acceptance_tests_arch_test.go @@ -26,7 +26,12 @@ func TestArchCheck_AcceptanceTests_Resources(t *testing.T) { }) t.Run("there are no acceptance tests in other test files in the directory", func(t *testing.T) { - otherTestFiles := resourcesFiles.Filter(architest.FileNameFilterWithExclusionsProvider(architest.TestFileRegex, architest.AcceptanceTestFileRegex, regexp.MustCompile("helpers_test.go"))) + otherTestFiles := resourcesFiles.Filter(architest.FileNameFilterWithExclusionsProvider( + architest.TestFileRegex, + architest.AcceptanceTestFileRegex, + regexp.MustCompile("helpers_test.go"), + regexp.MustCompile("view_test.go"), + )) otherTestFiles.All(func(file *architest.File) { file.ExportedMethods().All(func(method *architest.Method) { diff --git a/pkg/resources/stream_acceptance_test.go b/pkg/resources/stream_acceptance_test.go index 1fccdd2e23..2e595fea91 100644 --- a/pkg/resources/stream_acceptance_test.go +++ b/pkg/resources/stream_acceptance_test.go @@ -295,6 +295,12 @@ resource "snowflake_view" "test" { change_tracking = true statement = "select * from \"${snowflake_table.test.name}\"" + column { + column_name = "column1" + } + column { + column_name = "column2" + } } resource "snowflake_stream" "test_stream" { diff --git a/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView/test.tf b/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView/test.tf index 931f79a9cf..f2b35dabb4 100644 --- a/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView/test.tf +++ b/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView/test.tf @@ -28,6 +28,9 @@ resource "snowflake_view" "test" { schema = snowflake_schema.test.name is_secure = true statement = "select \"id\" from \"${snowflake_database.test.name}\".\"${snowflake_schema.test.name}\".\"${snowflake_table.test.name}\"" + column { + column_name = "id" + } } resource "snowflake_grant_privileges_to_share" "test_setup" { diff --git a/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView_NoGrant/test.tf b/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView_NoGrant/test.tf index 1dcf526eed..4a4ec953c4 100644 --- a/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView_NoGrant/test.tf +++ b/pkg/resources/testdata/TestAcc_GrantPrivilegesToShare/OnView_NoGrant/test.tf @@ -28,4 +28,7 @@ resource "snowflake_view" "test" { schema = snowflake_schema.test.name is_secure = true statement = "select \"id\" from \"${snowflake_database.test.name}\".\"${snowflake_schema.test.name}\".\"${snowflake_table.test.name}\"" + column { + column_name = "id" + } } diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index e641266880..0a41c72ebe 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -765,8 +765,8 @@ end;; HasSchemaString(id.SchemaName()). HasColumnLength(2), objectassert.View(t, id). - HasNoMaskingPolicyReferences(). - HasNoProjectionPolicyReferences(), + HasNoMaskingPolicyReferences(acc.TestClient()). + HasNoProjectionPolicyReferences(acc.TestClient()), ), }, // With all policies on columns @@ -781,8 +781,8 @@ end;; HasSchemaString(id.SchemaName()). HasColumnLength(2), objectassert.View(t, id). - HasMaskingPolicyReferences(1). - HasProjectionPolicyReferences(1), + HasMaskingPolicyReferences(acc.TestClient(), 1). + HasProjectionPolicyReferences(acc.TestClient(), 1), ), }, // Remove policies on columns externally @@ -801,8 +801,8 @@ end;; HasSchemaString(id.SchemaName()). HasColumnLength(2), objectassert.View(t, id). - HasMaskingPolicyReferences(1). - HasProjectionPolicyReferences(1), + HasMaskingPolicyReferences(acc.TestClient(), 1). + HasProjectionPolicyReferences(acc.TestClient(), 1), ), }, }, diff --git a/pkg/resources/view_test.go b/pkg/resources/view_test.go index 15df40852a..875fd01391 100644 --- a/pkg/resources/view_test.go +++ b/pkg/resources/view_test.go @@ -29,7 +29,6 @@ func Test_handleColumns(t *testing.T) { InputColumns []sdk.ViewDetails InputPolicyReferences []sdk.PolicyReference Expected map[string]any - Error string }{ { InputColumns: []sdk.ViewDetails{}, @@ -193,14 +192,8 @@ func Test_handleColumns(t *testing.T) { t.Run(fmt.Sprintf("handle columns(%d): %v - %v", i, tc.InputColumns, tc.InputPolicyReferences), func(t *testing.T) { valueSetter := newTestResourceValueSetter() err := handleColumns(valueSetter, tc.InputColumns, tc.InputPolicyReferences) - - if tc.Error != "" { - assert.NotNil(t, err) - assert.Contains(t, err.Error(), tc.Error) - } else { - assert.Nil(t, err) - assert.Equal(t, tc.Expected, valueSetter.internalMap) - } + assert.Nil(t, err) + assert.Equal(t, tc.Expected, valueSetter.internalMap) }) } }