From 4b48ffe068e08d3b114dd6ad45538fa1de2c053e Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 08:38:59 +0200 Subject: [PATCH 1/8] Prove and fix stage rename error References: #2972 --- pkg/acceptance/helpers/stage_client.go | 8 ++++ pkg/resources/stage.go | 20 ++++++---- pkg/resources/stage_acceptance_test.go | 51 ++++++++++++++++++++++++++ 3 files changed, 71 insertions(+), 8 deletions(-) diff --git a/pkg/acceptance/helpers/stage_client.go b/pkg/acceptance/helpers/stage_client.go index 5505237709..529d2f9552 100644 --- a/pkg/acceptance/helpers/stage_client.go +++ b/pkg/acceptance/helpers/stage_client.go @@ -128,3 +128,11 @@ func (c *StageClient) CopyIntoTableFromFile(t *testing.T, table, stage sdk.Schem MATCH_BY_COLUMN_NAME = CASE_INSENSITIVE`, table.FullyQualifiedName(), stage.FullyQualifiedName(), filename)) require.NoError(t, err) } + +func (c *StageClient) Rename(t *testing.T, id sdk.SchemaObjectIdentifier, newId sdk.SchemaObjectIdentifier) { + t.Helper() + ctx := context.Background() + + err := c.client().Alter(ctx, sdk.NewAlterStageRequest(id).WithRenameTo(&newId)) + require.NoError(t, err) +} diff --git a/pkg/resources/stage.go b/pkg/resources/stage.go index d102bd2a71..47a1747524 100644 --- a/pkg/resources/stage.go +++ b/pkg/resources/stage.go @@ -2,6 +2,7 @@ package resources import ( "context" + "errors" "fmt" "strings" @@ -174,14 +175,17 @@ func ReadStage(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagn properties, err := client.Stages.Describe(ctx, id) if err != nil { - d.SetId("") - return diag.Diagnostics{ - diag.Diagnostic{ - Severity: diag.Error, - Summary: "Failed to describe stage", - Detail: fmt.Sprintf("Id: %s, Err: %s", d.Id(), err), - }, + if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to describe stage. Marking the resource as removed.", + Detail: fmt.Sprintf("Stage: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } } + return diag.FromErr(err) } stage, err := client.Stages.ShowByID(ctx, id) @@ -191,7 +195,7 @@ func ReadStage(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagn diag.Diagnostic{ Severity: diag.Error, Summary: "Failed to show stage by id", - Detail: fmt.Sprintf("Id: %s, Err: %s", d.Id(), err), + Detail: fmt.Sprintf("Stage: %s, Err: %s", id.FullyQualifiedName(), err), }, } } diff --git a/pkg/resources/stage_acceptance_test.go b/pkg/resources/stage_acceptance_test.go index 7275987e56..e5489fdff5 100644 --- a/pkg/resources/stage_acceptance_test.go +++ b/pkg/resources/stage_acceptance_test.go @@ -204,3 +204,54 @@ resource "snowflake_stage" "test" { } `, name, siNameSuffix, url, databaseName, schemaName) } + +func TestAcc_Stage_Issue2972(t *testing.T) { + stageId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + newId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(stageId.SchemaId()) + resourceName := "snowflake_stage.test" + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Stage), + Steps: []resource.TestStep{ + { + Config: stageIssue2972Config(stageId), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "database", stageId.DatabaseName()), + resource.TestCheckResourceAttr(resourceName, "schema", stageId.SchemaName()), + resource.TestCheckResourceAttr(resourceName, "name", stageId.Name()), + ), + }, + { + PreConfig: func() { + acc.TestClient().Stage.Rename(t, stageId, newId) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionCreate), + }, + }, + Config: stageIssue2972Config(stageId), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "database", stageId.DatabaseName()), + resource.TestCheckResourceAttr(resourceName, "schema", stageId.SchemaName()), + resource.TestCheckResourceAttr(resourceName, "name", stageId.Name()), + ), + }, + }, + }) +} + +func stageIssue2972Config(stageId sdk.SchemaObjectIdentifier) string { + return fmt.Sprintf(` +resource "snowflake_stage" "test" { + name = "%[1]s" + database = "%[2]s" + schema = "%[3]s" +} +`, stageId.Name(), stageId.DatabaseName(), stageId.SchemaName()) +} From 20d51cfa986b43d592862d3f36e6081962f507d2 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 11:11:56 +0200 Subject: [PATCH 2/8] Provide better diff suppressors for text and number data types References: #3007 --- pkg/internal/util/strings.go | 11 +++ pkg/internal/util/strings_test.go | 34 +++++++ pkg/resources/helpers.go | 33 ++++++- pkg/resources/helpers_test.go | 145 ++++++++++++++++++++++++++++++ pkg/sdk/data_types.go | 106 ++++++++++++++++------ pkg/sdk/data_types_test.go | 76 ++++++++++++++++ 6 files changed, 376 insertions(+), 29 deletions(-) create mode 100644 pkg/internal/util/strings.go create mode 100644 pkg/internal/util/strings_test.go diff --git a/pkg/internal/util/strings.go b/pkg/internal/util/strings.go new file mode 100644 index 0000000000..0b5021ea51 --- /dev/null +++ b/pkg/internal/util/strings.go @@ -0,0 +1,11 @@ +package util + +import "strings" + +func TrimAllPrefixes(text string, prefixes ...string) string { + result := text + for _, prefix := range prefixes { + result = strings.TrimPrefix(result, prefix) + } + return result +} diff --git a/pkg/internal/util/strings_test.go b/pkg/internal/util/strings_test.go new file mode 100644 index 0000000000..ab2560b65d --- /dev/null +++ b/pkg/internal/util/strings_test.go @@ -0,0 +1,34 @@ +package util + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func Test_TrimAllPrefixes(t *testing.T) { + type test struct { + input string + prefixes []string + expected string + } + + tests := []test{ + {input: "VARCHAR(30)", prefixes: []string{"VARCHAR", "TEXT"}, expected: "(30)"}, + {input: "VARCHAR (30) ", prefixes: []string{"VARCHAR", "TEXT"}, expected: " (30) "}, + {input: "VARCHAR(30)", prefixes: []string{"VARCHAR"}, expected: "(30)"}, + {input: "VARCHAR(30)", prefixes: []string{}, expected: "VARCHAR(30)"}, + {input: "VARCHARVARCHAR(30)", prefixes: []string{"VARCHAR"}, expected: "VARCHAR(30)"}, + {input: "VARCHAR(30)", prefixes: []string{"NUMBER"}, expected: "VARCHAR(30)"}, + {input: "VARCHARTEXT(30)", prefixes: []string{"VARCHAR", "TEXT"}, expected: "(30)"}, + {input: "TEXTVARCHAR(30)", prefixes: []string{"VARCHAR", "TEXT"}, expected: "VARCHAR(30)"}, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.input, func(t *testing.T) { + output := TrimAllPrefixes(tc.input, tc.prefixes...) + require.Equal(t, tc.expected, output) + }) + } +} diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index 7b91689d48..70a0c386ce 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -5,10 +5,10 @@ import ( "slices" "strings" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -35,6 +35,35 @@ func dataTypeDiffSuppressFunc(_, old, new string, _ *schema.ResourceData) bool { return oldDT == newDT } +func DataTypeIssue3007DiffSuppressFunc(_, old, new string, _ *schema.ResourceData) bool { + oldDataType, err := sdk.ToDataType(old) + if err != nil { + return false + } + newDataType, err := sdk.ToDataType(new) + if err != nil { + return false + } + if oldDataType != newDataType { + return false + } + switch v := oldDataType; v { + case sdk.DataTypeNumber: + logging.DebugLogger.Printf("[DEBUG] Handling number data type diff suppression") + oldPrecision, oldScale := sdk.ParseNumberDataTypeRaw(old) + newPrecision, newScale := sdk.ParseNumberDataTypeRaw(new) + return oldPrecision == newPrecision && oldScale == newScale + case sdk.DataTypeVARCHAR: + logging.DebugLogger.Printf("[DEBUG] Handling text data type diff suppression") + oldLength := sdk.ParseVarcharDataTypeRaw(old) + newLength := sdk.ParseVarcharDataTypeRaw(new) + return oldLength == newLength + default: + logging.DebugLogger.Printf("[DEBUG] Diff suppression for %s can't be currently handled", v) + } + return true +} + func ignoreTrimSpaceSuppressFunc(_, old, new string, _ *schema.ResourceData) bool { return strings.TrimSpace(old) == strings.TrimSpace(new) } diff --git a/pkg/resources/helpers_test.go b/pkg/resources/helpers_test.go index 6447fa8e93..72d1e1305d 100644 --- a/pkg/resources/helpers_test.go +++ b/pkg/resources/helpers_test.go @@ -3,6 +3,7 @@ package resources_test import ( "context" "fmt" + "github.com/stretchr/testify/require" "slices" "testing" @@ -258,3 +259,147 @@ func TestListDiff(t *testing.T) { }) } } + +func Test_DataTypeIssue3007DiffSuppressFunc(t *testing.T) { + testCases := []struct { + name string + old string + new string + expected bool + }{ + { + name: "different data type", + old: string(sdk.DataTypeVARCHAR), + new: string(sdk.DataTypeNumber), + expected: false, + }, + { + name: "same number data type without arguments", + old: string(sdk.DataTypeNumber), + new: string(sdk.DataTypeNumber), + expected: true, + }, + { + name: "same number data type different casing", + old: string(sdk.DataTypeNumber), + new: "number", + expected: true, + }, + { + name: "same text data type without arguments", + old: string(sdk.DataTypeVARCHAR), + new: string(sdk.DataTypeVARCHAR), + expected: true, + }, + { + name: "same other data type", + old: string(sdk.DataTypeFloat), + new: string(sdk.DataTypeFloat), + expected: true, + }, + { + name: "synonym number data type without arguments", + old: string(sdk.DataTypeNumber), + new: "DECIMAL", + expected: true, + }, + { + name: "synonym text data type without arguments", + old: string(sdk.DataTypeVARCHAR), + new: "TEXT", + expected: true, + }, + { + name: "synonym other data type without arguments", + old: string(sdk.DataTypeFloat), + new: "DOUBLE", + expected: true, + }, + { + name: "synonym number data type same precision, no scale", + old: "NUMBER(30)", + new: "DECIMAL(30)", + expected: true, + }, + { + name: "synonym number data type precision implicit and same", + old: "NUMBER", + new: fmt.Sprintf("DECIMAL(%d)", sdk.DefaultNumberPrecision), + expected: true, + }, + { + name: "synonym number data type precision implicit and different", + old: "NUMBER", + new: "DECIMAL(30)", + expected: false, + }, + { + name: "number data type different precisions, no scale", + old: "NUMBER(35)", + new: "NUMBER(30)", + expected: false, + }, + { + name: "synonym number data type same precision, different scale", + old: "NUMBER(30, 2)", + new: "DECIMAL(30, 1)", + expected: false, + }, + { + name: "synonym number data type default scale implicit and explicit", + old: "NUMBER(30)", + new: fmt.Sprintf("DECIMAL(30, %d)", sdk.DefaultNumberScale), + expected: true, + }, + { + name: "synonym number data type default scale implicit and different", + old: "NUMBER(30)", + new: "DECIMAL(30, 3)", + expected: false, + }, + { + name: "synonym number data type both precision and scale implicit and explicit", + old: "NUMBER", + new: fmt.Sprintf("DECIMAL(%d, %d)", sdk.DefaultNumberPrecision, sdk.DefaultNumberScale), + expected: true, + }, + { + name: "synonym number data type both precision and scale implicit and scale different", + old: "NUMBER", + new: fmt.Sprintf("DECIMAL(%d, 2)", sdk.DefaultNumberPrecision), + expected: false, + }, + { + name: "synonym text data type same length", + old: "VARCHAR(30)", + new: "TEXT(30)", + expected: true, + }, + { + name: "synonym text data type different length", + old: "VARCHAR(30)", + new: "TEXT(40)", + expected: false, + }, + { + name: "synonym text data type length implicit and same", + old: "VARCHAR", + new: fmt.Sprintf("TEXT(%d)", sdk.DefaultVarcharLength), + expected: true, + }, + { + name: "synonym text data type length implicit and different", + old: "VARCHAR", + new: "TEXT(40)", + expected: false, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + result := resources.DataTypeIssue3007DiffSuppressFunc("", tc.old, tc.new, nil) + require.Equal(t, tc.expected, result) + }) + } +} diff --git a/pkg/sdk/data_types.go b/pkg/sdk/data_types.go index 9a8c46c07f..cae4f322f7 100644 --- a/pkg/sdk/data_types.go +++ b/pkg/sdk/data_types.go @@ -3,7 +3,11 @@ package sdk import ( "fmt" "slices" + "strconv" "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/logging" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/util" ) // DataType is based on https://docs.snowflake.com/en/sql-reference/intro-summary-data-types. @@ -35,6 +39,21 @@ const ( DataTypeGeometry DataType = "GEOMETRY" ) +var DataTypeNumberSynonyms = []string{"NUMBER", "DECIMAL", "NUMERIC", "INT", "INTEGER", "BIGINT", "SMALLINT", "TINYINT", "BYTEINT"} +var DataTypeFloatSynonyms = []string{"FLOAT", "FLOAT4", "FLOAT8", "DOUBLE", "DOUBLE PRECISION", "REAL"} +var DataTypeVarcharSynonyms = []string{"VARCHAR", "CHAR", "CHARACTER", "STRING", "TEXT"} +var DataTypeBinarySynonyms = []string{"BINARY", "VARBINARY"} +var DataTypeBooleanSynonyms = []string{"BOOLEAN", "BOOL"} +var DataTypeTimestampLTZSynonyms = []string{"TIMESTAMP_LTZ"} +var DataTypeTimestampTZSynonyms = []string{"TIMESTAMP_TZ"} +var DataTypeTimestampNTZSynonyms = []string{"DATETIME", "TIMESTAMP", "TIMESTAMP_NTZ"} +var DataTypeTimeSynonyms = []string{"TIME"} +var DataTypeVectorSynonyms = []string{"VECTOR"} + +const DefaultNumberPrecision = 38 +const DefaultNumberScale = 0 +const DefaultVarcharLength = 16777216 + func ToDataType(s string) (DataType, error) { dType := strings.ToUpper(s) @@ -53,53 +72,36 @@ func ToDataType(s string) (DataType, error) { return DataTypeGeometry, nil } - numberSynonyms := []string{"NUMBER", "DECIMAL", "NUMERIC", "INT", "INTEGER", "BIGINT", "SMALLINT", "TINYINT", "BYTEINT"} - if slices.ContainsFunc(numberSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeNumberSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeNumber, nil } - - floatSynonyms := []string{"FLOAT", "FLOAT4", "FLOAT8", "DOUBLE", "DOUBLE PRECISION", "REAL"} - if slices.ContainsFunc(floatSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeFloatSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeFloat, nil } - varcharSynonyms := []string{"VARCHAR", "CHAR", "CHARACTER", "STRING", "TEXT"} - if slices.ContainsFunc(varcharSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeVarcharSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeVARCHAR, nil } - binarySynonyms := []string{"BINARY", "VARBINARY"} - if slices.ContainsFunc(binarySynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeBinarySynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeBinary, nil } - booleanSynonyms := []string{"BOOLEAN", "BOOL"} - if slices.Contains(booleanSynonyms, dType) { + if slices.Contains(DataTypeBooleanSynonyms, dType) { return DataTypeBoolean, nil } - - timestampLTZSynonyms := []string{"TIMESTAMP_LTZ"} - if slices.ContainsFunc(timestampLTZSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeTimestampLTZSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeTimestampLTZ, nil } - - timestampTZSynonyms := []string{"TIMESTAMP_TZ"} - if slices.ContainsFunc(timestampTZSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeTimestampTZSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeTimestampTZ, nil } - - timestampNTZSynonyms := []string{"DATETIME", "TIMESTAMP", "TIMESTAMP_NTZ"} - if slices.ContainsFunc(timestampNTZSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeTimestampNTZSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeTimestampNTZ, nil } - - timeSynonyms := []string{"TIME"} - if slices.ContainsFunc(timeSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { + if slices.ContainsFunc(DataTypeTimeSynonyms, func(s string) bool { return strings.HasPrefix(dType, s) }) { return DataTypeTime, nil } - - vectorSynonyms := []string{"VECTOR"} - if slices.ContainsFunc(vectorSynonyms, func(e string) bool { return strings.HasPrefix(dType, e) }) { + if slices.ContainsFunc(DataTypeVectorSynonyms, func(e string) bool { return strings.HasPrefix(dType, e) }) { return DataType(dType), nil } - return "", fmt.Errorf("invalid data type: %s", s) } @@ -112,3 +114,53 @@ func IsStringType(_type string) bool { strings.HasPrefix(t, "NVARCHAR") || strings.HasPrefix(t, "NCHAR") } + +func ParseNumberDataTypeRaw(rawDataType string) (int, int) { + r := util.TrimAllPrefixes(strings.TrimSpace(strings.ToUpper(rawDataType)), DataTypeNumberSynonyms...) + r = strings.TrimSpace(r) + if strings.HasPrefix(r, "(") && strings.HasSuffix(r, ")") { + parts := strings.Split(r[1:len(r)-1], ",") + switch l := len(parts); l { + case 1: + precision, err := strconv.Atoi(strings.TrimSpace(parts[0])) + if err == nil { + return precision, DefaultNumberScale + } else { + logging.DebugLogger.Printf(`[DEBUG] Could not parse number precision "%s", err: %v`, parts[0], err) + } + case 2: + precision, err1 := strconv.Atoi(strings.TrimSpace(parts[0])) + scale, err2 := strconv.Atoi(strings.TrimSpace(parts[1])) + if err1 == nil && err2 == nil { + return precision, scale + } else { + logging.DebugLogger.Printf(`[DEBUG] Could not parse number precision "%s" or scale "%s", errs: %v, %v`, parts[0], parts[1], err1, err2) + } + default: + logging.DebugLogger.Printf("[DEBUG] Unexpected length of number arguments") + } + } + logging.DebugLogger.Printf("[DEBUG] Returning default number precision and scale") + return DefaultNumberPrecision, DefaultNumberScale +} + +func ParseVarcharDataTypeRaw(rawDataType string) int { + r := util.TrimAllPrefixes(strings.TrimSpace(strings.ToUpper(rawDataType)), DataTypeVarcharSynonyms...) + r = strings.TrimSpace(r) + if strings.HasPrefix(r, "(") && strings.HasSuffix(r, ")") { + parts := strings.Split(r[1:len(r)-1], ",") + switch l := len(parts); l { + case 1: + length, err := strconv.Atoi(strings.TrimSpace(parts[0])) + if err == nil { + return length + } else { + logging.DebugLogger.Printf(`[DEBUG] Could not parse varchar length "%s", err: %v`, parts[0], err) + } + default: + logging.DebugLogger.Printf("[DEBUG] Unexpected length of varchar arguments") + } + } + logging.DebugLogger.Printf("[DEBUG] Returning default varchar length") + return DefaultVarcharLength +} diff --git a/pkg/sdk/data_types_test.go b/pkg/sdk/data_types_test.go index 17abeca4c4..156e5dc8f2 100644 --- a/pkg/sdk/data_types_test.go +++ b/pkg/sdk/data_types_test.go @@ -1,8 +1,10 @@ package sdk import ( + "fmt" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -140,3 +142,77 @@ func TestIsStringType(t *testing.T) { }) } } + +func Test_ParseNumberDataTypeRaw(t *testing.T) { + type test struct { + input string + expectedPrecision int + expectedScale int + } + defaults := func(input string) test { + return test{input: input, expectedPrecision: DefaultNumberPrecision, expectedScale: DefaultNumberScale} + } + + tests := []test{ + {input: "NUMBER(30)", expectedPrecision: 30, expectedScale: DefaultNumberScale}, + {input: "NUMBER(30, 2)", expectedPrecision: 30, expectedScale: 2}, + {input: "decimal(30, 2)", expectedPrecision: 30, expectedScale: 2}, + {input: "NUMBER( 30 , 2 )", expectedPrecision: 30, expectedScale: 2}, + {input: " NUMBER ( 30 , 2 ) ", expectedPrecision: 30, expectedScale: 2}, + + // returns defaults if it can't parse arguments, data type is different, or no arguments were provided + defaults("VARCHAR(1, 2)"), + defaults("VARCHAR(1)"), + defaults("VARCHAR"), + defaults("NUMBER"), + defaults("NUMBER()"), + defaults("NUMBER(x)"), + defaults(fmt.Sprintf("NUMBER(%d)", DefaultNumberPrecision)), + defaults(fmt.Sprintf("NUMBER(%d, x)", DefaultNumberPrecision)), + defaults(fmt.Sprintf("NUMBER(x, %d)", DefaultNumberScale)), + defaults("NUMBER(1, 2, 3)"), + } + + for _, tc := range tests { + tc := tc + t.Run(tc.input, func(t *testing.T) { + precision, scale := ParseNumberDataTypeRaw(tc.input) + assert.Equal(t, tc.expectedPrecision, precision) + assert.Equal(t, tc.expectedScale, scale) + }) + } +} + +func Test_ParseVarcharDataTypeRaw(t *testing.T) { + type test struct { + input string + expectedLength int + } + defaults := func(input string) test { + return test{input: input, expectedLength: DefaultVarcharLength} + } + + tests := []test{ + {input: "VARCHAR(30)", expectedLength: 30}, + {input: "text(30)", expectedLength: 30}, + {input: "VARCHAR( 30 )", expectedLength: 30}, + {input: " VARCHAR ( 30 ) ", expectedLength: 30}, + + // returns defaults if it can't parse arguments, data type is different, or no arguments were provided + defaults("VARCHAR(1, 2)"), + defaults("VARCHAR(x)"), + defaults("VARCHAR"), + defaults("NUMBER"), + defaults("NUMBER()"), + defaults("NUMBER(x)"), + defaults(fmt.Sprintf("VARCHAR(%d)", DefaultVarcharLength)), + } + + for _, tc := range tests { + tc := tc + t.Run(tc.input, func(t *testing.T) { + length := ParseVarcharDataTypeRaw(tc.input) + assert.Equal(t, tc.expectedLength, length) + }) + } +} From 0989f3dbdb6a3c9566af2e1962cd5415bd4a3228 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 11:17:16 +0200 Subject: [PATCH 3/8] Add function descriptions References: #3007 --- pkg/internal/util/strings.go | 1 + pkg/resources/helpers.go | 4 ++++ pkg/sdk/data_types.go | 6 ++++++ 3 files changed, 11 insertions(+) diff --git a/pkg/internal/util/strings.go b/pkg/internal/util/strings.go index 0b5021ea51..2e8aadf014 100644 --- a/pkg/internal/util/strings.go +++ b/pkg/internal/util/strings.go @@ -2,6 +2,7 @@ package util import "strings" +// TrimAllPrefixes removes all prefixes from the input. Order matters. func TrimAllPrefixes(text string, prefixes ...string) string { result := text for _, prefix := range prefixes { diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index 70a0c386ce..1b3f94f8da 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -35,6 +35,10 @@ func dataTypeDiffSuppressFunc(_, old, new string, _ *schema.ResourceData) bool { return oldDT == newDT } +// DataTypeIssue3007DiffSuppressFunc is a temporary solution to handle data type suppression problems. +// Currently, it handles only number and text data types. +// It falls back to Snowflake defaults for arguments if no arguments were provided for the data type. +// TODO [SNOW-1348103 or SNOW-1348106]: visit with functions and procedures rework func DataTypeIssue3007DiffSuppressFunc(_, old, new string, _ *schema.ResourceData) bool { oldDataType, err := sdk.ToDataType(old) if err != nil { diff --git a/pkg/sdk/data_types.go b/pkg/sdk/data_types.go index cae4f322f7..011e6340fd 100644 --- a/pkg/sdk/data_types.go +++ b/pkg/sdk/data_types.go @@ -115,6 +115,9 @@ func IsStringType(_type string) bool { strings.HasPrefix(t, "NCHAR") } +// ParseNumberDataTypeRaw extracts precision and scale from the raw number data type input. +// It returns defaults if it can't parse arguments, data type is different, or no arguments were provided. +// TODO [SNOW-1348103 or SNOW-1348106]: visit with functions and procedures rework func ParseNumberDataTypeRaw(rawDataType string) (int, int) { r := util.TrimAllPrefixes(strings.TrimSpace(strings.ToUpper(rawDataType)), DataTypeNumberSynonyms...) r = strings.TrimSpace(r) @@ -144,6 +147,9 @@ func ParseNumberDataTypeRaw(rawDataType string) (int, int) { return DefaultNumberPrecision, DefaultNumberScale } +// ParseVarcharDataTypeRaw extracts length from the raw text data type input. +// It returns default if it can't parse arguments, data type is different, or no length argument was provided. +// TODO [SNOW-1348103 or SNOW-1348106]: visit with functions and procedures rework func ParseVarcharDataTypeRaw(rawDataType string) int { r := util.TrimAllPrefixes(strings.TrimSpace(strings.ToUpper(rawDataType)), DataTypeVarcharSynonyms...) r = strings.TrimSpace(r) From 28aae6ec8eb6bee9073062dfedf0a21582ba61f1 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 11:32:36 +0200 Subject: [PATCH 4/8] Prove and fix problems with data type suppression in the table resource (only for text and number types) References: #3007 --- pkg/resources/table.go | 2 +- pkg/resources/table_acceptance_test.go | 109 +++++++++++++++++++++++++ 2 files changed, 110 insertions(+), 1 deletion(-) diff --git a/pkg/resources/table.go b/pkg/resources/table.go index 13280eb9fc..ad10836b74 100644 --- a/pkg/resources/table.go +++ b/pkg/resources/table.go @@ -62,7 +62,7 @@ var tableSchema = map[string]*schema.Schema{ Required: true, Description: "Column type, e.g. VARIANT. For a full list of column types, see [Summary of Data Types](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types).", ValidateFunc: dataTypeValidateFunc, - DiffSuppressFunc: dataTypeDiffSuppressFunc, + DiffSuppressFunc: DataTypeIssue3007DiffSuppressFunc, }, "nullable": { Type: schema.TypeBool, diff --git a/pkg/resources/table_acceptance_test.go b/pkg/resources/table_acceptance_test.go index 49d8bc461b..bbbfe06d8e 100644 --- a/pkg/resources/table_acceptance_test.go +++ b/pkg/resources/table_acceptance_test.go @@ -9,7 +9,9 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + tfjson "github.com/hashicorp/terraform-json" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/planchecks" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -2090,3 +2092,110 @@ resource "snowflake_table" "test_table" { } `, name, databaseName, schemaName) } + +func TestAcc_Table_issue3007_textColumn(t *testing.T) { + databaseName := acc.TestClient().Ids.Alpha() + schemaName := acc.TestClient().Ids.Alpha() + name := acc.TestClient().Ids.Alpha() + resourceName := "snowflake_table.test_table" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "VARCHAR(3)"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.0.type", "NUMBER(11,2)"), + resource.TestCheckResourceAttr(resourceName, "column.1.type", "VARCHAR(3)"), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "VARCHAR(256)"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("VARCHAR(3)"), sdk.String("VARCHAR(256)")), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.0.type", "NUMBER(11,2)"), + resource.TestCheckResourceAttr(resourceName, "column.1.type", "VARCHAR(256)"), + ), + }, + }, + }) +} + +// TODO [SNOW-1348114]: visit with table rework (e.g. changing scale is not supported: err 040052 (22000): SQL compilation error: cannot change column SOME_COLUMN from type NUMBER(38,0) to NUMBER(11,2) because changing the scale of a number is not supported.) +func TestAcc_Table_issue3007_numberColumn(t *testing.T) { + databaseName := acc.TestClient().Ids.Alpha() + schemaName := acc.TestClient().Ids.Alpha() + name := acc.TestClient().Ids.Alpha() + resourceName := "snowflake_table.test_table" + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "NUMBER"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.0.type", "NUMBER(11,2)"), + resource.TestCheckResourceAttr(resourceName, "column.1.type", "NUMBER(38,0)"), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "NUMBER(11)"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("NUMBER(38,0)"), sdk.String("NUMBER(11)")), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.0.type", "NUMBER(11,2)"), + resource.TestCheckResourceAttr(resourceName, "column.1.type", "NUMBER(11,0)"), + ), + }, + }, + }) +} + +func tableConfigIssue3007(name string, databaseName string, schemaName string, dataType string) string { + return fmt.Sprintf(` +resource "snowflake_database" "test_database" { + name = "%[2]s" +} + +resource "snowflake_schema" "test_schema" { + depends_on = [snowflake_database.test_database] + name = "%[3]s" + database = "%[2]s" +} + +resource "snowflake_table" "test_table" { + depends_on = [snowflake_schema.test_schema] + name = "%[1]s" + database = "%[2]s" + schema = "%[3]s" + comment = "Issue 3007 confirmation" + + column { + name = "ID" + type = "NUMBER(11,2)" + } + + column { + name = "SOME_COLUMN" + type = "%[4]s" + } +} +`, name, databaseName, schemaName, dataType) +} From 1f9ab467d57a11b20c67005cc066f62c58af2aa2 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 11:40:15 +0200 Subject: [PATCH 5/8] Add more test steps References: #3007 --- pkg/resources/table_acceptance_test.go | 77 +++++++++++++++++++++++++- 1 file changed, 76 insertions(+), 1 deletion(-) diff --git a/pkg/resources/table_acceptance_test.go b/pkg/resources/table_acceptance_test.go index bbbfe06d8e..264c160156 100644 --- a/pkg/resources/table_acceptance_test.go +++ b/pkg/resources/table_acceptance_test.go @@ -2099,6 +2099,8 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { name := acc.TestClient().Ids.Alpha() resourceName := "snowflake_table.test_table" + defaultVarchar := fmt.Sprintf("VARCHAR(%d)", sdk.DefaultVarcharLength) + resource.Test(t, resource.TestCase{ PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ @@ -2122,10 +2124,45 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { }, }, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "column.0.type", "NUMBER(11,2)"), resource.TestCheckResourceAttr(resourceName, "column.1.type", "VARCHAR(256)"), ), }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "VARCHAR"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("VARCHAR(256)"), sdk.String("VARCHAR")), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.1.type", defaultVarchar), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, defaultVarchar), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.1.type", defaultVarchar), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "text"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.1.type", defaultVarchar), + ), + }, }, }) } @@ -2137,6 +2174,8 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { name := acc.TestClient().Ids.Alpha() resourceName := "snowflake_table.test_table" + defaultNumber := fmt.Sprintf("NUMBER(%d,%d)", sdk.DefaultNumberPrecision, sdk.DefaultNumberScale) + resource.Test(t, resource.TestCase{ PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ @@ -2164,6 +2203,42 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "column.1.type", "NUMBER(11,0)"), ), }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "NUMBER"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("NUMBER(11,0)"), sdk.String("NUMBER")), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.1.type", defaultNumber), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, defaultNumber), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.1.type", defaultNumber), + ), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: tableConfigIssue3007(name, databaseName, schemaName, "decimal"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), + }, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "column.1.type", defaultNumber), + ), + }, }, }) } From d5eaad384af5b0a4293ec511a12aafd82e705bab Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 11:52:41 +0200 Subject: [PATCH 6/8] Add fixes to the migration guide References: #2972 #3007 --- MIGRATION_GUIDE.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 8bbbc7fa7c..05c6014ff5 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -90,6 +90,30 @@ Some of the resources are excluded from this change: #### *(breaking change)* removed `qualified_name` from `snowflake_masking_policy`, `snowflake_network_rule`, `snowflake_password_policy` and `snowflake_table` Because of introducing a new `fully_qualified_name` field for all of the resources, `qualified_name` was removed from `snowflake_masking_policy`, `snowflake_network_rule`, `snowflake_password_policy` and `snowflake_table`. Please adjust your configurations. State is automatically migrated. +### snowflake_stage resource changes + +#### *(bugfix)* Correctly handle renamed/deleted stage + +Correctly handle the situation when stage was rename/deleted externally (earlier it resulted in a permanent loop). No action is required on the user's side. + +Connected issues: [#2972](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2972) + +### snowflake_table resource changes + +#### *(bugfix)* Handle data type diff suppression better for text and number + +Data types are not entirely correctly handled inside the provider (read more e.g. in [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735)). It will be still improved with the upcoming function, procedure, and table rework. Currently, diff suppression was fixed for text and number data types in the table resource with the following assumptions/limitations: +- for numbers the default precision is 38 and the default scale is 0 (following the [docs](https://docs.snowflake.com/en/sql-reference/data-types-numeric#number)) +- for number types the following types are treated as synonyms: `NUMBER`, `DECIMAL`, `NUMERIC`, `INT`, `INTEGER`, `BIGINT`, `SMALLINT`, `TINYINT`, `BYTEINT` +- for text the default length is 16777216 (following the [docs](https://docs.snowflake.com/en/sql-reference/data-types-text#varchar)) +- for text types the following types are treated as synonyms: `VARCHAR`, `CHAR`, `CHARACTER`, `STRING`, `TEXT` +- whitespace and casing is ignored +- if the type arguments cannot be parsed the defaults are used and therefore diff may be suppressed unexpectedly (please report such cases) + +No action is required on the user's side. + +Connected issues: [#3007](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3007) + ### snowflake_user resource changes #### *(breaking change)* user parameters added to snowflake_user resource From b702a210072708b5358cca3334c96128684772de Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 12:02:23 +0200 Subject: [PATCH 7/8] Run make pre-push --- pkg/resources/helpers_test.go | 2 +- pkg/sdk/data_types.go | 30 +++++++++++++++++------------- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/pkg/resources/helpers_test.go b/pkg/resources/helpers_test.go index 72d1e1305d..c143d40f03 100644 --- a/pkg/resources/helpers_test.go +++ b/pkg/resources/helpers_test.go @@ -3,7 +3,6 @@ package resources_test import ( "context" "fmt" - "github.com/stretchr/testify/require" "slices" "testing" @@ -15,6 +14,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func Test_GetPropertyAsPointer(t *testing.T) { diff --git a/pkg/sdk/data_types.go b/pkg/sdk/data_types.go index 011e6340fd..40ced4cb83 100644 --- a/pkg/sdk/data_types.go +++ b/pkg/sdk/data_types.go @@ -39,20 +39,24 @@ const ( DataTypeGeometry DataType = "GEOMETRY" ) -var DataTypeNumberSynonyms = []string{"NUMBER", "DECIMAL", "NUMERIC", "INT", "INTEGER", "BIGINT", "SMALLINT", "TINYINT", "BYTEINT"} -var DataTypeFloatSynonyms = []string{"FLOAT", "FLOAT4", "FLOAT8", "DOUBLE", "DOUBLE PRECISION", "REAL"} -var DataTypeVarcharSynonyms = []string{"VARCHAR", "CHAR", "CHARACTER", "STRING", "TEXT"} -var DataTypeBinarySynonyms = []string{"BINARY", "VARBINARY"} -var DataTypeBooleanSynonyms = []string{"BOOLEAN", "BOOL"} -var DataTypeTimestampLTZSynonyms = []string{"TIMESTAMP_LTZ"} -var DataTypeTimestampTZSynonyms = []string{"TIMESTAMP_TZ"} -var DataTypeTimestampNTZSynonyms = []string{"DATETIME", "TIMESTAMP", "TIMESTAMP_NTZ"} -var DataTypeTimeSynonyms = []string{"TIME"} -var DataTypeVectorSynonyms = []string{"VECTOR"} +var ( + DataTypeNumberSynonyms = []string{"NUMBER", "DECIMAL", "NUMERIC", "INT", "INTEGER", "BIGINT", "SMALLINT", "TINYINT", "BYTEINT"} + DataTypeFloatSynonyms = []string{"FLOAT", "FLOAT4", "FLOAT8", "DOUBLE", "DOUBLE PRECISION", "REAL"} + DataTypeVarcharSynonyms = []string{"VARCHAR", "CHAR", "CHARACTER", "STRING", "TEXT"} + DataTypeBinarySynonyms = []string{"BINARY", "VARBINARY"} + DataTypeBooleanSynonyms = []string{"BOOLEAN", "BOOL"} + DataTypeTimestampLTZSynonyms = []string{"TIMESTAMP_LTZ"} + DataTypeTimestampTZSynonyms = []string{"TIMESTAMP_TZ"} + DataTypeTimestampNTZSynonyms = []string{"DATETIME", "TIMESTAMP", "TIMESTAMP_NTZ"} + DataTypeTimeSynonyms = []string{"TIME"} + DataTypeVectorSynonyms = []string{"VECTOR"} +) -const DefaultNumberPrecision = 38 -const DefaultNumberScale = 0 -const DefaultVarcharLength = 16777216 +const ( + DefaultNumberPrecision = 38 + DefaultNumberScale = 0 + DefaultVarcharLength = 16777216 +) func ToDataType(s string) (DataType, error) { dType := strings.ToUpper(s) From f22630fa069cfc577116afd9facb275739712a5f Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 28 Aug 2024 15:04:57 +0200 Subject: [PATCH 8/8] Fix after review --- pkg/resources/helpers.go | 6 ++-- pkg/resources/table_acceptance_test.go | 43 +++++++++----------------- 2 files changed, 17 insertions(+), 32 deletions(-) diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index 1b3f94f8da..f88ae30bd2 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -53,17 +53,17 @@ func DataTypeIssue3007DiffSuppressFunc(_, old, new string, _ *schema.ResourceDat } switch v := oldDataType; v { case sdk.DataTypeNumber: - logging.DebugLogger.Printf("[DEBUG] Handling number data type diff suppression") + logging.DebugLogger.Printf("[DEBUG] DataTypeIssue3007DiffSuppressFunc: Handling number data type diff suppression") oldPrecision, oldScale := sdk.ParseNumberDataTypeRaw(old) newPrecision, newScale := sdk.ParseNumberDataTypeRaw(new) return oldPrecision == newPrecision && oldScale == newScale case sdk.DataTypeVARCHAR: - logging.DebugLogger.Printf("[DEBUG] Handling text data type diff suppression") + logging.DebugLogger.Printf("[DEBUG] DataTypeIssue3007DiffSuppressFunc: Handling text data type diff suppression") oldLength := sdk.ParseVarcharDataTypeRaw(old) newLength := sdk.ParseVarcharDataTypeRaw(new) return oldLength == newLength default: - logging.DebugLogger.Printf("[DEBUG] Diff suppression for %s can't be currently handled", v) + logging.DebugLogger.Printf("[DEBUG] DataTypeIssue3007DiffSuppressFunc: Diff suppression for %s can't be currently handled", v) } return true } diff --git a/pkg/resources/table_acceptance_test.go b/pkg/resources/table_acceptance_test.go index 264c160156..a855fb2a49 100644 --- a/pkg/resources/table_acceptance_test.go +++ b/pkg/resources/table_acceptance_test.go @@ -2094,9 +2094,7 @@ resource "snowflake_table" "test_table" { } func TestAcc_Table_issue3007_textColumn(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - name := acc.TestClient().Ids.Alpha() + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_table.test_table" defaultVarchar := fmt.Sprintf("VARCHAR(%d)", sdk.DefaultVarcharLength) @@ -2109,7 +2107,7 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { Steps: []resource.TestStep{ { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "VARCHAR(3)"), + Config: tableConfigIssue3007(tableId, "VARCHAR(3)"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "column.0.type", "NUMBER(11,2)"), resource.TestCheckResourceAttr(resourceName, "column.1.type", "VARCHAR(3)"), @@ -2117,7 +2115,7 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "VARCHAR(256)"), + Config: tableConfigIssue3007(tableId, "VARCHAR(256)"), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("VARCHAR(3)"), sdk.String("VARCHAR(256)")), @@ -2129,7 +2127,7 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "VARCHAR"), + Config: tableConfigIssue3007(tableId, "VARCHAR"), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("VARCHAR(256)"), sdk.String("VARCHAR")), @@ -2141,7 +2139,7 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, defaultVarchar), + Config: tableConfigIssue3007(tableId, defaultVarchar), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), @@ -2153,7 +2151,7 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "text"), + Config: tableConfigIssue3007(tableId, "text"), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), @@ -2169,9 +2167,7 @@ func TestAcc_Table_issue3007_textColumn(t *testing.T) { // TODO [SNOW-1348114]: visit with table rework (e.g. changing scale is not supported: err 040052 (22000): SQL compilation error: cannot change column SOME_COLUMN from type NUMBER(38,0) to NUMBER(11,2) because changing the scale of a number is not supported.) func TestAcc_Table_issue3007_numberColumn(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - name := acc.TestClient().Ids.Alpha() + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_table.test_table" defaultNumber := fmt.Sprintf("NUMBER(%d,%d)", sdk.DefaultNumberPrecision, sdk.DefaultNumberScale) @@ -2184,7 +2180,7 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { Steps: []resource.TestStep{ { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "NUMBER"), + Config: tableConfigIssue3007(tableId, "NUMBER"), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "column.0.type", "NUMBER(11,2)"), resource.TestCheckResourceAttr(resourceName, "column.1.type", "NUMBER(38,0)"), @@ -2192,7 +2188,7 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "NUMBER(11)"), + Config: tableConfigIssue3007(tableId, "NUMBER(11)"), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("NUMBER(38,0)"), sdk.String("NUMBER(11)")), @@ -2205,7 +2201,7 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "NUMBER"), + Config: tableConfigIssue3007(tableId, "NUMBER"), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ planchecks.ExpectChange(resourceName, "column.1.type", tfjson.ActionUpdate, sdk.String("NUMBER(11,0)"), sdk.String("NUMBER")), @@ -2217,7 +2213,7 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, defaultNumber), + Config: tableConfigIssue3007(tableId, defaultNumber), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), @@ -2229,7 +2225,7 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { }, { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - Config: tableConfigIssue3007(name, databaseName, schemaName, "decimal"), + Config: tableConfigIssue3007(tableId, "decimal"), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), @@ -2243,20 +2239,9 @@ func TestAcc_Table_issue3007_numberColumn(t *testing.T) { }) } -func tableConfigIssue3007(name string, databaseName string, schemaName string, dataType string) string { +func tableConfigIssue3007(tableId sdk.SchemaObjectIdentifier, dataType string) string { return fmt.Sprintf(` -resource "snowflake_database" "test_database" { - name = "%[2]s" -} - -resource "snowflake_schema" "test_schema" { - depends_on = [snowflake_database.test_database] - name = "%[3]s" - database = "%[2]s" -} - resource "snowflake_table" "test_table" { - depends_on = [snowflake_schema.test_schema] name = "%[1]s" database = "%[2]s" schema = "%[3]s" @@ -2272,5 +2257,5 @@ resource "snowflake_table" "test_table" { type = "%[4]s" } } -`, name, databaseName, schemaName, dataType) +`, tableId.Name(), tableId.DatabaseName(), tableId.SchemaName(), dataType) }