From 682606adea5e40befa7e599ced5aa7dc8570f80a Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Tue, 10 Dec 2024 16:39:19 +0100 Subject: [PATCH 1/2] feat: Procedures sdk update (#3255) Update and test procedures SDK as part of ongoing functions&procedures rework: - Regenerate SDK after adjusting defs - Wrap procedure definitions in `$$` - Wrap arguments in double quotes - Extract ProcedureDetails out of DESCRIBE output rows - Add AutoEventLogging to the SDK - Adjust alter's structure (add set/unset) - Add missing fields to SHOW output - Use extended in for SHOW - Add/Generate assertions for procedure, procedure details, and procedure parameters - Add missing unit tests and adjust existing ones - Add missing integration tests and temporarily skip most of the existing ones, adjust them, or remove them - Adjust existing resource --- .../function_describe_snowflake_ext.go | 37 + .../objectassert/function_snowflake_ext.go | 37 + .../assert/objectassert/gen/sdk_object_def.go | 5 + .../procedure_describe_snowflake_ext.go | 393 ++++ .../objectassert/procedure_snowflake_ext.go | 59 + .../objectassert/procedure_snowflake_gen.go | 227 ++ .../gen/object_parameters_def.go | 12 + .../procedure_parameters_snowflake_gen.go | 190 ++ pkg/acceptance/helpers/function_client.go | 1 + .../helpers/function_setup_helpers.go | 41 +- pkg/acceptance/helpers/parameter_client.go | 11 + pkg/acceptance/helpers/procedure_client.go | 158 +- pkg/acceptance/testdatatypes/testdatatypes.go | 1 + pkg/datasources/procedures.go | 4 +- pkg/resources/external_function.go | 2 +- pkg/resources/function.go | 23 +- pkg/resources/procedure.go | 41 +- pkg/resources/procedure_acceptance_test.go | 4 +- pkg/sdk/common_types.go | 33 +- pkg/sdk/common_types_test.go | 35 + pkg/sdk/functions_impl_gen.go | 4 +- pkg/sdk/identifier_helpers.go | 7 +- pkg/sdk/parameters.go | 18 +- pkg/sdk/poc/README.md | 1 + pkg/sdk/procedures_def.go | 84 +- pkg/sdk/procedures_dto_builders_gen.go | 140 +- pkg/sdk/procedures_dto_gen.go | 124 +- pkg/sdk/procedures_ext.go | 146 ++ pkg/sdk/procedures_gen.go | 241 +- pkg/sdk/procedures_gen_test.go | 338 +-- pkg/sdk/procedures_impl_gen.go | 266 ++- pkg/sdk/procedures_validations_gen.go | 14 +- pkg/sdk/testint/functions_integration_test.go | 152 +- .../testint/procedures_integration_test.go | 1959 ++++++++++++++--- 34 files changed, 3958 insertions(+), 850 deletions(-) create mode 100644 pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/objectparametersassert/procedure_parameters_snowflake_gen.go diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go index c1b241aa1d..f540d487bd 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/function_describe_snowflake_ext.go @@ -2,11 +2,13 @@ package objectassert import ( "fmt" + "strings" "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) @@ -368,3 +370,38 @@ func (f *FunctionDetailsAssert) HasInstalledPackagesNotEmpty() *FunctionDetailsA }) return f } + +func (f *FunctionDetailsAssert) HasExactlyExternalAccessIntegrations(integrations ...sdk.AccountObjectIdentifier) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.ExternalAccessIntegrations == nil { + return fmt.Errorf("expected external access integrations to have value; got: nil") + } + joined := strings.Join(collections.Map(integrations, func(ex sdk.AccountObjectIdentifier) string { return ex.FullyQualifiedName() }), ",") + expected := fmt.Sprintf(`[%s]`, joined) + if *o.ExternalAccessIntegrations != expected { + return fmt.Errorf("expected external access integrations: %v; got: %v", expected, *o.ExternalAccessIntegrations) + } + return nil + }) + return f +} + +func (f *FunctionDetailsAssert) HasExactlySecrets(expectedSecrets map[string]sdk.SchemaObjectIdentifier) *FunctionDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.FunctionDetails) error { + t.Helper() + if o.Secrets == nil { + return fmt.Errorf("expected secrets to have value; got: nil") + } + var parts []string + for k, v := range expectedSecrets { + parts = append(parts, fmt.Sprintf(`"%s":"\"%s\".\"%s\".%s"`, k, v.DatabaseName(), v.SchemaName(), v.Name())) + } + expected := fmt.Sprintf(`{%s}`, strings.Join(parts, ",")) + if *o.Secrets != expected { + return fmt.Errorf("expected secrets: %v; got: %v", expected, *o.Secrets) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go index 66e4253d20..aa8d17a022 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/function_snowflake_ext.go @@ -2,8 +2,10 @@ package objectassert import ( "fmt" + "strings" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) @@ -28,3 +30,38 @@ func (a *FunctionAssert) HasExternalAccessIntegrationsNil() *FunctionAssert { }) return a } + +func (f *FunctionAssert) HasExactlyExternalAccessIntegrations(integrations ...sdk.AccountObjectIdentifier) *FunctionAssert { + f.AddAssertion(func(t *testing.T, o *sdk.Function) error { + t.Helper() + if o.ExternalAccessIntegrations == nil { + return fmt.Errorf("expected external access integrations to have value; got: nil") + } + joined := strings.Join(collections.Map(integrations, func(ex sdk.AccountObjectIdentifier) string { return ex.FullyQualifiedName() }), ",") + expected := fmt.Sprintf(`[%s]`, joined) + if *o.ExternalAccessIntegrations != expected { + return fmt.Errorf("expected external access integrations: %v; got: %v", expected, *o.ExternalAccessIntegrations) + } + return nil + }) + return f +} + +func (f *FunctionAssert) HasExactlySecrets(expectedSecrets map[string]sdk.SchemaObjectIdentifier) *FunctionAssert { + f.AddAssertion(func(t *testing.T, o *sdk.Function) error { + t.Helper() + if o.Secrets == nil { + return fmt.Errorf("expected secrets to have value; got: nil") + } + var parts []string + for k, v := range expectedSecrets { + parts = append(parts, fmt.Sprintf(`"%s":"\"%s\".\"%s\".%s"`, k, v.DatabaseName(), v.SchemaName(), v.Name())) + } + expected := fmt.Sprintf(`{%s}`, strings.Join(parts, ",")) + if *o.Secrets != expected { + return fmt.Errorf("expected secrets: %v; got: %v", expected, *o.Secrets) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/gen/sdk_object_def.go b/pkg/acceptance/bettertestspoc/assert/objectassert/gen/sdk_object_def.go index 3f5a88e827..06ab381d2d 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/gen/sdk_object_def.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/gen/sdk_object_def.go @@ -97,6 +97,11 @@ var allStructs = []SdkObjectDef{ ObjectType: sdk.ObjectTypeFunction, ObjectStruct: sdk.Function{}, }, + { + IdType: "sdk.SchemaObjectIdentifierWithArguments", + ObjectType: sdk.ObjectTypeProcedure, + ObjectStruct: sdk.Procedure{}, + }, } func GetSdkObjectDetails() []genhelpers.SdkObjectDetails { diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go new file mode 100644 index 0000000000..64011d14f9 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_describe_snowflake_ext.go @@ -0,0 +1,393 @@ +package objectassert + +import ( + "fmt" + "strings" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +// TODO [SNOW-1501905]: this file should be fully regenerated when adding and option to assert the results of describe +type ProcedureDetailsAssert struct { + *assert.SnowflakeObjectAssert[sdk.ProcedureDetails, sdk.SchemaObjectIdentifierWithArguments] +} + +func ProcedureDetails(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) *ProcedureDetailsAssert { + t.Helper() + return &ProcedureDetailsAssert{ + assert.NewSnowflakeObjectAssertWithProvider(sdk.ObjectType("PROCEDURE_DETAILS"), id, acc.TestClient().Procedure.DescribeDetails), + } +} + +func (f *ProcedureDetailsAssert) HasSignature(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Signature != expected { + return fmt.Errorf("expected signature: %v; got: %v", expected, o.Signature) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasReturns(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Returns != expected { + return fmt.Errorf("expected returns: %v; got: %v", expected, o.Returns) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasLanguage(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Language != expected { + return fmt.Errorf("expected language: %v; got: %v", expected, o.Language) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasBody(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Body == nil { + return fmt.Errorf("expected body to have value; got: nil") + } + if *o.Body != expected { + return fmt.Errorf("expected body: %v; got: %v", expected, *o.Body) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasNullHandling(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NullHandling == nil { + return fmt.Errorf("expected null handling to have value; got: nil") + } + if *o.NullHandling != expected { + return fmt.Errorf("expected null handling: %v; got: %v", expected, *o.NullHandling) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasVolatility(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Volatility == nil { + return fmt.Errorf("expected volatility to have value; got: nil") + } + if *o.Volatility != expected { + return fmt.Errorf("expected volatility: %v; got: %v", expected, *o.Volatility) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasExternalAccessIntegrations(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ExternalAccessIntegrations == nil { + return fmt.Errorf("expected external access integrations to have value; got: nil") + } + if *o.ExternalAccessIntegrations != expected { + return fmt.Errorf("expected external access integrations: %v; got: %v", expected, *o.ExternalAccessIntegrations) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasSecrets(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Secrets == nil { + return fmt.Errorf("expected secrets to have value; got: nil") + } + if *o.Secrets != expected { + return fmt.Errorf("expected secrets: %v; got: %v", expected, *o.Secrets) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasImports(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Imports == nil { + return fmt.Errorf("expected imports to have value; got: nil") + } + if *o.Imports != expected { + return fmt.Errorf("expected imports: %v; got: %v", expected, *o.Imports) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasHandler(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Handler == nil { + return fmt.Errorf("expected handler to have value; got: nil") + } + if *o.Handler != expected { + return fmt.Errorf("expected handler: %v; got: %v", expected, *o.Handler) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasRuntimeVersion(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.RuntimeVersion == nil { + return fmt.Errorf("expected runtime version to have value; got: nil") + } + if *o.RuntimeVersion != expected { + return fmt.Errorf("expected runtime version: %v; got: %v", expected, *o.RuntimeVersion) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasPackages(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Packages == nil { + return fmt.Errorf("expected packages to have value; got: nil") + } + if *o.Packages != expected { + return fmt.Errorf("expected packages: %v; got: %v", expected, *o.Packages) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasTargetPath(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.TargetPath == nil { + return fmt.Errorf("expected target path to have value; got: nil") + } + if *o.TargetPath != expected { + return fmt.Errorf("expected target path: %v; got: %v", expected, *o.TargetPath) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasInstalledPackages(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.InstalledPackages == nil { + return fmt.Errorf("expected installed packages to have value; got: nil") + } + if *o.InstalledPackages != expected { + return fmt.Errorf("expected installed packages: %v; got: %v", expected, *o.InstalledPackages) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasExecuteAs(expected string) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ExecuteAs != expected { + return fmt.Errorf("expected execute as: %v; got: %v", expected, o.ExecuteAs) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasBodyNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Body != nil { + return fmt.Errorf("expected body to be nil, was %v", *o.Body) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasNullHandlingNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.NullHandling != nil { + return fmt.Errorf("expected null handling to be nil, was %v", *o.NullHandling) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasVolatilityNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Volatility != nil { + return fmt.Errorf("expected volatility to be nil, was %v", *o.Volatility) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasExternalAccessIntegrationsNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ExternalAccessIntegrations != nil { + return fmt.Errorf("expected external access integrations to be nil, was %v", *o.ExternalAccessIntegrations) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasSecretsNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Secrets != nil { + return fmt.Errorf("expected secrets to be nil, was %v", *o.Secrets) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasImportsNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Imports != nil { + return fmt.Errorf("expected imports to be nil, was %v", *o.Imports) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasHandlerNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Handler != nil { + return fmt.Errorf("expected handler to be nil, was %v", *o.Handler) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasRuntimeVersionNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.RuntimeVersion != nil { + return fmt.Errorf("expected runtime version to be nil, was %v", *o.RuntimeVersion) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasPackagesNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Packages != nil { + return fmt.Errorf("expected packages to be nil, was %v", *o.Packages) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasTargetPathNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.TargetPath != nil { + return fmt.Errorf("expected target path to be nil, was %v", *o.TargetPath) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasInstalledPackagesNil() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.InstalledPackages != nil { + return fmt.Errorf("expected installed packages to be nil, was %v", *o.InstalledPackages) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasInstalledPackagesNotEmpty() *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.InstalledPackages == nil { + return fmt.Errorf("expected installed packages to not be nil") + } + if *o.InstalledPackages == "" { + return fmt.Errorf("expected installed packages to not be empty") + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasExactlyExternalAccessIntegrations(integrations ...sdk.AccountObjectIdentifier) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.ExternalAccessIntegrations == nil { + return fmt.Errorf("expected external access integrations to have value; got: nil") + } + joined := strings.Join(collections.Map(integrations, func(ex sdk.AccountObjectIdentifier) string { return ex.FullyQualifiedName() }), ",") + expected := fmt.Sprintf(`[%s]`, joined) + if *o.ExternalAccessIntegrations != expected { + return fmt.Errorf("expected external access integrations: %v; got: %v", expected, *o.ExternalAccessIntegrations) + } + return nil + }) + return f +} + +func (f *ProcedureDetailsAssert) HasExactlySecrets(expectedSecrets map[string]sdk.SchemaObjectIdentifier) *ProcedureDetailsAssert { + f.AddAssertion(func(t *testing.T, o *sdk.ProcedureDetails) error { + t.Helper() + if o.Secrets == nil { + return fmt.Errorf("expected secrets to have value; got: nil") + } + var parts []string + for k, v := range expectedSecrets { + parts = append(parts, fmt.Sprintf(`"%s":"\"%s\".\"%s\".%s"`, k, v.DatabaseName(), v.SchemaName(), v.Name())) + } + expected := fmt.Sprintf(`{%s}`, strings.Join(parts, ",")) + if *o.Secrets != expected { + return fmt.Errorf("expected secrets: %v; got: %v", expected, *o.Secrets) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go new file mode 100644 index 0000000000..12d5a384cf --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_ext.go @@ -0,0 +1,59 @@ +package objectassert + +import ( + "fmt" + "strings" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (a *ProcedureAssert) HasCreatedOnNotEmpty() *ProcedureAssert { + a.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.CreatedOn == "" { + return fmt.Errorf("expected create_on to be not empty") + } + return nil + }) + return a +} + +func (a *ProcedureAssert) HasExternalAccessIntegrationsNil() *ProcedureAssert { + a.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.ExternalAccessIntegrations != nil { + return fmt.Errorf("expected external_access_integrations to be nil but was: %v", *o.ExternalAccessIntegrations) + } + return nil + }) + return a +} + +func (a *ProcedureAssert) HasSecretsNil() *ProcedureAssert { + a.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.Secrets != nil { + return fmt.Errorf("expected secrets to be nil but was: %v", *o.Secrets) + } + return nil + }) + return a +} + +func (f *ProcedureAssert) HasExactlyExternalAccessIntegrations(integrations ...sdk.AccountObjectIdentifier) *ProcedureAssert { + f.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.ExternalAccessIntegrations == nil { + return fmt.Errorf("expected external access integrations to have value; got: nil") + } + joined := strings.Join(collections.Map(integrations, func(ex sdk.AccountObjectIdentifier) string { return ex.FullyQualifiedName() }), ",") + expected := fmt.Sprintf(`[%s]`, joined) + if *o.ExternalAccessIntegrations != expected { + return fmt.Errorf("expected external access integrations: %v; got: %v", expected, *o.ExternalAccessIntegrations) + } + return nil + }) + return f +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_gen.go new file mode 100644 index 0000000000..ef1d4c83cf --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/procedure_snowflake_gen.go @@ -0,0 +1,227 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package objectassert + +// imports edited manually +import ( + "fmt" + "slices" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +type ProcedureAssert struct { + *assert.SnowflakeObjectAssert[sdk.Procedure, sdk.SchemaObjectIdentifierWithArguments] +} + +func Procedure(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) *ProcedureAssert { + t.Helper() + return &ProcedureAssert{ + assert.NewSnowflakeObjectAssertWithProvider(sdk.ObjectTypeProcedure, id, acc.TestClient().Procedure.Show), + } +} + +func ProcedureFromObject(t *testing.T, procedure *sdk.Procedure) *ProcedureAssert { + t.Helper() + return &ProcedureAssert{ + assert.NewSnowflakeObjectAssertWithObject(sdk.ObjectTypeProcedure, procedure.ID(), procedure), + } +} + +func (p *ProcedureAssert) HasCreatedOn(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.CreatedOn != expected { + return fmt.Errorf("expected created on: %v; got: %v", expected, o.CreatedOn) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasName(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.Name != expected { + return fmt.Errorf("expected name: %v; got: %v", expected, o.Name) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasSchemaName(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.SchemaName != expected { + return fmt.Errorf("expected schema name: %v; got: %v", expected, o.SchemaName) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasIsBuiltin(expected bool) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.IsBuiltin != expected { + return fmt.Errorf("expected is builtin: %v; got: %v", expected, o.IsBuiltin) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasIsAggregate(expected bool) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.IsAggregate != expected { + return fmt.Errorf("expected is aggregate: %v; got: %v", expected, o.IsAggregate) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasIsAnsi(expected bool) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.IsAnsi != expected { + return fmt.Errorf("expected is ansi: %v; got: %v", expected, o.IsAnsi) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasMinNumArguments(expected int) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.MinNumArguments != expected { + return fmt.Errorf("expected min num arguments: %v; got: %v", expected, o.MinNumArguments) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasMaxNumArguments(expected int) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.MaxNumArguments != expected { + return fmt.Errorf("expected max num arguments: %v; got: %v", expected, o.MaxNumArguments) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasArgumentsOld(expected []sdk.DataType) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + // edited manually + if !slices.Equal(o.ArgumentsOld, expected) { + return fmt.Errorf("expected arguments old: %v; got: %v", expected, o.ArgumentsOld) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasArgumentsRaw(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.ArgumentsRaw != expected { + return fmt.Errorf("expected arguments raw: %v; got: %v", expected, o.ArgumentsRaw) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasDescription(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.Description != expected { + return fmt.Errorf("expected description: %v; got: %v", expected, o.Description) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasCatalogName(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.CatalogName != expected { + return fmt.Errorf("expected catalog name: %v; got: %v", expected, o.CatalogName) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasIsTableFunction(expected bool) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.IsTableFunction != expected { + return fmt.Errorf("expected is table function: %v; got: %v", expected, o.IsTableFunction) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasValidForClustering(expected bool) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.ValidForClustering != expected { + return fmt.Errorf("expected valid for clustering: %v; got: %v", expected, o.ValidForClustering) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasIsSecure(expected bool) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.IsSecure != expected { + return fmt.Errorf("expected is secure: %v; got: %v", expected, o.IsSecure) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasSecrets(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.Secrets == nil { + return fmt.Errorf("expected secrets to have value; got: nil") + } + if *o.Secrets != expected { + return fmt.Errorf("expected secrets: %v; got: %v", expected, *o.Secrets) + } + return nil + }) + return p +} + +func (p *ProcedureAssert) HasExternalAccessIntegrations(expected string) *ProcedureAssert { + p.AddAssertion(func(t *testing.T, o *sdk.Procedure) error { + t.Helper() + if o.ExternalAccessIntegrations == nil { + return fmt.Errorf("expected external access integrations to have value; got: nil") + } + if *o.ExternalAccessIntegrations != expected { + return fmt.Errorf("expected external access integrations: %v; got: %v", expected, *o.ExternalAccessIntegrations) + } + return nil + }) + return p +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go index 7db9f72c07..fd716a8993 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/gen/object_parameters_def.go @@ -216,4 +216,16 @@ var allObjectsParameters = []SnowflakeObjectParameters{ {ParameterName: string(sdk.FunctionParameterTraceLevel), ParameterType: "sdk.TraceLevel", DefaultValue: "sdk.TraceLevelOff", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, }, }, + { + Name: "Procedure", + IdType: "sdk.SchemaObjectIdentifierWithArguments", + Level: sdk.ParameterTypeProcedure, + Parameters: []SnowflakeParameter{ + {ParameterName: string(sdk.ProcedureParameterAutoEventLogging), ParameterType: "sdk.AutoEventLogging", DefaultValue: "sdk.AutoEventLoggingOff", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, + {ParameterName: string(sdk.ProcedureParameterEnableConsoleOutput), ParameterType: "bool", DefaultValue: "false", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, + {ParameterName: string(sdk.ProcedureParameterLogLevel), ParameterType: "sdk.LogLevel", DefaultValue: "sdk.LogLevelOff", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, + {ParameterName: string(sdk.ProcedureParameterMetricLevel), ParameterType: "sdk.MetricLevel", DefaultValue: "sdk.MetricLevelNone", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, + {ParameterName: string(sdk.ProcedureParameterTraceLevel), ParameterType: "sdk.TraceLevel", DefaultValue: "sdk.TraceLevelOff", DefaultLevel: "sdk.ParameterTypeSnowflakeDefault"}, + }, + }, } diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/procedure_parameters_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/procedure_parameters_snowflake_gen.go new file mode 100644 index 0000000000..b425119010 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/procedure_parameters_snowflake_gen.go @@ -0,0 +1,190 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package objectparametersassert + +import ( + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +type ProcedureParametersAssert struct { + *assert.SnowflakeParametersAssert[sdk.SchemaObjectIdentifierWithArguments] +} + +func ProcedureParameters(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) *ProcedureParametersAssert { + t.Helper() + return &ProcedureParametersAssert{ + assert.NewSnowflakeParametersAssertWithProvider(id, sdk.ObjectTypeProcedure, acc.TestClient().Parameter.ShowProcedureParameters), + } +} + +func ProcedureParametersPrefetched(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, parameters []*sdk.Parameter) *ProcedureParametersAssert { + t.Helper() + return &ProcedureParametersAssert{ + assert.NewSnowflakeParametersAssertWithParameters(id, sdk.ObjectTypeProcedure, parameters), + } +} + +////////////////////////////// +// Generic parameter checks // +////////////////////////////// + +func (p *ProcedureParametersAssert) HasBoolParameterValue(parameterName sdk.ProcedureParameter, expected bool) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterBoolValueSet(parameterName, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasIntParameterValue(parameterName sdk.ProcedureParameter, expected int) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterIntValueSet(parameterName, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasStringParameterValue(parameterName sdk.ProcedureParameter, expected string) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterValueSet(parameterName, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasDefaultParameterValue(parameterName sdk.ProcedureParameter) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterDefaultValueSet(parameterName)) + return p +} + +func (p *ProcedureParametersAssert) HasDefaultParameterValueOnLevel(parameterName sdk.ProcedureParameter, parameterType sdk.ParameterType) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterDefaultValueOnLevelSet(parameterName, parameterType)) + return p +} + +/////////////////////////////// +// Aggregated generic checks // +/////////////////////////////// + +// HasAllDefaults checks if all the parameters: +// - have a default value by comparing current value of the sdk.Parameter with its default +// - have an expected level +func (p *ProcedureParametersAssert) HasAllDefaults() *ProcedureParametersAssert { + return p. + HasDefaultParameterValueOnLevel(sdk.ProcedureParameterAutoEventLogging, sdk.ParameterTypeSnowflakeDefault). + HasDefaultParameterValueOnLevel(sdk.ProcedureParameterEnableConsoleOutput, sdk.ParameterTypeSnowflakeDefault). + HasDefaultParameterValueOnLevel(sdk.ProcedureParameterLogLevel, sdk.ParameterTypeSnowflakeDefault). + HasDefaultParameterValueOnLevel(sdk.ProcedureParameterMetricLevel, sdk.ParameterTypeSnowflakeDefault). + HasDefaultParameterValueOnLevel(sdk.ProcedureParameterTraceLevel, sdk.ParameterTypeSnowflakeDefault) +} + +func (p *ProcedureParametersAssert) HasAllDefaultsExplicit() *ProcedureParametersAssert { + return p. + HasDefaultAutoEventLoggingValueExplicit(). + HasDefaultEnableConsoleOutputValueExplicit(). + HasDefaultLogLevelValueExplicit(). + HasDefaultMetricLevelValueExplicit(). + HasDefaultTraceLevelValueExplicit() +} + +//////////////////////////// +// Parameter value checks // +//////////////////////////// + +func (p *ProcedureParametersAssert) HasAutoEventLogging(expected sdk.AutoEventLogging) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterStringUnderlyingValueSet(sdk.ProcedureParameterAutoEventLogging, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasEnableConsoleOutput(expected bool) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterBoolValueSet(sdk.ProcedureParameterEnableConsoleOutput, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasLogLevel(expected sdk.LogLevel) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterStringUnderlyingValueSet(sdk.ProcedureParameterLogLevel, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasMetricLevel(expected sdk.MetricLevel) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterStringUnderlyingValueSet(sdk.ProcedureParameterMetricLevel, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasTraceLevel(expected sdk.TraceLevel) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterStringUnderlyingValueSet(sdk.ProcedureParameterTraceLevel, expected)) + return p +} + +//////////////////////////// +// Parameter level checks // +//////////////////////////// + +func (p *ProcedureParametersAssert) HasAutoEventLoggingLevel(expected sdk.ParameterType) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterLevelSet(sdk.ProcedureParameterAutoEventLogging, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasEnableConsoleOutputLevel(expected sdk.ParameterType) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterLevelSet(sdk.ProcedureParameterEnableConsoleOutput, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasLogLevelLevel(expected sdk.ParameterType) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterLevelSet(sdk.ProcedureParameterLogLevel, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasMetricLevelLevel(expected sdk.ParameterType) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterLevelSet(sdk.ProcedureParameterMetricLevel, expected)) + return p +} + +func (p *ProcedureParametersAssert) HasTraceLevelLevel(expected sdk.ParameterType) *ProcedureParametersAssert { + p.AddAssertion(assert.SnowflakeParameterLevelSet(sdk.ProcedureParameterTraceLevel, expected)) + return p +} + +//////////////////////////////////// +// Parameter default value checks // +//////////////////////////////////// + +func (p *ProcedureParametersAssert) HasDefaultAutoEventLoggingValue() *ProcedureParametersAssert { + return p.HasDefaultParameterValue(sdk.ProcedureParameterAutoEventLogging) +} + +func (p *ProcedureParametersAssert) HasDefaultEnableConsoleOutputValue() *ProcedureParametersAssert { + return p.HasDefaultParameterValue(sdk.ProcedureParameterEnableConsoleOutput) +} + +func (p *ProcedureParametersAssert) HasDefaultLogLevelValue() *ProcedureParametersAssert { + return p.HasDefaultParameterValue(sdk.ProcedureParameterLogLevel) +} + +func (p *ProcedureParametersAssert) HasDefaultMetricLevelValue() *ProcedureParametersAssert { + return p.HasDefaultParameterValue(sdk.ProcedureParameterMetricLevel) +} + +func (p *ProcedureParametersAssert) HasDefaultTraceLevelValue() *ProcedureParametersAssert { + return p.HasDefaultParameterValue(sdk.ProcedureParameterTraceLevel) +} + +///////////////////////////////////////////// +// Parameter explicit default value checks // +///////////////////////////////////////////// + +func (p *ProcedureParametersAssert) HasDefaultAutoEventLoggingValueExplicit() *ProcedureParametersAssert { + return p.HasAutoEventLogging(sdk.AutoEventLoggingOff) +} + +func (p *ProcedureParametersAssert) HasDefaultEnableConsoleOutputValueExplicit() *ProcedureParametersAssert { + return p.HasEnableConsoleOutput(false) +} + +func (p *ProcedureParametersAssert) HasDefaultLogLevelValueExplicit() *ProcedureParametersAssert { + return p.HasLogLevel(sdk.LogLevelOff) +} + +func (p *ProcedureParametersAssert) HasDefaultMetricLevelValueExplicit() *ProcedureParametersAssert { + return p.HasMetricLevel(sdk.MetricLevelNone) +} + +func (p *ProcedureParametersAssert) HasDefaultTraceLevelValueExplicit() *ProcedureParametersAssert { + return p.HasTraceLevel(sdk.TraceLevelOff) +} diff --git a/pkg/acceptance/helpers/function_client.go b/pkg/acceptance/helpers/function_client.go index ef8fde637b..4d9bf35aaa 100644 --- a/pkg/acceptance/helpers/function_client.go +++ b/pkg/acceptance/helpers/function_client.go @@ -232,6 +232,7 @@ func (c *FunctionClient) SampleScalaDefinition(t *testing.T, className string, f `, className, funcName, argName) } +// TODO [SNOW-1850370]: use input argument like in other samples func (c *FunctionClient) SampleSqlDefinition(t *testing.T) string { t.Helper() diff --git a/pkg/acceptance/helpers/function_setup_helpers.go b/pkg/acceptance/helpers/function_setup_helpers.go index ca4d29dd86..8f0447e443 100644 --- a/pkg/acceptance/helpers/function_setup_helpers.go +++ b/pkg/acceptance/helpers/function_setup_helpers.go @@ -54,6 +54,45 @@ func (c *TestClient) CreateSampleJavaFunctionAndJar(t *testing.T) *TmpFunction { } } +func (c *TestClient) CreateSampleJavaProcedureAndJar(t *testing.T) *TmpFunction { + t.Helper() + ctx := context.Background() + + className := fmt.Sprintf("TestClassAbc%s", random.AlphaLowerN(3)) + funcName := fmt.Sprintf("echoVarchar%s", random.AlphaLowerN(3)) + argName := fmt.Sprintf("arg%s", random.AlphaLowerN(3)) + dataType := testdatatypes.DataTypeVarchar_100 + + id := c.Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := c.Procedure.SampleJavaDefinition(t, className, funcName, argName) + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + targetPath := fmt.Sprintf("@~/%s", jarName) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithTargetPath(targetPath). + WithProcedureDefinitionWrapped(definition) + + err := c.context.client.Procedures.CreateForJava(ctx, request) + require.NoError(t, err) + t.Cleanup(c.Procedure.DropProcedureFunc(t, id)) + t.Cleanup(c.Stage.RemoveFromUserStageFunc(t, jarName)) + + return &TmpFunction{ + FunctionId: id, + ClassName: className, + FuncName: funcName, + ArgName: argName, + ArgType: dataType, + JarName: jarName, + } +} + func (c *TestClient) CreateSamplePythonFunctionAndModule(t *testing.T) *TmpFunction { t.Helper() ctx := context.Background() @@ -83,7 +122,7 @@ func (c *TestClient) CreateSamplePythonFunctionAndModule(t *testing.T) *TmpFunct return &TmpFunction{ FunctionId: id, - ModuleName: strings.ReplaceAll(moduleFileName, ".py", ""), + ModuleName: strings.TrimSuffix(moduleFileName, ".py"), FuncName: funcName, ArgName: argName, ArgType: dataType, diff --git a/pkg/acceptance/helpers/parameter_client.go b/pkg/acceptance/helpers/parameter_client.go index 902201d2ca..1e3a24da0b 100644 --- a/pkg/acceptance/helpers/parameter_client.go +++ b/pkg/acceptance/helpers/parameter_client.go @@ -102,6 +102,17 @@ func (c *ParameterClient) ShowFunctionParameters(t *testing.T, id sdk.SchemaObje return params } +func (c *ParameterClient) ShowProcedureParameters(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) []*sdk.Parameter { + t.Helper() + params, err := c.client().ShowParameters(context.Background(), &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Procedure: id, + }, + }) + require.NoError(t, err) + return params +} + func (c *ParameterClient) UpdateAccountParameterTemporarily(t *testing.T, parameter sdk.AccountParameter, newValue string) func() { t.Helper() ctx := context.Background() diff --git a/pkg/acceptance/helpers/procedure_client.go b/pkg/acceptance/helpers/procedure_client.go index 34aec170f7..019d5f9299 100644 --- a/pkg/acceptance/helpers/procedure_client.go +++ b/pkg/acceptance/helpers/procedure_client.go @@ -2,9 +2,12 @@ package helpers import ( "context" + "fmt" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" "github.com/stretchr/testify/require" ) @@ -24,6 +27,66 @@ func (c *ProcedureClient) client() sdk.Procedures { return c.context.client.Procedures } +func (c *ProcedureClient) CreateSql(t *testing.T) (*sdk.Procedure, func()) { + t.Helper() + dataType := testdatatypes.DataTypeFloat + id := c.ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + definition := c.SampleSqlDefinition(t) + return c.CreateSqlWithIdentifierAndArgument(t, id.SchemaObjectId(), dataType, definition) +} + +func (c *ProcedureClient) CreateSqlWithIdentifierAndArgument(t *testing.T, id sdk.SchemaObjectIdentifier, dataType datatypes.DataType, definition string) (*sdk.Procedure, func()) { + t.Helper() + ctx := context.Background() + + idWithArgs := sdk.NewSchemaObjectIdentifierWithArgumentsInSchema(id.SchemaId(), id.Name(), sdk.LegacyDataTypeFrom(dataType)) + argName := "x" + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id, *returns, definition). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}) + + err := c.client().CreateForSQL(ctx, request) + require.NoError(t, err) + + procedure, err := c.client().ShowByID(ctx, idWithArgs) + require.NoError(t, err) + + return procedure, c.DropProcedureFunc(t, idWithArgs) +} + +func (c *ProcedureClient) CreateJava(t *testing.T) (*sdk.Procedure, func()) { + t.Helper() + ctx := context.Background() + + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := c.ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := c.SampleJavaDefinition(t, className, funcName, argName) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithProcedureDefinitionWrapped(definition) + + err := c.client().CreateForJava(ctx, request) + require.NoError(t, err) + + function, err := c.client().ShowByID(ctx, id) + require.NoError(t, err) + + return function, c.DropProcedureFunc(t, id) +} + func (c *ProcedureClient) Create(t *testing.T, arguments ...sdk.DataType) *sdk.Procedure { t.Helper() return c.CreateWithIdentifier(t, c.ids.RandomSchemaObjectIdentifierWithArguments(arguments...)) @@ -37,7 +100,7 @@ func (c *ProcedureClient) CreateWithIdentifier(t *testing.T, id sdk.SchemaObject argumentRequests[i] = *sdk.NewProcedureArgumentRequest(c.ids.Alpha(), nil).WithArgDataTypeOld(argumentDataType) } err := c.client().CreateForSQL(ctx, - sdk.NewCreateForSQLProcedureRequest( + sdk.NewCreateForSQLProcedureRequestDefinitionWrapped( id.SchemaObjectId(), *sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeInt)), `BEGIN RETURN 1; END`).WithArguments(argumentRequests), @@ -53,3 +116,96 @@ func (c *ProcedureClient) CreateWithIdentifier(t *testing.T, id sdk.SchemaObject return procedure } + +func (c *ProcedureClient) DropProcedureFunc(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) func() { + t.Helper() + ctx := context.Background() + + return func() { + err := c.client().Drop(ctx, sdk.NewDropProcedureRequest(id).WithIfExists(true)) + require.NoError(t, err) + } +} + +func (c *ProcedureClient) Show(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) (*sdk.Procedure, error) { + t.Helper() + ctx := context.Background() + + return c.client().ShowByID(ctx, id) +} + +func (c *ProcedureClient) DescribeDetails(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) (*sdk.ProcedureDetails, error) { + t.Helper() + ctx := context.Background() + + return c.client().DescribeDetails(ctx, id) +} + +// Session argument is needed: https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java#data-access-example +// More references: https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java +func (c *ProcedureClient) SampleJavaDefinition(t *testing.T, className string, funcName string, argName string) string { + t.Helper() + + return fmt.Sprintf(` + import com.snowflake.snowpark_java.*; + class %[1]s { + public static String %[2]s(Session session, String %[3]s) { + return %[3]s; + } + } +`, className, funcName, argName) +} + +// For more references: https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript +func (c *ProcedureClient) SampleJavascriptDefinition(t *testing.T, argName string) string { + t.Helper() + + return fmt.Sprintf(` + if (%[1]s <= 0) { + return 1; + } else { + var result = 1; + for (var i = 2; i <= %[1]s; i++) { + result = result * i; + } + return result; + } +`, argName) +} + +func (c *ProcedureClient) SamplePythonDefinition(t *testing.T, funcName string, argName string) string { + t.Helper() + + return fmt.Sprintf(` +def %[1]s(%[2]s): + result = "" + for a in range(5): + result += %[2]s + return result +`, funcName, argName) +} + +// https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala +func (c *ProcedureClient) SampleScalaDefinition(t *testing.T, className string, funcName string, argName string) string { + t.Helper() + + return fmt.Sprintf(` + import com.snowflake.snowpark_java.Session + + class %[1]s { + def %[2]s(session : Session, %[3]s : String): String = { + return %[3]s + } + } +`, className, funcName, argName) +} + +func (c *ProcedureClient) SampleSqlDefinition(t *testing.T) string { + t.Helper() + + return ` +BEGIN + RETURN 3.141592654::FLOAT; +END; +` +} diff --git a/pkg/acceptance/testdatatypes/testdatatypes.go b/pkg/acceptance/testdatatypes/testdatatypes.go index dc11e82f0e..48aa8fde51 100644 --- a/pkg/acceptance/testdatatypes/testdatatypes.go +++ b/pkg/acceptance/testdatatypes/testdatatypes.go @@ -2,6 +2,7 @@ package testdatatypes import "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" +// TODO [SNOW-1843440]: create using constructors (when we add them)? var ( DataTypeNumber_36_2, _ = datatypes.ParseDataType("NUMBER(36, 2)") DataTypeVarchar_100, _ = datatypes.ParseDataType("VARCHAR(100)") diff --git a/pkg/datasources/procedures.go b/pkg/datasources/procedures.go index a20c338a68..c0dd714ff1 100644 --- a/pkg/datasources/procedures.go +++ b/pkg/datasources/procedures.go @@ -79,10 +79,10 @@ func ReadContextProcedures(ctx context.Context, d *schema.ResourceData, meta int req := sdk.NewShowProcedureRequest() if databaseName != "" { - req.WithIn(sdk.In{Database: sdk.NewAccountObjectIdentifier(databaseName)}) + req.WithIn(sdk.ExtendedIn{In: sdk.In{Database: sdk.NewAccountObjectIdentifier(databaseName)}}) } if schemaName != "" { - req.WithIn(sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(databaseName, schemaName)}) + req.WithIn(sdk.ExtendedIn{In: sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(databaseName, schemaName)}}) } procedures, err := client.Procedures.Show(ctx, req) if err != nil { diff --git a/pkg/resources/external_function.go b/pkg/resources/external_function.go index 7ff5270ae2..5330787394 100644 --- a/pkg/resources/external_function.go +++ b/pkg/resources/external_function.go @@ -275,7 +275,7 @@ func CreateContextExternalFunction(ctx context.Context, d *schema.ResourceData, case v.(string) == "CALLED ON NULL INPUT": req.WithNullInputBehavior(sdk.NullInputBehaviorCalledOnNullInput) case v.(string) == "RETURNS NULL ON NULL INPUT": - req.WithNullInputBehavior(sdk.NullInputBehaviorReturnNullInput) + req.WithNullInputBehavior(sdk.NullInputBehaviorReturnsNullInput) default: req.WithNullInputBehavior(sdk.NullInputBehaviorStrict) } diff --git a/pkg/resources/function.go b/pkg/resources/function.go index a0440d33f2..38c6619a37 100644 --- a/pkg/resources/function.go +++ b/pkg/resources/function.go @@ -313,6 +313,8 @@ func createScalaFunction(ctx context.Context, d *schema.ResourceData, meta inter var runtimeVersion string if v, ok := d.GetOk("runtime_version"); ok { runtimeVersion = v.(string) + } else { + return diag.Errorf("Runtime version is required for Scala function") } // create request with required @@ -570,6 +572,9 @@ func ReadContextFunction(ctx context.Context, d *schema.ResourceData, meta inter } } for _, desc := range functionDetails { + if desc.Value == nil { + continue + } switch desc.Property { case "signature": // Format in Snowflake DB is: (argName argType, argName argType, ...) @@ -590,15 +595,15 @@ func ReadContextFunction(ctx context.Context, d *schema.ResourceData, meta inter } } case "null handling": - if err := d.Set("null_input_behavior", desc.Value); err != nil { + if err := d.Set("null_input_behavior", *desc.Value); err != nil { diag.FromErr(err) } case "volatility": - if err := d.Set("return_behavior", desc.Value); err != nil { + if err := d.Set("return_behavior", *desc.Value); err != nil { diag.FromErr(err) } case "body": - if err := d.Set("statement", desc.Value); err != nil { + if err := d.Set("statement", *desc.Value); err != nil { diag.FromErr(err) } case "returns": @@ -614,11 +619,11 @@ func ReadContextFunction(ctx context.Context, d *schema.ResourceData, meta inter } case "language": if snowflake.Contains(languages, strings.ToLower(*desc.Value)) { - if err := d.Set("language", desc.Value); err != nil { + if err := d.Set("language", *desc.Value); err != nil { diag.FromErr(err) } } else { - log.Printf("[INFO] Unexpected language for function %v returned from Snowflake", desc.Value) + log.Printf("[INFO] Unexpected language for function %v returned from Snowflake", *desc.Value) } case "packages": value := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "[", ""), "]", ""), "'", "") @@ -637,19 +642,19 @@ func ReadContextFunction(ctx context.Context, d *schema.ResourceData, meta inter } } case "handler": - if err := d.Set("handler", desc.Value); err != nil { + if err := d.Set("handler", *desc.Value); err != nil { diag.FromErr(err) } case "target_path": - if err := d.Set("target_path", desc.Value); err != nil { + if err := d.Set("target_path", *desc.Value); err != nil { diag.FromErr(err) } case "runtime_version": - if err := d.Set("runtime_version", desc.Value); err != nil { + if err := d.Set("runtime_version", *desc.Value); err != nil { diag.FromErr(err) } default: - log.Printf("[INFO] Unexpected function property %v returned from Snowflake with value %v", desc.Property, desc.Value) + log.Printf("[INFO] Unexpected function property %v returned from Snowflake with value %v", desc.Property, *desc.Value) } } diff --git a/pkg/resources/procedure.go b/pkg/resources/procedure.go index f7577833f9..8665f71d09 100644 --- a/pkg/resources/procedure.go +++ b/pkg/resources/procedure.go @@ -267,7 +267,7 @@ func createJavaProcedure(ctx context.Context, d *schema.ResourceData, meta inter } handler := d.Get("handler").(string) req := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler) - req.WithProcedureDefinition(procedureDefinition) + req.WithProcedureDefinitionWrapped(procedureDefinition) if len(args) > 0 { req.WithArguments(args) } @@ -322,7 +322,7 @@ func createJavaScriptProcedure(ctx context.Context, d *schema.ResourceData, meta return diags } procedureDefinition := d.Get("statement").(string) - req := sdk.NewCreateForJavaScriptProcedureRequest(id.SchemaObjectId(), nil, procedureDefinition).WithResultDataTypeOld(sdk.LegacyDataTypeFrom(returnDataType)) + req := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), nil, procedureDefinition).WithResultDataTypeOld(sdk.LegacyDataTypeFrom(returnDataType)) if len(args) > 0 { req.WithArguments(args) } @@ -379,7 +379,7 @@ func createScalaProcedure(ctx context.Context, d *schema.ResourceData, meta inte } handler := d.Get("handler").(string) req := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler) - req.WithProcedureDefinition(procedureDefinition) + req.WithProcedureDefinitionWrapped(procedureDefinition) if len(args) > 0 { req.WithArguments(args) } @@ -433,7 +433,7 @@ func createSQLProcedure(ctx context.Context, d *schema.ResourceData, meta interf return diags } procedureDefinition := d.Get("statement").(string) - req := sdk.NewCreateForSQLProcedureRequest(id.SchemaObjectId(), *returns, procedureDefinition) + req := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, procedureDefinition) if len(args) > 0 { req.WithArguments(args) } @@ -490,7 +490,7 @@ func createPythonProcedure(ctx context.Context, d *schema.ResourceData, meta int } handler := d.Get("handler").(string) req := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, runtimeVersion, packages, handler) - req.WithProcedureDefinition(procedureDefinition) + req.WithProcedureDefinitionWrapped(procedureDefinition) if len(args) > 0 { req.WithArguments(args) } @@ -570,10 +570,13 @@ func ReadContextProcedure(ctx context.Context, d *schema.ResourceData, meta inte } } for _, desc := range procedureDetails { + if desc.Value == nil { + continue + } switch desc.Property { case "signature": // Format in Snowflake DB is: (argName argType, argName argType, ...) - args := strings.ReplaceAll(strings.ReplaceAll(desc.Value, "(", ""), ")", "") + args := strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "(", ""), ")", "") if args != "" { // Do nothing for functions without arguments argPairs := strings.Split(args, ", ") @@ -593,31 +596,31 @@ func ReadContextProcedure(ctx context.Context, d *schema.ResourceData, meta inte } } case "null handling": - if err := d.Set("null_input_behavior", desc.Value); err != nil { + if err := d.Set("null_input_behavior", *desc.Value); err != nil { return diag.FromErr(err) } case "body": - if err := d.Set("statement", desc.Value); err != nil { + if err := d.Set("statement", *desc.Value); err != nil { return diag.FromErr(err) } case "execute as": - if err := d.Set("execute_as", desc.Value); err != nil { + if err := d.Set("execute_as", *desc.Value); err != nil { return diag.FromErr(err) } case "returns": - if err := d.Set("return_type", desc.Value); err != nil { + if err := d.Set("return_type", *desc.Value); err != nil { return diag.FromErr(err) } case "language": - if err := d.Set("language", desc.Value); err != nil { + if err := d.Set("language", *desc.Value); err != nil { return diag.FromErr(err) } case "runtime_version": - if err := d.Set("runtime_version", desc.Value); err != nil { + if err := d.Set("runtime_version", *desc.Value); err != nil { return diag.FromErr(err) } case "packages": - packagesString := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(desc.Value, "[", ""), "]", ""), "'", "") + packagesString := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "[", ""), "]", ""), "'", "") if packagesString != "" { // Do nothing for Java / Python functions without packages packages := strings.Split(packagesString, ",") if err := d.Set("packages", packages); err != nil { @@ -625,7 +628,7 @@ func ReadContextProcedure(ctx context.Context, d *schema.ResourceData, meta inte } } case "imports": - importsString := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(desc.Value, "[", ""), "]", ""), "'", ""), " ", "") + importsString := strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(*desc.Value, "[", ""), "]", ""), "'", ""), " ", "") if importsString != "" { // Do nothing for Java functions without imports imports := strings.Split(importsString, ",") if err := d.Set("imports", imports); err != nil { @@ -633,15 +636,15 @@ func ReadContextProcedure(ctx context.Context, d *schema.ResourceData, meta inte } } case "handler": - if err := d.Set("handler", desc.Value); err != nil { + if err := d.Set("handler", *desc.Value); err != nil { return diag.FromErr(err) } case "volatility": - if err := d.Set("return_behavior", desc.Value); err != nil { + if err := d.Set("return_behavior", *desc.Value); err != nil { return diag.FromErr(err) } default: - log.Printf("[INFO] Unexpected procedure property %v returned from Snowflake with value %v", desc.Property, desc.Value) + log.Printf("[INFO] Unexpected procedure property %v returned from Snowflake with value %v", desc.Property, *desc.Value) } } @@ -685,11 +688,11 @@ func UpdateContextProcedure(ctx context.Context, d *schema.ResourceData, meta in if d.HasChange("comment") { comment := d.Get("comment") if comment != "" { - if err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSetComment(comment.(string))); err != nil { + if err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSet(*sdk.NewProcedureSetRequest().WithComment(comment.(string)))); err != nil { return diag.FromErr(err) } } else { - if err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnsetComment(true)); err != nil { + if err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnset(*sdk.NewProcedureUnsetRequest().WithComment(true))); err != nil { return diag.FromErr(err) } } diff --git a/pkg/resources/procedure_acceptance_test.go b/pkg/resources/procedure_acceptance_test.go index 05cbfbfd73..1039ebc459 100644 --- a/pkg/resources/procedure_acceptance_test.go +++ b/pkg/resources/procedure_acceptance_test.go @@ -157,9 +157,9 @@ func TestAcc_Procedure_complex(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "statement", statement), resource.TestCheckResourceAttr(resourceName, "execute_as", "CALLER"), resource.TestCheckResourceAttr(resourceName, "arguments.#", "2"), - resource.TestCheckResourceAttr(resourceName, "arguments.0.name", "ARG1"), + resource.TestCheckResourceAttr(resourceName, "arguments.0.name", "arg1"), resource.TestCheckResourceAttr(resourceName, "arguments.0.type", "VARCHAR"), - resource.TestCheckResourceAttr(resourceName, "arguments.1.name", "ARG2"), + resource.TestCheckResourceAttr(resourceName, "arguments.1.name", "arg2"), resource.TestCheckResourceAttr(resourceName, "arguments.1.type", "DATE"), resource.TestCheckResourceAttr(resourceName, "null_input_behavior", "RETURNS NULL ON NULL INPUT"), diff --git a/pkg/sdk/common_types.go b/pkg/sdk/common_types.go index ca2bc6b9b3..3678fe4e05 100644 --- a/pkg/sdk/common_types.go +++ b/pkg/sdk/common_types.go @@ -233,7 +233,7 @@ func NullInputBehaviorPointer(v NullInputBehavior) *NullInputBehavior { const ( NullInputBehaviorCalledOnNullInput NullInputBehavior = "CALLED ON NULL INPUT" - NullInputBehaviorReturnNullInput NullInputBehavior = "RETURNS NULL ON NULL INPUT" + NullInputBehaviorReturnsNullInput NullInputBehavior = "RETURNS NULL ON NULL INPUT" NullInputBehaviorStrict NullInputBehavior = "STRICT" ) @@ -380,6 +380,37 @@ var AllMetricLevels = []MetricLevel{ MetricLevelNone, } +type AutoEventLogging string + +const ( + AutoEventLoggingLogging AutoEventLogging = "LOGGING" + AutoEventLoggingTracing AutoEventLogging = "TRACING" + AutoEventLoggingAll AutoEventLogging = "ALL" + AutoEventLoggingOff AutoEventLogging = "OFF" +) + +func ToAutoEventLogging(value string) (AutoEventLogging, error) { + switch strings.ToUpper(value) { + case string(AutoEventLoggingLogging): + return AutoEventLoggingLogging, nil + case string(AutoEventLoggingTracing): + return AutoEventLoggingTracing, nil + case string(AutoEventLoggingAll): + return AutoEventLoggingAll, nil + case string(AutoEventLoggingOff): + return AutoEventLoggingOff, nil + default: + return "", fmt.Errorf("unknown auto event logging: %s", value) + } +} + +var AllAutoEventLoggings = []AutoEventLogging{ + AutoEventLoggingLogging, + AutoEventLoggingTracing, + AutoEventLoggingAll, + AutoEventLoggingOff, +} + // StringAllowEmpty is a wrapper on string to allow using empty strings in SQL. type StringAllowEmpty struct { Value string `ddl:"keyword,single_quotes"` diff --git a/pkg/sdk/common_types_test.go b/pkg/sdk/common_types_test.go index 1b5e5ecf9f..263d02ad2c 100644 --- a/pkg/sdk/common_types_test.go +++ b/pkg/sdk/common_types_test.go @@ -327,3 +327,38 @@ func Test_ToMetricLevel(t *testing.T) { }) } } + +func Test_ToAutoEventLogging(t *testing.T) { + testCases := []struct { + Name string + Input string + Expected AutoEventLogging + ExpectedError string + }{ + {Input: string(AutoEventLoggingLogging), Expected: AutoEventLoggingLogging}, + {Input: string(AutoEventLoggingTracing), Expected: AutoEventLoggingTracing}, + {Input: string(AutoEventLoggingAll), Expected: AutoEventLoggingAll}, + {Input: string(AutoEventLoggingOff), Expected: AutoEventLoggingOff}, + {Name: "validation: incorrect auto event logging", Input: "incorrect", ExpectedError: "unknown auto event logging: incorrect"}, + {Name: "validation: empty input", Input: "", ExpectedError: "unknown auto event logging: "}, + {Name: "validation: lower case input", Input: "all", Expected: AutoEventLoggingAll}, + } + + for _, tc := range testCases { + tc := tc + name := tc.Name + if name == "" { + name = fmt.Sprintf("%v auto event logging", tc.Input) + } + t.Run(name, func(t *testing.T) { + value, err := ToAutoEventLogging(tc.Input) + if tc.ExpectedError != "" { + assert.Empty(t, value) + assert.ErrorContains(t, err, tc.ExpectedError) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.Expected, value) + } + }) + } +} diff --git a/pkg/sdk/functions_impl_gen.go b/pkg/sdk/functions_impl_gen.go index 8d9dbd1606..051a8fc994 100644 --- a/pkg/sdk/functions_impl_gen.go +++ b/pkg/sdk/functions_impl_gen.go @@ -467,7 +467,7 @@ func (r functionRow) convert() *Function { e := &Function{ CreatedOn: r.CreatedOn, Name: r.Name, - SchemaName: r.SchemaName, + SchemaName: strings.Trim(r.SchemaName, `"`), IsBuiltin: r.IsBuiltin == "Y", IsAggregate: r.IsAggregate == "Y", IsAnsi: r.IsAnsi == "Y", @@ -475,7 +475,7 @@ func (r functionRow) convert() *Function { MaxNumArguments: r.MaxNumArguments, ArgumentsRaw: r.Arguments, Description: r.Description, - CatalogName: r.CatalogName, + CatalogName: strings.Trim(r.CatalogName, `"`), IsTableFunction: r.IsTableFunction == "Y", ValidForClustering: r.ValidForClustering == "Y", IsExternalFunction: r.IsExternalFunction == "Y", diff --git a/pkg/sdk/identifier_helpers.go b/pkg/sdk/identifier_helpers.go index 90d1acdf44..7e857fb8de 100644 --- a/pkg/sdk/identifier_helpers.go +++ b/pkg/sdk/identifier_helpers.go @@ -324,7 +324,12 @@ func NewSchemaObjectIdentifierWithArguments(databaseName, schemaName, name strin if err != nil { log.Printf("[DEBUG] failed to normalize argument %d: %v, err = %v", i, argument, err) } - normalizedArguments[i] = LegacyDataTypeFrom(normalizedArgument) + // TODO [SNOW-1348103]: temporary workaround to fix panic resulting from TestAcc_Grants_To_AccountRole test (because of unsupported TABLE data type) + if normalizedArgument != nil { + normalizedArguments[i] = LegacyDataTypeFrom(normalizedArgument) + } else { + normalizedArguments[i] = "" + } } return SchemaObjectIdentifierWithArguments{ databaseName: strings.Trim(databaseName, `"`), diff --git a/pkg/sdk/parameters.go b/pkg/sdk/parameters.go index b651b673f3..4940f4ed0b 100644 --- a/pkg/sdk/parameters.go +++ b/pkg/sdk/parameters.go @@ -842,6 +842,16 @@ const ( FunctionParameterTraceLevel FunctionParameter = "TRACE_LEVEL" ) +type ProcedureParameter string + +const ( + ProcedureParameterAutoEventLogging ProcedureParameter = "AUTO_EVENT_LOGGING" + ProcedureParameterEnableConsoleOutput ProcedureParameter = "ENABLE_CONSOLE_OUTPUT" + ProcedureParameterLogLevel ProcedureParameter = "LOG_LEVEL" + ProcedureParameterMetricLevel ProcedureParameter = "METRIC_LEVEL" + ProcedureParameterTraceLevel ProcedureParameter = "TRACE_LEVEL" +) + // AccountParameters is based on https://docs.snowflake.com/en/sql-reference/parameters#account-parameters. type AccountParameters struct { // Account Parameters @@ -1359,11 +1369,12 @@ type ParametersIn struct { Task SchemaObjectIdentifier `ddl:"identifier" sql:"TASK"` Table SchemaObjectIdentifier `ddl:"identifier" sql:"TABLE"` Function SchemaObjectIdentifierWithArguments `ddl:"identifier" sql:"FUNCTION"` + Procedure SchemaObjectIdentifierWithArguments `ddl:"identifier" sql:"PROCEDURE"` } func (v *ParametersIn) validate() error { - if !anyValueSet(v.Session, v.Account, v.User, v.Warehouse, v.Database, v.Schema, v.Task, v.Table, v.Function) { - return errors.Join(errAtLeastOneOf("Session", "Account", "User", "Warehouse", "Database", "Schema", "Task", "Table", "Function")) + if !anyValueSet(v.Session, v.Account, v.User, v.Warehouse, v.Database, v.Schema, v.Task, v.Table, v.Function, v.Procedure) { + return errors.Join(errAtLeastOneOf("Session", "Account", "User", "Warehouse", "Database", "Schema", "Task", "Table", "Function", "Procedure")) } return nil } @@ -1381,6 +1392,7 @@ const ( ParameterTypeSchema ParameterType = "SCHEMA" ParameterTypeTask ParameterType = "TASK" ParameterTypeFunction ParameterType = "FUNCTION" + ParameterTypeProcedure ParameterType = "PROCEDURE" ) type Parameter struct { @@ -1511,6 +1523,8 @@ func (v *parameters) ShowObjectParameter(ctx context.Context, parameter ObjectPa opts.In.User = object.Name.(AccountObjectIdentifier) case ObjectTypeFunction: opts.In.Function = object.Name.(SchemaObjectIdentifierWithArguments) + case ObjectTypeProcedure: + opts.In.Procedure = object.Name.(SchemaObjectIdentifierWithArguments) default: return nil, fmt.Errorf("unsupported object type %s", object.Name) } diff --git a/pkg/sdk/poc/README.md b/pkg/sdk/poc/README.md index 46cb6e16b9..eabaf74e55 100644 --- a/pkg/sdk/poc/README.md +++ b/pkg/sdk/poc/README.md @@ -110,6 +110,7 @@ find a better solution to solve the issue (add more logic to the templates ?) - more clear definition of lists that can be empty vs cannot be empty - add empty ids in generated tests (TODO in random_test.go) - add optional imports (currently they have to be added manually, e.g. `datatypes.DataType`) +- add fourth type of quotes - double dollars ($$..$$) -> used for functions, procedures, and tasks ##### Known issues - generating two converts when Show and Desc use the same data structure diff --git a/pkg/sdk/procedures_def.go b/pkg/sdk/procedures_def.go index 0485b7711b..636da06187 100644 --- a/pkg/sdk/procedures_def.go +++ b/pkg/sdk/procedures_def.go @@ -5,14 +5,14 @@ import g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/gen //go:generate go run ./poc/main.go var procedureArgument = g.NewQueryStruct("ProcedureArgument"). - Text("ArgName", g.KeywordOptions().NoQuotes().Required()). + Text("ArgName", g.KeywordOptions().DoubleQuotes().Required()). PredefinedQueryStructField("ArgDataTypeOld", "DataType", g.KeywordOptions().NoQuotes()). PredefinedQueryStructField("ArgDataType", "datatypes.DataType", g.ParameterOptions().NoQuotes().NoEquals().Required()). PredefinedQueryStructField("DefaultValue", "*string", g.ParameterOptions().NoEquals().SQL("DEFAULT")). WithValidation(g.ExactlyOneValueSet, "ArgDataTypeOld", "ArgDataType") var procedureColumn = g.NewQueryStruct("ProcedureColumn"). - Text("ColumnName", g.KeywordOptions().NoQuotes().Required()). + Text("ColumnName", g.KeywordOptions().DoubleQuotes().Required()). PredefinedQueryStructField("ColumnDataTypeOld", "DataType", g.KeywordOptions().NoQuotes()). PredefinedQueryStructField("ColumnDataType", "datatypes.DataType", g.ParameterOptions().NoQuotes().NoEquals().Required()). WithValidation(g.ExactlyOneValueSet, "ColumnDataTypeOld", "ColumnDataType") @@ -96,6 +96,8 @@ var ProceduresDef = g.NewInterface( g.KeywordOptions().SQL("RETURNS").Required(), ). SQL("LANGUAGE JAVA"). + PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ReturnResultsBehavior", "*ReturnResultsBehavior", g.KeywordOptions()). TextAssignment("RUNTIME_VERSION", g.ParameterOptions().SingleQuotes().Required()). ListQueryStructField( "Packages", @@ -111,10 +113,9 @@ var ProceduresDef = g.NewInterface( ListAssignment("EXTERNAL_ACCESS_INTEGRATIONS", "AccountObjectIdentifier", g.ParameterOptions().Parentheses()). ListAssignment("SECRETS", "SecretReference", g.ParameterOptions().Parentheses()). OptionalTextAssignment("TARGET_PATH", g.ParameterOptions().SingleQuotes()). - PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()). PredefinedQueryStructField("ExecuteAs", "*ExecuteAs", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). + PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SQL("AS")). WithValidation(g.ValidateValueSet, "RuntimeVersion"). WithValidation(g.ValidateValueSet, "Packages"). WithValidation(g.ValidateValueSet, "Handler"). @@ -140,9 +141,10 @@ var ProceduresDef = g.NewInterface( OptionalSQL("NOT NULL"). SQL("LANGUAGE JAVASCRIPT"). PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ReturnResultsBehavior", "*ReturnResultsBehavior", g.KeywordOptions()). OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()). PredefinedQueryStructField("ExecuteAs", "*ExecuteAs", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS").Required()). + PredefinedQueryStructField("ProcedureDefinition", "string", g.ParameterOptions().NoEquals().SQL("AS").Required()). WithValidation(g.ValidateValueSet, "ProcedureDefinition"). WithValidation(g.ValidIdentifier, "name"). WithValidation(g.ExactlyOneValueSet, "ResultDataTypeOld", "ResultDataType"), @@ -167,6 +169,8 @@ var ProceduresDef = g.NewInterface( g.KeywordOptions().SQL("RETURNS").Required(), ). SQL("LANGUAGE PYTHON"). + PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ReturnResultsBehavior", "*ReturnResultsBehavior", g.KeywordOptions()). TextAssignment("RUNTIME_VERSION", g.ParameterOptions().SingleQuotes().Required()). ListQueryStructField( "Packages", @@ -181,10 +185,9 @@ var ProceduresDef = g.NewInterface( TextAssignment("HANDLER", g.ParameterOptions().SingleQuotes().Required()). ListAssignment("EXTERNAL_ACCESS_INTEGRATIONS", "AccountObjectIdentifier", g.ParameterOptions().Parentheses()). ListAssignment("SECRETS", "SecretReference", g.ParameterOptions().Parentheses()). - PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()). PredefinedQueryStructField("ExecuteAs", "*ExecuteAs", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). + PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SQL("AS")). WithValidation(g.ValidateValueSet, "RuntimeVersion"). WithValidation(g.ValidateValueSet, "Packages"). WithValidation(g.ValidateValueSet, "Handler"). @@ -210,6 +213,8 @@ var ProceduresDef = g.NewInterface( g.KeywordOptions().SQL("RETURNS").Required(), ). SQL("LANGUAGE SCALA"). + PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ReturnResultsBehavior", "*ReturnResultsBehavior", g.KeywordOptions()). TextAssignment("RUNTIME_VERSION", g.ParameterOptions().SingleQuotes().Required()). ListQueryStructField( "Packages", @@ -222,11 +227,12 @@ var ProceduresDef = g.NewInterface( g.ParameterOptions().Parentheses().SQL("IMPORTS"), ). TextAssignment("HANDLER", g.ParameterOptions().SingleQuotes().Required()). + ListAssignment("EXTERNAL_ACCESS_INTEGRATIONS", "AccountObjectIdentifier", g.ParameterOptions().Parentheses()). + ListAssignment("SECRETS", "SecretReference", g.ParameterOptions().Parentheses()). OptionalTextAssignment("TARGET_PATH", g.ParameterOptions().SingleQuotes()). - PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()). PredefinedQueryStructField("ExecuteAs", "*ExecuteAs", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). + PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SQL("AS")). WithValidation(g.ValidateValueSet, "RuntimeVersion"). WithValidation(g.ValidateValueSet, "Packages"). WithValidation(g.ValidateValueSet, "Handler"). @@ -253,9 +259,10 @@ var ProceduresDef = g.NewInterface( ). SQL("LANGUAGE SQL"). PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ReturnResultsBehavior", "*ReturnResultsBehavior", g.KeywordOptions()). OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()). PredefinedQueryStructField("ExecuteAs", "*ExecuteAs", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS").Required()). + PredefinedQueryStructField("ProcedureDefinition", "string", g.ParameterOptions().NoEquals().SQL("AS").Required()). WithValidation(g.ValidateValueSet, "ProcedureDefinition"). WithValidation(g.ValidIdentifier, "name"), ).AlterOperation( @@ -266,16 +273,39 @@ var ProceduresDef = g.NewInterface( IfExists(). Name(). OptionalIdentifier("RenameTo", g.KindOfT[SchemaObjectIdentifier](), g.IdentifierOptions().SQL("RENAME TO")). - OptionalTextAssignment("SET COMMENT", g.ParameterOptions().SingleQuotes()). - OptionalTextAssignment("SET LOG_LEVEL", g.ParameterOptions().SingleQuotes()). - OptionalTextAssignment("SET TRACE_LEVEL", g.ParameterOptions().SingleQuotes()). - OptionalSQL("UNSET COMMENT"). + OptionalQueryStructField( + "Set", + g.NewQueryStruct("ProcedureSet"). + OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()). + ListAssignment("EXTERNAL_ACCESS_INTEGRATIONS", "AccountObjectIdentifier", g.ParameterOptions().Parentheses()). + OptionalQueryStructField("SecretsList", functionSecretsListWrapper, g.ParameterOptions().SQL("SECRETS").Parentheses()). + OptionalAssignment("AUTO_EVENT_LOGGING", g.KindOfTPointer[AutoEventLogging](), g.ParameterOptions().SingleQuotes()). + OptionalBooleanAssignment("ENABLE_CONSOLE_OUTPUT", nil). + OptionalAssignment("LOG_LEVEL", g.KindOfTPointer[LogLevel](), g.ParameterOptions().SingleQuotes()). + OptionalAssignment("METRIC_LEVEL", g.KindOfTPointer[MetricLevel](), g.ParameterOptions().SingleQuotes()). + OptionalAssignment("TRACE_LEVEL", g.KindOfTPointer[TraceLevel](), g.ParameterOptions().SingleQuotes()). + WithValidation(g.AtLeastOneValueSet, "Comment", "ExternalAccessIntegrations", "SecretsList", "AutoEventLogging", "EnableConsoleOutput", "LogLevel", "MetricLevel", "TraceLevel"), + g.ListOptions().SQL("SET"), + ). + OptionalQueryStructField( + "Unset", + g.NewQueryStruct("ProcedureUnset"). + OptionalSQL("COMMENT"). + OptionalSQL("EXTERNAL_ACCESS_INTEGRATIONS"). + OptionalSQL("AUTO_EVENT_LOGGING"). + OptionalSQL("ENABLE_CONSOLE_OUTPUT"). + OptionalSQL("LOG_LEVEL"). + OptionalSQL("METRIC_LEVEL"). + OptionalSQL("TRACE_LEVEL"). + WithValidation(g.AtLeastOneValueSet, "Comment", "ExternalAccessIntegrations", "AutoEventLogging", "EnableConsoleOutput", "LogLevel", "MetricLevel", "TraceLevel"), + g.ListOptions().SQL("UNSET"), + ). OptionalSetTags(). OptionalUnsetTags(). PredefinedQueryStructField("ExecuteAs", "*ExecuteAs", g.KeywordOptions()). WithValidation(g.ValidIdentifier, "name"). WithValidation(g.ValidIdentifierIfSet, "RenameTo"). - WithValidation(g.ExactlyOneValueSet, "RenameTo", "SetComment", "SetLogLevel", "SetTraceLevel", "UnsetComment", "SetTags", "UnsetTags", "ExecuteAs"), + WithValidation(g.ExactlyOneValueSet, "RenameTo", "Set", "Unset", "SetTags", "UnsetTags", "ExecuteAs"), ).DropOperation( "https://docs.snowflake.com/en/sql-reference/sql/drop-procedure", g.NewQueryStruct("DropProcedure"). @@ -300,7 +330,9 @@ var ProceduresDef = g.NewInterface( Field("catalog_name", "string"). Field("is_table_function", "string"). Field("valid_for_clustering", "string"). - Field("is_secure", "sql.NullString"), + Field("is_secure", "sql.NullString"). + OptionalText("secrets"). + OptionalText("external_access_integrations"), g.PlainStruct("Procedure"). Field("CreatedOn", "string"). Field("Name", "string"). @@ -315,12 +347,14 @@ var ProceduresDef = g.NewInterface( Field("CatalogName", "string"). Field("IsTableFunction", "bool"). Field("ValidForClustering", "bool"). - Field("IsSecure", "bool"), + Field("IsSecure", "bool"). + OptionalText("Secrets"). + OptionalText("ExternalAccessIntegrations"), g.NewQueryStruct("ShowProcedures"). Show(). SQL("PROCEDURES"). OptionalLike(). - OptionalIn(), // TODO: 'In' struct for procedures not support keyword "CLASS" now + OptionalExtendedIn(), ).ShowByIdOperation().DescribeOperation( g.DescriptionMappingKindSlice, "https://docs.snowflake.com/en/sql-reference/sql/desc-procedure", @@ -329,7 +363,7 @@ var ProceduresDef = g.NewInterface( Field("value", "sql.NullString"), g.PlainStruct("ProcedureDetail"). Field("Property", "string"). - Field("Value", "string"), + OptionalText("Value"), g.NewQueryStruct("DescribeProcedure"). Describe(). SQL("PROCEDURE"). @@ -362,6 +396,8 @@ var ProceduresDef = g.NewInterface( g.KeywordOptions().SQL("RETURNS").Required(), ). SQL("LANGUAGE JAVA"). + PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). TextAssignment("RUNTIME_VERSION", g.ParameterOptions().SingleQuotes().Required()). ListQueryStructField( "Packages", @@ -374,8 +410,6 @@ var ProceduresDef = g.NewInterface( g.ParameterOptions().Parentheses().SQL("IMPORTS"), ). TextAssignment("HANDLER", g.ParameterOptions().SingleQuotes().Required()). - PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). OptionalQueryStructField( "WithClause", procedureWithClause, @@ -408,6 +442,8 @@ var ProceduresDef = g.NewInterface( g.KeywordOptions().SQL("RETURNS").Required(), ). SQL("LANGUAGE SCALA"). + PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). TextAssignment("RUNTIME_VERSION", g.ParameterOptions().SingleQuotes().Required()). ListQueryStructField( "Packages", @@ -420,8 +456,6 @@ var ProceduresDef = g.NewInterface( g.ParameterOptions().Parentheses().SQL("IMPORTS"), ). TextAssignment("HANDLER", g.ParameterOptions().SingleQuotes().Required()). - PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). ListQueryStructField( "WithClauses", procedureWithClause, @@ -486,6 +520,8 @@ var ProceduresDef = g.NewInterface( g.KeywordOptions().SQL("RETURNS").Required(), ). SQL("LANGUAGE PYTHON"). + PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). + PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). TextAssignment("RUNTIME_VERSION", g.ParameterOptions().SingleQuotes().Required()). ListQueryStructField( "Packages", @@ -498,8 +534,6 @@ var ProceduresDef = g.NewInterface( g.ParameterOptions().Parentheses().SQL("IMPORTS"), ). TextAssignment("HANDLER", g.ParameterOptions().SingleQuotes().Required()). - PredefinedQueryStructField("NullInputBehavior", "*NullInputBehavior", g.KeywordOptions()). - PredefinedQueryStructField("ProcedureDefinition", "*string", g.ParameterOptions().NoEquals().SingleQuotes().SQL("AS")). ListQueryStructField( "WithClauses", procedureWithClause, diff --git a/pkg/sdk/procedures_dto_builders_gen.go b/pkg/sdk/procedures_dto_builders_gen.go index 373852a62a..c9be49ffce 100644 --- a/pkg/sdk/procedures_dto_builders_gen.go +++ b/pkg/sdk/procedures_dto_builders_gen.go @@ -68,6 +68,11 @@ func (s *CreateForJavaProcedureRequest) WithNullInputBehavior(NullInputBehavior return s } +func (s *CreateForJavaProcedureRequest) WithReturnResultsBehavior(ReturnResultsBehavior ReturnResultsBehavior) *CreateForJavaProcedureRequest { + s.ReturnResultsBehavior = &ReturnResultsBehavior + return s +} + func (s *CreateForJavaProcedureRequest) WithComment(Comment string) *CreateForJavaProcedureRequest { s.Comment = &Comment return s @@ -227,6 +232,11 @@ func (s *CreateForJavaScriptProcedureRequest) WithNullInputBehavior(NullInputBeh return s } +func (s *CreateForJavaScriptProcedureRequest) WithReturnResultsBehavior(ReturnResultsBehavior ReturnResultsBehavior) *CreateForJavaScriptProcedureRequest { + s.ReturnResultsBehavior = &ReturnResultsBehavior + return s +} + func (s *CreateForJavaScriptProcedureRequest) WithComment(Comment string) *CreateForJavaScriptProcedureRequest { s.Comment = &Comment return s @@ -293,6 +303,11 @@ func (s *CreateForPythonProcedureRequest) WithNullInputBehavior(NullInputBehavio return s } +func (s *CreateForPythonProcedureRequest) WithReturnResultsBehavior(ReturnResultsBehavior ReturnResultsBehavior) *CreateForPythonProcedureRequest { + s.ReturnResultsBehavior = &ReturnResultsBehavior + return s +} + func (s *CreateForPythonProcedureRequest) WithComment(Comment string) *CreateForPythonProcedureRequest { s.Comment = &Comment return s @@ -349,6 +364,16 @@ func (s *CreateForScalaProcedureRequest) WithImports(Imports []ProcedureImportRe return s } +func (s *CreateForScalaProcedureRequest) WithExternalAccessIntegrations(ExternalAccessIntegrations []AccountObjectIdentifier) *CreateForScalaProcedureRequest { + s.ExternalAccessIntegrations = ExternalAccessIntegrations + return s +} + +func (s *CreateForScalaProcedureRequest) WithSecrets(Secrets []SecretReference) *CreateForScalaProcedureRequest { + s.Secrets = Secrets + return s +} + func (s *CreateForScalaProcedureRequest) WithTargetPath(TargetPath string) *CreateForScalaProcedureRequest { s.TargetPath = &TargetPath return s @@ -359,6 +384,11 @@ func (s *CreateForScalaProcedureRequest) WithNullInputBehavior(NullInputBehavior return s } +func (s *CreateForScalaProcedureRequest) WithReturnResultsBehavior(ReturnResultsBehavior ReturnResultsBehavior) *CreateForScalaProcedureRequest { + s.ReturnResultsBehavior = &ReturnResultsBehavior + return s +} + func (s *CreateForScalaProcedureRequest) WithComment(Comment string) *CreateForScalaProcedureRequest { s.Comment = &Comment return s @@ -411,6 +441,11 @@ func (s *CreateForSQLProcedureRequest) WithNullInputBehavior(NullInputBehavior N return s } +func (s *CreateForSQLProcedureRequest) WithReturnResultsBehavior(ReturnResultsBehavior ReturnResultsBehavior) *CreateForSQLProcedureRequest { + s.ReturnResultsBehavior = &ReturnResultsBehavior + return s +} + func (s *CreateForSQLProcedureRequest) WithComment(Comment string) *CreateForSQLProcedureRequest { s.Comment = &Comment return s @@ -458,23 +493,13 @@ func (s *AlterProcedureRequest) WithRenameTo(RenameTo SchemaObjectIdentifier) *A return s } -func (s *AlterProcedureRequest) WithSetComment(SetComment string) *AlterProcedureRequest { - s.SetComment = &SetComment - return s -} - -func (s *AlterProcedureRequest) WithSetLogLevel(SetLogLevel string) *AlterProcedureRequest { - s.SetLogLevel = &SetLogLevel +func (s *AlterProcedureRequest) WithSet(Set ProcedureSetRequest) *AlterProcedureRequest { + s.Set = &Set return s } -func (s *AlterProcedureRequest) WithSetTraceLevel(SetTraceLevel string) *AlterProcedureRequest { - s.SetTraceLevel = &SetTraceLevel - return s -} - -func (s *AlterProcedureRequest) WithUnsetComment(UnsetComment bool) *AlterProcedureRequest { - s.UnsetComment = &UnsetComment +func (s *AlterProcedureRequest) WithUnset(Unset ProcedureUnsetRequest) *AlterProcedureRequest { + s.Unset = &Unset return s } @@ -493,6 +518,91 @@ func (s *AlterProcedureRequest) WithExecuteAs(ExecuteAs ExecuteAs) *AlterProcedu return s } +func NewProcedureSetRequest() *ProcedureSetRequest { + return &ProcedureSetRequest{} +} + +func (s *ProcedureSetRequest) WithComment(Comment string) *ProcedureSetRequest { + s.Comment = &Comment + return s +} + +func (s *ProcedureSetRequest) WithExternalAccessIntegrations(ExternalAccessIntegrations []AccountObjectIdentifier) *ProcedureSetRequest { + s.ExternalAccessIntegrations = ExternalAccessIntegrations + return s +} + +func (s *ProcedureSetRequest) WithSecretsList(SecretsList SecretsListRequest) *ProcedureSetRequest { + s.SecretsList = &SecretsList + return s +} + +func (s *ProcedureSetRequest) WithAutoEventLogging(AutoEventLogging AutoEventLogging) *ProcedureSetRequest { + s.AutoEventLogging = &AutoEventLogging + return s +} + +func (s *ProcedureSetRequest) WithEnableConsoleOutput(EnableConsoleOutput bool) *ProcedureSetRequest { + s.EnableConsoleOutput = &EnableConsoleOutput + return s +} + +func (s *ProcedureSetRequest) WithLogLevel(LogLevel LogLevel) *ProcedureSetRequest { + s.LogLevel = &LogLevel + return s +} + +func (s *ProcedureSetRequest) WithMetricLevel(MetricLevel MetricLevel) *ProcedureSetRequest { + s.MetricLevel = &MetricLevel + return s +} + +func (s *ProcedureSetRequest) WithTraceLevel(TraceLevel TraceLevel) *ProcedureSetRequest { + s.TraceLevel = &TraceLevel + return s +} + +// NewSecretsListRequest removed manually - redeclared in functions + +func NewProcedureUnsetRequest() *ProcedureUnsetRequest { + return &ProcedureUnsetRequest{} +} + +func (s *ProcedureUnsetRequest) WithComment(Comment bool) *ProcedureUnsetRequest { + s.Comment = &Comment + return s +} + +func (s *ProcedureUnsetRequest) WithExternalAccessIntegrations(ExternalAccessIntegrations bool) *ProcedureUnsetRequest { + s.ExternalAccessIntegrations = &ExternalAccessIntegrations + return s +} + +func (s *ProcedureUnsetRequest) WithAutoEventLogging(AutoEventLogging bool) *ProcedureUnsetRequest { + s.AutoEventLogging = &AutoEventLogging + return s +} + +func (s *ProcedureUnsetRequest) WithEnableConsoleOutput(EnableConsoleOutput bool) *ProcedureUnsetRequest { + s.EnableConsoleOutput = &EnableConsoleOutput + return s +} + +func (s *ProcedureUnsetRequest) WithLogLevel(LogLevel bool) *ProcedureUnsetRequest { + s.LogLevel = &LogLevel + return s +} + +func (s *ProcedureUnsetRequest) WithMetricLevel(MetricLevel bool) *ProcedureUnsetRequest { + s.MetricLevel = &MetricLevel + return s +} + +func (s *ProcedureUnsetRequest) WithTraceLevel(TraceLevel bool) *ProcedureUnsetRequest { + s.TraceLevel = &TraceLevel + return s +} + func NewDropProcedureRequest( name SchemaObjectIdentifierWithArguments, ) *DropProcedureRequest { @@ -515,7 +625,7 @@ func (s *ShowProcedureRequest) WithLike(Like Like) *ShowProcedureRequest { return s } -func (s *ShowProcedureRequest) WithIn(In In) *ShowProcedureRequest { +func (s *ShowProcedureRequest) WithIn(In ExtendedIn) *ShowProcedureRequest { s.In = &In return s } diff --git a/pkg/sdk/procedures_dto_gen.go b/pkg/sdk/procedures_dto_gen.go index bf3e0a8d72..75d57c2448 100644 --- a/pkg/sdk/procedures_dto_gen.go +++ b/pkg/sdk/procedures_dto_gen.go @@ -38,6 +38,7 @@ type CreateForJavaProcedureRequest struct { Secrets []SecretReference TargetPath *string NullInputBehavior *NullInputBehavior + ReturnResultsBehavior *ReturnResultsBehavior Comment *string ExecuteAs *ExecuteAs ProcedureDefinition *string @@ -81,18 +82,19 @@ type ProcedureImportRequest struct { } type CreateForJavaScriptProcedureRequest struct { - OrReplace *bool - Secure *bool - name SchemaObjectIdentifier // required - Arguments []ProcedureArgumentRequest - CopyGrants *bool - ResultDataTypeOld DataType - ResultDataType datatypes.DataType // required - NotNull *bool - NullInputBehavior *NullInputBehavior - Comment *string - ExecuteAs *ExecuteAs - ProcedureDefinition string // required + OrReplace *bool + Secure *bool + name SchemaObjectIdentifier // required + Arguments []ProcedureArgumentRequest + CopyGrants *bool + ResultDataTypeOld DataType + ResultDataType datatypes.DataType // required + NotNull *bool + NullInputBehavior *NullInputBehavior + ReturnResultsBehavior *ReturnResultsBehavior + Comment *string + ExecuteAs *ExecuteAs + ProcedureDefinition string // required } type CreateForPythonProcedureRequest struct { @@ -109,40 +111,45 @@ type CreateForPythonProcedureRequest struct { ExternalAccessIntegrations []AccountObjectIdentifier Secrets []SecretReference NullInputBehavior *NullInputBehavior + ReturnResultsBehavior *ReturnResultsBehavior Comment *string ExecuteAs *ExecuteAs ProcedureDefinition *string } type CreateForScalaProcedureRequest struct { - OrReplace *bool - Secure *bool - name SchemaObjectIdentifier // required - Arguments []ProcedureArgumentRequest - CopyGrants *bool - Returns ProcedureReturnsRequest // required - RuntimeVersion string // required - Packages []ProcedurePackageRequest // required - Imports []ProcedureImportRequest - Handler string // required - TargetPath *string - NullInputBehavior *NullInputBehavior - Comment *string - ExecuteAs *ExecuteAs - ProcedureDefinition *string + OrReplace *bool + Secure *bool + name SchemaObjectIdentifier // required + Arguments []ProcedureArgumentRequest + CopyGrants *bool + Returns ProcedureReturnsRequest // required + RuntimeVersion string // required + Packages []ProcedurePackageRequest // required + Imports []ProcedureImportRequest + Handler string // required + ExternalAccessIntegrations []AccountObjectIdentifier + Secrets []SecretReference + TargetPath *string + NullInputBehavior *NullInputBehavior + ReturnResultsBehavior *ReturnResultsBehavior + Comment *string + ExecuteAs *ExecuteAs + ProcedureDefinition *string } type CreateForSQLProcedureRequest struct { - OrReplace *bool - Secure *bool - name SchemaObjectIdentifier // required - Arguments []ProcedureArgumentRequest - CopyGrants *bool - Returns ProcedureSQLReturnsRequest // required - NullInputBehavior *NullInputBehavior - Comment *string - ExecuteAs *ExecuteAs - ProcedureDefinition string // required + OrReplace *bool + Secure *bool + name SchemaObjectIdentifier // required + Arguments []ProcedureArgumentRequest + CopyGrants *bool + Returns ProcedureSQLReturnsRequest // required + NullInputBehavior *NullInputBehavior + ReturnResultsBehavior *ReturnResultsBehavior + Comment *string + ExecuteAs *ExecuteAs + ProcedureDefinition string // required } type ProcedureSQLReturnsRequest struct { @@ -152,16 +159,37 @@ type ProcedureSQLReturnsRequest struct { } type AlterProcedureRequest struct { - IfExists *bool - name SchemaObjectIdentifierWithArguments // required - RenameTo *SchemaObjectIdentifier - SetComment *string - SetLogLevel *string - SetTraceLevel *string - UnsetComment *bool - SetTags []TagAssociation - UnsetTags []ObjectIdentifier - ExecuteAs *ExecuteAs + IfExists *bool + name SchemaObjectIdentifierWithArguments // required + RenameTo *SchemaObjectIdentifier + Set *ProcedureSetRequest + Unset *ProcedureUnsetRequest + SetTags []TagAssociation + UnsetTags []ObjectIdentifier + ExecuteAs *ExecuteAs +} + +type ProcedureSetRequest struct { + Comment *string + ExternalAccessIntegrations []AccountObjectIdentifier + SecretsList *SecretsListRequest + AutoEventLogging *AutoEventLogging + EnableConsoleOutput *bool + LogLevel *LogLevel + MetricLevel *MetricLevel + TraceLevel *TraceLevel +} + +// SecretsListRequest removed manually - redeclaration with function + +type ProcedureUnsetRequest struct { + Comment *bool + ExternalAccessIntegrations *bool + AutoEventLogging *bool + EnableConsoleOutput *bool + LogLevel *bool + MetricLevel *bool + TraceLevel *bool } type DropProcedureRequest struct { @@ -171,7 +199,7 @@ type DropProcedureRequest struct { type ShowProcedureRequest struct { Like *Like - In *In + In *ExtendedIn } type DescribeProcedureRequest struct { diff --git a/pkg/sdk/procedures_ext.go b/pkg/sdk/procedures_ext.go index 31307bc2fb..a8ee2844bf 100644 --- a/pkg/sdk/procedures_ext.go +++ b/pkg/sdk/procedures_ext.go @@ -1,5 +1,151 @@ package sdk +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" +) + +const DefaultProcedureComment = "user-defined procedure" + func (v *Procedure) ID() SchemaObjectIdentifierWithArguments { return NewSchemaObjectIdentifierWithArguments(v.CatalogName, v.SchemaName, v.Name, v.ArgumentsOld...) } + +// ProcedureDetails contains aggregated describe results for the given procedure. +type ProcedureDetails struct { + Signature string // present for all procedure types + Returns string // present for all procedure types + Language string // present for all procedure types + NullHandling *string // present for all procedure types but SQL + Body *string // present for all procedure types (hidden when SECURE) + Volatility *string // present for all procedure types but SQL + ExternalAccessIntegrations *string // list present for python, java, and scala + Secrets *string // map present for python, java, and scala + Imports *string // list present for python, java, and scala (hidden when SECURE) + Handler *string // present for python, java, and scala (hidden when SECURE) + RuntimeVersion *string // present for python, java, and scala (hidden when SECURE) + Packages *string // list // present for python, java, and scala (hidden when SECURE) + TargetPath *string // list present for scala and java (hidden when SECURE) + InstalledPackages *string // list present for python (hidden when SECURE) + ExecuteAs string // present for all procedure types +} + +func procedureDetailsFromRows(rows []ProcedureDetail) (*ProcedureDetails, error) { + v := &ProcedureDetails{} + var errs []error + for _, row := range rows { + switch row.Property { + case "signature": + errs = append(errs, row.setStringValueOrError("signature", &v.Signature)) + case "returns": + errs = append(errs, row.setStringValueOrError("returns", &v.Returns)) + case "language": + errs = append(errs, row.setStringValueOrError("language", &v.Language)) + case "execute as": + errs = append(errs, row.setStringValueOrError("execute as", &v.ExecuteAs)) + case "null handling": + v.NullHandling = row.Value + case "volatility": + v.Volatility = row.Value + case "body": + v.Body = row.Value + case "external_access_integrations": + v.ExternalAccessIntegrations = row.Value + case "secrets": + v.Secrets = row.Value + case "imports": + v.Imports = row.Value + case "handler": + v.Handler = row.Value + case "runtime_version": + v.RuntimeVersion = row.Value + case "packages": + v.Packages = row.Value + case "installed_packages": + v.InstalledPackages = row.Value + case "target_path": + v.TargetPath = row.Value + } + } + return v, errors.Join(errs...) +} + +func (d *ProcedureDetail) setStringValueOrError(property string, field *string) error { + if d.Value == nil { + return fmt.Errorf("value expected for field %s", property) + } else { + *field = *d.Value + } + return nil +} + +func (d *ProcedureDetail) setOptionalBoolValueOrError(property string, field **bool) error { + if d.Value != nil && *d.Value != "" { + v, err := strconv.ParseBool(*d.Value) + if err != nil { + return fmt.Errorf("invalid value for field %s, err: %w", property, err) + } else { + *field = Bool(v) + } + } + return nil +} + +func (v *procedures) DescribeDetails(ctx context.Context, id SchemaObjectIdentifierWithArguments) (*ProcedureDetails, error) { + rows, err := v.Describe(ctx, id) + if err != nil { + return nil, err + } + return procedureDetailsFromRows(rows) +} + +func (v *procedures) ShowParameters(ctx context.Context, id SchemaObjectIdentifierWithArguments) ([]*Parameter, error) { + return v.client.Parameters.ShowParameters(ctx, &ShowParametersOptions{ + In: &ParametersIn{ + Procedure: id, + }, + }) +} + +func (s *CreateForJavaProcedureRequest) WithProcedureDefinitionWrapped(procedureDefinition string) *CreateForJavaProcedureRequest { + s.ProcedureDefinition = String(fmt.Sprintf(`$$%s$$`, procedureDefinition)) + return s +} + +func (s *CreateForPythonProcedureRequest) WithProcedureDefinitionWrapped(procedureDefinition string) *CreateForPythonProcedureRequest { + s.ProcedureDefinition = String(fmt.Sprintf(`$$%s$$`, procedureDefinition)) + return s +} + +func (s *CreateForScalaProcedureRequest) WithProcedureDefinitionWrapped(procedureDefinition string) *CreateForScalaProcedureRequest { + s.ProcedureDefinition = String(fmt.Sprintf(`$$%s$$`, procedureDefinition)) + return s +} + +func NewCreateForSQLProcedureRequestDefinitionWrapped( + name SchemaObjectIdentifier, + returns ProcedureSQLReturnsRequest, + procedureDefinition string, +) *CreateForSQLProcedureRequest { + s := CreateForSQLProcedureRequest{} + s.name = name + s.Returns = returns + s.ProcedureDefinition = fmt.Sprintf(`$$%s$$`, procedureDefinition) + return &s +} + +func NewCreateForJavaScriptProcedureRequestDefinitionWrapped( + name SchemaObjectIdentifier, + resultDataType datatypes.DataType, + procedureDefinition string, +) *CreateForJavaScriptProcedureRequest { + s := CreateForJavaScriptProcedureRequest{} + s.name = name + s.ResultDataType = resultDataType + s.ProcedureDefinition = fmt.Sprintf(`$$%s$$`, procedureDefinition) + return &s +} diff --git a/pkg/sdk/procedures_gen.go b/pkg/sdk/procedures_gen.go index c65e95e94a..0dbbf8f5a1 100644 --- a/pkg/sdk/procedures_gen.go +++ b/pkg/sdk/procedures_gen.go @@ -25,6 +25,10 @@ type Procedures interface { CreateAndCallForJavaScript(ctx context.Context, request *CreateAndCallForJavaScriptProcedureRequest) error CreateAndCallForPython(ctx context.Context, request *CreateAndCallForPythonProcedureRequest) error CreateAndCallForSQL(ctx context.Context, request *CreateAndCallForSQLProcedureRequest) error + + // DescribeDetails is added manually; it returns aggregated describe results for the given procedure. + DescribeDetails(ctx context.Context, id SchemaObjectIdentifierWithArguments) (*ProcedureDetails, error) + ShowParameters(ctx context.Context, id SchemaObjectIdentifierWithArguments) ([]*Parameter, error) } // CreateForJavaProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-procedure#java-handler. @@ -38,6 +42,8 @@ type CreateForJavaProcedureOptions struct { CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` Returns ProcedureReturns `ddl:"keyword" sql:"RETURNS"` languageJava bool `ddl:"static" sql:"LANGUAGE JAVA"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` + ReturnResultsBehavior *ReturnResultsBehavior `ddl:"keyword"` RuntimeVersion string `ddl:"parameter,single_quotes" sql:"RUNTIME_VERSION"` Packages []ProcedurePackage `ddl:"parameter,parentheses" sql:"PACKAGES"` Imports []ProcedureImport `ddl:"parameter,parentheses" sql:"IMPORTS"` @@ -45,14 +51,13 @@ type CreateForJavaProcedureOptions struct { ExternalAccessIntegrations []AccountObjectIdentifier `ddl:"parameter,parentheses" sql:"EXTERNAL_ACCESS_INTEGRATIONS"` Secrets []SecretReference `ddl:"parameter,parentheses" sql:"SECRETS"` TargetPath *string `ddl:"parameter,single_quotes" sql:"TARGET_PATH"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` ExecuteAs *ExecuteAs `ddl:"keyword"` - ProcedureDefinition *string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` + ProcedureDefinition *string `ddl:"parameter,no_equals" sql:"AS"` } type ProcedureArgument struct { - ArgName string `ddl:"keyword,no_quotes"` + ArgName string `ddl:"keyword,double_quotes"` ArgDataTypeOld DataType `ddl:"keyword,no_quotes"` ArgDataType datatypes.DataType `ddl:"parameter,no_quotes,no_equals"` DefaultValue *string `ddl:"parameter,no_equals" sql:"DEFAULT"` @@ -75,7 +80,7 @@ type ProcedureReturnsTable struct { } type ProcedureColumn struct { - ColumnName string `ddl:"keyword,no_quotes"` + ColumnName string `ddl:"keyword,double_quotes"` ColumnDataTypeOld DataType `ddl:"keyword,no_quotes"` ColumnDataType datatypes.DataType `ddl:"parameter,no_quotes,no_equals"` } @@ -90,22 +95,23 @@ type ProcedureImport struct { // CreateForJavaScriptProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-procedure#javascript-handler. type CreateForJavaScriptProcedureOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Secure *bool `ddl:"keyword" sql:"SECURE"` - procedure bool `ddl:"static" sql:"PROCEDURE"` - name SchemaObjectIdentifier `ddl:"identifier"` - Arguments []ProcedureArgument `ddl:"list,must_parentheses"` - CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` - returns bool `ddl:"static" sql:"RETURNS"` - ResultDataTypeOld DataType `ddl:"parameter,no_equals"` - ResultDataType datatypes.DataType `ddl:"parameter,no_quotes,no_equals"` - NotNull *bool `ddl:"keyword" sql:"NOT NULL"` - languageJavascript bool `ddl:"static" sql:"LANGUAGE JAVASCRIPT"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` - ProcedureDefinition string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Secure *bool `ddl:"keyword" sql:"SECURE"` + procedure bool `ddl:"static" sql:"PROCEDURE"` + name SchemaObjectIdentifier `ddl:"identifier"` + Arguments []ProcedureArgument `ddl:"list,must_parentheses"` + CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` + returns bool `ddl:"static" sql:"RETURNS"` + ResultDataTypeOld DataType `ddl:"parameter,no_equals"` + ResultDataType datatypes.DataType `ddl:"parameter,no_quotes,no_equals"` + NotNull *bool `ddl:"keyword" sql:"NOT NULL"` + languageJavascript bool `ddl:"static" sql:"LANGUAGE JAVASCRIPT"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` + ReturnResultsBehavior *ReturnResultsBehavior `ddl:"keyword"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + ExecuteAs *ExecuteAs `ddl:"keyword"` + ProcedureDefinition string `ddl:"parameter,no_equals" sql:"AS"` } // CreateForPythonProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-procedure#python-handler. @@ -119,55 +125,60 @@ type CreateForPythonProcedureOptions struct { CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` Returns ProcedureReturns `ddl:"keyword" sql:"RETURNS"` languagePython bool `ddl:"static" sql:"LANGUAGE PYTHON"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` + ReturnResultsBehavior *ReturnResultsBehavior `ddl:"keyword"` RuntimeVersion string `ddl:"parameter,single_quotes" sql:"RUNTIME_VERSION"` Packages []ProcedurePackage `ddl:"parameter,parentheses" sql:"PACKAGES"` Imports []ProcedureImport `ddl:"parameter,parentheses" sql:"IMPORTS"` Handler string `ddl:"parameter,single_quotes" sql:"HANDLER"` ExternalAccessIntegrations []AccountObjectIdentifier `ddl:"parameter,parentheses" sql:"EXTERNAL_ACCESS_INTEGRATIONS"` Secrets []SecretReference `ddl:"parameter,parentheses" sql:"SECRETS"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` ExecuteAs *ExecuteAs `ddl:"keyword"` - ProcedureDefinition *string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` + ProcedureDefinition *string `ddl:"parameter,no_equals" sql:"AS"` } // CreateForScalaProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-procedure#scala-handler. type CreateForScalaProcedureOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Secure *bool `ddl:"keyword" sql:"SECURE"` - procedure bool `ddl:"static" sql:"PROCEDURE"` - name SchemaObjectIdentifier `ddl:"identifier"` - Arguments []ProcedureArgument `ddl:"list,must_parentheses"` - CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` - Returns ProcedureReturns `ddl:"keyword" sql:"RETURNS"` - languageScala bool `ddl:"static" sql:"LANGUAGE SCALA"` - RuntimeVersion string `ddl:"parameter,single_quotes" sql:"RUNTIME_VERSION"` - Packages []ProcedurePackage `ddl:"parameter,parentheses" sql:"PACKAGES"` - Imports []ProcedureImport `ddl:"parameter,parentheses" sql:"IMPORTS"` - Handler string `ddl:"parameter,single_quotes" sql:"HANDLER"` - TargetPath *string `ddl:"parameter,single_quotes" sql:"TARGET_PATH"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` - ProcedureDefinition *string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Secure *bool `ddl:"keyword" sql:"SECURE"` + procedure bool `ddl:"static" sql:"PROCEDURE"` + name SchemaObjectIdentifier `ddl:"identifier"` + Arguments []ProcedureArgument `ddl:"list,must_parentheses"` + CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` + Returns ProcedureReturns `ddl:"keyword" sql:"RETURNS"` + languageScala bool `ddl:"static" sql:"LANGUAGE SCALA"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` + ReturnResultsBehavior *ReturnResultsBehavior `ddl:"keyword"` + RuntimeVersion string `ddl:"parameter,single_quotes" sql:"RUNTIME_VERSION"` + Packages []ProcedurePackage `ddl:"parameter,parentheses" sql:"PACKAGES"` + Imports []ProcedureImport `ddl:"parameter,parentheses" sql:"IMPORTS"` + Handler string `ddl:"parameter,single_quotes" sql:"HANDLER"` + ExternalAccessIntegrations []AccountObjectIdentifier `ddl:"parameter,parentheses" sql:"EXTERNAL_ACCESS_INTEGRATIONS"` + Secrets []SecretReference `ddl:"parameter,parentheses" sql:"SECRETS"` + TargetPath *string `ddl:"parameter,single_quotes" sql:"TARGET_PATH"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + ExecuteAs *ExecuteAs `ddl:"keyword"` + ProcedureDefinition *string `ddl:"parameter,no_equals" sql:"AS"` } // CreateForSQLProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-procedure#snowflake-scripting-handler. type CreateForSQLProcedureOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Secure *bool `ddl:"keyword" sql:"SECURE"` - procedure bool `ddl:"static" sql:"PROCEDURE"` - name SchemaObjectIdentifier `ddl:"identifier"` - Arguments []ProcedureArgument `ddl:"list,must_parentheses"` - CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` - Returns ProcedureSQLReturns `ddl:"keyword" sql:"RETURNS"` - languageSql bool `ddl:"static" sql:"LANGUAGE SQL"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - ExecuteAs *ExecuteAs `ddl:"keyword"` - ProcedureDefinition string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Secure *bool `ddl:"keyword" sql:"SECURE"` + procedure bool `ddl:"static" sql:"PROCEDURE"` + name SchemaObjectIdentifier `ddl:"identifier"` + Arguments []ProcedureArgument `ddl:"list,must_parentheses"` + CopyGrants *bool `ddl:"keyword" sql:"COPY GRANTS"` + Returns ProcedureSQLReturns `ddl:"keyword" sql:"RETURNS"` + languageSql bool `ddl:"static" sql:"LANGUAGE SQL"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` + ReturnResultsBehavior *ReturnResultsBehavior `ddl:"keyword"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + ExecuteAs *ExecuteAs `ddl:"keyword"` + ProcedureDefinition string `ddl:"parameter,no_equals" sql:"AS"` } type ProcedureSQLReturns struct { @@ -178,18 +189,39 @@ type ProcedureSQLReturns struct { // AlterProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/alter-procedure. type AlterProcedureOptions struct { - alter bool `ddl:"static" sql:"ALTER"` - procedure bool `ddl:"static" sql:"PROCEDURE"` - IfExists *bool `ddl:"keyword" sql:"IF EXISTS"` - name SchemaObjectIdentifierWithArguments `ddl:"identifier"` - RenameTo *SchemaObjectIdentifier `ddl:"identifier" sql:"RENAME TO"` - SetComment *string `ddl:"parameter,single_quotes" sql:"SET COMMENT"` - SetLogLevel *string `ddl:"parameter,single_quotes" sql:"SET LOG_LEVEL"` - SetTraceLevel *string `ddl:"parameter,single_quotes" sql:"SET TRACE_LEVEL"` - UnsetComment *bool `ddl:"keyword" sql:"UNSET COMMENT"` - SetTags []TagAssociation `ddl:"keyword" sql:"SET TAG"` - UnsetTags []ObjectIdentifier `ddl:"keyword" sql:"UNSET TAG"` - ExecuteAs *ExecuteAs `ddl:"keyword"` + alter bool `ddl:"static" sql:"ALTER"` + procedure bool `ddl:"static" sql:"PROCEDURE"` + IfExists *bool `ddl:"keyword" sql:"IF EXISTS"` + name SchemaObjectIdentifierWithArguments `ddl:"identifier"` + RenameTo *SchemaObjectIdentifier `ddl:"identifier" sql:"RENAME TO"` + Set *ProcedureSet `ddl:"list" sql:"SET"` + Unset *ProcedureUnset `ddl:"list" sql:"UNSET"` + SetTags []TagAssociation `ddl:"keyword" sql:"SET TAG"` + UnsetTags []ObjectIdentifier `ddl:"keyword" sql:"UNSET TAG"` + ExecuteAs *ExecuteAs `ddl:"keyword"` +} + +type ProcedureSet struct { + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + ExternalAccessIntegrations []AccountObjectIdentifier `ddl:"parameter,parentheses" sql:"EXTERNAL_ACCESS_INTEGRATIONS"` + SecretsList *SecretsList `ddl:"parameter,parentheses" sql:"SECRETS"` + AutoEventLogging *AutoEventLogging `ddl:"parameter,single_quotes" sql:"AUTO_EVENT_LOGGING"` + EnableConsoleOutput *bool `ddl:"parameter" sql:"ENABLE_CONSOLE_OUTPUT"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + MetricLevel *MetricLevel `ddl:"parameter,single_quotes" sql:"METRIC_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` +} + +// SecretsList removed manually - redeclared in functions + +type ProcedureUnset struct { + Comment *bool `ddl:"keyword" sql:"COMMENT"` + ExternalAccessIntegrations *bool `ddl:"keyword" sql:"EXTERNAL_ACCESS_INTEGRATIONS"` + AutoEventLogging *bool `ddl:"keyword" sql:"AUTO_EVENT_LOGGING"` + EnableConsoleOutput *bool `ddl:"keyword" sql:"ENABLE_CONSOLE_OUTPUT"` + LogLevel *bool `ddl:"keyword" sql:"LOG_LEVEL"` + MetricLevel *bool `ddl:"keyword" sql:"METRIC_LEVEL"` + TraceLevel *bool `ddl:"keyword" sql:"TRACE_LEVEL"` } // DropProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/drop-procedure. @@ -202,45 +234,49 @@ type DropProcedureOptions struct { // ShowProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/show-procedures. type ShowProcedureOptions struct { - show bool `ddl:"static" sql:"SHOW"` - procedures bool `ddl:"static" sql:"PROCEDURES"` - Like *Like `ddl:"keyword" sql:"LIKE"` - In *In `ddl:"keyword" sql:"IN"` + show bool `ddl:"static" sql:"SHOW"` + procedures bool `ddl:"static" sql:"PROCEDURES"` + Like *Like `ddl:"keyword" sql:"LIKE"` + In *ExtendedIn `ddl:"keyword" sql:"IN"` } type procedureRow struct { - CreatedOn string `db:"created_on"` - Name string `db:"name"` - SchemaName string `db:"schema_name"` - IsBuiltin string `db:"is_builtin"` - IsAggregate string `db:"is_aggregate"` - IsAnsi string `db:"is_ansi"` - MinNumArguments int `db:"min_num_arguments"` - MaxNumArguments int `db:"max_num_arguments"` - Arguments string `db:"arguments"` - Description string `db:"description"` - CatalogName string `db:"catalog_name"` - IsTableFunction string `db:"is_table_function"` - ValidForClustering string `db:"valid_for_clustering"` - IsSecure sql.NullString `db:"is_secure"` + CreatedOn string `db:"created_on"` + Name string `db:"name"` + SchemaName string `db:"schema_name"` + IsBuiltin string `db:"is_builtin"` + IsAggregate string `db:"is_aggregate"` + IsAnsi string `db:"is_ansi"` + MinNumArguments int `db:"min_num_arguments"` + MaxNumArguments int `db:"max_num_arguments"` + Arguments string `db:"arguments"` + Description string `db:"description"` + CatalogName string `db:"catalog_name"` + IsTableFunction string `db:"is_table_function"` + ValidForClustering string `db:"valid_for_clustering"` + IsSecure sql.NullString `db:"is_secure"` + Secrets sql.NullString `db:"secrets"` + ExternalAccessIntegrations sql.NullString `db:"external_access_integrations"` } type Procedure struct { - CreatedOn string - Name string - SchemaName string - IsBuiltin bool - IsAggregate bool - IsAnsi bool - MinNumArguments int - MaxNumArguments int - ArgumentsOld []DataType - ArgumentsRaw string - Description string - CatalogName string - IsTableFunction bool - ValidForClustering bool - IsSecure bool + CreatedOn string + Name string + SchemaName string + IsBuiltin bool + IsAggregate bool + IsAnsi bool + MinNumArguments int + MaxNumArguments int + ArgumentsOld []DataType + ArgumentsRaw string + Description string + CatalogName string + IsTableFunction bool + ValidForClustering bool + IsSecure bool + Secrets *string + ExternalAccessIntegrations *string } // DescribeProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/desc-procedure. @@ -257,7 +293,7 @@ type procedureDetailRow struct { type ProcedureDetail struct { Property string - Value string + Value *string } // CallProcedureOptions is based on https://docs.snowflake.com/en/sql-reference/sql/call. @@ -276,11 +312,11 @@ type CreateAndCallForJavaProcedureOptions struct { Arguments []ProcedureArgument `ddl:"list,must_parentheses"` Returns ProcedureReturns `ddl:"keyword" sql:"RETURNS"` languageJava bool `ddl:"static" sql:"LANGUAGE JAVA"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` RuntimeVersion string `ddl:"parameter,single_quotes" sql:"RUNTIME_VERSION"` Packages []ProcedurePackage `ddl:"parameter,parentheses" sql:"PACKAGES"` Imports []ProcedureImport `ddl:"parameter,parentheses" sql:"IMPORTS"` Handler string `ddl:"parameter,single_quotes" sql:"HANDLER"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` ProcedureDefinition *string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` WithClause *ProcedureWithClause `ddl:"keyword"` call bool `ddl:"static" sql:"CALL"` @@ -288,6 +324,7 @@ type CreateAndCallForJavaProcedureOptions struct { CallArguments []string `ddl:"keyword,must_parentheses"` ScriptingVariable *string `ddl:"parameter,no_quotes,no_equals" sql:"INTO"` } + type ProcedureWithClause struct { prefix bool `ddl:"static" sql:","` CteName AccountObjectIdentifier `ddl:"identifier"` @@ -303,11 +340,11 @@ type CreateAndCallForScalaProcedureOptions struct { Arguments []ProcedureArgument `ddl:"list,must_parentheses"` Returns ProcedureReturns `ddl:"keyword" sql:"RETURNS"` languageScala bool `ddl:"static" sql:"LANGUAGE SCALA"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` RuntimeVersion string `ddl:"parameter,single_quotes" sql:"RUNTIME_VERSION"` Packages []ProcedurePackage `ddl:"parameter,parentheses" sql:"PACKAGES"` Imports []ProcedureImport `ddl:"parameter,parentheses" sql:"IMPORTS"` Handler string `ddl:"parameter,single_quotes" sql:"HANDLER"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` ProcedureDefinition *string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` WithClauses []ProcedureWithClause `ddl:"keyword"` call bool `ddl:"static" sql:"CALL"` @@ -344,11 +381,11 @@ type CreateAndCallForPythonProcedureOptions struct { Arguments []ProcedureArgument `ddl:"list,must_parentheses"` Returns ProcedureReturns `ddl:"keyword" sql:"RETURNS"` languagePython bool `ddl:"static" sql:"LANGUAGE PYTHON"` + NullInputBehavior *NullInputBehavior `ddl:"keyword"` RuntimeVersion string `ddl:"parameter,single_quotes" sql:"RUNTIME_VERSION"` Packages []ProcedurePackage `ddl:"parameter,parentheses" sql:"PACKAGES"` Imports []ProcedureImport `ddl:"parameter,parentheses" sql:"IMPORTS"` Handler string `ddl:"parameter,single_quotes" sql:"HANDLER"` - NullInputBehavior *NullInputBehavior `ddl:"keyword"` ProcedureDefinition *string `ddl:"parameter,single_quotes,no_equals" sql:"AS"` WithClauses []ProcedureWithClause `ddl:"keyword"` call bool `ddl:"static" sql:"CALL"` diff --git a/pkg/sdk/procedures_gen_test.go b/pkg/sdk/procedures_gen_test.go index 12a3d030c4..f7e84503d2 100644 --- a/pkg/sdk/procedures_gen_test.go +++ b/pkg/sdk/procedures_gen_test.go @@ -27,12 +27,6 @@ func TestProcedures_CreateForJava(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { - opts := defaultOpts() - opts.name = emptySchemaObjectIdentifier - assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) - }) - t.Run("validation: [opts.RuntimeVersion] should be set", func(t *testing.T) { opts := defaultOpts() opts.RuntimeVersion = "" @@ -51,6 +45,12 @@ func TestProcedures_CreateForJava(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateForJavaProcedureOptions", "Handler")) }) + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { + opts := defaultOpts() + opts.name = emptySchemaObjectIdentifier + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + t.Run("validation: exactly one field from [opts.Arguments.ArgDataTypeOld opts.Arguments.ArgDataType] should be present", func(t *testing.T) { opts := defaultOpts() opts.Arguments = []ProcedureArgument{ @@ -84,6 +84,12 @@ func TestProcedures_CreateForJava(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateAndCallForSQLProcedureOptions.Returns.ResultDataType", "ResultDataTypeOld", "ResultDataType")) }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureReturns{} + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForJavaProcedureOptions.Returns", "ResultDataType", "Table")) + }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType.ResultDataTypeOld opts.Returns.ResultDataType.ResultDataType] should be present - two present", func(t *testing.T) { opts := defaultOpts() opts.Returns = ProcedureReturns{ @@ -132,13 +138,7 @@ func TestProcedures_CreateForJava(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateAndCallForSQLProcedureOptions.Returns.Table.Columns", "ColumnDataTypeOld", "ColumnDataType")) }) - t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { - opts := defaultOpts() - opts.Returns = ProcedureReturns{} - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForJavaProcedureOptions.Returns", "ResultDataType", "Table")) - }) - - t.Run("validation: function definition", func(t *testing.T) { + t.Run("validation: procedure definition", func(t *testing.T) { opts := defaultOpts() opts.TargetPath = String("@~/testfunc.jar") opts.Packages = []ProcedurePackage{ @@ -203,10 +203,11 @@ func TestProcedures_CreateForJava(t *testing.T) { } opts.TargetPath = String("@~/testfunc.jar") opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = String("return id + name;") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (id NUMBER, name VARCHAR DEFAULT 'test') COPY GRANTS RETURNS TABLE (country_code VARCHAR) LANGUAGE JAVA RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) TARGET_PATH = '@~/testfunc.jar' STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'return id + name;'`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("id" NUMBER, "name" VARCHAR DEFAULT 'test') COPY GRANTS RETURNS TABLE ("country_code" VARCHAR) LANGUAGE JAVA STRICT IMMUTABLE RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) TARGET_PATH = '@~/testfunc.jar' COMMENT = 'test comment' EXECUTE AS CALLER AS return id + name;`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -262,10 +263,11 @@ func TestProcedures_CreateForJava(t *testing.T) { } opts.TargetPath = String("@~/testfunc.jar") opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = String("return id + name;") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (id NUMBER(36, 2), name VARCHAR(100) DEFAULT 'test') COPY GRANTS RETURNS TABLE (country_code VARCHAR(100)) LANGUAGE JAVA RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) TARGET_PATH = '@~/testfunc.jar' STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'return id + name;'`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("id" NUMBER(36, 2), "name" VARCHAR(100) DEFAULT 'test') COPY GRANTS RETURNS TABLE ("country_code" VARCHAR(100)) LANGUAGE JAVA STRICT IMMUTABLE RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) TARGET_PATH = '@~/testfunc.jar' COMMENT = 'test comment' EXECUTE AS CALLER AS return id + name;`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) }) } @@ -290,6 +292,12 @@ func TestProcedures_CreateForJavaScript(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateForJavaScriptProcedureOptions", "ProcedureDefinition")) }) + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { + opts := defaultOpts() + opts.name = emptySchemaObjectIdentifier + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + t.Run("validation: exactly one field from [opts.ResultDataTypeOld opts.ResultDataType] should be present", func(t *testing.T) { opts := defaultOpts() assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForJavaScriptProcedureOptions", "ResultDataTypeOld", "ResultDataType")) @@ -327,12 +335,6 @@ func TestProcedures_CreateForJavaScript(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForJavaScriptProcedureOptions.Arguments", "ArgDataTypeOld", "ArgDataType")) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { - opts := defaultOpts() - opts.name = emptySchemaObjectIdentifier - assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) - }) - // TODO [SNOW-1348106]: remove with old procedure removal for V1 t.Run("all options - old data types", func(t *testing.T) { opts := defaultOpts() @@ -349,10 +351,11 @@ func TestProcedures_CreateForJavaScript(t *testing.T) { opts.ResultDataTypeOld = "DOUBLE" opts.NotNull = Bool(true) opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = "return 1;" - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (d DOUBLE DEFAULT 1.0) COPY GRANTS RETURNS DOUBLE NOT NULL LANGUAGE JAVASCRIPT STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'return 1;'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("d" DOUBLE DEFAULT 1.0) COPY GRANTS RETURNS DOUBLE NOT NULL LANGUAGE JAVASCRIPT STRICT IMMUTABLE COMMENT = 'test comment' EXECUTE AS CALLER AS return 1;`, id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -370,10 +373,11 @@ func TestProcedures_CreateForJavaScript(t *testing.T) { opts.ResultDataType = dataTypeFloat opts.NotNull = Bool(true) opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = "return 1;" - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (d FLOAT DEFAULT 1.0) COPY GRANTS RETURNS FLOAT NOT NULL LANGUAGE JAVASCRIPT STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'return 1;'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("d" FLOAT DEFAULT 1.0) COPY GRANTS RETURNS FLOAT NOT NULL LANGUAGE JAVASCRIPT STRICT IMMUTABLE COMMENT = 'test comment' EXECUTE AS CALLER AS return 1;`, id.FullyQualifiedName()) }) } @@ -418,7 +422,7 @@ func TestProcedures_CreateForPython(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateForPythonProcedureOptions", "Handler")) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) @@ -449,6 +453,21 @@ func TestProcedures_CreateForPython(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForPythonProcedureOptions.Arguments", "ArgDataTypeOld", "ArgDataType")) }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureReturns{} + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForPythonProcedureOptions.Returns", "ResultDataType", "Table")) + }) + + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present - two present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureReturns{ + ResultDataType: &ProcedureReturnsResultDataType{}, + Table: &ProcedureReturnsTable{}, + } + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForPythonProcedureOptions.Returns", "ResultDataType", "Table")) + }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType.ResultDataTypeOld opts.Returns.ResultDataType.ResultDataType] should be present", func(t *testing.T) { opts := defaultOpts() opts.Returns = ProcedureReturns{ @@ -505,12 +524,6 @@ func TestProcedures_CreateForPython(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateAndCallForSQLProcedureOptions.Returns.Table.Columns", "ColumnDataTypeOld", "ColumnDataType")) }) - t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { - opts := defaultOpts() - opts.Returns = ProcedureReturns{} - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForPythonProcedureOptions.Returns", "ResultDataType", "Table")) - }) - // TODO [SNOW-1348106]: remove with old procedure removal for V1 t.Run("all options - old data types", func(t *testing.T) { opts := defaultOpts() @@ -562,10 +575,11 @@ func TestProcedures_CreateForPython(t *testing.T) { }, } opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = String("import numpy as np") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (i int DEFAULT 1) COPY GRANTS RETURNS VARIANT NULL LANGUAGE PYTHON RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'import numpy as np'`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("i" int DEFAULT 1) COPY GRANTS RETURNS VARIANT NULL LANGUAGE PYTHON STRICT IMMUTABLE RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) COMMENT = 'test comment' EXECUTE AS CALLER AS import numpy as np`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -618,10 +632,11 @@ func TestProcedures_CreateForPython(t *testing.T) { }, } opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = String("import numpy as np") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (i NUMBER(36, 2) DEFAULT 1) COPY GRANTS RETURNS VARIANT NULL LANGUAGE PYTHON RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'import numpy as np'`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("i" NUMBER(36, 2) DEFAULT 1) COPY GRANTS RETURNS VARIANT NULL LANGUAGE PYTHON STRICT IMMUTABLE RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' EXTERNAL_ACCESS_INTEGRATIONS = ("ext_integration") SECRETS = ('variable1' = %s, 'variable2' = %s) COMMENT = 'test comment' EXECUTE AS CALLER AS import numpy as np`, id.FullyQualifiedName(), secretId.FullyQualifiedName(), secretId2.FullyQualifiedName()) }) } @@ -664,7 +679,7 @@ func TestProcedures_CreateForScala(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateForScalaProcedureOptions", "Handler")) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) @@ -695,6 +710,21 @@ func TestProcedures_CreateForScala(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForScalaProcedureOptions.Arguments", "ArgDataTypeOld", "ArgDataType")) }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureReturns{} + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForScalaProcedureOptions.Returns", "ResultDataType", "Table")) + }) + + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present - two present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureReturns{ + ResultDataType: &ProcedureReturnsResultDataType{}, + Table: &ProcedureReturnsTable{}, + } + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForScalaProcedureOptions.Returns", "ResultDataType", "Table")) + }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType.ResultDataTypeOld opts.Returns.ResultDataType.ResultDataType] should be present", func(t *testing.T) { opts := defaultOpts() opts.Returns = ProcedureReturns{ @@ -751,13 +781,7 @@ func TestProcedures_CreateForScala(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateAndCallForSQLProcedureOptions.Returns.Table.Columns", "ColumnDataTypeOld", "ColumnDataType")) }) - t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { - opts := defaultOpts() - opts.Returns = ProcedureReturns{} - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForScalaProcedureOptions.Returns", "ResultDataType", "Table")) - }) - - t.Run("validation: function definition", func(t *testing.T) { + t.Run("validation: procedure definition", func(t *testing.T) { opts := defaultOpts() opts.TargetPath = String("@~/testfunc.jar") opts.Packages = []ProcedurePackage{ @@ -801,10 +825,11 @@ func TestProcedures_CreateForScala(t *testing.T) { opts.Handler = "Echo.echoVarchar" opts.TargetPath = String("@~/testfunc.jar") opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = String("return x") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (x VARCHAR DEFAULT 'test') COPY GRANTS RETURNS VARCHAR NOT NULL LANGUAGE SCALA RUNTIME_VERSION = '2.0' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('@udf_libs/echohandler.jar') HANDLER = 'Echo.echoVarchar' TARGET_PATH = '@~/testfunc.jar' STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'return x'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("x" VARCHAR DEFAULT 'test') COPY GRANTS RETURNS VARCHAR NOT NULL LANGUAGE SCALA STRICT IMMUTABLE RUNTIME_VERSION = '2.0' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('@udf_libs/echohandler.jar') HANDLER = 'Echo.echoVarchar' TARGET_PATH = '@~/testfunc.jar' COMMENT = 'test comment' EXECUTE AS CALLER AS return x`, id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -839,10 +864,11 @@ func TestProcedures_CreateForScala(t *testing.T) { opts.Handler = "Echo.echoVarchar" opts.TargetPath = String("@~/testfunc.jar") opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = String("return x") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (x VARCHAR(100) DEFAULT 'test') COPY GRANTS RETURNS VARCHAR(100) NOT NULL LANGUAGE SCALA RUNTIME_VERSION = '2.0' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('@udf_libs/echohandler.jar') HANDLER = 'Echo.echoVarchar' TARGET_PATH = '@~/testfunc.jar' STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS 'return x'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("x" VARCHAR(100) DEFAULT 'test') COPY GRANTS RETURNS VARCHAR(100) NOT NULL LANGUAGE SCALA STRICT IMMUTABLE RUNTIME_VERSION = '2.0' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('@udf_libs/echohandler.jar') HANDLER = 'Echo.echoVarchar' TARGET_PATH = '@~/testfunc.jar' COMMENT = 'test comment' EXECUTE AS CALLER AS return x`, id.FullyQualifiedName()) }) } @@ -872,23 +898,12 @@ func TestProcedures_CreateForSQL(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateForSQLProcedureOptions", "ProcedureDefinition")) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) - t.Run("create with no arguments", func(t *testing.T) { - opts := defaultOpts() - opts.Returns = ProcedureSQLReturns{ - ResultDataType: &ProcedureReturnsResultDataType{ - ResultDataType: dataTypeFloat, - }, - } - opts.ProcedureDefinition = "3.141592654::FLOAT" - assertOptsValidAndSQLEquals(t, opts, `CREATE PROCEDURE %s () RETURNS FLOAT LANGUAGE SQL AS '3.141592654::FLOAT'`, id.FullyQualifiedName()) - }) - t.Run("validation: exactly one field from [opts.Arguments.ArgDataTypeOld opts.Arguments.ArgDataType] should be present", func(t *testing.T) { opts := defaultOpts() opts.Arguments = []ProcedureArgument{ @@ -914,6 +929,21 @@ func TestProcedures_CreateForSQL(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForSQLProcedureOptions.Arguments", "ArgDataTypeOld", "ArgDataType")) }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureSQLReturns{} + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForSQLProcedureOptions.Returns", "ResultDataType", "Table")) + }) + + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present - two present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureSQLReturns{ + ResultDataType: &ProcedureReturnsResultDataType{}, + Table: &ProcedureReturnsTable{}, + } + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForSQLProcedureOptions.Returns", "ResultDataType", "Table")) + }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType.ResultDataTypeOld opts.Returns.ResultDataType.ResultDataType] should be present", func(t *testing.T) { opts := defaultOpts() opts.Returns = ProcedureSQLReturns{ @@ -970,12 +1000,6 @@ func TestProcedures_CreateForSQL(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForSQLProcedureOptions.Returns.Table.Columns", "ColumnDataTypeOld", "ColumnDataType")) }) - t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { - opts := defaultOpts() - opts.Returns = ProcedureSQLReturns{} - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateForSQLProcedureOptions.Returns", "ResultDataType", "Table")) - }) - // TODO [SNOW-1348106]: remove with old procedure removal for V1 t.Run("all options - old data types", func(t *testing.T) { opts := defaultOpts() @@ -996,10 +1020,11 @@ func TestProcedures_CreateForSQL(t *testing.T) { NotNull: Bool(true), } opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = "3.141592654::FLOAT" - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (message VARCHAR DEFAULT 'test') COPY GRANTS RETURNS VARCHAR NOT NULL LANGUAGE SQL STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS '3.141592654::FLOAT'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("message" VARCHAR DEFAULT 'test') COPY GRANTS RETURNS VARCHAR NOT NULL LANGUAGE SQL STRICT IMMUTABLE COMMENT = 'test comment' EXECUTE AS CALLER AS 3.141592654::FLOAT`, id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -1021,10 +1046,22 @@ func TestProcedures_CreateForSQL(t *testing.T) { NotNull: Bool(true), } opts.NullInputBehavior = NullInputBehaviorPointer(NullInputBehaviorStrict) + opts.ReturnResultsBehavior = Pointer(ReturnResultsBehaviorImmutable) opts.Comment = String("test comment") opts.ExecuteAs = ExecuteAsPointer(ExecuteAsCaller) opts.ProcedureDefinition = "3.141592654::FLOAT" - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s (message VARCHAR(100) DEFAULT 'test') COPY GRANTS RETURNS VARCHAR(100) NOT NULL LANGUAGE SQL STRICT COMMENT = 'test comment' EXECUTE AS CALLER AS '3.141592654::FLOAT'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE PROCEDURE %s ("message" VARCHAR(100) DEFAULT 'test') COPY GRANTS RETURNS VARCHAR(100) NOT NULL LANGUAGE SQL STRICT IMMUTABLE COMMENT = 'test comment' EXECUTE AS CALLER AS 3.141592654::FLOAT`, id.FullyQualifiedName()) + }) + + t.Run("create with no arguments", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureSQLReturns{ + ResultDataType: &ProcedureReturnsResultDataType{ + ResultDataType: dataTypeFloat, + }, + } + opts.ProcedureDefinition = "3.141592654::FLOAT" + assertOptsValidAndSQLEquals(t, opts, `CREATE PROCEDURE %s () RETURNS FLOAT LANGUAGE SQL AS 3.141592654::FLOAT`, id.FullyQualifiedName()) }) } @@ -1042,7 +1079,7 @@ func TestProcedures_Drop(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() opts.name = emptySchemaObjectIdentifierWithArguments assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) @@ -1062,8 +1099,8 @@ func TestProcedures_Drop(t *testing.T) { } func TestProcedures_Alter(t *testing.T) { - noArgsId := randomSchemaObjectIdentifierWithArguments() id := randomSchemaObjectIdentifierWithArguments(DataTypeVARCHAR, DataTypeNumber) + secretId := randomSchemaObjectIdentifier() defaultOpts := func() *AlterProcedureOptions { return &AlterProcedureOptions{ @@ -1077,17 +1114,40 @@ func TestProcedures_Alter(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() opts.name = emptySchemaObjectIdentifierWithArguments assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) - t.Run("validation: exactly one field should be present", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.RenameTo] if set", func(t *testing.T) { opts := defaultOpts() - opts.SetLogLevel = String("DEBUG") - opts.UnsetComment = Bool(true) - assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterProcedureOptions", "RenameTo", "SetComment", "SetLogLevel", "SetTraceLevel", "UnsetComment", "SetTags", "UnsetTags", "ExecuteAs")) + opts.RenameTo = Pointer(emptySchemaObjectIdentifier) + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + + t.Run("validation: exactly one field from [opts.RenameTo opts.Set opts.Unset opts.SetTags opts.UnsetTags opts.ExecuteAs] should be present", func(t *testing.T) { + opts := defaultOpts() + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterProcedureOptions", "RenameTo", "Set", "Unset", "SetTags", "UnsetTags", "ExecuteAs")) + }) + + t.Run("validation: exactly one field from [opts.RenameTo opts.Set opts.Unset opts.SetTags opts.UnsetTags opts.ExecuteAs] should be present - two present", func(t *testing.T) { + opts := defaultOpts() + opts.Set = &ProcedureSet{} + opts.Unset = &ProcedureUnset{} + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterProcedureOptions", "RenameTo", "Set", "Unset", "SetTags", "UnsetTags", "ExecuteAs")) + }) + + t.Run("validation: at least one of the fields [opts.Set.Comment opts.Set.ExternalAccessIntegrations opts.Set.SecretsList opts.Set.AutoEventLogging opts.Set.EnableConsoleOutput opts.Set.LogLevel opts.Set.MetricLevel opts.Set.TraceLevel] should be set", func(t *testing.T) { + opts := defaultOpts() + opts.Set = &ProcedureSet{} + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterProcedureOptions.Set", "Comment", "ExternalAccessIntegrations", "SecretsList", "AutoEventLogging", "EnableConsoleOutput", "LogLevel", "MetricLevel", "TraceLevel")) + }) + + t.Run("validation: at least one of the fields [opts.Unset.Comment opts.Unset.ExternalAccessIntegrations opts.Unset.AutoEventLogging opts.Unset.EnableConsoleOutput opts.Unset.LogLevel opts.Unset.MetricLevel opts.Unset.TraceLevel] should be set", func(t *testing.T) { + opts := defaultOpts() + opts.Unset = &ProcedureUnset{} + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterProcedureOptions.Unset", "Comment", "ExternalAccessIntegrations", "AutoEventLogging", "EnableConsoleOutput", "LogLevel", "MetricLevel", "TraceLevel")) }) t.Run("alter: rename to", func(t *testing.T) { @@ -1104,35 +1164,42 @@ func TestProcedures_Alter(t *testing.T) { assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s EXECUTE AS CALLER`, id.FullyQualifiedName()) }) - t.Run("alter: set log level", func(t *testing.T) { + t.Run("alter: set", func(t *testing.T) { opts := defaultOpts() - opts.SetLogLevel = String("DEBUG") - assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s SET LOG_LEVEL = 'DEBUG'`, id.FullyQualifiedName()) - }) - - t.Run("alter: set log level with no arguments", func(t *testing.T) { - opts := defaultOpts() - opts.name = noArgsId - opts.SetLogLevel = String("DEBUG") - assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s SET LOG_LEVEL = 'DEBUG'`, noArgsId.FullyQualifiedName()) + opts.Set = &ProcedureSet{ + Comment: String("comment"), + TraceLevel: Pointer(TraceLevelOff), + } + assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s SET COMMENT = 'comment', TRACE_LEVEL = 'OFF'`, id.FullyQualifiedName()) }) - t.Run("alter: set trace level", func(t *testing.T) { + t.Run("alter: set empty secrets", func(t *testing.T) { opts := defaultOpts() - opts.SetTraceLevel = String("DEBUG") - assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s SET TRACE_LEVEL = 'DEBUG'`, id.FullyQualifiedName()) + opts.Set = &ProcedureSet{ + SecretsList: &SecretsList{}, + } + assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s SET SECRETS = ()`, id.FullyQualifiedName()) }) - t.Run("alter: set comment", func(t *testing.T) { + t.Run("alter: set non-empty secrets", func(t *testing.T) { opts := defaultOpts() - opts.SetComment = String("comment") - assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s SET COMMENT = 'comment'`, id.FullyQualifiedName()) + opts.Set = &ProcedureSet{ + SecretsList: &SecretsList{ + []SecretReference{ + {VariableName: "abc", Name: secretId}, + }, + }, + } + assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s SET SECRETS = ('abc' = %s)`, id.FullyQualifiedName(), secretId.FullyQualifiedName()) }) - t.Run("alter: unset comment", func(t *testing.T) { + t.Run("alter: unset", func(t *testing.T) { opts := defaultOpts() - opts.UnsetComment = Bool(true) - assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s UNSET COMMENT`, id.FullyQualifiedName()) + opts.Unset = &ProcedureUnset{ + Comment: Bool(true), + TraceLevel: Bool(true), + } + assertOptsValidAndSQLEquals(t, opts, `ALTER PROCEDURE IF EXISTS %s UNSET COMMENT, TRACE_LEVEL`, id.FullyQualifiedName()) }) t.Run("alter: set tags", func(t *testing.T) { @@ -1181,15 +1248,16 @@ func TestProcedures_Show(t *testing.T) { t.Run("show with in", func(t *testing.T) { opts := defaultOpts() - opts.In = &In{ - Account: Bool(true), + opts.In = &ExtendedIn{ + In: In{ + Account: Bool(true), + }, } assertOptsValidAndSQLEquals(t, opts, `SHOW PROCEDURES IN ACCOUNT`) }) } func TestProcedures_Describe(t *testing.T) { - noArgsId := randomSchemaObjectIdentifierWithArguments() id := randomSchemaObjectIdentifierWithArguments(DataTypeVARCHAR, DataTypeNumber) defaultOpts := func() *DescribeProcedureOptions { @@ -1203,18 +1271,12 @@ func TestProcedures_Describe(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() opts.name = emptySchemaObjectIdentifierWithArguments assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) - t.Run("no arguments", func(t *testing.T) { - opts := defaultOpts() - opts.name = noArgsId - assertOptsValidAndSQLEquals(t, opts, `DESCRIBE PROCEDURE %s`, noArgsId.FullyQualifiedName()) - }) - t.Run("all options", func(t *testing.T) { opts := defaultOpts() assertOptsValidAndSQLEquals(t, opts, `DESCRIBE PROCEDURE %s`, id.FullyQualifiedName()) @@ -1235,7 +1297,7 @@ func TestProcedures_Call(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) @@ -1265,7 +1327,14 @@ func TestProcedures_CreateAndCallForJava(t *testing.T) { defaultOpts := func() *CreateAndCallForJavaProcedureOptions { return &CreateAndCallForJavaProcedureOptions{ - Name: id, + Name: id, + Handler: "TestFunc.echoVarchar", + Packages: []ProcedurePackage{ + { + Package: "com.snowflake:snowpark:1.2.0", + }, + }, + RuntimeVersion: "1.8", } } @@ -1274,18 +1343,51 @@ func TestProcedures_CreateAndCallForJava(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) }) - t.Run("validation: incorrect identifier", func(t *testing.T) { + t.Run("validation: [opts.RuntimeVersion] should be set", func(t *testing.T) { + opts := defaultOpts() + opts.RuntimeVersion = "" + assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateAndCallForJavaProcedureOptions", "RuntimeVersion")) + }) + + t.Run("validation: [opts.Packages] should be set", func(t *testing.T) { + opts := defaultOpts() + opts.Packages = nil + assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateAndCallForJavaProcedureOptions", "Packages")) + }) + + t.Run("validation: [opts.Handler] should be set", func(t *testing.T) { + opts := defaultOpts() + opts.Handler = "" + assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateAndCallForJavaProcedureOptions", "Handler")) + }) + + t.Run("validation: valid identifier for [opts.ProcedureName]", func(t *testing.T) { + opts := defaultOpts() + opts.ProcedureName = emptyAccountObjectIdentifier + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + + t.Run("validation: valid identifier for [opts.Name]", func(t *testing.T) { opts := defaultOpts() opts.Name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) - t.Run("validation: returns", func(t *testing.T) { + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present", func(t *testing.T) { opts := defaultOpts() opts.Returns = ProcedureReturns{} assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateAndCallForJavaProcedureOptions.Returns", "ResultDataType", "Table")) }) + t.Run("validation: exactly one field from [opts.Returns.ResultDataType opts.Returns.Table] should be present - both present", func(t *testing.T) { + opts := defaultOpts() + opts.Returns = ProcedureReturns{ + ResultDataType: &ProcedureReturnsResultDataType{}, + Table: &ProcedureReturnsTable{}, + } + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateAndCallForJavaProcedureOptions.Returns", "ResultDataType", "Table")) + }) + t.Run("validation: exactly one field should be present", func(t *testing.T) { opts := defaultOpts() opts.Returns = ProcedureReturns{ @@ -1304,18 +1406,6 @@ func TestProcedures_CreateAndCallForJava(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("CreateAndCallForJavaProcedureOptions.Returns", "ResultDataType", "Table")) }) - t.Run("validation: options are missing", func(t *testing.T) { - opts := defaultOpts() - opts.Returns = ProcedureReturns{ - ResultDataType: &ProcedureReturnsResultDataType{ - ResultDataType: dataTypeVarchar, - }, - } - assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateAndCallForJavaProcedureOptions", "Handler")) - assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateAndCallForJavaProcedureOptions", "RuntimeVersion")) - assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) - }) - t.Run("no arguments", func(t *testing.T) { opts := defaultOpts() opts.Returns = ProcedureReturns{ @@ -1378,7 +1468,7 @@ func TestProcedures_CreateAndCallForJava(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1", "rnd"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (id NUMBER, name VARCHAR) RETURNS TABLE (country_code VARCHAR) LANGUAGE JAVA RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' STRICT AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("id" NUMBER, "name" VARCHAR) RETURNS TABLE ("country_code" VARCHAR) LANGUAGE JAVA STRICT RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -1426,7 +1516,7 @@ func TestProcedures_CreateAndCallForJava(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1", "rnd"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (id NUMBER(36, 2), name VARCHAR(100)) RETURNS TABLE (country_code VARCHAR(100)) LANGUAGE JAVA RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' STRICT AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("id" NUMBER(36, 2), "name" VARCHAR(100)) RETURNS TABLE ("country_code" VARCHAR(100)) LANGUAGE JAVA STRICT RUNTIME_VERSION = '1.8' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) } @@ -1550,7 +1640,7 @@ func TestProcedures_CreateAndCallForScala(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1", "rnd"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (id NUMBER, name VARCHAR) RETURNS TABLE (country_code VARCHAR) LANGUAGE SCALA RUNTIME_VERSION = '2.12' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' STRICT AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("id" NUMBER, "name" VARCHAR) RETURNS TABLE ("country_code" VARCHAR) LANGUAGE SCALA STRICT RUNTIME_VERSION = '2.12' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -1600,7 +1690,7 @@ func TestProcedures_CreateAndCallForScala(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1", "rnd"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (id NUMBER(36, 2), name VARCHAR(100)) RETURNS TABLE (country_code VARCHAR(100)) LANGUAGE SCALA RUNTIME_VERSION = '2.12' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' STRICT AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("id" NUMBER(36, 2), "name" VARCHAR(100)) RETURNS TABLE ("country_code" VARCHAR(100)) LANGUAGE SCALA STRICT RUNTIME_VERSION = '2.12' PACKAGES = ('com.snowflake:snowpark:1.2.0') IMPORTS = ('test_jar.jar') HANDLER = 'TestFunc.echoVarchar' AS 'return id + name;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1, rnd) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) } @@ -1723,7 +1813,7 @@ func TestProcedures_CreateAndCallForPython(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (i int DEFAULT 1) RETURNS VARIANT NULL LANGUAGE PYTHON RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' STRICT AS 'import numpy as np' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("i" int DEFAULT 1) RETURNS VARIANT NULL LANGUAGE PYTHON STRICT RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' AS 'import numpy as np' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -1772,7 +1862,7 @@ func TestProcedures_CreateAndCallForPython(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (i NUMBER(36, 2) DEFAULT 1) RETURNS VARIANT NULL LANGUAGE PYTHON RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' STRICT AS 'import numpy as np' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("i" NUMBER(36, 2) DEFAULT 1) RETURNS VARIANT NULL LANGUAGE PYTHON STRICT RUNTIME_VERSION = '3.8' PACKAGES = ('numpy', 'pandas') IMPORTS = ('numpy', 'pandas') HANDLER = 'udf' AS 'import numpy as np' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) } @@ -1835,7 +1925,7 @@ func TestProcedures_CreateAndCallForJavaScript(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (d DOUBLE DEFAULT 1.0) RETURNS DOUBLE NOT NULL LANGUAGE JAVASCRIPT STRICT AS 'return 1;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("d" DOUBLE DEFAULT 1.0) RETURNS DOUBLE NOT NULL LANGUAGE JAVASCRIPT STRICT AS 'return 1;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -1862,7 +1952,7 @@ func TestProcedures_CreateAndCallForJavaScript(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (d FLOAT DEFAULT 1.0) RETURNS FLOAT NOT NULL LANGUAGE JAVASCRIPT STRICT AS 'return 1;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("d" FLOAT DEFAULT 1.0) RETURNS FLOAT NOT NULL LANGUAGE JAVASCRIPT STRICT AS 'return 1;' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) } @@ -1954,7 +2044,7 @@ func TestProcedures_CreateAndCallForSQL(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (message VARCHAR DEFAULT 'test') RETURNS FLOAT LANGUAGE SQL STRICT AS '3.141592654::FLOAT' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("message" VARCHAR DEFAULT 'test') RETURNS FLOAT LANGUAGE SQL STRICT AS '3.141592654::FLOAT' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) t.Run("all options", func(t *testing.T) { @@ -1984,6 +2074,6 @@ func TestProcedures_CreateAndCallForSQL(t *testing.T) { opts.ProcedureName = id opts.ScriptingVariable = String(":ret") opts.CallArguments = []string{"1"} - assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE (message VARCHAR(100) DEFAULT 'test') RETURNS FLOAT LANGUAGE SQL STRICT AS '3.141592654::FLOAT' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `WITH %s AS PROCEDURE ("message" VARCHAR(100) DEFAULT 'test') RETURNS FLOAT LANGUAGE SQL STRICT AS '3.141592654::FLOAT' , %s (x, y) AS (select m.album_ID, m.album_name, b.band_name from music_albums) CALL %s (1) INTO :ret`, id.FullyQualifiedName(), cte.FullyQualifiedName(), id.FullyQualifiedName()) }) } diff --git a/pkg/sdk/procedures_impl_gen.go b/pkg/sdk/procedures_impl_gen.go index e63cf1f386..5a7e1ce84e 100644 --- a/pkg/sdk/procedures_impl_gen.go +++ b/pkg/sdk/procedures_impl_gen.go @@ -60,7 +60,7 @@ func (v *procedures) Show(ctx context.Context, request *ShowProcedureRequest) ([ } func (v *procedures) ShowByID(ctx context.Context, id SchemaObjectIdentifierWithArguments) (*Procedure, error) { - procedures, err := v.Show(ctx, NewShowProcedureRequest().WithIn(In{Schema: id.SchemaId()}).WithLike(Like{String(id.Name())})) + procedures, err := v.Show(ctx, NewShowProcedureRequest().WithIn(ExtendedIn{In: In{Schema: id.SchemaId()}}).WithLike(Like{String(id.Name())})) if err != nil { return nil, err } @@ -123,6 +123,7 @@ func (r *CreateForJavaProcedureRequest) toOpts() *CreateForJavaProcedureOptions Secrets: r.Secrets, TargetPath: r.TargetPath, NullInputBehavior: r.NullInputBehavior, + ReturnResultsBehavior: r.ReturnResultsBehavior, Comment: r.Comment, ExecuteAs: r.ExecuteAs, ProcedureDefinition: r.ProcedureDefinition, @@ -130,7 +131,12 @@ func (r *CreateForJavaProcedureRequest) toOpts() *CreateForJavaProcedureOptions if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -148,7 +154,11 @@ func (r *CreateForJavaProcedureRequest) toOpts() *CreateForJavaProcedureOptions if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } @@ -156,14 +166,18 @@ func (r *CreateForJavaProcedureRequest) toOpts() *CreateForJavaProcedureOptions if r.Packages != nil { s := make([]ProcedurePackage, len(r.Packages)) for i, v := range r.Packages { - s[i] = ProcedurePackage(v) + s[i] = ProcedurePackage{ + Package: v.Package, + } } opts.Packages = s } if r.Imports != nil { s := make([]ProcedureImport, len(r.Imports)) for i, v := range r.Imports { - s[i] = ProcedureImport(v) + s[i] = ProcedureImport{ + Import: v.Import, + } } opts.Imports = s } @@ -176,19 +190,25 @@ func (r *CreateForJavaScriptProcedureRequest) toOpts() *CreateForJavaScriptProce Secure: r.Secure, name: r.name, - CopyGrants: r.CopyGrants, - ResultDataTypeOld: r.ResultDataTypeOld, - ResultDataType: r.ResultDataType, - NotNull: r.NotNull, - NullInputBehavior: r.NullInputBehavior, - Comment: r.Comment, - ExecuteAs: r.ExecuteAs, - ProcedureDefinition: r.ProcedureDefinition, + CopyGrants: r.CopyGrants, + ResultDataTypeOld: r.ResultDataTypeOld, + ResultDataType: r.ResultDataType, + NotNull: r.NotNull, + NullInputBehavior: r.NullInputBehavior, + ReturnResultsBehavior: r.ReturnResultsBehavior, + Comment: r.Comment, + ExecuteAs: r.ExecuteAs, + ProcedureDefinition: r.ProcedureDefinition, } if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -209,6 +229,7 @@ func (r *CreateForPythonProcedureRequest) toOpts() *CreateForPythonProcedureOpti ExternalAccessIntegrations: r.ExternalAccessIntegrations, Secrets: r.Secrets, NullInputBehavior: r.NullInputBehavior, + ReturnResultsBehavior: r.ReturnResultsBehavior, Comment: r.Comment, ExecuteAs: r.ExecuteAs, ProcedureDefinition: r.ProcedureDefinition, @@ -216,7 +237,12 @@ func (r *CreateForPythonProcedureRequest) toOpts() *CreateForPythonProcedureOpti if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -234,7 +260,11 @@ func (r *CreateForPythonProcedureRequest) toOpts() *CreateForPythonProcedureOpti if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } @@ -242,14 +272,18 @@ func (r *CreateForPythonProcedureRequest) toOpts() *CreateForPythonProcedureOpti if r.Packages != nil { s := make([]ProcedurePackage, len(r.Packages)) for i, v := range r.Packages { - s[i] = ProcedurePackage(v) + s[i] = ProcedurePackage{ + Package: v.Package, + } } opts.Packages = s } if r.Imports != nil { s := make([]ProcedureImport, len(r.Imports)) for i, v := range r.Imports { - s[i] = ProcedureImport(v) + s[i] = ProcedureImport{ + Import: v.Import, + } } opts.Imports = s } @@ -266,17 +300,25 @@ func (r *CreateForScalaProcedureRequest) toOpts() *CreateForScalaProcedureOption RuntimeVersion: r.RuntimeVersion, - Handler: r.Handler, - TargetPath: r.TargetPath, - NullInputBehavior: r.NullInputBehavior, - Comment: r.Comment, - ExecuteAs: r.ExecuteAs, - ProcedureDefinition: r.ProcedureDefinition, + Handler: r.Handler, + ExternalAccessIntegrations: r.ExternalAccessIntegrations, + Secrets: r.Secrets, + TargetPath: r.TargetPath, + NullInputBehavior: r.NullInputBehavior, + ReturnResultsBehavior: r.ReturnResultsBehavior, + Comment: r.Comment, + ExecuteAs: r.ExecuteAs, + ProcedureDefinition: r.ProcedureDefinition, } if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -294,7 +336,11 @@ func (r *CreateForScalaProcedureRequest) toOpts() *CreateForScalaProcedureOption if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } @@ -302,14 +348,18 @@ func (r *CreateForScalaProcedureRequest) toOpts() *CreateForScalaProcedureOption if r.Packages != nil { s := make([]ProcedurePackage, len(r.Packages)) for i, v := range r.Packages { - s[i] = ProcedurePackage(v) + s[i] = ProcedurePackage{ + Package: v.Package, + } } opts.Packages = s } if r.Imports != nil { s := make([]ProcedureImport, len(r.Imports)) for i, v := range r.Imports { - s[i] = ProcedureImport(v) + s[i] = ProcedureImport{ + Import: v.Import, + } } opts.Imports = s } @@ -324,15 +374,21 @@ func (r *CreateForSQLProcedureRequest) toOpts() *CreateForSQLProcedureOptions { CopyGrants: r.CopyGrants, - NullInputBehavior: r.NullInputBehavior, - Comment: r.Comment, - ExecuteAs: r.ExecuteAs, - ProcedureDefinition: r.ProcedureDefinition, + NullInputBehavior: r.NullInputBehavior, + ReturnResultsBehavior: r.ReturnResultsBehavior, + Comment: r.Comment, + ExecuteAs: r.ExecuteAs, + ProcedureDefinition: r.ProcedureDefinition, } if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -343,6 +399,8 @@ func (r *CreateForSQLProcedureRequest) toOpts() *CreateForSQLProcedureOptions { opts.Returns.ResultDataType = &ProcedureReturnsResultDataType{ ResultDataTypeOld: r.Returns.ResultDataType.ResultDataTypeOld, ResultDataType: r.Returns.ResultDataType.ResultDataType, + Null: r.Returns.ResultDataType.Null, + NotNull: r.Returns.ResultDataType.NotNull, } } if r.Returns.Table != nil { @@ -350,7 +408,11 @@ func (r *CreateForSQLProcedureRequest) toOpts() *CreateForSQLProcedureOptions { if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } @@ -360,16 +422,41 @@ func (r *CreateForSQLProcedureRequest) toOpts() *CreateForSQLProcedureOptions { func (r *AlterProcedureRequest) toOpts() *AlterProcedureOptions { opts := &AlterProcedureOptions{ - IfExists: r.IfExists, - name: r.name, - RenameTo: r.RenameTo, - SetComment: r.SetComment, - SetLogLevel: r.SetLogLevel, - SetTraceLevel: r.SetTraceLevel, - UnsetComment: r.UnsetComment, - SetTags: r.SetTags, - UnsetTags: r.UnsetTags, - ExecuteAs: r.ExecuteAs, + IfExists: r.IfExists, + name: r.name, + RenameTo: r.RenameTo, + + SetTags: r.SetTags, + UnsetTags: r.UnsetTags, + ExecuteAs: r.ExecuteAs, + } + if r.Set != nil { + opts.Set = &ProcedureSet{ + Comment: r.Set.Comment, + ExternalAccessIntegrations: r.Set.ExternalAccessIntegrations, + + AutoEventLogging: r.Set.AutoEventLogging, + EnableConsoleOutput: r.Set.EnableConsoleOutput, + LogLevel: r.Set.LogLevel, + MetricLevel: r.Set.MetricLevel, + TraceLevel: r.Set.TraceLevel, + } + if r.Set.SecretsList != nil { + opts.Set.SecretsList = &SecretsList{ + SecretsList: r.Set.SecretsList.SecretsList, + } + } + } + if r.Unset != nil { + opts.Unset = &ProcedureUnset{ + Comment: r.Unset.Comment, + ExternalAccessIntegrations: r.Unset.ExternalAccessIntegrations, + AutoEventLogging: r.Unset.AutoEventLogging, + EnableConsoleOutput: r.Unset.EnableConsoleOutput, + LogLevel: r.Unset.LogLevel, + MetricLevel: r.Unset.MetricLevel, + TraceLevel: r.Unset.TraceLevel, + } } return opts } @@ -394,7 +481,7 @@ func (r procedureRow) convert() *Procedure { e := &Procedure{ CreatedOn: r.CreatedOn, Name: r.Name, - SchemaName: r.SchemaName, + SchemaName: strings.Trim(r.SchemaName, `"`), IsBuiltin: r.IsBuiltin == "Y", IsAggregate: r.IsAggregate == "Y", IsAnsi: r.IsAnsi == "Y", @@ -402,7 +489,7 @@ func (r procedureRow) convert() *Procedure { MaxNumArguments: r.MaxNumArguments, ArgumentsRaw: r.Arguments, Description: r.Description, - CatalogName: r.CatalogName, + CatalogName: strings.Trim(r.CatalogName, `"`), IsTableFunction: r.IsTableFunction == "Y", ValidForClustering: r.ValidForClustering == "Y", } @@ -431,8 +518,8 @@ func (r procedureDetailRow) convert() *ProcedureDetail { e := &ProcedureDetail{ Property: r.Property, } - if r.Value.Valid { - e.Value = r.Value.String + if r.Value.Valid && r.Value.String != "null" { + e.Value = String(r.Value.String) } return e } @@ -464,7 +551,12 @@ func (r *CreateAndCallForJavaProcedureRequest) toOpts() *CreateAndCallForJavaPro if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -482,7 +574,11 @@ func (r *CreateAndCallForJavaProcedureRequest) toOpts() *CreateAndCallForJavaPro if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } @@ -490,14 +586,18 @@ func (r *CreateAndCallForJavaProcedureRequest) toOpts() *CreateAndCallForJavaPro if r.Packages != nil { s := make([]ProcedurePackage, len(r.Packages)) for i, v := range r.Packages { - s[i] = ProcedurePackage(v) + s[i] = ProcedurePackage{ + Package: v.Package, + } } opts.Packages = s } if r.Imports != nil { s := make([]ProcedureImport, len(r.Imports)) for i, v := range r.Imports { - s[i] = ProcedureImport(v) + s[i] = ProcedureImport{ + Import: v.Import, + } } opts.Imports = s } @@ -528,7 +628,12 @@ func (r *CreateAndCallForScalaProcedureRequest) toOpts() *CreateAndCallForScalaP if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -546,7 +651,11 @@ func (r *CreateAndCallForScalaProcedureRequest) toOpts() *CreateAndCallForScalaP if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } @@ -554,14 +663,18 @@ func (r *CreateAndCallForScalaProcedureRequest) toOpts() *CreateAndCallForScalaP if r.Packages != nil { s := make([]ProcedurePackage, len(r.Packages)) for i, v := range r.Packages { - s[i] = ProcedurePackage(v) + s[i] = ProcedurePackage{ + Package: v.Package, + } } opts.Packages = s } if r.Imports != nil { s := make([]ProcedureImport, len(r.Imports)) for i, v := range r.Imports { - s[i] = ProcedureImport(v) + s[i] = ProcedureImport{ + Import: v.Import, + } } opts.Imports = s } @@ -596,7 +709,12 @@ func (r *CreateAndCallForJavaScriptProcedureRequest) toOpts() *CreateAndCallForJ if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -631,7 +749,12 @@ func (r *CreateAndCallForPythonProcedureRequest) toOpts() *CreateAndCallForPytho if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -649,7 +772,11 @@ func (r *CreateAndCallForPythonProcedureRequest) toOpts() *CreateAndCallForPytho if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } @@ -657,14 +784,18 @@ func (r *CreateAndCallForPythonProcedureRequest) toOpts() *CreateAndCallForPytho if r.Packages != nil { s := make([]ProcedurePackage, len(r.Packages)) for i, v := range r.Packages { - s[i] = ProcedurePackage(v) + s[i] = ProcedurePackage{ + Package: v.Package, + } } opts.Packages = s } if r.Imports != nil { s := make([]ProcedureImport, len(r.Imports)) for i, v := range r.Imports { - s[i] = ProcedureImport(v) + s[i] = ProcedureImport{ + Import: v.Import, + } } opts.Imports = s } @@ -696,7 +827,12 @@ func (r *CreateAndCallForSQLProcedureRequest) toOpts() *CreateAndCallForSQLProce if r.Arguments != nil { s := make([]ProcedureArgument, len(r.Arguments)) for i, v := range r.Arguments { - s[i] = ProcedureArgument(v) + s[i] = ProcedureArgument{ + ArgName: v.ArgName, + ArgDataTypeOld: v.ArgDataTypeOld, + ArgDataType: v.ArgDataType, + DefaultValue: v.DefaultValue, + } } opts.Arguments = s } @@ -714,7 +850,11 @@ func (r *CreateAndCallForSQLProcedureRequest) toOpts() *CreateAndCallForSQLProce if r.Returns.Table.Columns != nil { s := make([]ProcedureColumn, len(r.Returns.Table.Columns)) for i, v := range r.Returns.Table.Columns { - s[i] = ProcedureColumn(v) + s[i] = ProcedureColumn{ + ColumnName: v.ColumnName, + ColumnDataTypeOld: v.ColumnDataTypeOld, + ColumnDataType: v.ColumnDataType, + } } opts.Returns.Table.Columns = s } diff --git a/pkg/sdk/procedures_validations_gen.go b/pkg/sdk/procedures_validations_gen.go index 5e7557176f..8298767264 100644 --- a/pkg/sdk/procedures_validations_gen.go +++ b/pkg/sdk/procedures_validations_gen.go @@ -248,8 +248,18 @@ func (opts *AlterProcedureOptions) validate() error { if opts.RenameTo != nil && !ValidObjectIdentifier(opts.RenameTo) { errs = append(errs, ErrInvalidObjectIdentifier) } - if !exactlyOneValueSet(opts.RenameTo, opts.SetComment, opts.SetLogLevel, opts.SetTraceLevel, opts.UnsetComment, opts.SetTags, opts.UnsetTags, opts.ExecuteAs) { - errs = append(errs, errExactlyOneOf("AlterProcedureOptions", "RenameTo", "SetComment", "SetLogLevel", "SetTraceLevel", "UnsetComment", "SetTags", "UnsetTags", "ExecuteAs")) + if !exactlyOneValueSet(opts.RenameTo, opts.Set, opts.Unset, opts.SetTags, opts.UnsetTags, opts.ExecuteAs) { + errs = append(errs, errExactlyOneOf("AlterProcedureOptions", "RenameTo", "Set", "Unset", "SetTags", "UnsetTags", "ExecuteAs")) + } + if valueSet(opts.Set) { + if !anyValueSet(opts.Set.Comment, opts.Set.ExternalAccessIntegrations, opts.Set.SecretsList, opts.Set.AutoEventLogging, opts.Set.EnableConsoleOutput, opts.Set.LogLevel, opts.Set.MetricLevel, opts.Set.TraceLevel) { + errs = append(errs, errAtLeastOneOf("AlterProcedureOptions.Set", "Comment", "ExternalAccessIntegrations", "SecretsList", "AutoEventLogging", "EnableConsoleOutput", "LogLevel", "MetricLevel", "TraceLevel")) + } + } + if valueSet(opts.Unset) { + if !anyValueSet(opts.Unset.Comment, opts.Unset.ExternalAccessIntegrations, opts.Unset.AutoEventLogging, opts.Unset.EnableConsoleOutput, opts.Unset.LogLevel, opts.Unset.MetricLevel, opts.Unset.TraceLevel) { + errs = append(errs, errAtLeastOneOf("AlterProcedureOptions.Unset", "Comment", "ExternalAccessIntegrations", "AutoEventLogging", "EnableConsoleOutput", "LogLevel", "MetricLevel", "TraceLevel")) + } } return JoinErrors(errs...) } diff --git a/pkg/sdk/testint/functions_integration_test.go b/pkg/sdk/testint/functions_integration_test.go index fa5196dc95..4f7220062c 100644 --- a/pkg/sdk/testint/functions_integration_test.go +++ b/pkg/sdk/testint/functions_integration_test.go @@ -87,7 +87,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -96,7 +96,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -151,7 +151,7 @@ func TestInt_Functions(t *testing.T) { WithOrReplace(true). WithArguments([]sdk.FunctionArgumentRequest{*argument}). WithCopyGrants(true). - WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). WithReturnNullValues(sdk.ReturnNullValuesNotNull). WithRuntimeVersion("11"). @@ -181,7 +181,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -190,12 +190,12 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasIsExternalFunction(false). HasLanguage("JAVA"). HasIsMemoizable(false). @@ -207,12 +207,12 @@ func TestInt_Functions(t *testing.T) { HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). HasLanguage("JAVA"). HasBody(definition). - HasNullHandling(string(sdk.NullInputBehaviorReturnNullInput)). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). // TODO [SNOW-1348103]: parse to identifier list // TODO [SNOW-1348103]: check multiple secrets (to know how to parse) - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasHandler(handler). HasRuntimeVersion("11"). @@ -253,7 +253,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -262,7 +262,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -312,7 +312,7 @@ func TestInt_Functions(t *testing.T) { WithOrReplace(true). WithArguments([]sdk.FunctionArgumentRequest{*argument}). WithCopyGrants(true). - WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). WithReturnNullValues(sdk.ReturnNullValuesNotNull). WithRuntimeVersion("11"). @@ -335,7 +335,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -344,12 +344,12 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasIsExternalFunction(false). HasLanguage("JAVA"). HasIsMemoizable(false). @@ -361,10 +361,10 @@ func TestInt_Functions(t *testing.T) { HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). HasLanguage("JAVA"). HasBodyNil(). - HasNullHandling(string(sdk.NullInputBehaviorReturnNullInput)). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasHandler(handler). HasRuntimeVersion("11"). @@ -403,7 +403,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -412,7 +412,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -462,7 +462,7 @@ func TestInt_Functions(t *testing.T) { WithArguments([]sdk.FunctionArgumentRequest{*argument}). WithCopyGrants(true). WithReturnNullValues(sdk.ReturnNullValuesNotNull). - WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). WithComment("comment") @@ -476,7 +476,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -485,7 +485,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -502,7 +502,7 @@ func TestInt_Functions(t *testing.T) { HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). HasLanguage("JAVASCRIPT"). HasBody(definition). - HasNullHandling(string(sdk.NullInputBehaviorReturnNullInput)). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). HasExternalAccessIntegrationsNil(). HasSecretsNil(). @@ -545,7 +545,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -554,7 +554,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -605,7 +605,7 @@ func TestInt_Functions(t *testing.T) { WithArguments([]sdk.FunctionArgumentRequest{*argument}). WithCopyGrants(true). WithReturnNullValues(sdk.ReturnNullValuesNotNull). - WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). WithComment("comment"). WithImports([]sdk.FunctionImportRequest{*sdk.NewFunctionImportRequest().WithImport(tmpPythonFunction.PythonModuleLocation())}). @@ -627,7 +627,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -636,12 +636,12 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasIsExternalFunction(false). HasLanguage("PYTHON"). HasIsMemoizable(false). @@ -653,10 +653,10 @@ func TestInt_Functions(t *testing.T) { HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). // TODO [SNOW-1348103]: do we care about this whitespace? HasLanguage("PYTHON"). HasBody(definition). - HasNullHandling(string(sdk.NullInputBehaviorReturnNullInput)). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasHandler(funcName). HasRuntimeVersion("3.8"). @@ -694,7 +694,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -703,7 +703,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -752,7 +752,7 @@ func TestInt_Functions(t *testing.T) { WithArguments([]sdk.FunctionArgumentRequest{*argument}). WithCopyGrants(true). WithReturnNullValues(sdk.ReturnNullValuesNotNull). - WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). WithComment("comment"). WithPackages([]sdk.FunctionPackageRequest{ @@ -773,7 +773,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -782,12 +782,12 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasIsExternalFunction(false). HasLanguage("PYTHON"). HasIsMemoizable(false). @@ -799,10 +799,10 @@ func TestInt_Functions(t *testing.T) { HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). HasLanguage("PYTHON"). HasBodyNil(). - HasNullHandling(string(sdk.NullInputBehaviorReturnNullInput)). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). HasHandler(tmpPythonFunction.PythonHandler()). HasRuntimeVersion("3.8"). @@ -843,7 +843,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -852,7 +852,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -905,7 +905,7 @@ func TestInt_Functions(t *testing.T) { WithOrReplace(true). WithArguments([]sdk.FunctionArgumentRequest{*argument}). WithCopyGrants(true). - WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). WithReturnNullValues(sdk.ReturnNullValuesNotNull). WithComment("comment"). @@ -934,7 +934,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -943,12 +943,12 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasIsExternalFunction(false). HasLanguage("SCALA"). HasIsMemoizable(false). @@ -960,10 +960,10 @@ func TestInt_Functions(t *testing.T) { HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). HasLanguage("SCALA"). HasBody(definition). - HasNullHandling(string(sdk.NullInputBehaviorReturnNullInput)). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasHandler(handler). HasRuntimeVersion("2.12"). @@ -1002,7 +1002,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -1011,7 +1011,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -1059,7 +1059,7 @@ func TestInt_Functions(t *testing.T) { WithOrReplace(true). WithArguments([]sdk.FunctionArgumentRequest{*argument}). WithCopyGrants(true). - WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). WithReturnNullValues(sdk.ReturnNullValuesNotNull). WithComment("comment"). @@ -1081,7 +1081,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -1090,13 +1090,13 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). HasIsExternalFunction(false). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasLanguage("SCALA"). HasIsMemoizable(false). HasIsDataMetric(false), @@ -1107,10 +1107,10 @@ func TestInt_Functions(t *testing.T) { HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). HasLanguage("SCALA"). HasBodyNil(). - HasNullHandling(string(sdk.NullInputBehaviorReturnNullInput)). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). HasImports(fmt.Sprintf(`[%s]`, tmpJavaFunction.JarLocation())). HasHandler(handler). HasRuntimeVersion("2.12"). @@ -1148,7 +1148,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -1157,7 +1157,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -1221,7 +1221,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -1230,7 +1230,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription("comment"). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -1287,7 +1287,7 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.FunctionFromObject(t, function). HasCreatedOnNotEmpty(). HasName(id.Name()). - HasSchemaName(fmt.Sprintf(`"%s"`, id.SchemaName())). + HasSchemaName(id.SchemaName()). HasIsBuiltin(false). HasIsAggregate(false). HasIsAnsi(false). @@ -1296,7 +1296,7 @@ func TestInt_Functions(t *testing.T) { HasArgumentsOld([]sdk.DataType{}). HasArgumentsRaw(fmt.Sprintf(`%[1]s() RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). HasDescription(sdk.DefaultFunctionComment). - HasCatalogName(fmt.Sprintf(`"%s"`, id.DatabaseName())). + HasCatalogName(id.DatabaseName()). HasIsTableFunction(false). HasValidForClustering(false). HasIsSecure(false). @@ -1420,8 +1420,8 @@ func TestInt_Functions(t *testing.T) { ) assertions.AssertThatObject(t, objectassert.FunctionDetails(t, id). - HasExternalAccessIntegrations(fmt.Sprintf(`[%s]`, externalAccessIntegration.FullyQualifiedName())). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())), + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}), ) assertParametersSet(t, objectparametersassert.FunctionParameters(t, id)) @@ -1442,8 +1442,8 @@ func TestInt_Functions(t *testing.T) { assertions.AssertThatObject(t, objectassert.Function(t, id). HasName(id.Name()). HasDescription(sdk.DefaultFunctionComment). - HasExternalAccessIntegrations("[]"). - HasSecrets(fmt.Sprintf(`{"abc":"\"%s\".\"%s\".%s"}`, secretId.DatabaseName(), secretId.SchemaName(), secretId.Name())), + HasExactlyExternalAccessIntegrations(). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}), ) assertions.AssertThatObject(t, objectassert.FunctionDetails(t, id). diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 4543791c4d..2a69ef42c2 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -3,8 +3,16 @@ package testint import ( "errors" "fmt" + "strings" "testing" + "time" + assertions "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/objectparametersassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testdatatypes" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/datatypes" @@ -12,59 +20,1203 @@ import ( "github.com/stretchr/testify/require" ) -// todo: add tests for: -// - creating procedure with different languages from stages - -func TestInt_CreateProcedures(t *testing.T) { +// TODO [SNOW-1850370]: 'ExtendedIn' struct for procedures not support keyword "CLASS" now +// TODO [SNOW-1850370]: Call/CreateAndCall methods were not updated before V1 because we are not using them +func TestInt_Procedures(t *testing.T) { client := testClient(t) ctx := testContext(t) - cleanupProcedureHandle := func(id sdk.SchemaObjectIdentifierWithArguments) func() { - return func() { - err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id)) - if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { - return - } - require.NoError(t, err) - } + secretId := testClientHelper().Ids.RandomSchemaObjectIdentifier() + + networkRule, networkRuleCleanup := testClientHelper().NetworkRule.Create(t) + t.Cleanup(networkRuleCleanup) + + secret, secretCleanup := testClientHelper().Secret.CreateWithGenericString(t, secretId, "test_secret_string") + t.Cleanup(secretCleanup) + + externalAccessIntegration, externalAccessIntegrationCleanup := testClientHelper().ExternalAccessIntegration.CreateExternalAccessIntegrationWithNetworkRuleAndSecret(t, networkRule.ID(), secret.ID()) + t.Cleanup(externalAccessIntegrationCleanup) + + tmpJavaProcedure := testClientHelper().CreateSampleJavaProcedureAndJar(t) + tmpPythonFunction := testClientHelper().CreateSamplePythonFunctionAndModule(t) + + assertParametersSet := func(t *testing.T, procedureParametersAssert *objectparametersassert.ProcedureParametersAssert) { + t.Helper() + assertions.AssertThatObject(t, procedureParametersAssert. + // TODO [SNOW-1850370]: every value end with invalid value [OFF] for parameter 'AUTO_EVENT_LOGGING' + // HasAutoEventLogging(sdk.AutoEventLoggingTracing). + HasEnableConsoleOutput(true). + HasLogLevel(sdk.LogLevelWarn). + HasMetricLevel(sdk.MetricLevelAll). + HasTraceLevel(sdk.TraceLevelAlways), + ) } - t.Run("create procedure for Java: returns result data type", func(t *testing.T) { - // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java#reading-a-dynamically-specified-file-with-inputstream - name := "file_reader_java_proc_snowflakefile" - id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR) + t.Run("create procedure for Java - inline minimal", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 - definition := ` - import java.io.InputStream; - import java.io.IOException; - import java.nio.charset.StandardCharsets; - import com.snowflake.snowpark_java.types.SnowflakeFile; - import com.snowflake.snowpark_java.Session; - class FileReader { - public String execute(Session session, String fileName) throws IOException { - InputStream input = SnowflakeFile.newInstance(fileName).getInputStream(); - return new String(input.readAllBytes(), StandardCharsets.UTF_8); - } - }` + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := testClientHelper().Procedure.SampleJavaDefinition(t, className, funcName, argName) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} - dt := sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeVARCHAR) + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithProcedureDefinitionWrapped(definition) + + err := client.Procedures.CreateForJava(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + procedure, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, procedure). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, procedure.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, procedure.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(dataType.ToSql()). + HasLanguage("JAVA"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImports(`[]`). + HasHandler(handler). + HasRuntimeVersion("11"). + HasPackages(`[com.snowflake:snowpark:1.14.0]`). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Java - inline full", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType). + WithNotNull(true) returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) - argument := sdk.NewProcedureArgumentRequest("input", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) - packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:latest")} - request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, "FileReader.execute"). + handler := fmt.Sprintf("%s.%s", className, funcName) + definition := testClientHelper().Procedure.SampleJavaDefinition(t, className, funcName, argName) + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + targetPath := fmt.Sprintf("@~/%s", jarName) + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0"), + *sdk.NewProcedurePackageRequest("com.snowflake:telemetry:0.1.0"), + } + + request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*argument}). - WithProcedureDefinition(definition) + WithCopyGrants(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithComment("comment"). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(tmpJavaProcedure.JarLocation())}). + WithExternalAccessIntegrations([]sdk.AccountObjectIdentifier{externalAccessIntegration}). + WithSecrets([]sdk.SecretReference{{VariableName: "abc", Name: secretId}}). + WithTargetPath(targetPath). + WithProcedureDefinitionWrapped(definition) + err := client.Procedures.CreateForJava(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + t.Cleanup(testClientHelper().Stage.RemoveFromUserStageFunc(t, jarName)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + // TODO [SNOW-1850370]: apparently external access integrations and secrets are not filled out correctly for procedures + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasLanguage("JAVA"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasHandler(handler). + HasRuntimeVersion("11"). + HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasTargetPath(targetPath). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) - procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) + t.Run("create procedure for Java - staged minimal", func(t *testing.T) { + dataType := tmpJavaProcedure.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := tmpJavaProcedure.JavaHandler() + importPath := tmpJavaProcedure.JarLocation() + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0"), + *sdk.NewProcedurePackageRequest("com.snowflake:telemetry:0.1.0"), + } + + requestStaged := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(importPath)}) + + err := client.Procedures.CreateForJava(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(dataType.ToSql()). + HasLanguage("JAVA"). + HasBodyNil(). + HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasHandler(handler). + HasRuntimeVersion("11"). + HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Java - staged full", func(t *testing.T) { + dataType := tmpJavaProcedure.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType). + WithNotNull(true) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := tmpJavaProcedure.JavaHandler() + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0"), + *sdk.NewProcedurePackageRequest("com.snowflake:telemetry:0.1.0"), + } + + requestStaged := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, handler). + WithOrReplace(true). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithCopyGrants(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithComment("comment"). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(tmpJavaProcedure.JarLocation())}). + WithExternalAccessIntegrations([]sdk.AccountObjectIdentifier{externalAccessIntegration}). + WithSecrets([]sdk.SecretReference{{VariableName: "abc", Name: secretId}}) + + err := client.Procedures.CreateForJava(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasLanguage("JAVA"). + HasBodyNil(). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasHandler(handler). + HasRuntimeVersion("11"). + HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Javascript - inline minimal", func(t *testing.T) { + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "d" + definition := testClientHelper().Procedure.SampleJavascriptDefinition(t, argName) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + + request := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), dataType, definition). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}) + + err := client.Procedures.CreateForJavaScript(ctx, request) require.NoError(t, err) - require.GreaterOrEqual(t, len(procedures), 1) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(dataType.ToSql()). + HasLanguage("JAVASCRIPT"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImportsNil(). + HasHandlerNil(). + HasRuntimeVersionNil(). + HasPackagesNil(). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Javascript - inline full", func(t *testing.T) { + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "d" + definition := testClientHelper().Procedure.SampleJavascriptDefinition(t, argName) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + request := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), dataType, definition). + WithOrReplace(true). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithCopyGrants(true). + WithNotNull(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithExecuteAs(sdk.ExecuteAsCaller). + WithComment("comment") + + err := client.Procedures.CreateForJavaScript(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasLanguage("JAVASCRIPT"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImportsNil(). + HasHandlerNil(). + HasRuntimeVersionNil(). + HasPackagesNil(). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("CALLER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Python - inline minimal", func(t *testing.T) { + dataType := testdatatypes.DataTypeNumber_36_2 + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "i" + funcName := "dump" + definition := testClientHelper().Procedure.SamplePythonDefinition(t, funcName, argName) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("snowflake-snowpark-python==1.14.0"), + } + request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, "3.8", packages, funcName). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithProcedureDefinitionWrapped(definition) + + err := client.Procedures.CreateForPython(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasLanguage("PYTHON"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImports(`[]`). + HasHandler(funcName). + HasRuntimeVersion("3.8"). + HasPackages(`['snowflake-snowpark-python==1.14.0']`). + HasTargetPathNil(). + HasInstalledPackagesNotEmpty(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Python - inline full", func(t *testing.T) { + dataType := testdatatypes.DataTypeNumber_36_2 + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "i" + funcName := "dump" + definition := testClientHelper().Procedure.SamplePythonDefinition(t, funcName, argName) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType). + WithNotNull(true) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("snowflake-snowpark-python==1.14.0"), + *sdk.NewProcedurePackageRequest("absl-py==0.10.0"), + } + + request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, "3.8", packages, funcName). + WithOrReplace(true). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithCopyGrants(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithComment("comment"). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(tmpPythonFunction.PythonModuleLocation())}). + WithExternalAccessIntegrations([]sdk.AccountObjectIdentifier{externalAccessIntegration}). + WithSecrets([]sdk.SecretReference{{VariableName: "abc", Name: secretId}}). + WithExecuteAs(sdk.ExecuteAsCaller). + WithProcedureDefinitionWrapped(definition) + + err := client.Procedures.CreateForPython(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasLanguage("PYTHON"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasHandler(funcName). + HasRuntimeVersion("3.8"). + HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). + HasTargetPathNil(). + HasInstalledPackagesNotEmpty(). + HasExecuteAs("CALLER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Python - staged minimal", func(t *testing.T) { + dataType := testdatatypes.DataTypeVarchar_100 + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "i" + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("snowflake-snowpark-python==1.14.0"), + } + request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, "3.8", packages, tmpPythonFunction.PythonHandler()). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(tmpPythonFunction.PythonModuleLocation())}) + + err := client.Procedures.CreateForPython(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")). + HasLanguage("PYTHON"). + HasBodyNil(). + HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasHandler(tmpPythonFunction.PythonHandler()). + HasRuntimeVersion("3.8"). + HasPackages(`['snowflake-snowpark-python==1.14.0']`). + HasTargetPathNil(). + HasInstalledPackagesNotEmpty(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Python - staged full", func(t *testing.T) { + dataType := testdatatypes.DataTypeVarchar_100 + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "i" + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType). + WithNotNull(true) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("snowflake-snowpark-python==1.14.0"), + *sdk.NewProcedurePackageRequest("absl-py==0.10.0"), + } + + request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, "3.8", packages, tmpPythonFunction.PythonHandler()). + WithOrReplace(true). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithCopyGrants(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithComment("comment"). + WithExternalAccessIntegrations([]sdk.AccountObjectIdentifier{externalAccessIntegration}). + WithSecrets([]sdk.SecretReference{{VariableName: "abc", Name: secretId}}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(tmpPythonFunction.PythonModuleLocation())}). + WithExecuteAs(sdk.ExecuteAsCaller) + + err := client.Procedures.CreateForPython(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(strings.ReplaceAll(dataType.ToSql(), " ", "")+" NOT NULL"). + HasLanguage("PYTHON"). + HasBodyNil(). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + HasImports(fmt.Sprintf(`[%s]`, tmpPythonFunction.PythonModuleLocation())). + HasHandler(tmpPythonFunction.PythonHandler()). + HasRuntimeVersion("3.8"). + HasPackages(`['snowflake-snowpark-python==1.14.0','absl-py==0.10.0']`). + HasTargetPathNil(). + HasInstalledPackagesNotEmpty(). + HasExecuteAs("CALLER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Scala - inline minimal", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + definition := testClientHelper().Procedure.SampleScalaDefinition(t, className, funcName, argName) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + handler := fmt.Sprintf("%s.%s", className, funcName) + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithProcedureDefinitionWrapped(definition) + + err := client.Procedures.CreateForScala(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(dataType.ToSql()). + HasLanguage("SCALA"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImports(`[]`). + HasHandler(handler). + HasRuntimeVersion("2.12"). + HasPackages(`[com.snowflake:snowpark:1.14.0]`). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Scala - inline full", func(t *testing.T) { + className := "TestFunc" + funcName := "echoVarchar" + argName := "x" + dataType := testdatatypes.DataTypeVarchar_100 + + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType). + WithNotNull(true) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + definition := testClientHelper().Procedure.SampleScalaDefinition(t, className, funcName, argName) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + handler := fmt.Sprintf("%s.%s", className, funcName) + jarName := fmt.Sprintf("tf-%d-%s.jar", time.Now().Unix(), random.AlphaN(5)) + targetPath := fmt.Sprintf("@~/%s", jarName) + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0"), + *sdk.NewProcedurePackageRequest("com.snowflake:telemetry:0.1.0"), + } + + request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, handler). + WithOrReplace(true). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithCopyGrants(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithComment("comment"). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(tmpJavaProcedure.JarLocation())}). + WithTargetPath(targetPath). + WithExecuteAs(sdk.ExecuteAsCaller). + WithExternalAccessIntegrations([]sdk.AccountObjectIdentifier{externalAccessIntegration}). + WithSecrets([]sdk.SecretReference{{VariableName: "abc", Name: secretId}}). + WithProcedureDefinitionWrapped(definition) + + err := client.Procedures.CreateForScala(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + t.Cleanup(testClientHelper().Stage.RemoveFromUserStageFunc(t, jarName)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasLanguage("SCALA"). + HasBody(definition). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasHandler(handler). + HasRuntimeVersion("2.12"). + HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasTargetPath(targetPath). + HasInstalledPackagesNil(). + HasExecuteAs("CALLER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Scala - staged minimal", func(t *testing.T) { + dataType := tmpJavaProcedure.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + handler := tmpJavaProcedure.JavaHandler() + importPath := tmpJavaProcedure.JarLocation() + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0")} + + requestStaged := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, handler). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(importPath)}) + + err := client.Procedures.CreateForScala(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(dataType.ToSql()). + HasLanguage("SCALA"). + HasBodyNil(). + HasNullHandling(string(sdk.NullInputBehaviorCalledOnNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorVolatile)). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImports(fmt.Sprintf(`[%s]`, importPath)). + HasHandler(handler). + HasRuntimeVersion("2.12"). + HasPackages(`[com.snowflake:snowpark:1.14.0]`). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for Scala - staged full", func(t *testing.T) { + dataType := tmpJavaProcedure.ArgType + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + argName := "x" + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + handler := tmpJavaProcedure.JavaHandler() + + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType). + WithNotNull(true) + returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) + packages := []sdk.ProcedurePackageRequest{ + *sdk.NewProcedurePackageRequest("com.snowflake:snowpark:1.14.0"), + *sdk.NewProcedurePackageRequest("com.snowflake:telemetry:0.1.0"), + } + + requestStaged := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, handler). + WithOrReplace(true). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithCopyGrants(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithComment("comment"). + WithExecuteAs(sdk.ExecuteAsCaller). + WithExternalAccessIntegrations([]sdk.AccountObjectIdentifier{externalAccessIntegration}). + WithSecrets([]sdk.SecretReference{{VariableName: "abc", Name: secretId}}). + WithImports([]sdk.ProcedureImportRequest{*sdk.NewProcedureImportRequest(tmpJavaProcedure.JarLocation())}) + + err := client.Procedures.CreateForScala(ctx, requestStaged) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasLanguage("SCALA"). + HasBodyNil(). + HasNullHandling(string(sdk.NullInputBehaviorReturnsNullInput)). + HasVolatility(string(sdk.ReturnResultsBehaviorImmutable)). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}). + HasImports(fmt.Sprintf(`[%s]`, tmpJavaProcedure.JarLocation())). + HasHandler(handler). + HasRuntimeVersion("2.12"). + HasPackages(`[com.snowflake:snowpark:1.14.0,com.snowflake:telemetry:0.1.0]`). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("CALLER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for SQL - inline minimal", func(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + definition := testClientHelper().Procedure.SampleSqlDefinition(t) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType) + returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}) + + err := client.Procedures.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription(sdk.DefaultProcedureComment). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(dataType.ToSql()). + HasLanguage("SQL"). + HasBody(definition). + HasNullHandlingNil(). + HasVolatilityNil(). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImportsNil(). + HasHandlerNil(). + HasRuntimeVersionNil(). + HasPackagesNil(). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("OWNER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) + + t.Run("create procedure for SQL - inline full", func(t *testing.T) { + argName := "x" + dataType := testdatatypes.DataTypeFloat + id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + + definition := testClientHelper().Procedure.SampleSqlDefinition(t) + dt := sdk.NewProcedureReturnsResultDataTypeRequest(dataType). + WithNotNull(true) + returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt) + argument := sdk.NewProcedureArgumentRequest(argName, dataType) + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). + WithOrReplace(true). + WithArguments([]sdk.ProcedureArgumentRequest{*argument}). + WithCopyGrants(true). + WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). + WithReturnResultsBehavior(sdk.ReturnResultsBehaviorImmutable). + WithExecuteAs(sdk.ExecuteAsCaller). + WithComment("comment") + + err := client.Procedures.CreateForSQL(ctx, request) + require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) + + function, err := client.Procedures.ShowByID(ctx, id) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureFromObject(t, function). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasSchemaName(id.SchemaName()). + HasIsBuiltin(false). + HasIsAggregate(false). + HasIsAnsi(false). + HasMinNumArguments(1). + HasMaxNumArguments(1). + HasArgumentsOld([]sdk.DataType{sdk.LegacyDataTypeFrom(dataType)}). + HasArgumentsRaw(fmt.Sprintf(`%[1]s(%[2]s) RETURN %[2]s`, function.ID().Name(), dataType.ToLegacyDataTypeSql())). + HasDescription("comment"). + HasCatalogName(id.DatabaseName()). + HasIsTableFunction(false). + HasValidForClustering(false). + HasIsSecure(false). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, function.ID()). + HasSignature(fmt.Sprintf(`(%s %s)`, argName, dataType.ToLegacyDataTypeSql())). + HasReturns(fmt.Sprintf(`%s NOT NULL`, dataType.ToSql())). + HasLanguage("SQL"). + HasBody(definition). + // TODO [SNOW-1348103]: null handling and volatility are not returned and is present in create syntax + HasNullHandlingNil(). + HasVolatilityNil(). + HasVolatilityNil(). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(). + HasImportsNil(). + HasHandlerNil(). + HasRuntimeVersionNil(). + HasPackagesNil(). + HasTargetPathNil(). + HasInstalledPackagesNil(). + HasExecuteAs("CALLER"), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Java: returns table", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java#specifying-return-column-names-and-types name := "filter_by_role" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR) @@ -89,17 +1241,20 @@ func TestInt_CreateProcedures(t *testing.T) { request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, "Filter.filterByRole"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*arg1, *arg2}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForJava(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Javascript", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript#basic-examples name := "stproc1" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeFloat) @@ -116,37 +1271,43 @@ func TestInt_CreateProcedures(t *testing.T) { return "Failed: " + err; // Return a success/error indicator. }` argument := sdk.NewProcedureArgumentRequest("FLOAT_PARAM1", nil).WithArgDataTypeOld(sdk.DataTypeFloat) - request := sdk.NewCreateForJavaScriptProcedureRequest(id.SchemaObjectId(), nil, definition). + request := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), nil, definition). WithResultDataTypeOld(sdk.DataTypeString). WithArguments([]sdk.ProcedureArgumentRequest{*argument}). WithNullInputBehavior(*sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorStrict)). WithExecuteAs(*sdk.ExecuteAsPointer(sdk.ExecuteAsCaller)) err := client.Procedures.CreateForJavaScript(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Javascript: no arguments", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript#basic-examples name := "sp_pi" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name) definition := `return 3.1415926;` - request := sdk.NewCreateForJavaScriptProcedureRequest(id.SchemaObjectId(), nil, definition).WithResultDataTypeOld(sdk.DataTypeFloat).WithNotNull(true).WithOrReplace(true) + request := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), nil, definition).WithResultDataTypeOld(sdk.DataTypeFloat).WithNotNull(true).WithOrReplace(true) err := client.Procedures.CreateForJavaScript(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Scala: returns result data type", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala#reading-a-dynamically-specified-file-with-snowflakefile name := "file_reader_scala_proc_snowflakefile" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR) @@ -169,17 +1330,20 @@ func TestInt_CreateProcedures(t *testing.T) { request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, "FileReader.execute"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*argument}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForScala(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Scala: returns table", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala#specifying-return-column-names-and-types name := "filter_by_role" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR) @@ -205,17 +1369,20 @@ func TestInt_CreateProcedures(t *testing.T) { request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, "Filter.filterByRole"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*arg1, *arg2}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForScala(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Python: returns result data type", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-python#running-concurrent-tasks-with-worker-processes name := "joblib_multiprocessing_proc" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeInt) @@ -237,17 +1404,20 @@ def joblib_multiprocessing(session, i): request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, "3.8", packages, "joblib_multiprocessing"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*argument}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForPython(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for Python: returns table", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-python#specifying-return-column-names-and-types name := "filterByRole" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR) @@ -268,17 +1438,20 @@ def filter_by_role(session, table_name, role): request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, "3.8", packages, "filter_by_role"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*arg1, *arg2}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForPython(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for SQL: returns result data type", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + // https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-snowflake-scripting name := "output_message" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR) @@ -291,7 +1464,7 @@ def filter_by_role(session, table_name, role): dt := sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeVARCHAR) returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt).WithNotNull(true) argument := sdk.NewProcedureArgumentRequest("message", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) - request := sdk.NewCreateForSQLProcedureRequest(id.SchemaObjectId(), *returns, definition). + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). WithOrReplace(true). // Suddenly this is erroring out, when it used to not have an problem. Must be an error with the Snowflake API. // Created issue in docs-discuss channel. https://snowflake.slack.com/archives/C6380540P/p1707511734666249 @@ -299,18 +1472,21 @@ def filter_by_role(session, table_name, role): // 001003 (42000): SQL compilation error: // syntax error line 1 at position 210 unexpected 'NULL'. // syntax error line 1 at position 215 unexpected 'ON'. - // WithNullInputBehavior(sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + // WithNullInputBehavior(sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithArguments([]sdk.ProcedureArgumentRequest{*argument}) err := client.Procedures.CreateForSQL(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) + // TODO [SNOW-1348103]: adjust or remove t.Run("create procedure for SQL: returns table", func(t *testing.T) { + t.Skipf("Skipped for now; left as inspiration for resource rework as part of SNOW-1348103") + name := "find_invoice_by_id" id := testClientHelper().Ids.NewSchemaObjectIdentifierWithArguments(name, sdk.DataTypeVARCHAR) @@ -325,216 +1501,443 @@ def filter_by_role(session, table_name, role): returnsTable := sdk.NewProcedureReturnsTableRequest().WithColumns([]sdk.ProcedureColumnRequest{*column1, *column2}) returns := sdk.NewProcedureSQLReturnsRequest().WithTable(*returnsTable) argument := sdk.NewProcedureArgumentRequest("id", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) - request := sdk.NewCreateForSQLProcedureRequest(id.SchemaObjectId(), *returns, definition). + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). WithOrReplace(true). // SNOW-1051627 todo: uncomment once null input behavior working again - // WithNullInputBehavior(sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnNullInput)). + // WithNullInputBehavior(sdk.NullInputBehaviorPointer(sdk.NullInputBehaviorReturnsNullInput)). WithArguments([]sdk.ProcedureArgumentRequest{*argument}) err := client.Procedures.CreateForSQL(ctx, request) require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, id)) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) }) -} - -func TestInt_OtherProcedureFunctions(t *testing.T) { - client := testClient(t) - ctx := testContext(t) - assertProcedure := func(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments, secure bool) { - t.Helper() + t.Run("show parameters", func(t *testing.T) { + p, pCleanup := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup) + id := p.ID() - procedure, err := client.Procedures.ShowByID(ctx, id) + param, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeProcedure, Name: id}) require.NoError(t, err) + assert.Equal(t, string(sdk.LogLevelOff), param.Value) - assert.NotEmpty(t, procedure.CreatedOn) - assert.Equal(t, id.Name(), procedure.Name) - assert.Equal(t, false, procedure.IsBuiltin) - assert.Equal(t, false, procedure.IsAggregate) - assert.Equal(t, false, procedure.IsAnsi) - assert.Equal(t, 1, procedure.MinNumArguments) - assert.Equal(t, 1, procedure.MaxNumArguments) - assert.NotEmpty(t, procedure.ArgumentsOld) - assert.NotEmpty(t, procedure.ArgumentsRaw) - assert.NotEmpty(t, procedure.Description) - assert.NotEmpty(t, procedure.CatalogName) - assert.Equal(t, false, procedure.IsTableFunction) - assert.Equal(t, false, procedure.ValidForClustering) - assert.Equal(t, secure, procedure.IsSecure) - } - - cleanupProcedureHandle := func(id sdk.SchemaObjectIdentifierWithArguments) func() { - return func() { - err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id)) - if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { - return - } - require.NoError(t, err) - } - } + parameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Procedure: id, + }, + }) + require.NoError(t, err) - createProcedureForSQLHandle := func(t *testing.T, cleanup bool) *sdk.Procedure { - t.Helper() + assertions.AssertThatObject(t, objectparametersassert.ProcedureParametersPrefetched(t, id, parameters). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) - definition := ` - BEGIN - RETURN message; - END;` - id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) - dt := sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeVARCHAR) - returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt).WithNotNull(true) - argument := sdk.NewProcedureArgumentRequest("message", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) - request := sdk.NewCreateForSQLProcedureRequest(id.SchemaObjectId(), *returns, definition). - WithSecure(true). - WithOrReplace(true). - WithArguments([]sdk.ProcedureArgumentRequest{*argument}). - WithExecuteAs(*sdk.ExecuteAsPointer(sdk.ExecuteAsCaller)) - err := client.Procedures.CreateForSQL(ctx, request) - require.NoError(t, err) - if cleanup { - t.Cleanup(cleanupProcedureHandle(id)) - } - procedure, err := client.Procedures.ShowByID(ctx, id) + // check that ShowParameters on procedure level works too + parameters, err = client.Procedures.ShowParameters(ctx, id) require.NoError(t, err) - return procedure - } + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParametersPrefetched(t, id, parameters). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + }) t.Run("alter procedure: rename", func(t *testing.T) { - f := createProcedureForSQLHandle(t, false) + p, pCleanup := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup) + id := p.ID() - id := f.ID() - nid := testClientHelper().Ids.RandomSchemaObjectIdentifier() - nidWithArguments := sdk.NewSchemaObjectIdentifierWithArguments(nid.DatabaseName(), nid.SchemaName(), nid.Name(), id.ArgumentDataTypes()...) + nid := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(id.ArgumentDataTypes()...) - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithRenameTo(nid)) - if err != nil { - t.Cleanup(cleanupProcedureHandle(id)) - } else { - t.Cleanup(cleanupProcedureHandle(nidWithArguments)) - } + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithRenameTo(nid.SchemaObjectId())) require.NoError(t, err) + t.Cleanup(testClientHelper().Procedure.DropProcedureFunc(t, nid)) _, err = client.Procedures.ShowByID(ctx, id) assert.ErrorIs(t, err, collections.ErrObjectNotFound) - e, err := client.Procedures.ShowByID(ctx, nidWithArguments) + e, err := client.Procedures.ShowByID(ctx, nid) require.NoError(t, err) require.Equal(t, nid.Name(), e.Name) }) - t.Run("alter procedure: set log level", func(t *testing.T) { - f := createProcedureForSQLHandle(t, true) - - id := f.ID() - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSetLogLevel("DEBUG")) - require.NoError(t, err) - assertProcedure(t, id, true) + t.Run("alter procedure: set and unset all for Java", func(t *testing.T) { + p, pCleanup := testClientHelper().Procedure.CreateJava(t) + t.Cleanup(pCleanup) + id := p.ID() + + assertions.AssertThatObject(t, objectassert.Procedure(t, id). + HasName(id.Name()). + HasDescription(sdk.DefaultProcedureComment), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + + request := sdk.NewAlterProcedureRequest(id).WithSet(*sdk.NewProcedureSetRequest(). + WithExternalAccessIntegrations([]sdk.AccountObjectIdentifier{externalAccessIntegration}). + WithSecretsList(*sdk.NewSecretsListRequest([]sdk.SecretReference{{VariableName: "abc", Name: secretId}})). + // TODO [SNOW-1850370]: every value end with invalid value [OFF] for parameter 'AUTO_EVENT_LOGGING' + // WithAutoEventLogging(sdk.AutoEventLoggingAll). + WithEnableConsoleOutput(true). + WithLogLevel(sdk.LogLevelWarn). + WithMetricLevel(sdk.MetricLevelAll). + WithTraceLevel(sdk.TraceLevelAlways). + WithComment("new comment"), + ) + + err := client.Procedures.Alter(ctx, request) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.Procedure(t, id). + HasName(id.Name()). + HasDescription("new comment"), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasExactlyExternalAccessIntegrations(externalAccessIntegration). + HasExactlySecrets(map[string]sdk.SchemaObjectIdentifier{"abc": secretId}), + ) + + assertParametersSet(t, objectparametersassert.ProcedureParameters(t, id)) + + unsetRequest := sdk.NewAlterProcedureRequest(id).WithUnset(*sdk.NewProcedureUnsetRequest(). + WithExternalAccessIntegrations(true). + // WithAutoEventLogging(true). + WithEnableConsoleOutput(true). + WithLogLevel(true). + WithMetricLevel(true). + WithTraceLevel(true). + WithComment(true), + ) + + err = client.Procedures.Alter(ctx, unsetRequest) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.Procedure(t, id). + HasName(id.Name()). + HasDescription(sdk.DefaultProcedureComment). + // both nil, because they are always nil in SHOW for procedures + HasExternalAccessIntegrationsNil(). + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasExternalAccessIntegrationsNil(). + // TODO [SNOW-1850370]: apparently UNSET external access integrations cleans out secrets in the describe but leaves it in SHOW + HasSecretsNil(), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + + unsetSecretsRequest := sdk.NewAlterProcedureRequest(id).WithSet(*sdk.NewProcedureSetRequest(). + WithSecretsList(*sdk.NewSecretsListRequest([]sdk.SecretReference{})), + ) + + err = client.Procedures.Alter(ctx, unsetSecretsRequest) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasSecretsNil(), + ) }) - t.Run("alter procedure: set trace level", func(t *testing.T) { - f := createProcedureForSQLHandle(t, true) - - id := f.ID() - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSetTraceLevel("ALWAYS")) - require.NoError(t, err) - assertProcedure(t, id, true) + t.Run("alter procedure: set and unset all for SQL", func(t *testing.T) { + p, pCleanup := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup) + id := p.ID() + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) + + request := sdk.NewAlterProcedureRequest(id).WithSet(*sdk.NewProcedureSetRequest(). + // WithAutoEventLogging(sdk.AutoEventLoggingTracing). + WithEnableConsoleOutput(true). + WithLogLevel(sdk.LogLevelWarn). + WithMetricLevel(sdk.MetricLevelAll). + WithTraceLevel(sdk.TraceLevelAlways). + WithComment("new comment"), + ) + + err := client.Procedures.Alter(ctx, request) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.Procedure(t, id). + HasName(id.Name()). + HasDescription("new comment"), + ) + + assertParametersSet(t, objectparametersassert.ProcedureParameters(t, id)) + + unsetRequest := sdk.NewAlterProcedureRequest(id).WithUnset(*sdk.NewProcedureUnsetRequest(). + // WithAutoEventLogging(true). + WithEnableConsoleOutput(true). + WithLogLevel(true). + WithMetricLevel(true). + WithTraceLevel(true). + WithComment(true), + ) + + err = client.Procedures.Alter(ctx, unsetRequest) + require.NoError(t, err) + + assertions.AssertThatObject(t, objectassert.Procedure(t, id). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDescription(sdk.DefaultProcedureComment), + ) + + assertions.AssertThatObject(t, objectparametersassert.ProcedureParameters(t, id). + HasAllDefaults(). + HasAllDefaultsExplicit(), + ) }) - t.Run("alter procedure: set comment", func(t *testing.T) { - f := createProcedureForSQLHandle(t, true) - - id := f.ID() - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithSetComment("comment")) - require.NoError(t, err) - assertProcedure(t, id, true) - }) + t.Run("alter procedure: set execute as", func(t *testing.T) { + p, pCleanup := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup) + id := p.ID() - t.Run("alter procedure: unset comment", func(t *testing.T) { - f := createProcedureForSQLHandle(t, true) + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasExecuteAs("OWNER"), + ) - id := f.ID() - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithUnsetComment(true)) + err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithExecuteAs(*sdk.ExecuteAsPointer(sdk.ExecuteAsCaller))) require.NoError(t, err) - assertProcedure(t, id, true) - }) - t.Run("alter procedure: set execute as", func(t *testing.T) { - f := createProcedureForSQLHandle(t, true) + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasExecuteAs("CALLER"), + ) - id := f.ID() - err := client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithExecuteAs(*sdk.ExecuteAsPointer(sdk.ExecuteAsOwner))) + err = client.Procedures.Alter(ctx, sdk.NewAlterProcedureRequest(id).WithExecuteAs(*sdk.ExecuteAsPointer(sdk.ExecuteAsOwner))) require.NoError(t, err) - assertProcedure(t, id, true) + + assertions.AssertThatObject(t, objectassert.ProcedureDetails(t, id). + HasExecuteAs("OWNER"), + ) }) - t.Run("show procedure for SQL: without like", func(t *testing.T) { - f1 := createProcedureForSQLHandle(t, true) - f2 := createProcedureForSQLHandle(t, true) + t.Run("show procedure: without like", func(t *testing.T) { + p1, pCleanup := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup) + p2, pCleanup2 := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup2) procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest()) require.NoError(t, err) require.GreaterOrEqual(t, len(procedures), 1) - require.Contains(t, procedures, *f1) - require.Contains(t, procedures, *f2) + require.Contains(t, procedures, *p1) + require.Contains(t, procedures, *p2) }) - t.Run("show procedure for SQL: with like", func(t *testing.T) { - f1 := createProcedureForSQLHandle(t, true) - f2 := createProcedureForSQLHandle(t, true) + t.Run("show procedure: with like", func(t *testing.T) { + p1, pCleanup := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup) + p2, pCleanup2 := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup2) - procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest().WithLike(sdk.Like{Pattern: &f1.Name})) + procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest().WithLike(sdk.Like{Pattern: &p1.Name})) require.NoError(t, err) require.Equal(t, 1, len(procedures)) - require.Contains(t, procedures, *f1) - require.NotContains(t, procedures, *f2) + require.Contains(t, procedures, *p1) + require.NotContains(t, procedures, *p2) }) - t.Run("show procedure for SQL: no matches", func(t *testing.T) { - procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest().WithLike(sdk.Like{Pattern: sdk.String("non-existing-id-pattern")})) + t.Run("show procedure: no matches", func(t *testing.T) { + procedures, err := client.Procedures.Show(ctx, sdk.NewShowProcedureRequest(). + WithIn(sdk.ExtendedIn{In: sdk.In{Schema: testClientHelper().Ids.SchemaId()}}). + WithLike(sdk.Like{Pattern: sdk.String(NonExistingSchemaObjectIdentifier.Name())})) require.NoError(t, err) require.Equal(t, 0, len(procedures)) }) - t.Run("describe procedure for SQL", func(t *testing.T) { - f := createProcedureForSQLHandle(t, true) - id := f.ID() + t.Run("describe procedure: for SQL", func(t *testing.T) { + p, pCleanup := testClientHelper().Procedure.CreateSql(t) + t.Cleanup(pCleanup) + id := p.ID() details, err := client.Procedures.Describe(ctx, id) require.NoError(t, err) - pairs := make(map[string]string) + assert.Len(t, details, 5) + + pairs := make(map[string]*string) for _, detail := range details { pairs[detail.Property] = detail.Value } - require.Equal(t, "SQL", pairs["language"]) - require.Equal(t, "CALLER", pairs["execute as"]) - require.Equal(t, "(MESSAGE VARCHAR)", pairs["signature"]) - require.Equal(t, "\n\tBEGIN\n\t\tRETURN message;\n\tEND;", pairs["body"]) + assert.Equal(t, "(x FLOAT)", *pairs["signature"]) + assert.Equal(t, "FLOAT", *pairs["returns"]) + assert.Equal(t, "SQL", *pairs["language"]) + assert.Equal(t, "\nBEGIN\n\tRETURN 3.141592654::FLOAT;\nEND;\n", *pairs["body"]) + assert.Equal(t, "OWNER", *pairs["execute as"]) + }) + + t.Run("describe procedure: for Java", func(t *testing.T) { + p, pCleanup := testClientHelper().Procedure.CreateJava(t) + t.Cleanup(pCleanup) + id := p.ID() + + details, err := client.Procedures.Describe(ctx, id) + require.NoError(t, err) + assert.Len(t, details, 12) + + pairs := make(map[string]*string) + for _, detail := range details { + pairs[detail.Property] = detail.Value + } + assert.Equal(t, "(x VARCHAR)", *pairs["signature"]) + assert.Equal(t, "VARCHAR(100)", *pairs["returns"]) + assert.Equal(t, "JAVA", *pairs["language"]) + assert.NotEmpty(t, *pairs["body"]) + assert.Equal(t, string(sdk.NullInputBehaviorCalledOnNullInput), *pairs["null handling"]) + assert.Equal(t, string(sdk.VolatileTableKind), *pairs["volatility"]) + assert.Nil(t, pairs["external_access_integration"]) + assert.Nil(t, pairs["secrets"]) + assert.Equal(t, "[]", *pairs["imports"]) + assert.Equal(t, "TestFunc.echoVarchar", *pairs["handler"]) + assert.Equal(t, "11", *pairs["runtime_version"]) + assert.Equal(t, "OWNER", *pairs["execute as"]) }) t.Run("drop procedure for SQL", func(t *testing.T) { - definition := ` - BEGIN - RETURN message; - END;` - id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) - dt := sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeVARCHAR) - returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt).WithNotNull(true) - argument := sdk.NewProcedureArgumentRequest("message", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) - request := sdk.NewCreateForSQLProcedureRequest(id.SchemaObjectId(), *returns, definition). - WithOrReplace(true). - WithArguments([]sdk.ProcedureArgumentRequest{*argument}). - WithExecuteAs(*sdk.ExecuteAsPointer(sdk.ExecuteAsCaller)) - err := client.Procedures.CreateForSQL(ctx, request) + p, pCleanup := testClientHelper().Procedure.CreateJava(t) + t.Cleanup(pCleanup) + id := p.ID() + + err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id)) + require.NoError(t, err) + }) + + t.Run("show by id - same name in different schemas", func(t *testing.T) { + schema, schemaCleanup := testClientHelper().Schema.CreateSchema(t) + t.Cleanup(schemaCleanup) + + dataType := testdatatypes.DataTypeFloat + id1 := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.LegacyDataTypeFrom(dataType)) + id2 := testClientHelper().Ids.NewSchemaObjectIdentifierWithArgumentsInSchema(id1.Name(), schema.ID(), sdk.LegacyDataTypeFrom(dataType)) + + _, pCleanup1 := testClientHelper().Procedure.CreateSqlWithIdentifierAndArgument(t, id1.SchemaObjectId(), dataType, testClientHelper().Procedure.SampleSqlDefinition(t)) + t.Cleanup(pCleanup1) + _, pCleanup2 := testClientHelper().Procedure.CreateSqlWithIdentifierAndArgument(t, id2.SchemaObjectId(), dataType, testClientHelper().Procedure.SampleSqlDefinition(t)) + t.Cleanup(pCleanup2) + + e1, err := client.Procedures.ShowByID(ctx, id1) require.NoError(t, err) + require.Equal(t, id1, e1.ID()) + + e2, err := client.Procedures.ShowByID(ctx, id2) + require.NoError(t, err) + require.Equal(t, id2, e2.ID()) + }) + + t.Run("show procedure by id - same name, different arguments", func(t *testing.T) { + dataType := testdatatypes.DataTypeFloat + name := testClientHelper().Ids.Alpha() + + id1 := testClientHelper().Ids.NewSchemaObjectIdentifierWithArgumentsInSchema(name, testClientHelper().Ids.SchemaId(), sdk.LegacyDataTypeFrom(dataType)) + id2 := testClientHelper().Ids.NewSchemaObjectIdentifierWithArgumentsInSchema(name, testClientHelper().Ids.SchemaId(), sdk.DataTypeInt, sdk.DataTypeVARCHAR) + + e := testClientHelper().Procedure.CreateWithIdentifier(t, id1) + testClientHelper().Procedure.CreateWithIdentifier(t, id2) - err = client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id)) + es, err := client.Procedures.ShowByID(ctx, id1) require.NoError(t, err) + require.Equal(t, *e, *es) }) + + // This test shows behavior of detailed types (e.g. VARCHAR(20) and NUMBER(10, 0)) on Snowflake side for procedures. + // For SHOW, data type is generalized both for argument and return type (to e.g. VARCHAR and NUMBER). + // FOR DESCRIBE, data type is generalized for argument and works weirdly for the return type: type is generalized to the canonical one, but we also get the attributes. + for _, tc := range []string{ + "NUMBER(36, 5)", + "NUMBER(36)", + "NUMBER", + "DECIMAL", + "INTEGER", + "FLOAT", + "DOUBLE", + "VARCHAR", + "VARCHAR(20)", + "CHAR", + "CHAR(10)", + "TEXT", + "BINARY", + "BINARY(1000)", + "VARBINARY", + "BOOLEAN", + "DATE", + "DATETIME", + "TIME", + "TIMESTAMP_LTZ", + "TIMESTAMP_NTZ", + "TIMESTAMP_TZ", + "VARIANT", + "OBJECT", + "ARRAY", + "GEOGRAPHY", + "GEOMETRY", + "VECTOR(INT, 16)", + "VECTOR(FLOAT, 8)", + } { + tc := tc + t.Run(fmt.Sprintf("procedure returns non detailed data types of arguments for %s", tc), func(t *testing.T) { + procName := "add" + argName := "A" + dataType, err := datatypes.ParseDataType(tc) + require.NoError(t, err) + args := []sdk.ProcedureArgumentRequest{ + *sdk.NewProcedureArgumentRequest(argName, dataType), + } + oldDataType := sdk.LegacyDataTypeFrom(dataType) + idWithArguments := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(oldDataType) + + packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("snowflake-snowpark-python")} + definition := fmt.Sprintf("def add(%[1]s): %[1]s", argName) + + err = client.Procedures.CreateForPython(ctx, sdk.NewCreateForPythonProcedureRequest( + idWithArguments.SchemaObjectId(), + *sdk.NewProcedureReturnsRequest().WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(dataType)), + "3.8", + packages, + procName, + ). + WithArguments(args). + WithProcedureDefinitionWrapped(definition), + ) + require.NoError(t, err) + + procedure, err := client.Procedures.ShowByID(ctx, idWithArguments) + require.NoError(t, err) + assert.Equal(t, []sdk.DataType{oldDataType}, procedure.ArgumentsOld) + assert.Equal(t, fmt.Sprintf("%[1]s(%[2]s) RETURN %[2]s", idWithArguments.Name(), oldDataType), procedure.ArgumentsRaw) + + details, err := client.Procedures.Describe(ctx, idWithArguments) + require.NoError(t, err) + pairs := make(map[string]string) + for _, detail := range details { + pairs[detail.Property] = *detail.Value + } + assert.Equal(t, fmt.Sprintf("(%s %s)", argName, oldDataType), pairs["signature"]) + assert.Equal(t, dataType.Canonical(), pairs["returns"]) + }) + } } func TestInt_CallProcedure(t *testing.T) { @@ -574,13 +1977,13 @@ func TestInt_CallProcedure(t *testing.T) { definition := ` BEGIN - RETURN message; + RETURN MESSAGE; END;` id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) dt := sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeVARCHAR) returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt).WithNotNull(true) - argument := sdk.NewProcedureArgumentRequest("message", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) - request := sdk.NewCreateForSQLProcedureRequest(id.SchemaObjectId(), *returns, definition). + argument := sdk.NewProcedureArgumentRequest("MESSAGE", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) + request := sdk.NewCreateForSQLProcedureRequestDefinitionWrapped(id.SchemaObjectId(), *returns, definition). WithSecure(true). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*argument}). @@ -603,7 +2006,7 @@ func TestInt_CallProcedure(t *testing.T) { t.Run("call procedure for SQL: argument names", func(t *testing.T) { f := createProcedureForSQLHandle(t, true) - err := client.Procedures.Call(ctx, sdk.NewCallProcedureRequest(f.ID().SchemaObjectId()).WithCallArguments([]string{"message => 'hi'"})) + err := client.Procedures.Call(ctx, sdk.NewCallProcedureRequest(f.ID().SchemaObjectId()).WithCallArguments([]string{"MESSAGE => 'hi'"})) require.NoError(t, err) }) @@ -632,7 +2035,7 @@ func TestInt_CallProcedure(t *testing.T) { request := sdk.NewCreateForJavaProcedureRequest(id.SchemaObjectId(), *returns, "11", packages, "Filter.filterByRole"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*arg1, *arg2}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForJava(ctx, request) require.NoError(t, err) t.Cleanup(cleanupProcedureHandle(id)) @@ -666,7 +2069,7 @@ func TestInt_CallProcedure(t *testing.T) { request := sdk.NewCreateForScalaProcedureRequest(id.SchemaObjectId(), *returns, "2.12", packages, "Filter.filterByRole"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*arg1, *arg2}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForScala(ctx, request) require.NoError(t, err) t.Cleanup(cleanupProcedureHandle(id)) @@ -693,7 +2096,7 @@ func TestInt_CallProcedure(t *testing.T) { return "Failed: " + err; // Return a success/error indicator. }` arg := sdk.NewProcedureArgumentRequest("FLOAT_PARAM1", nil).WithArgDataTypeOld(sdk.DataTypeFloat) - request := sdk.NewCreateForJavaScriptProcedureRequest(id.SchemaObjectId(), nil, definition). + request := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), nil, definition). WithResultDataTypeOld(sdk.DataTypeString). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*arg}). @@ -713,7 +2116,7 @@ func TestInt_CallProcedure(t *testing.T) { id := sdk.NewSchemaObjectIdentifierWithArguments(databaseId.Name(), schemaId.Name(), name) definition := `return 3.1415926;` - request := sdk.NewCreateForJavaScriptProcedureRequest(id.SchemaObjectId(), nil, definition).WithResultDataTypeOld(sdk.DataTypeFloat).WithNotNull(true).WithOrReplace(true) + request := sdk.NewCreateForJavaScriptProcedureRequestDefinitionWrapped(id.SchemaObjectId(), nil, definition).WithResultDataTypeOld(sdk.DataTypeFloat).WithNotNull(true).WithOrReplace(true) err := client.Procedures.CreateForJavaScript(ctx, request) require.NoError(t, err) t.Cleanup(cleanupProcedureHandle(id)) @@ -739,7 +2142,7 @@ def filter_by_role(session, name, role): request := sdk.NewCreateForPythonProcedureRequest(id.SchemaObjectId(), *returns, "3.8", packages, "filter_by_role"). WithOrReplace(true). WithArguments([]sdk.ProcedureArgumentRequest{*arg1, *arg2}). - WithProcedureDefinition(definition) + WithProcedureDefinitionWrapped(definition) err := client.Procedures.CreateForPython(ctx, request) require.NoError(t, err) t.Cleanup(cleanupProcedureHandle(id)) @@ -876,13 +2279,13 @@ func TestInt_CreateAndCallProcedures(t *testing.T) { t.Run("create and call procedure for SQL: argument positions", func(t *testing.T) { definition := ` BEGIN - RETURN message; + RETURN MESSAGE; END;` name := testClientHelper().Ids.RandomAccountObjectIdentifier() dt := sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeVARCHAR) returns := sdk.NewProcedureReturnsRequest().WithResultDataType(*dt) - argument := sdk.NewProcedureArgumentRequest("message", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) + argument := sdk.NewProcedureArgumentRequest("MESSAGE", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) request := sdk.NewCreateAndCallForSQLProcedureRequest(name, *returns, definition, name). WithArguments([]sdk.ProcedureArgumentRequest{*argument}). WithCallArguments([]string{"message => 'hi'"}) @@ -949,155 +2352,3 @@ def filter_by_role(session, name, role): require.NoError(t, err) }) } - -func TestInt_ProceduresShowByID(t *testing.T) { - client := testClient(t) - ctx := testContext(t) - - cleanupProcedureHandle := func(id sdk.SchemaObjectIdentifierWithArguments) func() { - return func() { - err := client.Procedures.Drop(ctx, sdk.NewDropProcedureRequest(id)) - if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { - return - } - require.NoError(t, err) - } - } - - createProcedureForSQLHandle := func(t *testing.T, id sdk.SchemaObjectIdentifierWithArguments) { - t.Helper() - - definition := ` - BEGIN - RETURN message; - END;` - dt := sdk.NewProcedureReturnsResultDataTypeRequest(nil).WithResultDataTypeOld(sdk.DataTypeVARCHAR) - returns := sdk.NewProcedureSQLReturnsRequest().WithResultDataType(*dt).WithNotNull(true) - argument := sdk.NewProcedureArgumentRequest("message", nil).WithArgDataTypeOld(sdk.DataTypeVARCHAR) - request := sdk.NewCreateForSQLProcedureRequest(id.SchemaObjectId(), *returns, definition). - WithArguments([]sdk.ProcedureArgumentRequest{*argument}). - WithExecuteAs(*sdk.ExecuteAsPointer(sdk.ExecuteAsCaller)) - err := client.Procedures.CreateForSQL(ctx, request) - require.NoError(t, err) - t.Cleanup(cleanupProcedureHandle(id)) - } - - t.Run("show by id - same name in different schemas", func(t *testing.T) { - schema, schemaCleanup := testClientHelper().Schema.CreateSchema(t) - t.Cleanup(schemaCleanup) - - id1 := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeVARCHAR) - id2 := testClientHelper().Ids.NewSchemaObjectIdentifierWithArgumentsInSchema(id1.Name(), schema.ID(), sdk.DataTypeVARCHAR) - - createProcedureForSQLHandle(t, id1) - createProcedureForSQLHandle(t, id2) - - e1, err := client.Procedures.ShowByID(ctx, id1) - require.NoError(t, err) - require.Equal(t, id1, e1.ID()) - - e2, err := client.Procedures.ShowByID(ctx, id2) - require.NoError(t, err) - require.Equal(t, id2, e2.ID()) - }) - - t.Run("show procedure by id - different name, same arguments", func(t *testing.T) { - id1 := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeInt, sdk.DataTypeFloat, sdk.DataTypeVARCHAR) - id2 := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(sdk.DataTypeInt, sdk.DataTypeFloat, sdk.DataTypeVARCHAR) - e := testClientHelper().Procedure.CreateWithIdentifier(t, id1) - testClientHelper().Procedure.CreateWithIdentifier(t, id2) - - es, err := client.Procedures.ShowByID(ctx, id1) - require.NoError(t, err) - require.Equal(t, *e, *es) - }) - - t.Run("show procedure by id - same name, different arguments", func(t *testing.T) { - name := testClientHelper().Ids.Alpha() - id1 := testClientHelper().Ids.NewSchemaObjectIdentifierWithArgumentsInSchema(name, testClientHelper().Ids.SchemaId(), sdk.DataTypeInt, sdk.DataTypeFloat, sdk.DataTypeVARCHAR) - id2 := testClientHelper().Ids.NewSchemaObjectIdentifierWithArgumentsInSchema(name, testClientHelper().Ids.SchemaId(), sdk.DataTypeInt, sdk.DataTypeVARCHAR) - e := testClientHelper().Procedure.CreateWithIdentifier(t, id1) - testClientHelper().Procedure.CreateWithIdentifier(t, id2) - - es, err := client.Procedures.ShowByID(ctx, id1) - require.NoError(t, err) - require.Equal(t, *e, *es) - }) - - // This test shows behavior of detailed types (e.g. VARCHAR(20) and NUMBER(10, 0)) on Snowflake side for procedures. - // For SHOW, data type is generalized both for argument and return type (to e.g. VARCHAR and NUMBER). - // FOR DESCRIBE, data type is generalized for argument and works weirdly for the return type: type is generalized to the canonical one, but we also get the attributes. - for _, tc := range []string{ - "NUMBER(36, 5)", - "NUMBER(36)", - "NUMBER", - "DECIMAL", - "INTEGER", - "FLOAT", - "DOUBLE", - "VARCHAR", - "VARCHAR(20)", - "CHAR", - "CHAR(10)", - "TEXT", - "BINARY", - "BINARY(1000)", - "VARBINARY", - "BOOLEAN", - "DATE", - "DATETIME", - "TIME", - "TIMESTAMP_LTZ", - "TIMESTAMP_NTZ", - "TIMESTAMP_TZ", - "VARIANT", - "OBJECT", - "ARRAY", - "GEOGRAPHY", - "GEOMETRY", - "VECTOR(INT, 16)", - "VECTOR(FLOAT, 8)", - } { - tc := tc - t.Run(fmt.Sprintf("procedure returns non detailed data types of arguments for %s", tc), func(t *testing.T) { - procName := "add" - argName := "A" - dataType, err := datatypes.ParseDataType(tc) - require.NoError(t, err) - args := []sdk.ProcedureArgumentRequest{ - *sdk.NewProcedureArgumentRequest(argName, dataType), - } - oldDataType := sdk.LegacyDataTypeFrom(dataType) - idWithArguments := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(oldDataType) - - packages := []sdk.ProcedurePackageRequest{*sdk.NewProcedurePackageRequest("snowflake-snowpark-python")} - definition := fmt.Sprintf("def add(%[1]s): %[1]s", argName) - - err = client.Procedures.CreateForPython(ctx, sdk.NewCreateForPythonProcedureRequest( - idWithArguments.SchemaObjectId(), - *sdk.NewProcedureReturnsRequest().WithResultDataType(*sdk.NewProcedureReturnsResultDataTypeRequest(dataType)), - "3.8", - packages, - procName, - ). - WithArguments(args). - WithProcedureDefinition(definition), - ) - require.NoError(t, err) - - procedure, err := client.Procedures.ShowByID(ctx, idWithArguments) - require.NoError(t, err) - assert.Equal(t, []sdk.DataType{oldDataType}, procedure.ArgumentsOld) - assert.Equal(t, fmt.Sprintf("%[1]s(%[2]s) RETURN %[2]s", idWithArguments.Name(), oldDataType), procedure.ArgumentsRaw) - - details, err := client.Procedures.Describe(ctx, idWithArguments) - require.NoError(t, err) - pairs := make(map[string]string) - for _, detail := range details { - pairs[detail.Property] = detail.Value - } - assert.Equal(t, fmt.Sprintf("(%s %s)", argName, oldDataType), pairs["signature"]) - assert.Equal(t, dataType.Canonical(), pairs["returns"]) - }) - } -} From 73b7e74bf44b1ae6ddc78cac752f2b7febb836cd Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Tue, 10 Dec 2024 16:46:36 +0100 Subject: [PATCH 2/2] fix: Minor fixes 2 (#3230) - prove that using network policy with lowercase characters in oauth integration fails in Snowflake - update the migration guide regarding migrating provider configuration - rename `datasource` to `data source` - add missing examples and fix some examples in the docs - add notes about missing fields (they will be done in SNOW-1844996) - adjust documentation for provider configuration - improve quoting with import and resource configuration examples - some examples of replication and grants were not changed because they show a nicer setup, IMO - can discuss about this - add info about external changes not being detected for certain fields or resource types - change infobox format for some notes (`[!WARNING]` -> `!> Note`) because it isn't rendered properly in the registry - fix empty `using` in view masking policies - add links to documentation of the referenced resources - adjust enum validations to use `sdkValidation` with SDK converter function instead of `StringInSlice` - address https://github.com/Snowflake-Labs/terraform-provider-snowflake/pull/3247 by improving the documentation - update lists in `v1-preparations` files ## Test Plan * [ ] acceptance tests * [x] integration tests ## References https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3198 https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3251 ## TODO (next PRs): - check the migration guide once again --- MIGRATION_GUIDE.md | 13 ++- docs/data-sources/connections.md | 16 ++- docs/data-sources/database_roles.md | 6 +- docs/data-sources/databases.md | 4 +- docs/data-sources/masking_policies.md | 4 +- docs/data-sources/network_policies.md | 6 +- docs/data-sources/resource_monitors.md | 4 +- docs/data-sources/roles.md | 7 +- docs/data-sources/row_access_policies.md | 4 +- docs/data-sources/schemas.md | 7 +- docs/data-sources/secrets.md | 4 +- docs/data-sources/security_integrations.md | 4 +- docs/data-sources/streamlits.md | 4 +- docs/data-sources/streams.md | 4 +- docs/data-sources/tags.md | 4 +- docs/data-sources/users.md | 4 +- docs/data-sources/views.md | 4 +- docs/data-sources/warehouses.md | 4 +- docs/guides/unassigning_policies.md | 65 ++++++++++++ docs/index.md | 20 ++-- docs/resources/account_role.md | 4 +- ...tegration_with_authorization_code_grant.md | 8 +- ...ion_integration_with_client_credentials.md | 8 +- ...hentication_integration_with_jwt_bearer.md | 8 +- docs/resources/authentication_policy.md | 9 +- docs/resources/database.md | 27 +++-- docs/resources/database_role.md | 8 +- docs/resources/external_oauth_integration.md | 14 +-- docs/resources/external_volume.md | 4 +- docs/resources/grant_account_role.md | 14 +-- docs/resources/grant_application_role.md | 2 +- docs/resources/grant_database_role.md | 8 +- docs/resources/grant_ownership.md | 12 +-- .../grant_privileges_to_account_role.md | 4 +- .../grant_privileges_to_database_role.md | 4 +- docs/resources/grant_privileges_to_share.md | 12 +-- docs/resources/legacy_service_user.md | 8 +- docs/resources/masking_policy.md | 9 +- docs/resources/network_policy.md | 17 +-- docs/resources/network_rule.md | 3 +- .../oauth_integration_for_custom_clients.md | 28 ++--- ...th_integration_for_partner_applications.md | 8 +- docs/resources/password_policy.md | 3 +- docs/resources/primary_connection.md | 10 +- docs/resources/resource_monitor.md | 11 +- docs/resources/role.md | 2 +- docs/resources/row_access_policy.md | 10 +- docs/resources/saml2_integration.md | 10 +- docs/resources/schema.md | 11 +- docs/resources/scim_integration.md | 12 ++- docs/resources/secondary_connection.md | 12 +-- docs/resources/secondary_database.md | 11 +- .../secret_with_authorization_code_grant.md | 12 +-- .../secret_with_basic_authentication.md | 6 +- .../secret_with_client_credentials.md | 12 +-- docs/resources/secret_with_generic_string.md | 6 +- docs/resources/service_user.md | 6 +- docs/resources/shared_database.md | 11 +- docs/resources/stream.md | 2 +- docs/resources/stream_on_directory_table.md | 28 ++--- docs/resources/stream_on_external_table.md | 35 ++---- docs/resources/stream_on_table.md | 27 +++-- docs/resources/stream_on_view.md | 25 ++--- docs/resources/streamlit.md | 26 +++-- docs/resources/tag.md | 16 +-- .../tag_masking_policy_association.md | 2 +- docs/resources/task.md | 23 ++-- docs/resources/user.md | 12 +-- docs/resources/view.md | 27 ++--- docs/resources/warehouse.md | 38 +++++-- .../snowflake_connections/data-source.tf | 12 +++ .../snowflake_database_roles/data-source.tf | 2 +- .../snowflake_network_policies/data-source.tf | 2 +- .../snowflake_account_role/import.sh | 2 +- .../import.sh | 2 +- .../import.sh | 2 +- .../import.sh | 2 +- .../resources/snowflake_database/import.sh | 2 +- .../resources/snowflake_database/resource.tf | 16 +-- .../import.sh | 2 +- .../resource.tf | 4 +- .../snowflake_grant_account_role/resource.tf | 8 +- .../snowflake_network_policy/import.sh | 2 +- .../snowflake_network_policy/resource.tf | 6 +- .../import.sh | 2 +- .../resource.tf | 10 +- .../resource.tf | 2 +- .../snowflake_primary_connection/import.sh | 2 +- .../snowflake_primary_connection/resource.tf | 2 +- .../snowflake_resource_monitor/import.sh | 3 +- .../snowflake_resource_monitor/resource.tf | 4 +- .../snowflake_row_access_policy/resource.tf | 1 + .../snowflake_saml2_integration/import.sh | 2 +- examples/resources/snowflake_schema/import.sh | 1 - .../snowflake_scim_integration/import.sh | 2 +- .../snowflake_scim_integration/resource.tf | 4 +- .../snowflake_secondary_connection/import.sh | 2 +- .../resource.tf | 4 +- .../snowflake_secondary_database/import.sh | 2 +- .../resource.tf | 4 +- .../resource.tf | 4 +- .../snowflake_shared_database/import.sh | 2 +- .../resource.tf | 16 +-- .../resource.tf | 23 +--- .../snowflake_stream_on_table/resource.tf | 15 ++- .../snowflake_stream_on_view/resource.tf | 13 +-- .../resources/snowflake_streamlit/import.sh | 1 - .../resources/snowflake_streamlit/resource.tf | 7 +- examples/resources/snowflake_tag/resource.tf | 2 +- examples/resources/snowflake_task/import.sh | 3 +- examples/resources/snowflake_task/resource.tf | 4 +- examples/resources/snowflake_user/resource.tf | 4 +- examples/resources/snowflake_view/resource.tf | 9 +- .../resources/snowflake_warehouse/import.sh | 2 +- .../resources/snowflake_warehouse/resource.tf | 26 ++++- pkg/datasources/connections.go | 2 +- .../connections_acceptance_test.go | 31 ++++++ pkg/datasources/database_roles.go | 2 +- pkg/datasources/databases.go | 2 +- pkg/datasources/masking_policies.go | 2 +- pkg/datasources/network_policies.go | 2 +- pkg/datasources/resource_monitors.go | 2 +- pkg/datasources/roles.go | 2 +- pkg/datasources/row_access_policies.go | 2 +- pkg/datasources/schemas.go | 2 +- pkg/datasources/secrets.go | 2 +- pkg/datasources/security_integrations.go | 2 +- pkg/datasources/streamlits.go | 2 +- pkg/datasources/streams.go | 2 +- pkg/datasources/tags.go | 2 +- pkg/datasources/users.go | 2 +- pkg/datasources/views.go | 2 +- pkg/datasources/warehouses.go | 2 +- pkg/internal/tracking/context.go | 2 +- .../api_authentication_integration_common.go | 2 +- pkg/resources/custom_diffs.go | 2 +- pkg/resources/database.go | 4 +- pkg/resources/database_commons.go | 18 ++-- pkg/resources/database_role.go | 6 +- pkg/resources/diff_suppressions.go | 21 ++++ pkg/resources/diff_suppressions_test.go | 82 ++++++++++++++ pkg/resources/doc_helpers.go | 15 ++- pkg/resources/external_oauth_integration.go | 4 +- pkg/resources/grant_account_role.go | 6 +- pkg/resources/grant_application_role.go | 2 +- pkg/resources/grant_database_role.go | 8 +- pkg/resources/grant_ownership.go | 8 +- .../grant_privileges_to_account_role.go | 4 +- .../grant_privileges_to_database_role.go | 4 +- pkg/resources/grant_privileges_to_share.go | 12 +-- pkg/resources/network_policy.go | 4 +- pkg/resources/network_rule.go | 1 + .../oauth_integration_for_custom_clients.go | 10 +- ...th_integration_for_partner_applications.go | 2 +- pkg/resources/primary_connection.go | 2 +- pkg/resources/resource_monitor.go | 5 +- pkg/resources/saml2_integration.go | 2 +- pkg/resources/schema.go | 7 +- pkg/resources/scim_integration.go | 15 +-- .../scim_integration_acceptance_test.go | 4 +- pkg/resources/secondary_connection.go | 2 +- pkg/resources/secondary_database.go | 3 +- ...ret_with_oauth_authorization_code_grant.go | 2 +- .../secret_with_oauth_client_credentials.go | 2 +- pkg/resources/shared_database.go | 9 +- .../shared_database_acceptance_test.go | 7 +- pkg/resources/stream_common.go | 2 +- pkg/resources/stream_on_directory_table.go | 2 +- pkg/resources/stream_on_external_table.go | 2 +- pkg/resources/stream_on_table.go | 2 +- pkg/resources/stream_on_view.go | 2 +- pkg/resources/streamlit.go | 17 +-- pkg/resources/tag.go | 5 +- pkg/resources/task.go | 4 +- pkg/resources/task_parameters.go | 2 +- .../testdata/TestAcc_View/columns/test.tf | 2 +- .../TestAcc_View/columns/variables.tf | 3 +- pkg/resources/user.go | 6 +- pkg/resources/view.go | 17 ++- pkg/resources/view_acceptance_test.go | 100 ++++++++++++++++++ pkg/resources/warehouse.go | 2 +- pkg/sdk/testint/client_integration_test.go | 5 +- ...urity_integrations_gen_integration_test.go | 30 ++++++ templates/data-sources/roles.md.tmpl | 3 + templates/data-sources/schemas.md.tmpl | 3 + templates/guides/unassigning_policies.md.tmpl | 65 ++++++++++++ templates/index.md.tmpl | 20 ++-- ...tion_with_authorization_code_grant.md.tmpl | 2 + ...ntegration_with_client_credentials.md.tmpl | 2 + ...cation_integration_with_jwt_bearer.md.tmpl | 2 + .../resources/authentication_policy.md.tmpl | 3 +- templates/resources/database.md.tmpl | 5 + .../external_oauth_integration.md.tmpl | 2 + templates/resources/masking_policy.md.tmpl | 3 +- templates/resources/network_policy.md.tmpl | 5 +- templates/resources/network_rule.md.tmpl | 35 ++++++ ...uth_integration_for_custom_clients.md.tmpl | 4 + ...tegration_for_partner_applications.md.tmpl | 2 + templates/resources/password_policy.md.tmpl | 3 +- .../resources/primary_connection.md.tmpl | 2 +- templates/resources/row_access_policy.md.tmpl | 3 +- templates/resources/saml2_integration.md.tmpl | 2 + templates/resources/schema.md.tmpl | 6 ++ templates/resources/scim_integration.md.tmpl | 2 + .../resources/secondary_connection.md.tmpl | 2 +- .../resources/secondary_database.md.tmpl | 5 + templates/resources/shared_database.md.tmpl | 5 + .../stream_on_directory_table.md.tmpl | 2 + .../stream_on_external_table.md.tmpl | 2 +- templates/resources/stream_on_table.md.tmpl | 2 +- templates/resources/stream_on_view.md.tmpl | 2 + templates/resources/streamlit.md.tmpl | 6 ++ templates/resources/tag.md.tmpl | 2 + templates/resources/view.md.tmpl | 4 +- templates/resources/warehouse.md.tmpl | 6 ++ v1-preparations/ESSENTIAL_GA_OBJECTS.MD | 2 +- .../LIST_OF_PREVIEW_FEATURES_FOR_V1.md | 2 + .../LIST_OF_STABLE_RESOURCES_FOR_V1.md | 11 +- 218 files changed, 1195 insertions(+), 632 deletions(-) create mode 100644 docs/guides/unassigning_policies.md create mode 100644 templates/guides/unassigning_policies.md.tmpl create mode 100644 templates/resources/network_rule.md.tmpl diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index dbfef7d7f6..2d14a2d000 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -275,6 +275,9 @@ Also, we added diff suppress function that prevents Terraform from showing diffe No change is required, the state will be migrated automatically. +#### *(breaking change)* Required warehouse +For this resource, the provider now uses [tag references](https://docs.snowflake.com/en/sql-reference/functions/tag_references) to get information about masking policies attached to tags. This function requires a warehouse in the connection. Please, make sure you have either set a `DEFAULT_WAREHOUSE` for the user, or specified a warehouse in the provider configuration. + ## v0.97.0 ➞ v0.98.0 ### *(new feature)* snowflake_connections datasource @@ -333,7 +336,7 @@ On our road to v1, we have decided to rework configuration to address the most c We have added new fields to match the ones in [the driver](https://pkg.go.dev/github.com/snowflakedb/gosnowflake#Config) and to simplify setting account name. Specifically: - `include_retry_reason`, `max_retry_count`, `driver_tracing`, `tmp_directory_path` and `disable_console_login` are the new fields that are supported in the driver - `disable_saml_url_check` will be added to the provider after upgrading the driver -- `account_name` and `organization_name` were added to improve handling account names. Read more in [docs](https://docs.snowflake.com/en/user-guide/admin-account-identifier#using-an-account-name-as-an-identifier). +- `account_name` and `organization_name` were added to improve handling account names. Execute `SELECT CURRENT_ORGANIZATION_NAME(), CURRENT_ACCOUNT_NAME();` to get the required values. Read more in [docs](https://docs.snowflake.com/en/user-guide/admin-account-identifier#using-an-account-name-as-an-identifier). #### *(behavior change)* changed configuration of driver log level To be more consistent with other configuration options, we have decided to add `driver_tracing` to the configuration schema. This value can also be configured by `SNOWFLAKE_DRIVER_TRACING` environmental variable and by `drivertracing` field in the TOML file. The previous `SF_TF_GOSNOWFLAKE_LOG_LEVEL` environmental variable is not supported now, and was removed from the provider. @@ -354,6 +357,12 @@ provider "snowflake" { } ``` +This change may cause the connection host URL to change. If you get errors like +``` +Error: open snowflake connection: Post "https://ORGANIZATION-ACCOUNT.snowflakecomputing.com:443/session/v1/login-request?requestId=[guid]&request_guid=[guid]&roleName=myrole": EOF +``` +make sure that the host `ORGANIZATION-ACCOUNT.snowflakecomputing.com` is allowed to be reached from your network (i.e. not blocked by a firewall). + #### *(behavior change)* changed behavior of some fields For the fields that are not deprecated, we focused on improving validations and documentation. Also, we adjusted some fields to match our [driver's](https://github.com/snowflakedb/gosnowflake) defaults. Specifically: - Relaxed validations for enum fields like `protocol` and `authenticator`. Now, the case on such fields is ignored. @@ -785,7 +794,7 @@ Removed fields: The value of these field will be removed from the state automatically. #### *(breaking change)* Required warehouse -For this resource, the provider now uses [policy references](https://docs.snowflake.com/en/sql-reference/functions/policy_references) which requires a warehouse in the connection. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. +For this resource, the provider now uses [policy references](https://docs.snowflake.com/en/sql-reference/functions/policy_references) which requires a warehouse in the connection. Please, make sure you have either set a `DEFAULT_WAREHOUSE` for the user, or specified a warehouse in the provider configuration. ### Identifier changes diff --git a/docs/data-sources/connections.md b/docs/data-sources/connections.md index 5dff3c63ad..e794d01f9f 100644 --- a/docs/data-sources/connections.md +++ b/docs/data-sources/connections.md @@ -2,14 +2,14 @@ page_title: "snowflake_connections Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered connections. Filtering is aligned with the current possibilities for SHOW CONNECTIONS https://docs.snowflake.com/en/sql-reference/sql/show-connections query. The results of SHOW is encapsulated in one output collection connections. + Data source used to get details of filtered connections. Filtering is aligned with the current possibilities for SHOW CONNECTIONS https://docs.snowflake.com/en/sql-reference/sql/show-connections query. The results of SHOW is encapsulated in one output collection connections. --- !> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. # snowflake_connections (Data Source) -Datasource used to get details of filtered connections. Filtering is aligned with the current possibilities for [SHOW CONNECTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-connections) query. The results of SHOW is encapsulated in one output collection `connections`. +Data source used to get details of filtered connections. Filtering is aligned with the current possibilities for [SHOW CONNECTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-connections) query. The results of SHOW is encapsulated in one output collection `connections`. ## Example Usage @@ -39,6 +39,18 @@ data "snowflake_connections" "like_prefix" { output "like_prefix_output" { value = data.snowflake_connections.like_prefix.connections } + +# Ensure the number of connections is equal to at exactly one element (with the use of check block) +check "connection_check" { + data "snowflake_connections" "assert_with_check_block" { + like = "connection-name" + } + + assert { + condition = length(data.snowflake_connections.assert_with_check_block.connections) == 1 + error_message = "connections filtered by '${data.snowflake_connections.assert_with_check_block.like}' returned ${length(data.snowflake_connections.assert_with_check_block.connections)} connections where one was expected" + } +} ``` diff --git a/docs/data-sources/database_roles.md b/docs/data-sources/database_roles.md index baeb080b28..ccdfd274b7 100644 --- a/docs/data-sources/database_roles.md +++ b/docs/data-sources/database_roles.md @@ -2,14 +2,14 @@ page_title: "snowflake_database_roles Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered database roles. Filtering is aligned with the current possibilities for SHOW DATABASE ROLES https://docs.snowflake.com/en/sql-reference/sql/show-database-roles query (like and limit are supported). The results of SHOW is encapsulated in show_output collection. + Data source used to get details of filtered database roles. Filtering is aligned with the current possibilities for SHOW DATABASE ROLES https://docs.snowflake.com/en/sql-reference/sql/show-database-roles query (like and limit are supported). The results of SHOW is encapsulated in show_output collection. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. # snowflake_database_roles (Data Source) -Datasource used to get details of filtered database roles. Filtering is aligned with the current possibilities for [SHOW DATABASE ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-database-roles) query (`like` and `limit` are supported). The results of SHOW is encapsulated in show_output collection. +Data source used to get details of filtered database roles. Filtering is aligned with the current possibilities for [SHOW DATABASE ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-database-roles) query (`like` and `limit` are supported). The results of SHOW is encapsulated in show_output collection. ## Example Usage @@ -60,7 +60,7 @@ data "snowflake_database_roles" "assert_with_postcondition" { # Ensure the number of database roles is equal to at exactly one element (with the use of check block) check "database_role_check" { - data "snowflake_resource_monitors" "assert_with_check_block" { + data "snowflake_database_roles" "assert_with_check_block" { in_database = "database-name" like = "database_role-name" } diff --git a/docs/data-sources/databases.md b/docs/data-sources/databases.md index a32b9f9da9..691ded55b1 100644 --- a/docs/data-sources/databases.md +++ b/docs/data-sources/databases.md @@ -2,14 +2,14 @@ page_title: "snowflake_databases Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered databases. Filtering is aligned with the current possibilities for SHOW DATABASES https://docs.snowflake.com/en/sql-reference/sql/show-databases query (like, starts_with, and limit are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. + Data source used to get details of filtered databases. Filtering is aligned with the current possibilities for SHOW DATABASES https://docs.snowflake.com/en/sql-reference/sql/show-databases query (like, starts_with, and limit are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. # snowflake_databases (Data Source) -Datasource used to get details of filtered databases. Filtering is aligned with the current possibilities for [SHOW DATABASES](https://docs.snowflake.com/en/sql-reference/sql/show-databases) query (`like`, `starts_with`, and `limit` are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. +Data source used to get details of filtered databases. Filtering is aligned with the current possibilities for [SHOW DATABASES](https://docs.snowflake.com/en/sql-reference/sql/show-databases) query (`like`, `starts_with`, and `limit` are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. ## Example Usage diff --git a/docs/data-sources/masking_policies.md b/docs/data-sources/masking_policies.md index cc7e257c56..1facb000b9 100644 --- a/docs/data-sources/masking_policies.md +++ b/docs/data-sources/masking_policies.md @@ -2,14 +2,14 @@ page_title: "snowflake_masking_policies Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered masking policies. Filtering is aligned with the current possibilities for SHOW MASKING POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies query. The results of SHOW and DESCRIBE are encapsulated in one output collection masking_policies. + Data source used to get details of filtered masking policies. Filtering is aligned with the current possibilities for SHOW MASKING POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies query. The results of SHOW and DESCRIBE are encapsulated in one output collection masking_policies. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. # snowflake_masking_policies (Data Source) -Datasource used to get details of filtered masking policies. Filtering is aligned with the current possibilities for [SHOW MASKING POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `masking_policies`. +Data source used to get details of filtered masking policies. Filtering is aligned with the current possibilities for [SHOW MASKING POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `masking_policies`. ## Example Usage diff --git a/docs/data-sources/network_policies.md b/docs/data-sources/network_policies.md index 9a930a231b..b46596fce8 100644 --- a/docs/data-sources/network_policies.md +++ b/docs/data-sources/network_policies.md @@ -2,14 +2,14 @@ page_title: "snowflake_network_policies Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered network policies. Filtering is aligned with the current possibilities for SHOW NETWORK POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-network-policies query (like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection. + Data source used to get details of filtered network policies. Filtering is aligned with the current possibilities for SHOW NETWORK POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-network-policies query (like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. # snowflake_network_policies (Data Source) -Datasource used to get details of filtered network policies. Filtering is aligned with the current possibilities for [SHOW NETWORK POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-network-policies) query (`like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection. +Data source used to get details of filtered network policies. Filtering is aligned with the current possibilities for [SHOW NETWORK POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-network-policies) query (`like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection. ## Example Usage @@ -43,7 +43,7 @@ output "only_show_output" { # Ensure the number of network policies is equal to at least one element (with the use of postcondition) data "snowflake_network_policies" "assert_with_postcondition" { - starts_with = "network-policy-name" + like = "network-policy-name" lifecycle { postcondition { condition = length(self.network_policies) > 0 diff --git a/docs/data-sources/resource_monitors.md b/docs/data-sources/resource_monitors.md index f0da9f3394..6e44d1a023 100644 --- a/docs/data-sources/resource_monitors.md +++ b/docs/data-sources/resource_monitors.md @@ -2,14 +2,14 @@ page_title: "snowflake_resource_monitors Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for SHOW RESOURCE MONITORS https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors query (like is supported). The results of SHOW is encapsulated in show_output collection. + Data source used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for SHOW RESOURCE MONITORS https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors query (like is supported). The results of SHOW is encapsulated in show_output collection. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. # snowflake_resource_monitors (Data Source) -Datasource used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for [SHOW RESOURCE MONITORS](https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors) query (`like` is supported). The results of SHOW is encapsulated in show_output collection. +Data source used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for [SHOW RESOURCE MONITORS](https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors) query (`like` is supported). The results of SHOW is encapsulated in show_output collection. ## Example Usage diff --git a/docs/data-sources/roles.md b/docs/data-sources/roles.md index 8382bffa5b..0e5db6a28f 100644 --- a/docs/data-sources/roles.md +++ b/docs/data-sources/roles.md @@ -2,14 +2,17 @@ page_title: "snowflake_roles Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered roles. Filtering is aligned with the current possibilities for SHOW ROLES https://docs.snowflake.com/en/sql-reference/sql/show-roles query (like and in_class are all supported). The results of SHOW are encapsulated in one output collection. + Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for SHOW ROLES https://docs.snowflake.com/en/sql-reference/sql/show-roles query (like and in_class are all supported). The results of SHOW are encapsulated in one output collection. --- !> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. + +-> **Note** Fields `STARTS WITH` and `LIMIT` are currently missing. They will be added in the future. + # snowflake_roles (Data Source) -Datasource used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection. +Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection. ## Example Usage diff --git a/docs/data-sources/row_access_policies.md b/docs/data-sources/row_access_policies.md index b6ccb31cd8..1c7c7b6d28 100644 --- a/docs/data-sources/row_access_policies.md +++ b/docs/data-sources/row_access_policies.md @@ -2,14 +2,14 @@ page_title: "snowflake_row_access_policies Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered row access policies. Filtering is aligned with the current possibilities for SHOW ROW ACCESS POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies query. The results of SHOW and DESCRIBE are encapsulated in one output collection row_access_policies. + Data source used to get details of filtered row access policies. Filtering is aligned with the current possibilities for SHOW ROW ACCESS POLICIES https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies query. The results of SHOW and DESCRIBE are encapsulated in one output collection row_access_policies. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. # snowflake_row_access_policies (Data Source) -Datasource used to get details of filtered row access policies. Filtering is aligned with the current possibilities for [SHOW ROW ACCESS POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `row_access_policies`. +Data source used to get details of filtered row access policies. Filtering is aligned with the current possibilities for [SHOW ROW ACCESS POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `row_access_policies`. ## Example Usage diff --git a/docs/data-sources/schemas.md b/docs/data-sources/schemas.md index 81cc107919..5787b9bd7b 100644 --- a/docs/data-sources/schemas.md +++ b/docs/data-sources/schemas.md @@ -2,14 +2,17 @@ page_title: "snowflake_schemas Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered schemas. Filtering is aligned with the current possibilities for SHOW SCHEMAS https://docs.snowflake.com/en/sql-reference/sql/show-schemas query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. + Data source used to get details of filtered schemas. Filtering is aligned with the current possibilities for SHOW SCHEMAS https://docs.snowflake.com/en/sql-reference/sql/show-schemas query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. + +-> **Note** Field `WITH PRIVILEGES` is currently missing. It will be added in the future. + # snowflake_schemas (Data Source) -Datasource used to get details of filtered schemas. Filtering is aligned with the current possibilities for [SHOW SCHEMAS](https://docs.snowflake.com/en/sql-reference/sql/show-schemas) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. +Data source used to get details of filtered schemas. Filtering is aligned with the current possibilities for [SHOW SCHEMAS](https://docs.snowflake.com/en/sql-reference/sql/show-schemas) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. ## Example Usage diff --git a/docs/data-sources/secrets.md b/docs/data-sources/secrets.md index 7271fd6090..397cfa9e3e 100644 --- a/docs/data-sources/secrets.md +++ b/docs/data-sources/secrets.md @@ -2,14 +2,14 @@ page_title: "snowflake_secrets Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered secrets. Filtering is aligned with the current possibilities for SHOW SECRETS https://docs.snowflake.com/en/sql-reference/sql/show-secrets query. The results of SHOW and DESCRIBE are encapsulated in one output collection secrets. + Data source used to get details of filtered secrets. Filtering is aligned with the current possibilities for SHOW SECRETS https://docs.snowflake.com/en/sql-reference/sql/show-secrets query. The results of SHOW and DESCRIBE are encapsulated in one output collection secrets. --- !> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. # snowflake_secrets (Data Source) -Datasource used to get details of filtered secrets. Filtering is aligned with the current possibilities for [SHOW SECRETS](https://docs.snowflake.com/en/sql-reference/sql/show-secrets) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `secrets`. +Data source used to get details of filtered secrets. Filtering is aligned with the current possibilities for [SHOW SECRETS](https://docs.snowflake.com/en/sql-reference/sql/show-secrets) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `secrets`. ## Example Usage diff --git a/docs/data-sources/security_integrations.md b/docs/data-sources/security_integrations.md index 4f0bc30c5b..833eb70663 100644 --- a/docs/data-sources/security_integrations.md +++ b/docs/data-sources/security_integrations.md @@ -2,14 +2,14 @@ page_title: "snowflake_security_integrations Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered security integrations. Filtering is aligned with the current possibilities for SHOW SECURITY INTEGRATIONS https://docs.snowflake.com/en/sql-reference/sql/show-integrations query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection security_integrations. + Data source used to get details of filtered security integrations. Filtering is aligned with the current possibilities for SHOW SECURITY INTEGRATIONS https://docs.snowflake.com/en/sql-reference/sql/show-integrations query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection security_integrations. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. # snowflake_security_integrations (Data Source) -Datasource used to get details of filtered security integrations. Filtering is aligned with the current possibilities for [SHOW SECURITY INTEGRATIONS](https://docs.snowflake.com/en/sql-reference/sql/show-integrations) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `security_integrations`. +Data source used to get details of filtered security integrations. Filtering is aligned with the current possibilities for [SHOW SECURITY INTEGRATIONS](https://docs.snowflake.com/en/sql-reference/sql/show-integrations) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `security_integrations`. ## Example Usage diff --git a/docs/data-sources/streamlits.md b/docs/data-sources/streamlits.md index ef767a9082..3bb9c19549 100644 --- a/docs/data-sources/streamlits.md +++ b/docs/data-sources/streamlits.md @@ -2,14 +2,14 @@ page_title: "snowflake_streamlits Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered streamlits. Filtering is aligned with the current possibilities for SHOW STREAMLITS https://docs.snowflake.com/en/sql-reference/sql/show-streamlits query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection streamlits. + Data source used to get details of filtered streamlits. Filtering is aligned with the current possibilities for SHOW STREAMLITS https://docs.snowflake.com/en/sql-reference/sql/show-streamlits query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection streamlits. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. # snowflake_streamlits (Data Source) -Datasource used to get details of filtered streamlits. Filtering is aligned with the current possibilities for [SHOW STREAMLITS](https://docs.snowflake.com/en/sql-reference/sql/show-streamlits) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `streamlits`. +Data source used to get details of filtered streamlits. Filtering is aligned with the current possibilities for [SHOW STREAMLITS](https://docs.snowflake.com/en/sql-reference/sql/show-streamlits) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `streamlits`. ## Example Usage diff --git a/docs/data-sources/streams.md b/docs/data-sources/streams.md index 62ca70cb22..23acd3a192 100644 --- a/docs/data-sources/streams.md +++ b/docs/data-sources/streams.md @@ -2,14 +2,14 @@ page_title: "snowflake_streams Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered streams. Filtering is aligned with the current possibilities for SHOW STREAMS https://docs.snowflake.com/en/sql-reference/sql/show-streams query. The results of SHOW and DESCRIBE are encapsulated in one output collection streams. + Data source used to get details of filtered streams. Filtering is aligned with the current possibilities for SHOW STREAMS https://docs.snowflake.com/en/sql-reference/sql/show-streams query. The results of SHOW and DESCRIBE are encapsulated in one output collection streams. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. # snowflake_streams (Data Source) -Datasource used to get details of filtered streams. Filtering is aligned with the current possibilities for [SHOW STREAMS](https://docs.snowflake.com/en/sql-reference/sql/show-streams) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `streams`. +Data source used to get details of filtered streams. Filtering is aligned with the current possibilities for [SHOW STREAMS](https://docs.snowflake.com/en/sql-reference/sql/show-streams) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `streams`. ## Example Usage diff --git a/docs/data-sources/tags.md b/docs/data-sources/tags.md index bb8e360071..cde76cf652 100644 --- a/docs/data-sources/tags.md +++ b/docs/data-sources/tags.md @@ -2,14 +2,14 @@ page_title: "snowflake_tags Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered tags. Filtering is aligned with the current possibilities for SHOW TAGS https://docs.snowflake.com/en/sql-reference/sql/show-tags query. The results of SHOW are encapsulated in one output collection tags. + Data source used to get details of filtered tags. Filtering is aligned with the current possibilities for SHOW TAGS https://docs.snowflake.com/en/sql-reference/sql/show-tags query. The results of SHOW are encapsulated in one output collection tags. --- !> **V1 release candidate** This data source is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. # snowflake_tags (Data Source) -Datasource used to get details of filtered tags. Filtering is aligned with the current possibilities for [SHOW TAGS](https://docs.snowflake.com/en/sql-reference/sql/show-tags) query. The results of SHOW are encapsulated in one output collection `tags`. +Data source used to get details of filtered tags. Filtering is aligned with the current possibilities for [SHOW TAGS](https://docs.snowflake.com/en/sql-reference/sql/show-tags) query. The results of SHOW are encapsulated in one output collection `tags`. ## Example Usage diff --git a/docs/data-sources/users.md b/docs/data-sources/users.md index c5e501510f..4a068375c9 100644 --- a/docs/data-sources/users.md +++ b/docs/data-sources/users.md @@ -2,14 +2,14 @@ page_title: "snowflake_users Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered users. Filtering is aligned with the current possibilities for SHOW USERS https://docs.snowflake.com/en/sql-reference/sql/show-users query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in show_output (only empty or default values), the DESCRIBE command cannot be called, so you have to set with_describe = false. Only parameters output is not affected by the lack of privileges. + Data source used to get details of filtered users. Filtering is aligned with the current possibilities for SHOW USERS https://docs.snowflake.com/en/sql-reference/sql/show-users query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in show_output (only empty or default values), the DESCRIBE command cannot be called, so you have to set with_describe = false. Only parameters output is not affected by the lack of privileges. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. # snowflake_users (Data Source) -Datasource used to get details of filtered users. Filtering is aligned with the current possibilities for [SHOW USERS](https://docs.snowflake.com/en/sql-reference/sql/show-users) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in `show_output` (only empty or default values), the DESCRIBE command cannot be called, so you have to set `with_describe = false`. Only `parameters` output is not affected by the lack of privileges. +Data source used to get details of filtered users. Filtering is aligned with the current possibilities for [SHOW USERS](https://docs.snowflake.com/en/sql-reference/sql/show-users) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in `show_output` (only empty or default values), the DESCRIBE command cannot be called, so you have to set `with_describe = false`. Only `parameters` output is not affected by the lack of privileges. ## Example Usage diff --git a/docs/data-sources/views.md b/docs/data-sources/views.md index d4eeb988fc..9425ccbcde 100644 --- a/docs/data-sources/views.md +++ b/docs/data-sources/views.md @@ -2,14 +2,14 @@ page_title: "snowflake_views Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered views. Filtering is aligned with the current possibilities for SHOW VIEWS https://docs.snowflake.com/en/sql-reference/sql/show-views query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection views. + Data source used to get details of filtered views. Filtering is aligned with the current possibilities for SHOW VIEWS https://docs.snowflake.com/en/sql-reference/sql/show-views query (only like is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection views. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. # snowflake_views (Data Source) -Datasource used to get details of filtered views. Filtering is aligned with the current possibilities for [SHOW VIEWS](https://docs.snowflake.com/en/sql-reference/sql/show-views) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `views`. +Data source used to get details of filtered views. Filtering is aligned with the current possibilities for [SHOW VIEWS](https://docs.snowflake.com/en/sql-reference/sql/show-views) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `views`. ## Example Usage diff --git a/docs/data-sources/warehouses.md b/docs/data-sources/warehouses.md index 99ce968572..2afcc6f502 100644 --- a/docs/data-sources/warehouses.md +++ b/docs/data-sources/warehouses.md @@ -2,14 +2,14 @@ page_title: "snowflake_warehouses Data Source - terraform-provider-snowflake" subcategory: "" description: |- - Datasource used to get details of filtered warehouses. Filtering is aligned with the current possibilities for SHOW WAREHOUSES https://docs.snowflake.com/en/sql-reference/sql/show-warehouses query (only like is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. + Data source used to get details of filtered warehouses. Filtering is aligned with the current possibilities for SHOW WAREHOUSES https://docs.snowflake.com/en/sql-reference/sql/show-warehouses query (only like is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. --- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. # snowflake_warehouses (Data Source) -Datasource used to get details of filtered warehouses. Filtering is aligned with the current possibilities for [SHOW WAREHOUSES](https://docs.snowflake.com/en/sql-reference/sql/show-warehouses) query (only `like` is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. +Data source used to get details of filtered warehouses. Filtering is aligned with the current possibilities for [SHOW WAREHOUSES](https://docs.snowflake.com/en/sql-reference/sql/show-warehouses) query (only `like` is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. ## Example Usage diff --git a/docs/guides/unassigning_policies.md b/docs/guides/unassigning_policies.md new file mode 100644 index 0000000000..de5de63e86 --- /dev/null +++ b/docs/guides/unassigning_policies.md @@ -0,0 +1,65 @@ +--- +page_title: "Unassigning policies" +subcategory: "" +description: |- + +--- +# Unassigning policies + +For some objects, like network policies, Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes) suggest that a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. + +Before dropping the resource: +- if the objects the policy is assigned to are managed in Terraform, follow the example below +- if they are not managed in Terraform, list them with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` + +## Example + +When you have a configuration like +```terraform +resource "snowflake_network_policy" "example" { + name = "network_policy_name" +} + +resource "snowflake_oauth_integration_for_custom_clients" "example" { + name = "integration" + oauth_client_type = "CONFIDENTIAL" + oauth_redirect_uri = "https://example.com" + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] + network_policy = snowflake_network_policy.example.fully_qualified_name +} +``` + +and try removing the network policy, Terraform fails with +``` +│ Error deleting network policy EXAMPLE, err = 001492 (42601): SQL compilation error: +│ Cannot perform Drop operation on network policy EXAMPLE. The policy is attached to INTEGRATION with name EXAMPLE. Unset the network policy from INTEGRATION and try the +│ Drop operation again. +``` + +In order to remove the policy correctly, first adjust the configuration to +```terraform +resource "snowflake_network_policy" "example" { + name = "network_policy_name" +} + +resource "snowflake_oauth_integration_for_custom_clients" "example" { + name = "integration" + oauth_client_type = "CONFIDENTIAL" + oauth_redirect_uri = "https://example.com" + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] +} +``` + +Note that the network policy has been unassigned. Now, run `terraform apply`. This should cause the policy to be unassigned. Now, adjust the configuration once again to +```terraform +resource "snowflake_oauth_integration_for_custom_clients" "example" { + name = "integration" + oauth_client_type = "CONFIDENTIAL" + oauth_redirect_uri = "https://example.com" + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] +} +``` + +Now the network policy should be removed successfully. + +This behavior will be fixed in the provider in the future. diff --git a/docs/index.md b/docs/index.md index 518af87d7c..9a69fadbe9 100644 --- a/docs/index.md +++ b/docs/index.md @@ -9,7 +9,7 @@ description: Manage SnowflakeDB with Terraform. ~> **Note** Please check the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md) when changing the version of the provider. --> **Note** the current roadmap is available in our GitHub repository: [ROADMAP.md](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md). +-> **Note** The current roadmap is available in our GitHub repository: [ROADMAP.md](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md). This is a terraform provider plugin for managing [Snowflake](https://www.snowflake.com/) accounts. Coverage is focused on part of Snowflake related to access control. @@ -161,7 +161,7 @@ To export the variables into your provider: ```shell export SNOWFLAKE_USER="..." -export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key" +export SNOWFLAKE_PRIVATE_KEY="~/.ssh/snowflake_key" ``` ### Keypair Authentication Passphrase @@ -183,7 +183,7 @@ To export the variables into your provider: ```shell export SNOWFLAKE_USER="..." -export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key.p8" +export SNOWFLAKE_PRIVATE_KEY="~/.ssh/snowflake_key.p8" export SNOWFLAKE_PRIVATE_KEY_PASSPHRASE="..." ``` @@ -193,7 +193,7 @@ If you have an OAuth access token, export these credentials as environment varia ```shell export SNOWFLAKE_USER='...' -export SNOWFLAKE_OAUTH_ACCESS_TOKEN='...' +export SNOWFLAKE_TOKEN='...' ``` Note that once this access token expires, you'll need to request a new one through an external application. @@ -203,11 +203,11 @@ Note that once this access token expires, you'll need to request a new one throu If you have an OAuth Refresh token, export these credentials as environment variables: ```shell -export SNOWFLAKE_OAUTH_REFRESH_TOKEN='...' -export SNOWFLAKE_OAUTH_CLIENT_ID='...' -export SNOWFLAKE_OAUTH_CLIENT_SECRET='...' -export SNOWFLAKE_OAUTH_ENDPOINT='...' -export SNOWFLAKE_OAUTH_REDIRECT_URL='https://localhost.com' +export SNOWFLAKE_TOKEN_ACCESSOR_REFRESH_TOKEN='...' +export SNOWFLAKE_TOKEN_ACCESSOR_CLIENT_ID='...' +export SNOWFLAKE_TOKEN_ACCESSOR_CLIENT_SECRET='...' +export SNOWFLAKE_TOKEN_ACCESSOR_TOKEN_ENDPOINT='...' +export SNOWFLAKE_TOKEN_ACCESSOR_REDIRECT_URI='https://localhost.com' ``` Note because access token have a short life; typically 10 minutes, by passing refresh token new access token will be generated. @@ -242,7 +242,7 @@ provider "snowflake" { ```bash export SNOWFLAKE_USER="..." -export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key" +export SNOWFLAKE_PRIVATE_KEY="~/.ssh/snowflake_key" ``` 3. In a TOML file (default in ~/.snowflake/config). Notice the use of different profiles. The profile name needs to be specified in the Terraform configuration file in `profile` field. When this is not specified, `default` profile is loaded. diff --git a/docs/resources/account_role.md b/docs/resources/account_role.md index e5c9a7068f..bc409840f6 100644 --- a/docs/resources/account_role.md +++ b/docs/resources/account_role.md @@ -33,7 +33,7 @@ resource "snowflake_account_role" "complete" { ### Required -- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -66,5 +66,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_account_role.example "name" +terraform import snowflake_account_role.example '""' ``` diff --git a/docs/resources/api_authentication_integration_with_authorization_code_grant.md b/docs/resources/api_authentication_integration_with_authorization_code_grant.md index 7ff4240139..683d691d23 100644 --- a/docs/resources/api_authentication_integration_with_authorization_code_grant.md +++ b/docs/resources/api_authentication_integration_with_authorization_code_grant.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_api_authentication_integration_with_authorization_code_grant (Resource) Resource used to manage api authentication security integration objects with authorization code grant. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-api-auth). @@ -45,9 +47,9 @@ resource "snowflake_api_authentication_integration_with_authorization_code_grant ### Required - `enabled` (Boolean) Specifies whether this security integration is enabled or disabled. -- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_client_id` (String) Specifies the client ID for the OAuth application in the external service. -- `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. +- `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Optional @@ -234,5 +236,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_api_authentication_integration_with_authorization_code_grant.example "name" +terraform import snowflake_api_authentication_integration_with_authorization_code_grant.example '""' ``` diff --git a/docs/resources/api_authentication_integration_with_client_credentials.md b/docs/resources/api_authentication_integration_with_client_credentials.md index 098bdf6ce8..539e6b51cb 100644 --- a/docs/resources/api_authentication_integration_with_client_credentials.md +++ b/docs/resources/api_authentication_integration_with_client_credentials.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_api_authentication_integration_with_client_credentials (Resource) Resource used to manage api authentication security integration objects with client credentials. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-api-auth). @@ -43,9 +45,9 @@ resource "snowflake_api_authentication_integration_with_client_credentials" "tes ### Required - `enabled` (Boolean) Specifies whether this security integration is enabled or disabled. -- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_client_id` (String) Specifies the client ID for the OAuth application in the external service. -- `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. +- `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Optional @@ -231,5 +233,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_api_authentication_integration_with_client_credentials.example "name" +terraform import snowflake_api_authentication_integration_with_client_credentials.example '""' ``` diff --git a/docs/resources/api_authentication_integration_with_jwt_bearer.md b/docs/resources/api_authentication_integration_with_jwt_bearer.md index c4cdee9bdf..623a15d70e 100644 --- a/docs/resources/api_authentication_integration_with_jwt_bearer.md +++ b/docs/resources/api_authentication_integration_with_jwt_bearer.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_api_authentication_integration_with_jwt_bearer (Resource) Resource used to manage api authentication security integration objects with jwt bearer. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-api-auth). @@ -46,10 +48,10 @@ resource "snowflake_api_authentication_integration_with_jwt_bearer" "test" { ### Required - `enabled` (Boolean) Specifies whether this security integration is enabled or disabled. -- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier (i.e. name) for the integration. This value must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_assertion_issuer` (String) - `oauth_client_id` (String) Specifies the client ID for the OAuth application in the external service. -- `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. +- `oauth_client_secret` (String) Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Optional @@ -235,5 +237,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_api_authentication_integration_with_jwt_bearer.example "name" +terraform import snowflake_api_authentication_integration_with_jwt_bearer.example '""' ``` diff --git a/docs/resources/authentication_policy.md b/docs/resources/authentication_policy.md index 926acdd4fb..bd78a8eef8 100644 --- a/docs/resources/authentication_policy.md +++ b/docs/resources/authentication_policy.md @@ -5,8 +5,7 @@ description: |- Resource used to manage authentication policy objects. For more information, check authentication policy documentation https://docs.snowflake.com/en/sql-reference/sql/create-authentication-policy. --- -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-authentication-policy#usage-notes), an authentication policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-authentication-policy#usage-notes), an authentication policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_authentication_policy (Resource) @@ -44,9 +43,9 @@ resource "snowflake_authentication_policy" "complete" { ### Required -- `database` (String) The database in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the authentication policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional diff --git a/docs/resources/database.md b/docs/resources/database.md index 998d9ca232..e5d16f93e4 100644 --- a/docs/resources/database.md +++ b/docs/resources/database.md @@ -7,6 +7,11 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # snowflake_database (Resource) Represents a standard database. If replication configuration is specified, the database is promoted to serve as a primary database for replication. @@ -26,10 +31,9 @@ resource "snowflake_database" "primary" { comment = "my standard database" data_retention_time_in_days = 10 - data_retention_time_in_days_save = 10 max_data_extension_time_in_days = 20 - external_volume = "" - catalog = "" + external_volume = snowflake_external_volume.example.fully_qualified_name + catalog = snowflake_catalog.example.fully_qualified_name replace_invalid_characters = false default_ddl_collation = "en_US" storage_serialization_policy = "COMPATIBLE" @@ -56,11 +60,11 @@ resource "snowflake_database" "primary" { locals { replication_configs = [ { - account_identifier = "." + account_identifier = "\"\".\"\"" with_failover = true }, { - account_identifier = "." + account_identifier = "\"\".\"\"" with_failover = true }, ] @@ -68,10 +72,13 @@ locals { resource "snowflake_database" "primary" { name = "database_name" - for_each = local.replication_configs + for_each = { for rc in local.replication_configs : rc.account_identifier => rc } replication { - enable_to_account = each.value + enable_to_account { + account_identifier = each.value.account_identifier + with_failover = each.value.with_failover + } ignore_edition_check = true } } @@ -84,7 +91,7 @@ resource "snowflake_database" "primary" { ### Required -- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -130,7 +137,7 @@ Optional: Required: -- `account_identifier` (String) Specifies account identifier for which replication should be enabled. The account identifiers should be in the form of `"".""`. +- `account_identifier` (String) Specifies account identifier for which replication should be enabled. The account identifiers should be in the form of `"".""`. For more information about this resource, see [docs](./account). Optional: @@ -141,5 +148,5 @@ Optional: Import is supported using the following syntax: ```shell -terraform import snowflake_database.example 'database_name' +terraform import snowflake_database.example '""' ``` diff --git a/docs/resources/database_role.md b/docs/resources/database_role.md index f0097ccb89..706ceba760 100644 --- a/docs/resources/database_role.md +++ b/docs/resources/database_role.md @@ -2,14 +2,14 @@ page_title: "snowflake_database_role Resource - terraform-provider-snowflake" subcategory: "" description: |- - + Resource used to manage database roles. For more information, check database roles documentation https://docs.snowflake.com/en/sql-reference/sql/create-database-role. --- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. # snowflake_database_role (Resource) - +Resource used to manage database roles. For more information, check [database roles documentation](https://docs.snowflake.com/en/sql-reference/sql/create-database-role). ## Example Usage @@ -32,8 +32,8 @@ resource "snowflake_database_role" "test_database_role" { ### Required -- `database` (String) The database in which to create the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the database role. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional diff --git a/docs/resources/external_oauth_integration.md b/docs/resources/external_oauth_integration.md index 2d6473cfd9..37550af92e 100644 --- a/docs/resources/external_oauth_integration.md +++ b/docs/resources/external_oauth_integration.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_external_oauth_integration (Resource) Resource used to manage external oauth security integration objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-oauth-external). @@ -27,7 +29,7 @@ resource "snowflake_external_oauth_integration" "test" { resource "snowflake_external_oauth_integration" "test" { comment = "comment" enabled = true - external_oauth_allowed_roles_list = ["user1"] + external_oauth_allowed_roles_list = [snowflake_role.one.fully_qualified_name] external_oauth_any_role_mode = "ENABLE" external_oauth_audience_list = ["https://example.com"] external_oauth_issuer = "issuer" @@ -45,7 +47,7 @@ resource "snowflake_external_oauth_integration" "test" { enabled = true external_oauth_any_role_mode = "ENABLE" external_oauth_audience_list = ["https://example.com"] - external_oauth_blocked_roles_list = ["user1"] + external_oauth_blocked_roles_list = [snowflake_role.one.fully_qualified_name] external_oauth_issuer = "issuer" external_oauth_rsa_public_key = file("key.pem") external_oauth_rsa_public_key_2 = file("key2.pem") @@ -70,15 +72,15 @@ resource "snowflake_external_oauth_integration" "test" { - `external_oauth_snowflake_user_mapping_attribute` (String) Indicates which Snowflake user record attribute should be used to map the access token to a Snowflake user record. Valid values are (case-insensitive): `LOGIN_NAME` | `EMAIL_ADDRESS`. - `external_oauth_token_user_mapping_claim` (Set of String) Specifies the access token claim or claims that can be used to map the access token to a Snowflake user record. If removed from the config, the resource is recreated. - `external_oauth_type` (String) Specifies the OAuth 2.0 authorization server to be Okta, Microsoft Azure AD, Ping Identity PingFederate, or a Custom OAuth 2.0 authorization server. Valid values are (case-insensitive): `OKTA` | `AZURE` | `PING_FEDERATE` | `CUSTOM`. -- `name` (String) Specifies the name of the External Oath integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the name of the External Oath integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional - `comment` (String) Specifies a comment for the OAuth integration. -- `external_oauth_allowed_roles_list` (Set of String) Specifies the list of roles that the client can set as the primary role. +- `external_oauth_allowed_roles_list` (Set of String) Specifies the list of roles that the client can set as the primary role. For more information about this resource, see [docs](./account_role). - `external_oauth_any_role_mode` (String) Specifies whether the OAuth client or user can use a role that is not defined in the OAuth access token. Valid values are (case-insensitive): `DISABLE` | `ENABLE` | `ENABLE_FOR_PRIVILEGE`. - `external_oauth_audience_list` (Set of String) Specifies additional values that can be used for the access token's audience validation on top of using the Customer's Snowflake Account URL -- `external_oauth_blocked_roles_list` (Set of String) Specifies the list of roles that a client cannot set as the primary role. By default, this list includes the ACCOUNTADMIN, ORGADMIN and SECURITYADMIN roles. To remove these privileged roles from the list, use the ALTER ACCOUNT command to set the EXTERNAL_OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST account parameter to FALSE. +- `external_oauth_blocked_roles_list` (Set of String) Specifies the list of roles that a client cannot set as the primary role. By default, this list includes the ACCOUNTADMIN, ORGADMIN and SECURITYADMIN roles. To remove these privileged roles from the list, use the ALTER ACCOUNT command to set the EXTERNAL_OAUTH_ADD_PRIVILEGED_ROLES_TO_BLOCKED_LIST account parameter to FALSE. For more information about this resource, see [docs](./account_role). - `external_oauth_jws_keys_url` (Set of String) Specifies the endpoint or a list of endpoints from which to download public keys or certificates to validate an External OAuth access token. The maximum number of URLs that can be specified in the list is 3. If removed from the config, the resource is recreated. - `external_oauth_rsa_public_key` (String) Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. If removed from the config, the resource is recreated. - `external_oauth_rsa_public_key_2` (String) Specifies a second RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. Used for key rotation. If removed from the config, the resource is recreated. @@ -293,5 +295,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_external_oauth_integration.example "name" +terraform import snowflake_external_oauth_integration.example '""' ``` diff --git a/docs/resources/external_volume.md b/docs/resources/external_volume.md index 249d2a60f4..943f610e93 100644 --- a/docs/resources/external_volume.md +++ b/docs/resources/external_volume.md @@ -16,7 +16,7 @@ Resource used to manage external volume objects. For more information, check [ex ### Required -- `name` (String) Identifier for the external volume; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Identifier for the external volume; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `storage_location` (Block List, Min: 1) List of named cloud storage locations in different regions and, optionally, cloud platforms. Minimum 1 required. The order of the list is important as it impacts the active storage location, and updates will be triggered if it changes. Note that not all parameter combinations are valid as they depend on the given storage_provider. Consult [the docs](https://docs.snowflake.com/en/sql-reference/sql/create-external-volume#cloud-provider-parameters-cloudproviderparams) for more details on this. (see [below for nested schema](#nestedblock--storage_location)) ### Optional @@ -37,7 +37,7 @@ Resource used to manage external volume objects. For more information, check [ex Required: - `storage_base_url` (String) Specifies the base URL for your cloud storage location. -- `storage_location_name` (String) Name of the storage location. Must be unique for the external volume. Do not use the name `terraform_provider_sentinel_storage_location` - this is reserved for the provider for performing update operations. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `storage_location_name` (String) Name of the storage location. Must be unique for the external volume. Do not use the name `terraform_provider_sentinel_storage_location` - this is reserved for the provider for performing update operations. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `storage_provider` (String) Specifies the cloud storage provider that stores your data files. Valid values are (case-insensitive): `GCS` | `AZURE` | `S3` | `S3GOV`. Optional: diff --git a/docs/resources/grant_account_role.md b/docs/resources/grant_account_role.md index b01aad31af..c98a6b08f7 100644 --- a/docs/resources/grant_account_role.md +++ b/docs/resources/grant_account_role.md @@ -19,11 +19,11 @@ description: |- ################################## resource "snowflake_account_role" "role" { - name = var.role_name + name = "ROLE" } resource "snowflake_account_role" "parent_role" { - name = var.parent_role_name + name = "PARENT_ROLE" } resource "snowflake_grant_account_role" "g" { @@ -37,11 +37,11 @@ resource "snowflake_grant_account_role" "g" { ################################## resource "snowflake_account_role" "role" { - name = var.role_name + name = "ROLE" } resource "snowflake_user" "user" { - name = var.user_name + name = "USER" } resource "snowflake_grant_account_role" "g" { @@ -58,12 +58,12 @@ resource "snowflake_grant_account_role" "g" { ### Required -- `role_name` (String) The fully qualified name of the role which will be granted to the user or parent role. +- `role_name` (String) The fully qualified name of the role which will be granted to the user or parent role. For more information about this resource, see [docs](./account_role). ### Optional -- `parent_role_name` (String) The fully qualified name of the parent role which will create a parent-child relationship between the roles. -- `user_name` (String) The fully qualified name of the user on which specified role will be granted. +- `parent_role_name` (String) The fully qualified name of the parent role which will create a parent-child relationship between the roles. For more information about this resource, see [docs](./account_role). +- `user_name` (String) The fully qualified name of the user on which specified role will be granted. For more information about this resource, see [docs](./user). ### Read-Only diff --git a/docs/resources/grant_application_role.md b/docs/resources/grant_application_role.md index 9578e2b830..989c27e7c2 100644 --- a/docs/resources/grant_application_role.md +++ b/docs/resources/grant_application_role.md @@ -55,7 +55,7 @@ resource "snowflake_grant_application_role" "g" { ### Optional - `application_name` (String) The fully qualified name of the application on which application role will be granted. -- `parent_account_role_name` (String) The fully qualified name of the account role on which application role will be granted. +- `parent_account_role_name` (String) The fully qualified name of the account role on which application role will be granted. For more information about this resource, see [docs](./account_role). ### Read-Only diff --git a/docs/resources/grant_database_role.md b/docs/resources/grant_database_role.md index 0071eaf5a0..9ac1dcf848 100644 --- a/docs/resources/grant_database_role.md +++ b/docs/resources/grant_database_role.md @@ -69,13 +69,13 @@ resource "snowflake_grant_database_role" "g" { ### Required -- `database_role_name` (String) The fully qualified name of the database role which will be granted to share or parent role. +- `database_role_name` (String) The fully qualified name of the database role which will be granted to share or parent role. For more information about this resource, see [docs](./database_role). ### Optional -- `parent_database_role_name` (String) The fully qualified name of the parent database role which will create a parent-child relationship between the roles. -- `parent_role_name` (String) The fully qualified name of the parent account role which will create a parent-child relationship between the roles. -- `share_name` (String) The fully qualified name of the share on which privileges will be granted. +- `parent_database_role_name` (String) The fully qualified name of the parent database role which will create a parent-child relationship between the roles. For more information about this resource, see [docs](./database_role). +- `parent_role_name` (String) The fully qualified name of the parent account role which will create a parent-child relationship between the roles. For more information about this resource, see [docs](./account_role). +- `share_name` (String) The fully qualified name of the share on which privileges will be granted. For more information about this resource, see [docs](./share). ### Read-Only diff --git a/docs/resources/grant_ownership.md b/docs/resources/grant_ownership.md index 03eb48c244..9536f727b4 100644 --- a/docs/resources/grant_ownership.md +++ b/docs/resources/grant_ownership.md @@ -252,8 +252,8 @@ To set the `AUTO_REFRESH` property back to `TRUE` (after you transfer ownership) ### Optional -- `account_role_name` (String) The fully qualified name of the account role to which privileges will be granted. -- `database_role_name` (String) The fully qualified name of the database role to which privileges will be granted. +- `account_role_name` (String) The fully qualified name of the account role to which privileges will be granted. For more information about this resource, see [docs](./account_role). +- `database_role_name` (String) The fully qualified name of the database role to which privileges will be granted. For more information about this resource, see [docs](./database_role). - `outbound_privileges` (String) Specifies whether to remove or transfer all existing outbound privileges on the object when ownership is transferred to a new role. Available options are: REVOKE for removing existing privileges and COPY to transfer them with ownership. For more information head over to [Snowflake documentation](https://docs.snowflake.com/en/sql-reference/sql/grant-ownership#optional-parameters). ### Read-Only @@ -279,8 +279,8 @@ Required: Optional: -- `in_database` (String) The fully qualified name of the database. -- `in_schema` (String) The fully qualified name of the schema. +- `in_database` (String) The fully qualified name of the database. For more information about this resource, see [docs](./database). +- `in_schema` (String) The fully qualified name of the schema. For more information about this resource, see [docs](./schema). @@ -292,8 +292,8 @@ Required: Optional: -- `in_database` (String) The fully qualified name of the database. -- `in_schema` (String) The fully qualified name of the schema. +- `in_database` (String) The fully qualified name of the database. For more information about this resource, see [docs](./database). +- `in_schema` (String) The fully qualified name of the schema. For more information about this resource, see [docs](./schema). ## Import diff --git a/docs/resources/grant_privileges_to_account_role.md b/docs/resources/grant_privileges_to_account_role.md index a314c5dd16..dfc4dee7f0 100644 --- a/docs/resources/grant_privileges_to_account_role.md +++ b/docs/resources/grant_privileges_to_account_role.md @@ -264,7 +264,7 @@ resource "snowflake_grant_privileges_to_account_role" "example" { ### Required -- `account_role_name` (String) The fully qualified name of the account role to which privileges will be granted. +- `account_role_name` (String) The fully qualified name of the account role to which privileges will be granted. For more information about this resource, see [docs](./account_role). ### Optional @@ -275,7 +275,7 @@ resource "snowflake_grant_privileges_to_account_role" "example" { - `on_account_object` (Block List, Max: 1) Specifies the account object on which privileges will be granted (see [below for nested schema](#nestedblock--on_account_object)) - `on_schema` (Block List, Max: 1) Specifies the schema on which privileges will be granted. (see [below for nested schema](#nestedblock--on_schema)) - `on_schema_object` (Block List, Max: 1) Specifies the schema object on which privileges will be granted. (see [below for nested schema](#nestedblock--on_schema_object)) -- `privileges` (Set of String) The privileges to grant on the account role. +- `privileges` (Set of String) The privileges to grant on the account role. This field is case-sensitive; use only upper-case privileges. - `with_grant_option` (Boolean) Specifies whether the grantee can grant the privileges to other users. ### Read-Only diff --git a/docs/resources/grant_privileges_to_database_role.md b/docs/resources/grant_privileges_to_database_role.md index 81f34a561a..9ad169db1b 100644 --- a/docs/resources/grant_privileges_to_database_role.md +++ b/docs/resources/grant_privileges_to_database_role.md @@ -182,14 +182,14 @@ resource "snowflake_grant_privileges_to_database_role" "example" { ### Required -- `database_role_name` (String) The fully qualified name of the database role to which privileges will be granted. +- `database_role_name` (String) The fully qualified name of the database role to which privileges will be granted. For more information about this resource, see [docs](./database_role). ### Optional - `all_privileges` (Boolean) Grant all privileges on the database role. - `always_apply` (Boolean) If true, the resource will always produce a “plan” and on “apply” it will re-grant defined privileges. It is supposed to be used only in “grant privileges on all X’s in database / schema Y” or “grant all privileges to X” scenarios to make sure that every new object in a given database / schema is granted by the account role and every new privilege is granted to the database role. Important note: this flag is not compliant with the Terraform assumptions of the config being eventually convergent (producing an empty plan). - `always_apply_trigger` (String) This is a helper field and should not be set. Its main purpose is to help to achieve the functionality described by the always_apply field. -- `on_database` (String) The fully qualified name of the database on which privileges will be granted. +- `on_database` (String) The fully qualified name of the database on which privileges will be granted. For more information about this resource, see [docs](./database). - `on_schema` (Block List, Max: 1) Specifies the schema on which privileges will be granted. (see [below for nested schema](#nestedblock--on_schema)) - `on_schema_object` (Block List, Max: 1) Specifies the schema object on which privileges will be granted. (see [below for nested schema](#nestedblock--on_schema_object)) - `privileges` (Set of String) The privileges to grant on the database role. diff --git a/docs/resources/grant_privileges_to_share.md b/docs/resources/grant_privileges_to_share.md index cbfb9e14fb..f22c2cb496 100644 --- a/docs/resources/grant_privileges_to_share.md +++ b/docs/resources/grant_privileges_to_share.md @@ -106,17 +106,17 @@ resource "snowflake_grant_privileges_to_share" "example" { ### Required - `privileges` (Set of String) The privileges to grant on the share. See available list of privileges: https://docs.snowflake.com/en/sql-reference/sql/grant-privilege-share#syntax -- `to_share` (String) The fully qualified name of the share on which privileges will be granted. +- `to_share` (String) The fully qualified name of the share on which privileges will be granted. For more information about this resource, see [docs](./share). ### Optional - `on_all_tables_in_schema` (String) The fully qualified identifier for the schema for which the specified privilege will be granted for all tables. -- `on_database` (String) The fully qualified name of the database on which privileges will be granted. +- `on_database` (String) The fully qualified name of the database on which privileges will be granted. For more information about this resource, see [docs](./database). - `on_function` (String) The fully qualified name of the function on which privileges will be granted. -- `on_schema` (String) The fully qualified name of the schema on which privileges will be granted. -- `on_table` (String) The fully qualified name of the table on which privileges will be granted. -- `on_tag` (String) The fully qualified name of the tag on which privileges will be granted. -- `on_view` (String) The fully qualified name of the view on which privileges will be granted. +- `on_schema` (String) The fully qualified name of the schema on which privileges will be granted. For more information about this resource, see [docs](./schema). +- `on_table` (String) The fully qualified name of the table on which privileges will be granted. For more information about this resource, see [docs](./table). +- `on_tag` (String) The fully qualified name of the tag on which privileges will be granted. For more information about this resource, see [docs](./tag). +- `on_view` (String) The fully qualified name of the view on which privileges will be granted. For more information about this resource, see [docs](./view). ### Read-Only diff --git a/docs/resources/legacy_service_user.md b/docs/resources/legacy_service_user.md index 57ef504370..35847df2a0 100644 --- a/docs/resources/legacy_service_user.md +++ b/docs/resources/legacy_service_user.md @@ -123,7 +123,7 @@ resource "snowflake_legacy_service_user" "u" { ### Required -- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -144,9 +144,9 @@ resource "snowflake_legacy_service_user" "u" { - `date_output_format` (String) Specifies the display format for the DATE data type. For more information, see [Date and time input and output formats](https://docs.snowflake.com/en/sql-reference/date-time-input-output). For more information, check [DATE_OUTPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#date-output-format). - `days_to_expiry` (Number) Specifies the number of days after which the user status is set to `Expired` and the user is no longer allowed to log in. This is useful for defining temporary users (i.e. users who should only have access to Snowflake for a limited time period). In general, you should not set this property for [account administrators](https://docs.snowflake.com/en/user-guide/security-access-control-considerations.html#label-accountadmin-users) (i.e. users with the `ACCOUNTADMIN` role) because Snowflake locks them out when they become `Expired`. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `default_namespace` (String) Specifies the namespace (database only or database and schema) that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the namespace exists. -- `default_role` (String) Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists. +- `default_role` (String) Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists. For more information about this resource, see [docs](./account_role). - `default_secondary_roles_option` (String) Specifies the secondary roles that are active for the user’s session upon login. Valid values are (case-insensitive): `DEFAULT` | `NONE` | `ALL`. More information can be found in [doc](https://docs.snowflake.com/en/sql-reference/sql/create-user#optional-object-properties-objectproperties). -- `default_warehouse` (String) Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists. +- `default_warehouse` (String) Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists. For more information about this resource, see [docs](./warehouse). - `disabled` (String) Specifies whether the user is disabled, which prevents logging in and aborts all the currently-running queries for the user. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `display_name` (String) Name displayed for the user in the Snowflake web interface. - `email` (String, Sensitive) Email address for the user. @@ -169,7 +169,7 @@ resource "snowflake_legacy_service_user" "u" { - `network_policy` (String) Specifies the network policy to enforce for your account. Network policies enable restricting access to your account based on users’ IP address. For more details, see [Controlling network traffic with network policies](https://docs.snowflake.com/en/user-guide/network-policies). Any existing network policy (created using [CREATE NETWORK POLICY](https://docs.snowflake.com/en/sql-reference/sql/create-network-policy)). For more information, check [NETWORK_POLICY docs](https://docs.snowflake.com/en/sql-reference/parameters#network-policy). - `noorder_sequence_as_default` (Boolean) Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in [increasing or decreasing order](https://docs.snowflake.com/en/user-guide/querying-sequences.html#label-querying-sequences-increasing-values). For more information, check [NOORDER_SEQUENCE_AS_DEFAULT docs](https://docs.snowflake.com/en/sql-reference/parameters#noorder-sequence-as-default). - `odbc_treat_decimal_as_int` (Boolean) Specifies how ODBC processes columns that have a scale of zero (0). For more information, check [ODBC_TREAT_DECIMAL_AS_INT docs](https://docs.snowflake.com/en/sql-reference/parameters#odbc-treat-decimal-as-int). -- `password` (String, Sensitive) Password for the user. **WARNING:** this will put the password in the terraform state file. Use carefully. +- `password` (String, Sensitive) Password for the user. **WARNING:** this will put the password in the terraform state file. Use carefully. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `prevent_unload_to_internal_stages` (Boolean) Specifies whether to prevent data unload operations to internal (Snowflake) stages using [COPY INTO ](https://docs.snowflake.com/en/sql-reference/sql/copy-into-location) statements. For more information, check [PREVENT_UNLOAD_TO_INTERNAL_STAGES docs](https://docs.snowflake.com/en/sql-reference/parameters#prevent-unload-to-internal-stages). - `query_tag` (String) Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the [QUERY_HISTORY, QUERY_HISTORY_BY_*](https://docs.snowflake.com/en/sql-reference/functions/query_history) functions. For more information, check [QUERY_TAG docs](https://docs.snowflake.com/en/sql-reference/parameters#query-tag). - `quoted_identifiers_ignore_case` (Boolean) Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see [Identifier resolution](https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html#label-identifier-casing)). You can use this parameter in situations in which [third-party applications always use double quotes around identifiers](https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html#label-identifier-casing-parameter). For more information, check [QUOTED_IDENTIFIERS_IGNORE_CASE docs](https://docs.snowflake.com/en/sql-reference/parameters#quoted-identifiers-ignore-case). diff --git a/docs/resources/masking_policy.md b/docs/resources/masking_policy.md index cb34591a7f..4adb212f90 100644 --- a/docs/resources/masking_policy.md +++ b/docs/resources/masking_policy.md @@ -7,8 +7,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-masking-policy#usage-notes), a masking policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-masking-policy#usage-notes), a masking policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_masking_policy (Resource) @@ -89,10 +88,10 @@ EOF - `argument` (Block List, Min: 1) List of the arguments for the masking policy. The first column and its data type always indicate the column data type values to mask or tokenize in the subsequent policy conditions. Note that you can not specify a virtual column as the first column argument in a conditional masking policy. (see [below for nested schema](#nestedblock--argument)) - `body` (String) Specifies the SQL expression that transforms the data. To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. -- `database` (String) The database in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the masking policy; must be unique for the database and schema in which the masking policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the masking policy; must be unique for the database and schema in which the masking policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_data_type` (String) The return data type must match the input data type of the first column that is specified as an input column. For more information about data types, check [Snowflake docs](https://docs.snowflake.com/en/sql-reference/intro-summary-data-types). -- `schema` (String) The schema in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the masking policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional diff --git a/docs/resources/network_policy.md b/docs/resources/network_policy.md index f6eda6e0d8..b77d2776f1 100644 --- a/docs/resources/network_policy.md +++ b/docs/resources/network_policy.md @@ -7,8 +7,9 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes), a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes), a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + +!> **Note** Due to technical limitations in Terraform SDK, changes in `allowed_network_rule_list` and `blocked_network_rule_list` do not cause diff for `show_output` and `describe_output`. # snowflake_network_policy (Resource) @@ -25,8 +26,8 @@ resource "snowflake_network_policy" "basic" { ## Complete (with every optional set) resource "snowflake_network_policy" "complete" { name = "network_policy_name" - allowed_network_rule_list = [""] - blocked_network_rule_list = [""] + allowed_network_rule_list = [snowflake_network_rule.one.fully_qualified_name] + blocked_network_rule_list = [snowflake_network_rule.two.fully_qualified_name] allowed_ip_list = ["192.168.1.0/24"] blocked_ip_list = ["192.168.1.99"] comment = "my network policy" @@ -40,14 +41,14 @@ resource "snowflake_network_policy" "complete" { ### Required -- `name` (String) Specifies the identifier for the network policy; must be unique for the account in which the network policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the identifier for the network policy; must be unique for the account in which the network policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional - `allowed_ip_list` (Set of String) Specifies one or more IPv4 addresses (CIDR notation) that are allowed access to your Snowflake account. -- `allowed_network_rule_list` (Set of String) Specifies a list of fully qualified network rules that contain the network identifiers that are allowed access to Snowflake. +- `allowed_network_rule_list` (Set of String) Specifies a list of fully qualified network rules that contain the network identifiers that are allowed access to Snowflake. For more information about this resource, see [docs](./network_rule). - `blocked_ip_list` (Set of String) Specifies one or more IPv4 addresses (CIDR notation) that are denied access to your Snowflake account. **Do not** add `0.0.0.0/0` to `blocked_ip_list`, in order to block all IP addresses except a select list, you only need to add IP addresses to `allowed_ip_list`. -- `blocked_network_rule_list` (Set of String) Specifies a list of fully qualified network rules that contain the network identifiers that are denied access to Snowflake. +- `blocked_network_rule_list` (Set of String) Specifies a list of fully qualified network rules that contain the network identifiers that are denied access to Snowflake. For more information about this resource, see [docs](./network_rule). - `comment` (String) Specifies a comment for the network policy. ### Read-Only @@ -86,5 +87,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_network_policy.example "name" +terraform import snowflake_network_policy.example '""' ``` diff --git a/docs/resources/network_rule.md b/docs/resources/network_rule.md index a9a723dd4e..64052fbcfb 100644 --- a/docs/resources/network_rule.md +++ b/docs/resources/network_rule.md @@ -5,6 +5,8 @@ description: |- --- +!> **Note** A network rule cannot be dropped successfully if it is currently assigned to a network policy. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # snowflake_network_rule (Resource) @@ -22,7 +24,6 @@ resource "snowflake_network_rule" "rule" { value_list = ["192.168.0.100/24", "29.254.123.20"] } ``` - -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). diff --git a/docs/resources/oauth_integration_for_custom_clients.md b/docs/resources/oauth_integration_for_custom_clients.md index 2ed8b0521e..8a5182a45d 100644 --- a/docs/resources/oauth_integration_for_custom_clients.md +++ b/docs/resources/oauth_integration_for_custom_clients.md @@ -7,6 +7,10 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** Setting a network policy with lowercase letters does not work correctly in Snowflake (see [issue](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3229)). As a workaround, set the network policy with uppercase letters only, or use unsafe_execute with network policy ID wrapped in `'`. + +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_oauth_integration_for_custom_clients (Resource) Resource used to manage oauth security integration for custom clients objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-oauth-snowflake). @@ -16,7 +20,7 @@ Resource used to manage oauth security integration for custom clients objects. F ```terraform # basic resource resource "snowflake_oauth_integration_for_custom_clients" "basic" { - name = "saml_integration" + name = "integration" oauth_client_type = "CONFIDENTIAL" oauth_redirect_uri = "https://example.com" blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] @@ -24,18 +28,18 @@ resource "snowflake_oauth_integration_for_custom_clients" "basic" { # resource with all fields set resource "snowflake_oauth_integration_for_custom_clients" "complete" { - name = "saml_integration" + name = "integration" oauth_client_type = "CONFIDENTIAL" oauth_redirect_uri = "https://example.com" enabled = "true" oauth_allow_non_tls_redirect_uri = "true" oauth_enforce_pkce = "true" oauth_use_secondary_roles = "NONE" - pre_authorized_roles_list = ["role_id1", "role_id2"] - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", "role_id1", "role_id2"] + pre_authorized_roles_list = [snowflake_role.one.fully_qualified_name, snowflake_role.two.fully_qualified_name] + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", snowflake_role.three.fully_qualified_name, snowflake_role.four.fully_qualified_name] oauth_issue_refresh_tokens = "true" oauth_refresh_token_validity = 87600 - network_policy = "network_policy_id" + network_policy = snowflake_network_policy.example.fully_qualified_name oauth_client_rsa_public_key = file("rsa.pub") oauth_client_rsa_public_key_2 = file("rsa2.pub") comment = "my oauth integration" @@ -49,8 +53,8 @@ resource "snowflake_oauth_integration_for_custom_clients" "complete" { ### Required -- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. -- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. For more information about this resource, see [docs](./account_role). +- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_client_type` (String) Specifies the type of client being registered. Snowflake supports both confidential and public clients. Valid options are: `PUBLIC` | `CONFIDENTIAL`. - `oauth_redirect_uri` (String) Specifies the client URI. After a user is authenticated, the web browser is redirected to this URI. @@ -58,15 +62,15 @@ resource "snowflake_oauth_integration_for_custom_clients" "complete" { - `comment` (String) Specifies a comment for the OAuth integration. - `enabled` (String) Specifies whether this OAuth integration is enabled or disabled. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. -- `network_policy` (String) Specifies an existing network policy. This network policy controls network traffic that is attempting to exchange an authorization code for an access or refresh token or to use a refresh token to obtain a new access token. +- `network_policy` (String) Specifies an existing network policy. This network policy controls network traffic that is attempting to exchange an authorization code for an access or refresh token or to use a refresh token to obtain a new access token. For more information about this resource, see [docs](./network_policy). - `oauth_allow_non_tls_redirect_uri` (String) If true, allows setting oauth_redirect_uri to a URI not protected by TLS. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. -- `oauth_client_rsa_public_key` (String) Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource using `terraform taint`. -- `oauth_client_rsa_public_key_2` (String) Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource using `terraform taint`. +- `oauth_client_rsa_public_key` (String) Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `oauth_client_rsa_public_key_2` (String) Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `oauth_enforce_pkce` (String) Boolean that specifies whether Proof Key for Code Exchange (PKCE) should be required for the integration. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `oauth_issue_refresh_tokens` (String) Specifies whether to allow the client to exchange a refresh token for an access token when the current access token has expired. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `oauth_refresh_token_validity` (Number) Specifies how long refresh tokens should be valid (in seconds). OAUTH_ISSUE_REFRESH_TOKENS must be set to TRUE. - `oauth_use_secondary_roles` (String) Specifies whether default secondary roles set in the user properties are activated by default in the session being opened. Valid options are: `IMPLICIT` | `NONE`. -- `pre_authorized_roles_list` (Set of String) A set of Snowflake roles that a user does not need to explicitly consent to using after authenticating. +- `pre_authorized_roles_list` (Set of String) A set of Snowflake roles that a user does not need to explicitly consent to using after authenticating. For more information about this resource, see [docs](./account_role). ### Read-Only @@ -327,5 +331,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_oauth_integration_for_custom_clients.example "name" +terraform import snowflake_oauth_integration_for_custom_clients.example '""' ``` diff --git a/docs/resources/oauth_integration_for_partner_applications.md b/docs/resources/oauth_integration_for_partner_applications.md index 0d9f1c139e..48311dbca2 100644 --- a/docs/resources/oauth_integration_for_partner_applications.md +++ b/docs/resources/oauth_integration_for_partner_applications.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_oauth_integration_for_partner_applications (Resource) Resource used to manage oauth security integration for partner applications objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-oauth-snowflake). @@ -30,7 +32,7 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { oauth_issue_refresh_tokens = "true" oauth_refresh_token_validity = 3600 oauth_use_secondary_roles = "IMPLICIT" - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", "role_id1", "role_id2"] + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", snowflake_role.one.fully_qualified_name, snowflake_role.two.fully_qualified_name] comment = "example oauth integration for partner applications" } ``` @@ -42,8 +44,8 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { ### Required -- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. -- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `blocked_roles_list` (Set of String) A set of Snowflake roles that a user cannot explicitly consent to using after authenticating. For more information about this resource, see [docs](./account_role). +- `name` (String) Specifies the name of the OAuth integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_client` (String) Creates an OAuth interface between Snowflake and a partner application. Valid options are: `LOOKER` | `TABLEAU_DESKTOP` | `TABLEAU_SERVER`. ### Optional diff --git a/docs/resources/password_policy.md b/docs/resources/password_policy.md index c88f79b2f5..e3131cb1a3 100644 --- a/docs/resources/password_policy.md +++ b/docs/resources/password_policy.md @@ -5,8 +5,7 @@ description: |- A password policy specifies the requirements that must be met to create and reset a password to authenticate to Snowflake. --- -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-password-policy#usage-notes), a password policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-password-policy#usage-notes), a password policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_password_policy (Resource) diff --git a/docs/resources/primary_connection.md b/docs/resources/primary_connection.md index 27d726a362..20009789d2 100644 --- a/docs/resources/primary_connection.md +++ b/docs/resources/primary_connection.md @@ -24,14 +24,14 @@ resource "snowflake_primary_connection" "complete" { name = "connection_name" comment = "my complete connection" enable_failover_to_accounts = [ - "." + "\"\".\"\"" ] } ``` -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). --> **Note** To demote `snowflake_primary_connection` to [`snowflake_secondary_connection`](./secondary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then recreate it in manually using: +-> **Note** To demote `snowflake_primary_connection` to [`snowflake_secondary_connection`](./secondary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state with [terraform state rm](https://developer.hashicorp.com/terraform/cli/commands/state/rm), then recreate it in manually using: ``` CREATE CONNECTION AS REPLICA OF ..; ``` @@ -43,12 +43,12 @@ and then import it as the `snowflake_secondary_connection`. ### Required -- `name` (String) String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a primary connection, the name must be unique across connection names and account names in the organization. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a primary connection, the name must be unique across connection names and account names in the organization. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional - `comment` (String) Specifies a comment for the connection. -- `enable_failover_to_accounts` (List of String) Enables failover for given connection to provided accounts. Specifies a list of accounts in your organization where a secondary connection for this primary connection can be promoted to serve as the primary connection. Include your organization name for each account in the list. +- `enable_failover_to_accounts` (List of String) Enables failover for given connection to provided accounts. Specifies a list of accounts in your organization where a secondary connection for this primary connection can be promoted to serve as the primary connection. Include your organization name for each account in the list. For more information about this resource, see [docs](./account). ### Read-Only @@ -80,5 +80,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_primary_connection.example 'connection_name' +terraform import snowflake_primary_connection.example '""' ``` diff --git a/docs/resources/resource_monitor.md b/docs/resources/resource_monitor.md index c5eb401268..a674411c2f 100644 --- a/docs/resources/resource_monitor.md +++ b/docs/resources/resource_monitor.md @@ -31,7 +31,7 @@ resource "snowflake_resource_monitor" "minimal_working" { name = "resource-monitor-name" credit_quota = 100 suspend_trigger = 100 - notify_users = ["USERONE", "USERTWO"] + notify_users = [snowflake_user.one.fully_qualified_name, snowflake_user.two.fully_qualified_name] } resource "snowflake_resource_monitor" "complete" { @@ -46,7 +46,7 @@ resource "snowflake_resource_monitor" "complete" { suspend_trigger = 50 suspend_immediate_trigger = 90 - notify_users = ["USERONE", "USERTWO"] + notify_users = [snowflake_user.one.fully_qualified_name, snowflake_user.two.fully_qualified_name] } ``` -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). @@ -57,7 +57,7 @@ resource "snowflake_resource_monitor" "complete" { ### Required -- `name` (String) Identifier for the resource monitor; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Identifier for the resource monitor; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -65,7 +65,7 @@ resource "snowflake_resource_monitor" "complete" { - `end_timestamp` (String) The date and time when the resource monitor suspends the assigned warehouses. - `frequency` (String) The frequency interval at which the credit usage resets to 0. Valid values are (case-insensitive): `MONTHLY` | `DAILY` | `WEEKLY` | `YEARLY` | `NEVER`. If you set a `frequency` for a resource monitor, you must also set `start_timestamp`. If you specify `NEVER` for the frequency, the credit usage for the warehouse does not reset. After removing this field from the config, the previously set value will be preserved on the Snowflake side, not the default value. That's due to Snowflake limitation and the lack of unset functionality for this parameter. - `notify_triggers` (Set of Number) Specifies a list of percentages of the credit quota. After reaching any of the values the users passed in the notify_users field will be notified (to receive the notification they should have notifications enabled). Values over 100 are supported. -- `notify_users` (Set of String) Specifies the list of users (their identifiers) to receive email notifications on resource monitors. +- `notify_users` (Set of String) Specifies the list of users (their identifiers) to receive email notifications on resource monitors. For more information about this resource, see [docs](./user). - `start_timestamp` (String) The date and time when the resource monitor starts monitoring credit usage for the assigned warehouses. If you set a `start_timestamp` for a resource monitor, you must also set `frequency`. After removing this field from the config, the previously set value will be preserved on the Snowflake side, not the default value. That's due to Snowflake limitation and the lack of unset functionality for this parameter. - `suspend_immediate_trigger` (Number) Represents a numeric value specified as a percentage of the credit quota. Values over 100 are supported. After reaching this value, all assigned warehouses immediately cancel any currently running queries or statements. In addition, this action sends a notification to all users who have enabled notifications for themselves. - `suspend_trigger` (Number) Represents a numeric value specified as a percentage of the credit quota. Values over 100 are supported. After reaching this value, all assigned warehouses while allowing currently running queries to complete will be suspended. No new queries can be executed by the warehouses until the credit quota for the resource monitor is increased. In addition, this action sends a notification to all users who have enabled notifications for themselves. @@ -100,6 +100,5 @@ Read-Only: Import is supported using the following syntax: ```shell -# format is the resource monitor name -terraform import snowflake_resource_monitor.example 'resourceMonitorName' +terraform import snowflake_resource_monitor.example '""' ``` diff --git a/docs/resources/role.md b/docs/resources/role.md index cf79b2cdf3..6dba422fba 100644 --- a/docs/resources/role.md +++ b/docs/resources/role.md @@ -34,7 +34,7 @@ resource "snowflake_role" "complete" { ### Required -- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Identifier for the role; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional diff --git a/docs/resources/row_access_policy.md b/docs/resources/row_access_policy.md index 0023f99391..5f243e1b1c 100644 --- a/docs/resources/row_access_policy.md +++ b/docs/resources/row_access_policy.md @@ -7,8 +7,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-row-access-policy#usage-notes), a row access policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-row-access-policy#usage-notes), a row access policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # snowflake_row_access_policy (Resource) @@ -17,6 +16,7 @@ Resource used to manage row access policy objects. For more information, check [ ## Example Usage ```terraform +# resource with all fields set resource "snowflake_row_access_policy" "example_row_access_policy" { name = "EXAMPLE_ROW_ACCESS_POLICY" database = "EXAMPLE_DB" @@ -47,9 +47,9 @@ resource "snowflake_row_access_policy" "example_row_access_policy" { - `argument` (Block List, Min: 1) List of the arguments for the row access policy. A signature specifies a set of attributes that must be considered to determine whether the row is accessible. The attribute values come from the database object (e.g. table or view) to be protected by the row access policy. If any argument name or type is changed, the resource is recreated. (see [below for nested schema](#nestedblock--argument)) - `body` (String) Specifies the SQL expression. The expression can be any boolean-valued SQL expression. To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. -- `database` (String) The database in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the row access policy; must be unique for the database and schema in which the row access policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the row access policy; must be unique for the database and schema in which the row access policy is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the row access policy. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional diff --git a/docs/resources/saml2_integration.md b/docs/resources/saml2_integration.md index 38e45dc4d8..fa55be5e87 100644 --- a/docs/resources/saml2_integration.md +++ b/docs/resources/saml2_integration.md @@ -2,14 +2,16 @@ page_title: "snowflake_saml2_integration Resource - terraform-provider-snowflake" subcategory: "" description: |- - Resource used to manage saml2 security integration objects. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-saml2. + Resource used to manage SAML2 security integration objects. For more information, check security integrations documentation https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-saml2. --- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_saml2_integration (Resource) -Resource used to manage saml2 security integration objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-saml2). +Resource used to manage SAML2 security integration objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-saml2). ## Example Usage @@ -53,7 +55,7 @@ resource "snowflake_saml2_integration" "test" { ### Required -- `name` (String) Specifies the name of the SAML2 integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Specifies the name of the SAML2 integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `saml2_issuer` (String) The string containing the IdP EntityID / Issuer. - `saml2_provider` (String) The string describing the IdP. Valid options are: `OKTA` | `ADFS` | `CUSTOM`. - `saml2_sso_url` (String) The string containing the IdP SSO URL, where the user should be redirected by Snowflake (the Service Provider) with a SAML AuthnRequest message. @@ -333,5 +335,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_saml2_integration.example "name" +terraform import snowflake_saml2_integration.example '""' ``` diff --git a/docs/resources/schema.md b/docs/resources/schema.md index 622d864fef..3c919e9d74 100644 --- a/docs/resources/schema.md +++ b/docs/resources/schema.md @@ -7,6 +7,12 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. + +-> **Note** Field `CLASSIFICATION_ROLE` is currently missing. It will be added in the future. + +!> **Note** A schema cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098508 (2BP01): Cannot drop schema SCHEMA as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # snowflake_schema (Resource) Resource used to manage schema objects. For more information, check [schema documentation](https://docs.snowflake.com/en/sql-reference/sql/create-schema). @@ -56,8 +62,8 @@ resource "snowflake_schema" "schema" { ### Required -- `database` (String) The database in which to create the schema. -- `name` (String) Specifies the identifier for the schema; must be unique for the database in which the schema is created. When the name is `PUBLIC`, during creation the provider checks if this schema has already been created and, in such case, `ALTER` is used to match the desired state. +- `database` (String) The database in which to create the schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the schema; must be unique for the database in which the schema is created. When the name is `PUBLIC`, during creation the provider checks if this schema has already been created and, in such case, `ALTER` is used to match the desired state. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -350,6 +356,5 @@ Read-Only: Import is supported using the following syntax: ```shell -# format is . terraform import snowflake_schema.example '"".""' ``` diff --git a/docs/resources/scim_integration.md b/docs/resources/scim_integration.md index c3e8ee35a9..ae782bcbe0 100644 --- a/docs/resources/scim_integration.md +++ b/docs/resources/scim_integration.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # snowflake_scim_integration (Resource) Resource used to manage scim security integration objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-scim). @@ -20,14 +22,16 @@ resource "snowflake_scim_integration" "test" { enabled = true scim_client = "GENERIC" sync_password = true + run_as_role = "GENERIC_SCIM_PROVISIONER" } + # resource with all fields set resource "snowflake_scim_integration" "test" { name = "test" enabled = true scim_client = "GENERIC" sync_password = true - network_policy = "network_policy_test" + network_policy = snowflake_network_policy.example.fully_qualified_name run_as_role = "GENERIC_SCIM_PROVISIONER" comment = "foo" } @@ -41,14 +45,14 @@ resource "snowflake_scim_integration" "test" { ### Required - `enabled` (Boolean) Specify whether the security integration is enabled. -- `name` (String) String that specifies the identifier (i.e. name) for the integration; must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) String that specifies the identifier (i.e. name) for the integration; must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `run_as_role` (String) Specify the SCIM role in Snowflake that owns any users and roles that are imported from the identity provider into Snowflake using SCIM. Provider assumes that the specified role is already provided. Valid options are: `OKTA_PROVISIONER` | `AAD_PROVISIONER` | `GENERIC_SCIM_PROVISIONER`. - `scim_client` (String) Specifies the client type for the scim integration. Valid options are: `OKTA` | `AZURE` | `GENERIC`. ### Optional - `comment` (String) Specifies a comment for the integration. -- `network_policy` (String) Specifies an existing network policy that controls SCIM network traffic. +- `network_policy` (String) Specifies an existing network policy that controls SCIM network traffic. For more information about this resource, see [docs](./network_policy). - `sync_password` (String) Specifies whether to enable or disable the synchronization of a user password from an Okta SCIM client as part of the API request to Snowflake. This property is not supported for Azure SCIM. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. ### Read-Only @@ -142,5 +146,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_scim_integration.example "name" +terraform import snowflake_scim_integration.example '""' ``` diff --git a/docs/resources/secondary_connection.md b/docs/resources/secondary_connection.md index 416e593392..566c4e507b 100644 --- a/docs/resources/secondary_connection.md +++ b/docs/resources/secondary_connection.md @@ -17,20 +17,20 @@ Resource used to manage secondary (replicated) connections. To manage primary co ## Minimal resource "snowflake_secondary_connection" "basic" { name = "connection_name" - as_replica_of = ".." + as_replica_of = "\"\".\"\".\"\"" } ## Complete (with every optional set) resource "snowflake_secondary_connection" "complete" { name = "connection_name" - as_replica_of = ".." + as_replica_of = "\"\".\"\".\"\"" comment = "my complete secondary connection" } ``` -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../guides/identifiers#new-computed-fully-qualified-name-field-in-resources). --> **Note** To promote `snowflake_secondary_connection` to [`snowflake_primary_connection`](./primary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then promote it manually using: +-> **Note** To promote `snowflake_secondary_connection` to [`snowflake_primary_connection`](./primary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state with [terraform state rm](https://developer.hashicorp.com/terraform/cli/commands/state/rm), then promote it manually using: ``` ALTER CONNECTION PRIMARY; ``` @@ -42,8 +42,8 @@ and then import it as the `snowflake_primary_connection`. ### Required -- `as_replica_of` (String) Specifies the identifier for a primary connection from which to create a replica (i.e. a secondary connection). -- `name` (String) String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a secondary connection, the name must match the name of its primary connection. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `as_replica_of` (String) Specifies the identifier for a primary connection from which to create a replica (i.e. a secondary connection). For more information about this resource, see [docs](./primary_connection). +- `name` (String) String that specifies the identifier (i.e. name) for the connection. Must start with an alphabetic character and may only contain letters, decimal digits (0-9), and underscores (_). For a secondary connection, the name must match the name of its primary connection. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -79,5 +79,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_secondary_connection.example 'secondary_connection_name' +terraform import snowflake_secondary_connection.example '""' ``` diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md index 95fbaf8815..652f875871 100644 --- a/docs/resources/secondary_database.md +++ b/docs/resources/secondary_database.md @@ -8,6 +8,11 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # snowflake_secondary_database (Resource) ~> **Note** The snowflake_secondary_database resource doesn't refresh itself, as the best practice is to use tasks scheduled for a certain interval. Check out the examples to see how to set up the refresh task. For SQL-based replication guide, see the [official documentation](https://docs.snowflake.com/en/user-guide/db-replication-config#replicating-a-database-to-another-account). @@ -92,8 +97,8 @@ resource "snowflake_task" "refresh_secondary_database" { ### Required -- `as_replica_of` (String) A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `""."".""`. -- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `as_replica_of` (String) A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `""."".""`. For more information about this resource, see [docs](./database). +- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -126,5 +131,5 @@ resource "snowflake_task" "refresh_secondary_database" { Import is supported using the following syntax: ```shell -terraform import snowflake_secondary_database.example 'secondary_database_name' +terraform import snowflake_secondary_database.example '""' ``` diff --git a/docs/resources/secret_with_authorization_code_grant.md b/docs/resources/secret_with_authorization_code_grant.md index 3217db19dc..5f4ea693fd 100644 --- a/docs/resources/secret_with_authorization_code_grant.md +++ b/docs/resources/secret_with_authorization_code_grant.md @@ -19,7 +19,7 @@ resource "snowflake_secret_with_authorization_code_grant" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_authorization_code_grant.example.fully_qualified_name oauth_refresh_token = "EXAMPLE_TOKEN" oauth_refresh_token_expiry_time = "2025-01-02 15:04:01" } @@ -29,7 +29,7 @@ resource "snowflake_secret_with_authorization_code_grant" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_authorization_code_grant.example.fully_qualified_name oauth_refresh_token = "EXAMPLE_TOKEN" oauth_refresh_token_expiry_time = "2025-01-02 15:04:01" comment = "EXAMPLE_COMMENT" @@ -43,12 +43,12 @@ resource "snowflake_secret_with_authorization_code_grant" "test" { ### Required -- `api_authentication` (String) Specifies the name value of the Snowflake security integration that connects Snowflake to an external service. -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `api_authentication` (String) Specifies the name value of the Snowflake security integration that connects Snowflake to an external service. For more information about this resource, see [docs](./api_authentication_integration_with_authorization_code_grant). +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_refresh_token` (String, Sensitive) Specifies the token as a string that is used to obtain a new access token from the OAuth authorization server when the access token expires. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `oauth_refresh_token_expiry_time` (String) Specifies the timestamp as a string when the OAuth refresh token expires. Accepted string formats: YYYY-MM-DD, YYYY-MM-DD HH:MI, YYYY-MM-DD HH:MI:SS, YYYY-MM-DD HH:MI -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional diff --git a/docs/resources/secret_with_basic_authentication.md b/docs/resources/secret_with_basic_authentication.md index 66d99164b3..e6b0d88c1b 100644 --- a/docs/resources/secret_with_basic_authentication.md +++ b/docs/resources/secret_with_basic_authentication.md @@ -41,10 +41,10 @@ resource "snowflake_secret_with_basic_authentication" "test" { ### Required -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `password` (String, Sensitive) Specifies the password value to store in the secret. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `username` (String, Sensitive) Specifies the username value to store in the secret. ### Optional diff --git a/docs/resources/secret_with_client_credentials.md b/docs/resources/secret_with_client_credentials.md index 0e5ad14903..6dd8757a59 100644 --- a/docs/resources/secret_with_client_credentials.md +++ b/docs/resources/secret_with_client_credentials.md @@ -19,7 +19,7 @@ resource "snowflake_secret_with_client_credentials" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_client_credentials.example.fully_qualified_name oauth_scopes = ["useraccount", "testscope"] } @@ -28,7 +28,7 @@ resource "snowflake_secret_with_client_credentials" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_client_credentials.example.fully_qualified_name oauth_scopes = ["useraccount", "testscope"] comment = "EXAMPLE_COMMENT" } @@ -41,11 +41,11 @@ resource "snowflake_secret_with_client_credentials" "test" { ### Required -- `api_authentication` (String) Specifies the name value of the Snowflake security integration that connects Snowflake to an external service. -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `api_authentication` (String) Specifies the name value of the Snowflake security integration that connects Snowflake to an external service. For more information about this resource, see [docs](./api_authentication_integration_with_client_credentials). +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `oauth_scopes` (Set of String) Specifies a list of scopes to use when making a request from the OAuth server by a role with USAGE on the integration during the OAuth client credentials flow. -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional diff --git a/docs/resources/secret_with_generic_string.md b/docs/resources/secret_with_generic_string.md index 408e71592d..4c0b426ab9 100644 --- a/docs/resources/secret_with_generic_string.md +++ b/docs/resources/secret_with_generic_string.md @@ -39,9 +39,9 @@ resource "snowflake_secret_with_generic_string" "test" { ### Required -- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the secret Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) String that specifies the identifier (i.e. name) for the secret, must be unique in your schema. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the secret. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `secret_string` (String, Sensitive) Specifies the string to store in the secret. The string can be an API token or a string of sensitive value that can be used in the handler code of a UDF or stored procedure. For details, see [Creating and using an external access integration](https://docs.snowflake.com/en/developer-guide/external-network-access/creating-using-external-network-access). You should not use this property to store any kind of OAuth token; use one of the other secret types for your OAuth use cases. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Optional diff --git a/docs/resources/service_user.md b/docs/resources/service_user.md index eba0597df1..7daa93d14b 100644 --- a/docs/resources/service_user.md +++ b/docs/resources/service_user.md @@ -120,7 +120,7 @@ resource "snowflake_service_user" "u" { ### Required -- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -141,9 +141,9 @@ resource "snowflake_service_user" "u" { - `date_output_format` (String) Specifies the display format for the DATE data type. For more information, see [Date and time input and output formats](https://docs.snowflake.com/en/sql-reference/date-time-input-output). For more information, check [DATE_OUTPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#date-output-format). - `days_to_expiry` (Number) Specifies the number of days after which the user status is set to `Expired` and the user is no longer allowed to log in. This is useful for defining temporary users (i.e. users who should only have access to Snowflake for a limited time period). In general, you should not set this property for [account administrators](https://docs.snowflake.com/en/user-guide/security-access-control-considerations.html#label-accountadmin-users) (i.e. users with the `ACCOUNTADMIN` role) because Snowflake locks them out when they become `Expired`. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `default_namespace` (String) Specifies the namespace (database only or database and schema) that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the namespace exists. -- `default_role` (String) Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists. +- `default_role` (String) Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists. For more information about this resource, see [docs](./account_role). - `default_secondary_roles_option` (String) Specifies the secondary roles that are active for the user’s session upon login. Valid values are (case-insensitive): `DEFAULT` | `NONE` | `ALL`. More information can be found in [doc](https://docs.snowflake.com/en/sql-reference/sql/create-user#optional-object-properties-objectproperties). -- `default_warehouse` (String) Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists. +- `default_warehouse` (String) Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists. For more information about this resource, see [docs](./warehouse). - `disabled` (String) Specifies whether the user is disabled, which prevents logging in and aborts all the currently-running queries for the user. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `display_name` (String) Name displayed for the user in the Snowflake web interface. - `email` (String, Sensitive) Email address for the user. diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md index 574ff4f28c..daf52b60bd 100644 --- a/docs/resources/shared_database.md +++ b/docs/resources/shared_database.md @@ -7,6 +7,11 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # snowflake_shared_database (Resource) A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see [Introduction to Secure Data Sharing](https://docs.snowflake.com/en/user-guide/data-sharing-intro). @@ -75,8 +80,8 @@ resource "snowflake_shared_database" "test" { ### Required -- `from_share` (String) A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `""."".""`. -- `name` (String) Specifies the identifier for the database; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `from_share` (String) A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `""."".""`. For more information about this resource, see [docs](./share). +- `name` (String) Specifies the identifier for the database; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -106,5 +111,5 @@ resource "snowflake_shared_database" "test" { Import is supported using the following syntax: ```shell -terraform import snowflake_shared_database.example 'shared_database_name' +terraform import snowflake_shared_database.example '""' ``` diff --git a/docs/resources/stream.md b/docs/resources/stream.md index 1a36f91eb3..2cf478e3ba 100644 --- a/docs/resources/stream.md +++ b/docs/resources/stream.md @@ -7,7 +7,7 @@ description: |- # snowflake_stream (Resource) -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: `snowflake_stream_on_directory_table` | `snowflake_stream_on_external_table` | `snowflake_stream_on_table` | `snowflake_stream_on_view` +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: `snowflake_stream_on_directory_table` | `snowflake_stream_on_external_table` | `snowflake_stream_on_table` | `snowflake_stream_on_view`. ## Example Usage diff --git a/docs/resources/stream_on_directory_table.md b/docs/resources/stream_on_directory_table.md index 1913610345..4f1ebf9772 100644 --- a/docs/resources/stream_on_directory_table.md +++ b/docs/resources/stream_on_directory_table.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. +~> **Note about copy_grants** Fields like `stage`, and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. + # snowflake_stream_on_directory_table (Resource) Resource used to manage streams on directory tables. For more information, check [stream documentation](https://docs.snowflake.com/en/sql-reference/sql/create-stream). @@ -14,21 +16,13 @@ Resource used to manage streams on directory tables. For more information, check ## Example Usage ```terraform -resource "snowflake_stage" "example_stage" { - name = "EXAMPLE_STAGE" - url = "s3://com.example.bucket/prefix" - database = "EXAMPLE_DB" - schema = "EXAMPLE_SCHEMA" - credentials = "AWS_KEY_ID='${var.example_aws_key_id}' AWS_SECRET_KEY='${var.example_aws_secret_key}'" -} - # basic resource resource "snowflake_stream_on_directory_table" "stream" { name = "stream" schema = "schema" database = "database" - stage = snowflake_stage.stage.fully_qualified_name + stage = snowflake_stage.example.fully_qualified_name } @@ -39,11 +33,7 @@ resource "snowflake_stream_on_directory_table" "stream" { database = "database" copy_grants = true - stage = snowflake_stage.stage.fully_qualified_name - - at { - statement = "8e5d0ca9-005e-44e6-b858-a8f5b37c5726" - } + stage = snowflake_stage.example.fully_qualified_name comment = "A stream." } @@ -56,15 +46,15 @@ resource "snowflake_stream_on_directory_table" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `stage` (String) Specifies an identifier for the stage the stream will monitor. Due to Snowflake limitations, the provider can not read the stage's database and schema. For stages, Snowflake returns only partially qualified name instead of fully qualified name. Please use stages located in the same schema as the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `stage` (String) Specifies an identifier for the stage the stream will monitor. Due to Snowflake limitations, the provider can not read the stage's database and schema. For stages, Snowflake returns only partially qualified name instead of fully qualified name. Please use stages located in the same schema as the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. For more information about this resource, see [docs](./stage). ### Optional - `comment` (String) Specifies a comment for the stream. -- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. That is sometimes used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect when creating a new stream. +- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. This is used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect during creating a new object with Terraform. ### Read-Only diff --git a/docs/resources/stream_on_external_table.md b/docs/resources/stream_on_external_table.md index a01c1a93bc..48ca9d2e26 100644 --- a/docs/resources/stream_on_external_table.md +++ b/docs/resources/stream_on_external_table.md @@ -7,7 +7,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. -!> **Note about copy_grants** Fields like `external_table`, `insert_only`, `at`, `before` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. +~> **Note about copy_grants** Fields like `external_table`, `insert_only`, `at`, `before` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # snowflake_stream_on_external_table (Resource) @@ -16,32 +16,13 @@ Resource used to manage streams on external tables. For more information, check ## Example Usage ```terraform -resource "snowflake_external_table" "external_table" { - database = "db" - schema = "schema" - name = "external_table" - comment = "External table" - file_format = "TYPE = CSV FIELD_DELIMITER = '|'" - location = "@stage/directory/" - - column { - name = "id" - type = "int" - } - - column { - name = "data" - type = "text" - } -} - # basic resource resource "snowflake_stream_on_external_table" "stream" { name = "stream" schema = "schema" database = "database" - external_table = snowflake_external_table.external_table.fully_qualified_name + external_table = snowflake_external_table.example.fully_qualified_name } @@ -52,7 +33,7 @@ resource "snowflake_stream_on_external_table" "stream" { database = "database" copy_grants = true - external_table = snowflake_external_table.external_table.fully_qualified_name + external_table = snowflake_external_table.example.fully_qualified_name insert_only = "true" at { @@ -70,17 +51,17 @@ resource "snowflake_stream_on_external_table" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `external_table` (String) Specifies an identifier for the external table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `external_table` (String) Specifies an identifier for the external table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. For more information about this resource, see [docs](./external_table). +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional - `at` (Block List, Max: 1) This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--at)) - `before` (Block List, Max: 1) This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--before)) - `comment` (String) Specifies a comment for the stream. -- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. That is sometimes used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect when creating a new stream. +- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. This is used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect during creating a new object with Terraform. - `insert_only` (String) Specifies whether this is an insert-only stream. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. ### Read-Only diff --git a/docs/resources/stream_on_table.md b/docs/resources/stream_on_table.md index 244f5b97a5..67361e3aaa 100644 --- a/docs/resources/stream_on_table.md +++ b/docs/resources/stream_on_table.md @@ -7,7 +7,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. -!> **Note about copy_grants** Fields like `table`, `append_only`, `at`, `before`, `show_initial_rows` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. +~> **Note about copy_grants** Fields like `table`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # snowflake_stream_on_table (Resource) @@ -16,18 +16,15 @@ Resource used to manage streams on tables. For more information, check [stream d ## Example Usage ```terraform -resource "snowflake_table" "table" { - database = "database" +# basic resource +resource "snowflake_stream_on_table" "stream" { + name = "stream" schema = "schema" - name = "name" + database = "database" - column { - type = "NUMBER(38,0)" - name = "id" - } + table = snowflake_table.example.fully_qualified_name } - # resource with more fields set resource "snowflake_stream_on_table" "stream" { name = "stream" @@ -35,7 +32,7 @@ resource "snowflake_stream_on_table" "stream" { database = "database" copy_grants = true - table = snowflake_table.table.fully_qualified_name + table = snowflake_table.example.fully_qualified_name append_only = "true" show_initial_rows = "true" @@ -54,10 +51,10 @@ resource "snowflake_stream_on_table" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `table` (String) Specifies an identifier for the table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `table` (String) Specifies an identifier for the table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. For more information about this resource, see [docs](./table). ### Optional @@ -65,7 +62,7 @@ resource "snowflake_stream_on_table" "stream" { - `at` (Block List, Max: 1) This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--at)) - `before` (Block List, Max: 1) This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--before)) - `comment` (String) Specifies a comment for the stream. -- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. That is sometimes used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect when creating a new stream. +- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. This is used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect during creating a new object with Terraform. - `show_initial_rows` (String) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Read-Only diff --git a/docs/resources/stream_on_view.md b/docs/resources/stream_on_view.md index ea8c406eb4..4a9ae5607b 100644 --- a/docs/resources/stream_on_view.md +++ b/docs/resources/stream_on_view.md @@ -7,6 +7,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. +~> **Note about copy_grants** Fields like `view`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. + # snowflake_stream_on_view (Resource) Resource used to manage streams on views. For more information, check [stream documentation](https://docs.snowflake.com/en/sql-reference/sql/create-stream). @@ -14,22 +16,13 @@ Resource used to manage streams on views. For more information, check [stream do ## Example Usage ```terraform -resource "snowflake_view" "view" { - database = "database" - schema = "schema" - name = "view" - statement = <<-SQL - select * from foo; -SQL -} - # basic resource resource "snowflake_stream_on_view" "stream" { name = "stream" schema = "schema" database = "database" - view = snowflake_view.view.fully_qualified_name + view = snowflake_view.example.fully_qualified_name } # resource with additional fields @@ -39,7 +32,7 @@ resource "snowflake_stream_on_view" "stream" { database = "database" copy_grants = true - view = snowflake_view.view.fully_qualified_name + view = snowflake_view.example.fully_qualified_name append_only = "true" show_initial_rows = "true" @@ -58,10 +51,10 @@ resource "snowflake_stream_on_view" "stream" { ### Required -- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `view` (String) Specifies an identifier for the view the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `view` (String) Specifies an identifier for the view the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. For more information about this resource, see [docs](./view). ### Optional @@ -69,7 +62,7 @@ resource "snowflake_stream_on_view" "stream" { - `at` (Block List, Max: 1) This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--at)) - `before` (Block List, Max: 1) This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--before)) - `comment` (String) Specifies a comment for the stream. -- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. That is sometimes used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect when creating a new stream. +- `copy_grants` (Boolean) Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. This is used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect during creating a new object with Terraform. - `show_initial_rows` (String) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Read-Only diff --git a/docs/resources/streamlit.md b/docs/resources/streamlit.md index 25b6d5a28d..2087870c40 100644 --- a/docs/resources/streamlit.md +++ b/docs/resources/streamlit.md @@ -7,6 +7,12 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. + +!> **Note** Setting a query warehouse with lowercase letters does not work correctly in Snowflake. As a workaround, set the query warehouse with uppercase letters only, or use unsafe_execute with query warehouse ID wrapped in `'`. + + +-> **Note** Field `IMPORTS` is currently missing. It will be added in the future. + # snowflake_streamlit (Resource) Resource used to manage streamlits objects. For more information, check [streamlit documentation](https://docs.snowflake.com/en/sql-reference/commands-streamlit). @@ -19,18 +25,19 @@ resource "snowflake_streamlit" "streamlit" { database = "database" schema = "schema" name = "streamlit" - stage = "streamlit_db.streamlit_schema.streamlit_stage" + stage = snowflake_stage.example.fully_qualified_name main_file = "/streamlit_main.py" } + # resource with all fields set resource "snowflake_streamlit" "streamlit" { database = "database" schema = "schema" name = "streamlit" - stage = "streamlit_db.streamlit_schema.streamlit_stage" + stage = snowflake_stage.example.fully_qualified_name directory_location = "src" main_file = "streamlit_main.py" - query_warehouse = "warehouse" + query_warehouse = snowflake_warehouse.example.fully_qualified_name external_access_integrations = ["integration_id"] title = "title" comment = "comment" @@ -44,18 +51,18 @@ resource "snowflake_streamlit" "streamlit" { ### Required -- `database` (String) The database in which to create the streamlit -- `main_file` (String) Specifies the filename of the Streamlit Python application. This filename is relative to the value of `root_location` -- `name` (String) String that specifies the identifier (i.e. name) for the streamlit; must be unique in your account. -- `schema` (String) The schema in which to create the streamlit. -- `stage` (String) The stage in which streamlit files are located. +- `database` (String) The database in which to create the streamlit Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `main_file` (String) Specifies the filename of the Streamlit Python application. This filename is relative to the value of `directory_location` +- `name` (String) String that specifies the identifier (i.e. name) for the streamlit; must be unique in your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the streamlit. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `stage` (String) The stage in which streamlit files are located. For more information about this resource, see [docs](./stage). ### Optional - `comment` (String) Specifies a comment for the streamlit. - `directory_location` (String) Specifies the full path to the named stage containing the Streamlit Python files, media files, and the environment.yml file. - `external_access_integrations` (Set of String) External access integrations connected to the Streamlit. -- `query_warehouse` (String) Specifies the warehouse where SQL queries issued by the Streamlit application are run. +- `query_warehouse` (String) Specifies the warehouse where SQL queries issued by the Streamlit application are run. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. For more information about this resource, see [docs](./warehouse). - `title` (String) Specifies a title for the Streamlit app to display in Snowsight. ### Read-Only @@ -104,6 +111,5 @@ Read-Only: Import is supported using the following syntax: ```shell -# format is .. terraform import snowflake_schema.example '""."".""' ``` diff --git a/docs/resources/tag.md b/docs/resources/tag.md index 0d8c2ba31c..92a4c51caf 100644 --- a/docs/resources/tag.md +++ b/docs/resources/tag.md @@ -2,14 +2,16 @@ page_title: "snowflake_tag Resource - terraform-provider-snowflake" subcategory: "" description: |- - Resource used to manage tags. For more information, check tag documentation https://docs.snowflake.com/en/sql-reference/sql/create-tag. + Resource used to manage tags. For more information, check tag documentation https://docs.snowflake.com/en/sql-reference/sql/create-tag. For asssigning tags to Snowflake objects, see tag_association resource https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association. --- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. +~> **Required warehouse** For this resource, the provider now uses [tag references](https://docs.snowflake.com/en/sql-reference/functions/tag_references) to get information about masking policies attached to tags. This function requires a warehouse in the connection. Please, make sure you have either set a `DEFAULT_WAREHOUSE` for the user, or specified a warehouse in the provider configuration. + # snowflake_tag (Resource) -Resource used to manage tags. For more information, check [tag documentation](https://docs.snowflake.com/en/sql-reference/sql/create-tag). +Resource used to manage tags. For more information, check [tag documentation](https://docs.snowflake.com/en/sql-reference/sql/create-tag). For asssigning tags to Snowflake objects, see [tag_association resource](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association). ## Example Usage @@ -28,7 +30,7 @@ resource "snowflake_tag" "tag" { schema = "schema" comment = "comment" allowed_values = ["finance", "engineering", ""] - masking_policies = [snowfalke_masking_policy.masking_policy.fully_qualified_name] + masking_policies = [snowfalke_masking_policy.example.fully_qualified_name] } ``` -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). @@ -39,15 +41,15 @@ resource "snowflake_tag" "tag" { ### Required -- `database` (String) The database in which to create the tag. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the tag; must be unique for the database in which the tag is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the tag. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the tag. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the tag; must be unique for the database in which the tag is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the tag. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional - `allowed_values` (Set of String) Set of allowed values for the tag. - `comment` (String) Specifies a comment for the tag. -- `masking_policies` (Set of String) Set of masking policies for the tag. A tag can support one masking policy for each data type. If masking policies are assigned to the tag, before dropping the tag, the provider automatically unassigns them. +- `masking_policies` (Set of String) Set of masking policies for the tag. A tag can support one masking policy for each data type. If masking policies are assigned to the tag, before dropping the tag, the provider automatically unassigns them. For more information about this resource, see [docs](./masking_policy). ### Read-Only diff --git a/docs/resources/tag_masking_policy_association.md b/docs/resources/tag_masking_policy_association.md index 36d6c8943a..ed23cfb3dc 100644 --- a/docs/resources/tag_masking_policy_association.md +++ b/docs/resources/tag_masking_policy_association.md @@ -7,7 +7,7 @@ description: |- # snowflake_tag_masking_policy_association (Resource) -~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: `snowflake_tag` +~> **Deprecation** This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: `snowflake_tag`. Attach a masking policy to a tag. Requires a current warehouse to be set. Either with SNOWFLAKE_WAREHOUSE env variable or in current session. If no warehouse is provided, a temporary warehouse will be created. diff --git a/docs/resources/task.md b/docs/resources/task.md index 8343f80763..8f9a2c6034 100644 --- a/docs/resources/task.md +++ b/docs/resources/task.md @@ -69,13 +69,13 @@ resource "snowflake_task" "test" { database = "database" schema = "schema" name = "task" - warehouse = "warehouse" + warehouse = snowflake_warehouse.example.fully_qualified_name started = true sql_statement = "select 1" config = "{\"key\":\"value\"}" allow_overlapping_execution = true - error_integration = "" + error_integration = snowflake_notification_integration.example.fully_qualified_name when = "SYSTEM$STREAM_HAS_DATA('')" comment = "complete task" @@ -151,16 +151,16 @@ resource "snowflake_task" "test" { ### Required -- `database` (String) The database in which to create the task. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the task. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `database` (String) The database in which to create the task. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the task. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `sql_statement` (String) Any single SQL statement, or a call to a stored procedure, executed when the task runs. - `started` (Boolean) Specifies if the task should be started or suspended. ### Optional - `abort_detached_query` (Boolean) Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check [ABORT_DETACHED_QUERY docs](https://docs.snowflake.com/en/sql-reference/parameters#abort-detached-query). -- `after` (Set of String) Specifies one or more predecessor tasks for the current task. Use this option to [create a DAG](https://docs.snowflake.com/en/user-guide/tasks-graphs.html#label-task-dag) of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `after` (Set of String) Specifies one or more predecessor tasks for the current task. Use this option to [create a DAG](https://docs.snowflake.com/en/user-guide/tasks-graphs.html#label-task-dag) of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `allow_overlapping_execution` (String) By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `autocommit` (Boolean) Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see [Transactions](https://docs.snowflake.com/en/sql-reference/transactions). For more information, check [AUTOCOMMIT docs](https://docs.snowflake.com/en/sql-reference/parameters#autocommit). - `binary_input_format` (String) The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see [Binary input and output](https://docs.snowflake.com/en/sql-reference/binary-input-output). For more information, check [BINARY_INPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#binary-input-format). @@ -178,10 +178,10 @@ resource "snowflake_task" "test" { - `date_input_format` (String) Specifies the input format for the DATE data type. For more information, see [Date and time input and output formats](https://docs.snowflake.com/en/sql-reference/date-time-input-output). For more information, check [DATE_INPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#date-input-format). - `date_output_format` (String) Specifies the display format for the DATE data type. For more information, see [Date and time input and output formats](https://docs.snowflake.com/en/sql-reference/date-time-input-output). For more information, check [DATE_OUTPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#date-output-format). - `enable_unload_physical_type_optimization` (Boolean) Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check [ENABLE_UNLOAD_PHYSICAL_TYPE_OPTIMIZATION docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-unload-physical-type-optimization). -- `error_integration` (String) Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `error_integration` (String) Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. For more information about this resource, see [docs](./notification_integration). - `error_on_nondeterministic_merge` (Boolean) Specifies whether to return an error when the [MERGE](https://docs.snowflake.com/en/sql-reference/sql/merge) command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check [ERROR_ON_NONDETERMINISTIC_MERGE docs](https://docs.snowflake.com/en/sql-reference/parameters#error-on-nondeterministic-merge). - `error_on_nondeterministic_update` (Boolean) Specifies whether to return an error when the [UPDATE](https://docs.snowflake.com/en/sql-reference/sql/update) command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check [ERROR_ON_NONDETERMINISTIC_UPDATE docs](https://docs.snowflake.com/en/sql-reference/parameters#error-on-nondeterministic-update). -- `finalize` (String) Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see [Release and cleanup of task graphs](https://docs.snowflake.com/en/user-guide/tasks-graphs.html#label-finalizer-task). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `finalize` (String) Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see [Release and cleanup of task graphs](https://docs.snowflake.com/en/user-guide/tasks-graphs.html#label-finalizer-task). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `geography_output_format` (String) Display format for [GEOGRAPHY values](https://docs.snowflake.com/en/sql-reference/data-types-geospatial.html#label-data-types-geography). For more information, check [GEOGRAPHY_OUTPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#geography-output-format). - `geometry_output_format` (String) Display format for [GEOMETRY values](https://docs.snowflake.com/en/sql-reference/data-types-geospatial.html#label-data-types-geometry). For more information, check [GEOMETRY_OUTPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#geometry-output-format). - `jdbc_treat_timestamp_ntz_as_utc` (Boolean) Specifies how JDBC processes TIMESTAMP_NTZ values. For more information, check [JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC docs](https://docs.snowflake.com/en/sql-reference/parameters#jdbc-treat-timestamp-ntz-as-utc). @@ -219,10 +219,10 @@ resource "snowflake_task" "test" { - `two_digit_century_start` (Number) Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the `YY` date format component (i.e. years represented as 2 digits). For more information, check [TWO_DIGIT_CENTURY_START docs](https://docs.snowflake.com/en/sql-reference/parameters#two-digit-century-start). - `unsupported_ddl_action` (String) Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check [UNSUPPORTED_DDL_ACTION docs](https://docs.snowflake.com/en/sql-reference/parameters#unsupported-ddl-action). - `use_cached_result` (Boolean) Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check [USE_CACHED_RESULT docs](https://docs.snowflake.com/en/sql-reference/parameters#use-cached-result). -- `user_task_managed_initial_warehouse_size` (String) Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse) For more information, check [USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE docs](https://docs.snowflake.com/en/sql-reference/parameters#user-task-managed-initial-warehouse-size). +- `user_task_managed_initial_warehouse_size` (String) Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see [docs](./warehouse). For more information, check [USER_TASK_MANAGED_INITIAL_WAREHOUSE_SIZE docs](https://docs.snowflake.com/en/sql-reference/parameters#user-task-managed-initial-warehouse-size). - `user_task_minimum_trigger_interval_in_seconds` (Number) Minimum amount of time between Triggered Task executions in seconds For more information, check [USER_TASK_MINIMUM_TRIGGER_INTERVAL_IN_SECONDS docs](https://docs.snowflake.com/en/sql-reference/parameters#user-task-minimum-trigger-interval-in-seconds). - `user_task_timeout_ms` (Number) Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check [USER_TASK_TIMEOUT_MS docs](https://docs.snowflake.com/en/sql-reference/parameters#user-task-timeout-ms). -- `warehouse` (String) The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with user_task_managed_initial_warehouse_size) +- `warehouse` (String) The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with user_task_managed_initial_warehouse_size) For more information about this resource, see [docs](./warehouse). - `week_of_year_policy` (Number) Specifies how the weeks in a given year are computed. `0`: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year. `1`: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check [WEEK_OF_YEAR_POLICY docs](https://docs.snowflake.com/en/sql-reference/parameters#week-of-year-policy). - `week_start` (Number) Specifies the first day of the week (used by week-related date functions). `0`: Legacy Snowflake behavior is used (i.e. ISO-like semantics). `1` (Monday) to `7` (Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check [WEEK_START docs](https://docs.snowflake.com/en/sql-reference/parameters#week-start). - `when` (String) Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run. @@ -1046,6 +1046,5 @@ Read-Only: Import is supported using the following syntax: ```shell -# format is database name | schema name | task name -terraform import snowflake_task.example 'dbName|schemaName|taskName' +terraform import snowflake_task.example '""."".""' ``` diff --git a/docs/resources/user.md b/docs/resources/user.md index 0f542d36b9..b1bb599467 100644 --- a/docs/resources/user.md +++ b/docs/resources/user.md @@ -40,9 +40,9 @@ resource "snowflake_user" "user" { display_name = "Snowflake User display name" email = "user@snowflake.example" - default_warehouse = "warehouse" + default_warehouse = snowflake_warehouse.example.fully_qualified_name default_secondary_roles_option = "ALL" - default_role = "role1" + default_role = snowflake_role.example.fully_qualified_name default_namespace = "some.namespace" mins_to_unlock = 9 @@ -128,7 +128,7 @@ resource "snowflake_user" "u" { ### Required -- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Name of the user. Note that if you do not supply login_name this will be used as login_name. Check the [docs](https://docs.snowflake.net/manuals/sql-reference/sql/create-user.html#required-parameters). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -149,9 +149,9 @@ resource "snowflake_user" "u" { - `date_output_format` (String) Specifies the display format for the DATE data type. For more information, see [Date and time input and output formats](https://docs.snowflake.com/en/sql-reference/date-time-input-output). For more information, check [DATE_OUTPUT_FORMAT docs](https://docs.snowflake.com/en/sql-reference/parameters#date-output-format). - `days_to_expiry` (Number) Specifies the number of days after which the user status is set to `Expired` and the user is no longer allowed to log in. This is useful for defining temporary users (i.e. users who should only have access to Snowflake for a limited time period). In general, you should not set this property for [account administrators](https://docs.snowflake.com/en/user-guide/security-access-control-considerations.html#label-accountadmin-users) (i.e. users with the `ACCOUNTADMIN` role) because Snowflake locks them out when they become `Expired`. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `default_namespace` (String) Specifies the namespace (database only or database and schema) that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the namespace exists. -- `default_role` (String) Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists. +- `default_role` (String) Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists. For more information about this resource, see [docs](./account_role). - `default_secondary_roles_option` (String) Specifies the secondary roles that are active for the user’s session upon login. Valid values are (case-insensitive): `DEFAULT` | `NONE` | `ALL`. More information can be found in [doc](https://docs.snowflake.com/en/sql-reference/sql/create-user#optional-object-properties-objectproperties). -- `default_warehouse` (String) Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists. +- `default_warehouse` (String) Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists. For more information about this resource, see [docs](./warehouse). - `disable_mfa` (String) Allows enabling or disabling [multi-factor authentication](https://docs.snowflake.com/en/user-guide/security-mfa). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `disabled` (String) Specifies whether the user is disabled, which prevents logging in and aborts all the currently-running queries for the user. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `display_name` (String) Name displayed for the user in the Snowflake web interface. @@ -179,7 +179,7 @@ resource "snowflake_user" "u" { - `network_policy` (String) Specifies the network policy to enforce for your account. Network policies enable restricting access to your account based on users’ IP address. For more details, see [Controlling network traffic with network policies](https://docs.snowflake.com/en/user-guide/network-policies). Any existing network policy (created using [CREATE NETWORK POLICY](https://docs.snowflake.com/en/sql-reference/sql/create-network-policy)). For more information, check [NETWORK_POLICY docs](https://docs.snowflake.com/en/sql-reference/parameters#network-policy). - `noorder_sequence_as_default` (Boolean) Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in [increasing or decreasing order](https://docs.snowflake.com/en/user-guide/querying-sequences.html#label-querying-sequences-increasing-values). For more information, check [NOORDER_SEQUENCE_AS_DEFAULT docs](https://docs.snowflake.com/en/sql-reference/parameters#noorder-sequence-as-default). - `odbc_treat_decimal_as_int` (Boolean) Specifies how ODBC processes columns that have a scale of zero (0). For more information, check [ODBC_TREAT_DECIMAL_AS_INT docs](https://docs.snowflake.com/en/sql-reference/parameters#odbc-treat-decimal-as-int). -- `password` (String, Sensitive) Password for the user. **WARNING:** this will put the password in the terraform state file. Use carefully. +- `password` (String, Sensitive) Password for the user. **WARNING:** this will put the password in the terraform state file. Use carefully. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `prevent_unload_to_internal_stages` (Boolean) Specifies whether to prevent data unload operations to internal (Snowflake) stages using [COPY INTO ](https://docs.snowflake.com/en/sql-reference/sql/copy-into-location) statements. For more information, check [PREVENT_UNLOAD_TO_INTERNAL_STAGES docs](https://docs.snowflake.com/en/sql-reference/parameters#prevent-unload-to-internal-stages). - `query_tag` (String) Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the [QUERY_HISTORY, QUERY_HISTORY_BY_*](https://docs.snowflake.com/en/sql-reference/functions/query_history) functions. For more information, check [QUERY_TAG docs](https://docs.snowflake.com/en/sql-reference/parameters#query-tag). - `quoted_identifiers_ignore_case` (Boolean) Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see [Identifier resolution](https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html#label-identifier-casing)). You can use this parameter in situations in which [third-party applications always use double quotes around identifiers](https://docs.snowflake.com/en/sql-reference/identifiers-syntax.html#label-identifier-casing-parameter). For more information, check [QUOTED_IDENTIFIERS_IGNORE_CASE docs](https://docs.snowflake.com/en/sql-reference/parameters#quoted-identifiers-ignore-case). diff --git a/docs/resources/view.md b/docs/resources/view.md index 4295f33051..9a4efafe6d 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -7,10 +7,10 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. -!> **Note about copy_grants** Fields like `is_recursive`, `is_temporary`, `copy_grants` and `statement` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-view)), and a change means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. - !> Due to Snowflake limitations, to properly compute diff on `statement` field, the provider parses a `text` field which contains the whole CREATE query used to create the resource. We recommend not using special characters, especially `(`, `,`, `)` in any of the fields, if possible. +~> **Note about copy_grants** Fields like `is_recursive`, `is_temporary`, `copy_grants` and `statement` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-view)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. + ~> **Required warehouse** For this resource, the provider uses [policy references](https://docs.snowflake.com/en/sql-reference/functions/policy_references) which requires a warehouse in the connection. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. # snowflake_view (Resource) @@ -59,12 +59,12 @@ resource "snowflake_view" "test" { policy_name = "projection_policy" } masking_policy { - policy_name = "masking_policy" + policy_name = snowflake_masking_policy.example.fully_qualified_name using = ["address"] } } row_access_policy { - policy_name = "row_access_policy" + policy_name = snowflake_row_access_policy.example.fully_qualified_name on = ["id"] } aggregation_policy { @@ -72,8 +72,9 @@ resource "snowflake_view" "test" { entity_key = ["id"] } data_metric_function { - function_name = "data_metric_function" - on = ["id"] + function_name = "data_metric_function" + on = ["id"] + schedule_status = "STARTED" } data_metric_schedule { using_cron = "15 * * * * UTC" @@ -91,10 +92,10 @@ SQL ### Required -- `database` (String) The database in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `name` (String) Specifies the identifier for the view; must be unique for the schema in which the view is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `schema` (String) The schema in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` -- `statement` (String) Specifies the query used to create the view. +- `database` (String) The database in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) Specifies the identifier for the view; must be unique for the schema in which the view is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `schema` (String) The schema in which to create the view. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `statement` (String) Specifies the query used to create the view. To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. ### Optional @@ -102,7 +103,7 @@ SQL - `change_tracking` (String) Specifies to enable or disable change tracking on the table. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `column` (Block List) If you want to change the name of a column or add a comment to a column in the new view, include a column list that specifies the column names and (if needed) comments about the columns. You do not need to specify the data types of the columns. If this field is not specified, columns are inferred from the `statement` field by Snowflake. (see [below for nested schema](#nestedblock--column)) - `comment` (String) Specifies a comment for the view. -- `copy_grants` (Boolean) Retains the access permissions from the original view when a new view is created using the OR REPLACE clause. +- `copy_grants` (Boolean) Retains the access permissions from the original view when a view is recreated using the OR REPLACE clause. This is used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect during creating a new object with Terraform. - `data_metric_function` (Block Set) Data metric functions used for the view. (see [below for nested schema](#nestedblock--data_metric_function)) - `data_metric_schedule` (Block List, Max: 1) Specifies the schedule to run the data metric functions periodically. (see [below for nested schema](#nestedblock--data_metric_schedule)) - `is_recursive` (String) Specifies that the view can refer to itself using recursive syntax without necessarily using a CTE (common table expression). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. @@ -147,7 +148,7 @@ Optional: Required: -- `policy_name` (String) Specifies the masking policy to set on a column. +- `policy_name` (String) Specifies the masking policy to set on a column. For more information about this resource, see [docs](./masking_policy). Optional: @@ -188,7 +189,7 @@ Optional: Required: - `on` (Set of String) Defines which columns are affected by the policy. -- `policy_name` (String) Row access policy name. +- `policy_name` (String) Row access policy name. For more information about this resource, see [docs](./row_access_policy). diff --git a/docs/resources/warehouse.md b/docs/resources/warehouse.md index af0fbe914d..44f1ca5e65 100644 --- a/docs/resources/warehouse.md +++ b/docs/resources/warehouse.md @@ -7,6 +7,12 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. + +-> **Note** Field `RESOURCE_CONSTRAINT` is currently missing. It will be added in the future. + + +-> **Note** Assigning resource monitors to warehouses requires ACCOUNTADMIN role. To do this, either manage the warehouse resource with ACCOUNTADMIN role, or use [unsafe_execute](./unsafe_execute) instead. See [this issue](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3019) for more details. + # snowflake_warehouse (Resource) Resource used to manage warehouse objects. For more information, check [warehouse documentation](https://docs.snowflake.com/en/sql-reference/commands-warehouse). @@ -14,10 +20,30 @@ Resource used to manage warehouse objects. For more information, check [warehous ## Example Usage ```terraform +# Resource with required fields +resource "snowflake_warehouse" "warehouse" { + name = "WAREHOUSE" +} + +# Resource with all fields resource "snowflake_warehouse" "warehouse" { - name = "test" - comment = "foo" - warehouse_size = "small" + name = "WAREHOUSE" + warehouse_type = "SNOWPARK-OPTIMIZED" + warehouse_size = "MEDIUM" + max_cluster_count = 4 + min_cluster_count = 2 + scaling_policy = "ECONOMY" + auto_suspend = 1200 + auto_resume = false + initially_suspended = false + resource_monitor = snowflake_resource_monitor.monitor.fully_qualified_name + comment = "An example warehouse." + enable_query_acceleration = true + query_acceleration_max_scale_factor = 4 + + max_concurrency_level = 4 + statement_queued_timeout_in_seconds = 5 + statement_timeout_in_seconds = 86400 } ``` -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). @@ -28,7 +54,7 @@ resource "snowflake_warehouse" "warehouse" { ### Required -- `name` (String) Identifier for the virtual warehouse; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"` +- `name` (String) Identifier for the virtual warehouse; must be unique for your account. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. ### Optional @@ -41,7 +67,7 @@ resource "snowflake_warehouse" "warehouse" { - `max_concurrency_level` (Number) Object parameter that specifies the concurrency level for SQL statements (i.e. queries and DML) executed by a warehouse. - `min_cluster_count` (Number) Specifies the minimum number of server clusters for the warehouse (only applies to multi-cluster warehouses). - `query_acceleration_max_scale_factor` (Number) Specifies the maximum scale factor for leasing compute resources for query acceleration. The scale factor is used as a multiplier based on warehouse size. -- `resource_monitor` (String) Specifies the name of a resource monitor that is explicitly assigned to the warehouse. +- `resource_monitor` (String) Specifies the name of a resource monitor that is explicitly assigned to the warehouse. For more information about this resource, see [docs](./resource_monitor). - `scaling_policy` (String) Specifies the policy for automatically starting and shutting down clusters in a multi-cluster warehouse running in Auto-scale mode. Valid values are (case-insensitive): `STANDARD` | `ECONOMY`. - `statement_queued_timeout_in_seconds` (Number) Object parameter that specifies the time, in seconds, a SQL statement (query, DDL, DML, etc.) can be queued on a warehouse before it is canceled by the system. - `statement_timeout_in_seconds` (Number) Specifies the time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system @@ -139,5 +165,5 @@ Read-Only: Import is supported using the following syntax: ```shell -terraform import snowflake_warehouse.example warehouseName +terraform import snowflake_warehouse.example '""' ``` diff --git a/examples/data-sources/snowflake_connections/data-source.tf b/examples/data-sources/snowflake_connections/data-source.tf index b32fd92e5e..58e2b8ea5e 100644 --- a/examples/data-sources/snowflake_connections/data-source.tf +++ b/examples/data-sources/snowflake_connections/data-source.tf @@ -23,3 +23,15 @@ data "snowflake_connections" "like_prefix" { output "like_prefix_output" { value = data.snowflake_connections.like_prefix.connections } + +# Ensure the number of connections is equal to at exactly one element (with the use of check block) +check "connection_check" { + data "snowflake_connections" "assert_with_check_block" { + like = "connection-name" + } + + assert { + condition = length(data.snowflake_connections.assert_with_check_block.connections) == 1 + error_message = "connections filtered by '${data.snowflake_connections.assert_with_check_block.like}' returned ${length(data.snowflake_connections.assert_with_check_block.connections)} connections where one was expected" + } +} diff --git a/examples/data-sources/snowflake_database_roles/data-source.tf b/examples/data-sources/snowflake_database_roles/data-source.tf index ff07fdc68b..29621c29e9 100644 --- a/examples/data-sources/snowflake_database_roles/data-source.tf +++ b/examples/data-sources/snowflake_database_roles/data-source.tf @@ -44,7 +44,7 @@ data "snowflake_database_roles" "assert_with_postcondition" { # Ensure the number of database roles is equal to at exactly one element (with the use of check block) check "database_role_check" { - data "snowflake_resource_monitors" "assert_with_check_block" { + data "snowflake_database_roles" "assert_with_check_block" { in_database = "database-name" like = "database_role-name" } diff --git a/examples/data-sources/snowflake_network_policies/data-source.tf b/examples/data-sources/snowflake_network_policies/data-source.tf index 55b7ce844a..496ade396f 100644 --- a/examples/data-sources/snowflake_network_policies/data-source.tf +++ b/examples/data-sources/snowflake_network_policies/data-source.tf @@ -27,7 +27,7 @@ output "only_show_output" { # Ensure the number of network policies is equal to at least one element (with the use of postcondition) data "snowflake_network_policies" "assert_with_postcondition" { - starts_with = "network-policy-name" + like = "network-policy-name" lifecycle { postcondition { condition = length(self.network_policies) > 0 diff --git a/examples/resources/snowflake_account_role/import.sh b/examples/resources/snowflake_account_role/import.sh index d7d6ebddbe..28bc0caf54 100644 --- a/examples/resources/snowflake_account_role/import.sh +++ b/examples/resources/snowflake_account_role/import.sh @@ -1 +1 @@ -terraform import snowflake_account_role.example "name" +terraform import snowflake_account_role.example '""' diff --git a/examples/resources/snowflake_api_authentication_integration_with_authorization_code_grant/import.sh b/examples/resources/snowflake_api_authentication_integration_with_authorization_code_grant/import.sh index c641594f3b..d825e7f812 100644 --- a/examples/resources/snowflake_api_authentication_integration_with_authorization_code_grant/import.sh +++ b/examples/resources/snowflake_api_authentication_integration_with_authorization_code_grant/import.sh @@ -1 +1 @@ -terraform import snowflake_api_authentication_integration_with_authorization_code_grant.example "name" +terraform import snowflake_api_authentication_integration_with_authorization_code_grant.example '""' diff --git a/examples/resources/snowflake_api_authentication_integration_with_client_credentials/import.sh b/examples/resources/snowflake_api_authentication_integration_with_client_credentials/import.sh index d3454c9a27..60b80879ac 100644 --- a/examples/resources/snowflake_api_authentication_integration_with_client_credentials/import.sh +++ b/examples/resources/snowflake_api_authentication_integration_with_client_credentials/import.sh @@ -1 +1 @@ -terraform import snowflake_api_authentication_integration_with_client_credentials.example "name" +terraform import snowflake_api_authentication_integration_with_client_credentials.example '""' diff --git a/examples/resources/snowflake_api_authentication_integration_with_jwt_bearer/import.sh b/examples/resources/snowflake_api_authentication_integration_with_jwt_bearer/import.sh index b1cb40660a..a3d29286ad 100644 --- a/examples/resources/snowflake_api_authentication_integration_with_jwt_bearer/import.sh +++ b/examples/resources/snowflake_api_authentication_integration_with_jwt_bearer/import.sh @@ -1 +1 @@ -terraform import snowflake_api_authentication_integration_with_jwt_bearer.example "name" +terraform import snowflake_api_authentication_integration_with_jwt_bearer.example '""' diff --git a/examples/resources/snowflake_database/import.sh b/examples/resources/snowflake_database/import.sh index 8a30774299..add2afbd03 100644 --- a/examples/resources/snowflake_database/import.sh +++ b/examples/resources/snowflake_database/import.sh @@ -1 +1 @@ -terraform import snowflake_database.example 'database_name' +terraform import snowflake_database.example '""' diff --git a/examples/resources/snowflake_database/resource.tf b/examples/resources/snowflake_database/resource.tf index 13c1833c6b..616e0b2c00 100644 --- a/examples/resources/snowflake_database/resource.tf +++ b/examples/resources/snowflake_database/resource.tf @@ -10,10 +10,9 @@ resource "snowflake_database" "primary" { comment = "my standard database" data_retention_time_in_days = 10 - data_retention_time_in_days_save = 10 max_data_extension_time_in_days = 20 - external_volume = "" - catalog = "" + external_volume = snowflake_external_volume.example.fully_qualified_name + catalog = snowflake_catalog.example.fully_qualified_name replace_invalid_characters = false default_ddl_collation = "en_US" storage_serialization_policy = "COMPATIBLE" @@ -40,11 +39,11 @@ resource "snowflake_database" "primary" { locals { replication_configs = [ { - account_identifier = "." + account_identifier = "\"\".\"\"" with_failover = true }, { - account_identifier = "." + account_identifier = "\"\".\"\"" with_failover = true }, ] @@ -52,10 +51,13 @@ locals { resource "snowflake_database" "primary" { name = "database_name" - for_each = local.replication_configs + for_each = { for rc in local.replication_configs : rc.account_identifier => rc } replication { - enable_to_account = each.value + enable_to_account { + account_identifier = each.value.account_identifier + with_failover = each.value.with_failover + } ignore_edition_check = true } } diff --git a/examples/resources/snowflake_external_oauth_integration/import.sh b/examples/resources/snowflake_external_oauth_integration/import.sh index 8029ac973e..d4ad4dd90d 100644 --- a/examples/resources/snowflake_external_oauth_integration/import.sh +++ b/examples/resources/snowflake_external_oauth_integration/import.sh @@ -1 +1 @@ -terraform import snowflake_external_oauth_integration.example "name" +terraform import snowflake_external_oauth_integration.example '""' diff --git a/examples/resources/snowflake_external_oauth_integration/resource.tf b/examples/resources/snowflake_external_oauth_integration/resource.tf index ef29249ace..017f0edf04 100644 --- a/examples/resources/snowflake_external_oauth_integration/resource.tf +++ b/examples/resources/snowflake_external_oauth_integration/resource.tf @@ -11,7 +11,7 @@ resource "snowflake_external_oauth_integration" "test" { resource "snowflake_external_oauth_integration" "test" { comment = "comment" enabled = true - external_oauth_allowed_roles_list = ["user1"] + external_oauth_allowed_roles_list = [snowflake_role.one.fully_qualified_name] external_oauth_any_role_mode = "ENABLE" external_oauth_audience_list = ["https://example.com"] external_oauth_issuer = "issuer" @@ -29,7 +29,7 @@ resource "snowflake_external_oauth_integration" "test" { enabled = true external_oauth_any_role_mode = "ENABLE" external_oauth_audience_list = ["https://example.com"] - external_oauth_blocked_roles_list = ["user1"] + external_oauth_blocked_roles_list = [snowflake_role.one.fully_qualified_name] external_oauth_issuer = "issuer" external_oauth_rsa_public_key = file("key.pem") external_oauth_rsa_public_key_2 = file("key2.pem") diff --git a/examples/resources/snowflake_grant_account_role/resource.tf b/examples/resources/snowflake_grant_account_role/resource.tf index d3af9ba081..7558d13da7 100644 --- a/examples/resources/snowflake_grant_account_role/resource.tf +++ b/examples/resources/snowflake_grant_account_role/resource.tf @@ -3,11 +3,11 @@ ################################## resource "snowflake_account_role" "role" { - name = var.role_name + name = "ROLE" } resource "snowflake_account_role" "parent_role" { - name = var.parent_role_name + name = "PARENT_ROLE" } resource "snowflake_grant_account_role" "g" { @@ -21,11 +21,11 @@ resource "snowflake_grant_account_role" "g" { ################################## resource "snowflake_account_role" "role" { - name = var.role_name + name = "ROLE" } resource "snowflake_user" "user" { - name = var.user_name + name = "USER" } resource "snowflake_grant_account_role" "g" { diff --git a/examples/resources/snowflake_network_policy/import.sh b/examples/resources/snowflake_network_policy/import.sh index e9f2b372ac..9da953c5d9 100644 --- a/examples/resources/snowflake_network_policy/import.sh +++ b/examples/resources/snowflake_network_policy/import.sh @@ -1 +1 @@ -terraform import snowflake_network_policy.example "name" +terraform import snowflake_network_policy.example '""' diff --git a/examples/resources/snowflake_network_policy/resource.tf b/examples/resources/snowflake_network_policy/resource.tf index d6cfe4bb62..5ff38f82b7 100644 --- a/examples/resources/snowflake_network_policy/resource.tf +++ b/examples/resources/snowflake_network_policy/resource.tf @@ -6,9 +6,9 @@ resource "snowflake_network_policy" "basic" { ## Complete (with every optional set) resource "snowflake_network_policy" "complete" { name = "network_policy_name" - allowed_network_rule_list = [""] - blocked_network_rule_list = [""] + allowed_network_rule_list = [snowflake_network_rule.one.fully_qualified_name] + blocked_network_rule_list = [snowflake_network_rule.two.fully_qualified_name] allowed_ip_list = ["192.168.1.0/24"] blocked_ip_list = ["192.168.1.99"] comment = "my network policy" -} \ No newline at end of file +} diff --git a/examples/resources/snowflake_oauth_integration_for_custom_clients/import.sh b/examples/resources/snowflake_oauth_integration_for_custom_clients/import.sh index beeddc5d18..94acb1cc2d 100644 --- a/examples/resources/snowflake_oauth_integration_for_custom_clients/import.sh +++ b/examples/resources/snowflake_oauth_integration_for_custom_clients/import.sh @@ -1 +1 @@ -terraform import snowflake_oauth_integration_for_custom_clients.example "name" +terraform import snowflake_oauth_integration_for_custom_clients.example '""' diff --git a/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf b/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf index 77f64e69ba..c48c536a33 100644 --- a/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf +++ b/examples/resources/snowflake_oauth_integration_for_custom_clients/resource.tf @@ -1,6 +1,6 @@ # basic resource resource "snowflake_oauth_integration_for_custom_clients" "basic" { - name = "saml_integration" + name = "integration" oauth_client_type = "CONFIDENTIAL" oauth_redirect_uri = "https://example.com" blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] @@ -8,18 +8,18 @@ resource "snowflake_oauth_integration_for_custom_clients" "basic" { # resource with all fields set resource "snowflake_oauth_integration_for_custom_clients" "complete" { - name = "saml_integration" + name = "integration" oauth_client_type = "CONFIDENTIAL" oauth_redirect_uri = "https://example.com" enabled = "true" oauth_allow_non_tls_redirect_uri = "true" oauth_enforce_pkce = "true" oauth_use_secondary_roles = "NONE" - pre_authorized_roles_list = ["role_id1", "role_id2"] - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", "role_id1", "role_id2"] + pre_authorized_roles_list = [snowflake_role.one.fully_qualified_name, snowflake_role.two.fully_qualified_name] + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", snowflake_role.three.fully_qualified_name, snowflake_role.four.fully_qualified_name] oauth_issue_refresh_tokens = "true" oauth_refresh_token_validity = 87600 - network_policy = "network_policy_id" + network_policy = snowflake_network_policy.example.fully_qualified_name oauth_client_rsa_public_key = file("rsa.pub") oauth_client_rsa_public_key_2 = file("rsa2.pub") comment = "my oauth integration" diff --git a/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf b/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf index f6a52145a2..1c8a7830c2 100644 --- a/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf +++ b/examples/resources/snowflake_oauth_integration_for_partner_applications/resource.tf @@ -14,6 +14,6 @@ resource "snowflake_oauth_integration_for_partner_applications" "test" { oauth_issue_refresh_tokens = "true" oauth_refresh_token_validity = 3600 oauth_use_secondary_roles = "IMPLICIT" - blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", "role_id1", "role_id2"] + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN", snowflake_role.one.fully_qualified_name, snowflake_role.two.fully_qualified_name] comment = "example oauth integration for partner applications" } diff --git a/examples/resources/snowflake_primary_connection/import.sh b/examples/resources/snowflake_primary_connection/import.sh index 743bf79921..7813816225 100644 --- a/examples/resources/snowflake_primary_connection/import.sh +++ b/examples/resources/snowflake_primary_connection/import.sh @@ -1 +1 @@ -terraform import snowflake_primary_connection.example 'connection_name' +terraform import snowflake_primary_connection.example '""' diff --git a/examples/resources/snowflake_primary_connection/resource.tf b/examples/resources/snowflake_primary_connection/resource.tf index b9fe410b72..ba7c7da919 100644 --- a/examples/resources/snowflake_primary_connection/resource.tf +++ b/examples/resources/snowflake_primary_connection/resource.tf @@ -8,6 +8,6 @@ resource "snowflake_primary_connection" "complete" { name = "connection_name" comment = "my complete connection" enable_failover_to_accounts = [ - "." + "\"\".\"\"" ] } diff --git a/examples/resources/snowflake_resource_monitor/import.sh b/examples/resources/snowflake_resource_monitor/import.sh index 2fd060d6ed..c0cc9809c7 100644 --- a/examples/resources/snowflake_resource_monitor/import.sh +++ b/examples/resources/snowflake_resource_monitor/import.sh @@ -1,2 +1 @@ -# format is the resource monitor name -terraform import snowflake_resource_monitor.example 'resourceMonitorName' +terraform import snowflake_resource_monitor.example '""' diff --git a/examples/resources/snowflake_resource_monitor/resource.tf b/examples/resources/snowflake_resource_monitor/resource.tf index 45273e869d..5d9a960ee8 100644 --- a/examples/resources/snowflake_resource_monitor/resource.tf +++ b/examples/resources/snowflake_resource_monitor/resource.tf @@ -9,7 +9,7 @@ resource "snowflake_resource_monitor" "minimal_working" { name = "resource-monitor-name" credit_quota = 100 suspend_trigger = 100 - notify_users = ["USERONE", "USERTWO"] + notify_users = [snowflake_user.one.fully_qualified_name, snowflake_user.two.fully_qualified_name] } resource "snowflake_resource_monitor" "complete" { @@ -24,5 +24,5 @@ resource "snowflake_resource_monitor" "complete" { suspend_trigger = 50 suspend_immediate_trigger = 90 - notify_users = ["USERONE", "USERTWO"] + notify_users = [snowflake_user.one.fully_qualified_name, snowflake_user.two.fully_qualified_name] } diff --git a/examples/resources/snowflake_row_access_policy/resource.tf b/examples/resources/snowflake_row_access_policy/resource.tf index c4ff60b7be..c266158eb0 100644 --- a/examples/resources/snowflake_row_access_policy/resource.tf +++ b/examples/resources/snowflake_row_access_policy/resource.tf @@ -1,3 +1,4 @@ +# resource with all fields set resource "snowflake_row_access_policy" "example_row_access_policy" { name = "EXAMPLE_ROW_ACCESS_POLICY" database = "EXAMPLE_DB" diff --git a/examples/resources/snowflake_saml2_integration/import.sh b/examples/resources/snowflake_saml2_integration/import.sh index bf68b01c98..d6643bf352 100644 --- a/examples/resources/snowflake_saml2_integration/import.sh +++ b/examples/resources/snowflake_saml2_integration/import.sh @@ -1 +1 @@ -terraform import snowflake_saml2_integration.example "name" +terraform import snowflake_saml2_integration.example '""' diff --git a/examples/resources/snowflake_schema/import.sh b/examples/resources/snowflake_schema/import.sh index dea2bb90cf..fe3ede8b35 100644 --- a/examples/resources/snowflake_schema/import.sh +++ b/examples/resources/snowflake_schema/import.sh @@ -1,2 +1 @@ -# format is . terraform import snowflake_schema.example '"".""' diff --git a/examples/resources/snowflake_scim_integration/import.sh b/examples/resources/snowflake_scim_integration/import.sh index 365c14b973..467137f2f3 100644 --- a/examples/resources/snowflake_scim_integration/import.sh +++ b/examples/resources/snowflake_scim_integration/import.sh @@ -1 +1 @@ -terraform import snowflake_scim_integration.example "name" +terraform import snowflake_scim_integration.example '""' diff --git a/examples/resources/snowflake_scim_integration/resource.tf b/examples/resources/snowflake_scim_integration/resource.tf index 8e3417fae2..445860f22a 100644 --- a/examples/resources/snowflake_scim_integration/resource.tf +++ b/examples/resources/snowflake_scim_integration/resource.tf @@ -4,14 +4,16 @@ resource "snowflake_scim_integration" "test" { enabled = true scim_client = "GENERIC" sync_password = true + run_as_role = "GENERIC_SCIM_PROVISIONER" } + # resource with all fields set resource "snowflake_scim_integration" "test" { name = "test" enabled = true scim_client = "GENERIC" sync_password = true - network_policy = "network_policy_test" + network_policy = snowflake_network_policy.example.fully_qualified_name run_as_role = "GENERIC_SCIM_PROVISIONER" comment = "foo" } diff --git a/examples/resources/snowflake_secondary_connection/import.sh b/examples/resources/snowflake_secondary_connection/import.sh index 4de28135f7..b78ed2332d 100644 --- a/examples/resources/snowflake_secondary_connection/import.sh +++ b/examples/resources/snowflake_secondary_connection/import.sh @@ -1 +1 @@ -terraform import snowflake_secondary_connection.example 'secondary_connection_name' +terraform import snowflake_secondary_connection.example '""' diff --git a/examples/resources/snowflake_secondary_connection/resource.tf b/examples/resources/snowflake_secondary_connection/resource.tf index 17d32c0820..66617dd596 100644 --- a/examples/resources/snowflake_secondary_connection/resource.tf +++ b/examples/resources/snowflake_secondary_connection/resource.tf @@ -1,12 +1,12 @@ ## Minimal resource "snowflake_secondary_connection" "basic" { name = "connection_name" - as_replica_of = ".." + as_replica_of = "\"\".\"\".\"\"" } ## Complete (with every optional set) resource "snowflake_secondary_connection" "complete" { name = "connection_name" - as_replica_of = ".." + as_replica_of = "\"\".\"\".\"\"" comment = "my complete secondary connection" } diff --git a/examples/resources/snowflake_secondary_database/import.sh b/examples/resources/snowflake_secondary_database/import.sh index f183eac8ac..2896ef9be8 100644 --- a/examples/resources/snowflake_secondary_database/import.sh +++ b/examples/resources/snowflake_secondary_database/import.sh @@ -1 +1 @@ -terraform import snowflake_secondary_database.example 'secondary_database_name' +terraform import snowflake_secondary_database.example '""' diff --git a/examples/resources/snowflake_secret_with_authorization_code_grant/resource.tf b/examples/resources/snowflake_secret_with_authorization_code_grant/resource.tf index bb45a36e87..f6a499b028 100644 --- a/examples/resources/snowflake_secret_with_authorization_code_grant/resource.tf +++ b/examples/resources/snowflake_secret_with_authorization_code_grant/resource.tf @@ -3,7 +3,7 @@ resource "snowflake_secret_with_authorization_code_grant" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_authorization_code_grant.example.fully_qualified_name oauth_refresh_token = "EXAMPLE_TOKEN" oauth_refresh_token_expiry_time = "2025-01-02 15:04:01" } @@ -13,7 +13,7 @@ resource "snowflake_secret_with_authorization_code_grant" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_authorization_code_grant.example.fully_qualified_name oauth_refresh_token = "EXAMPLE_TOKEN" oauth_refresh_token_expiry_time = "2025-01-02 15:04:01" comment = "EXAMPLE_COMMENT" diff --git a/examples/resources/snowflake_secret_with_client_credentials/resource.tf b/examples/resources/snowflake_secret_with_client_credentials/resource.tf index baaf605e67..c62aecf252 100644 --- a/examples/resources/snowflake_secret_with_client_credentials/resource.tf +++ b/examples/resources/snowflake_secret_with_client_credentials/resource.tf @@ -3,7 +3,7 @@ resource "snowflake_secret_with_client_credentials" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_client_credentials.example.fully_qualified_name oauth_scopes = ["useraccount", "testscope"] } @@ -12,7 +12,7 @@ resource "snowflake_secret_with_client_credentials" "test" { name = "EXAMPLE_SECRET" database = "EXAMPLE_DB" schema = "EXAMPLE_SCHEMA" - api_authentication = "EXAMPLE_SECURITY_INTEGRATION_NAME" + api_authentication = snowflake_api_authentication_integration_with_client_credentials.example.fully_qualified_name oauth_scopes = ["useraccount", "testscope"] comment = "EXAMPLE_COMMENT" } diff --git a/examples/resources/snowflake_shared_database/import.sh b/examples/resources/snowflake_shared_database/import.sh index 6cf900566c..8c39bba7ee 100644 --- a/examples/resources/snowflake_shared_database/import.sh +++ b/examples/resources/snowflake_shared_database/import.sh @@ -1 +1 @@ -terraform import snowflake_shared_database.example 'shared_database_name' +terraform import snowflake_shared_database.example '""' diff --git a/examples/resources/snowflake_stream_on_directory_table/resource.tf b/examples/resources/snowflake_stream_on_directory_table/resource.tf index ab85c22f29..70188045e0 100644 --- a/examples/resources/snowflake_stream_on_directory_table/resource.tf +++ b/examples/resources/snowflake_stream_on_directory_table/resource.tf @@ -1,18 +1,10 @@ -resource "snowflake_stage" "example_stage" { - name = "EXAMPLE_STAGE" - url = "s3://com.example.bucket/prefix" - database = "EXAMPLE_DB" - schema = "EXAMPLE_SCHEMA" - credentials = "AWS_KEY_ID='${var.example_aws_key_id}' AWS_SECRET_KEY='${var.example_aws_secret_key}'" -} - # basic resource resource "snowflake_stream_on_directory_table" "stream" { name = "stream" schema = "schema" database = "database" - stage = snowflake_stage.stage.fully_qualified_name + stage = snowflake_stage.example.fully_qualified_name } @@ -23,11 +15,7 @@ resource "snowflake_stream_on_directory_table" "stream" { database = "database" copy_grants = true - stage = snowflake_stage.stage.fully_qualified_name - - at { - statement = "8e5d0ca9-005e-44e6-b858-a8f5b37c5726" - } + stage = snowflake_stage.example.fully_qualified_name comment = "A stream." } diff --git a/examples/resources/snowflake_stream_on_external_table/resource.tf b/examples/resources/snowflake_stream_on_external_table/resource.tf index 964cb0f342..bdd0073b2a 100644 --- a/examples/resources/snowflake_stream_on_external_table/resource.tf +++ b/examples/resources/snowflake_stream_on_external_table/resource.tf @@ -1,29 +1,10 @@ -resource "snowflake_external_table" "external_table" { - database = "db" - schema = "schema" - name = "external_table" - comment = "External table" - file_format = "TYPE = CSV FIELD_DELIMITER = '|'" - location = "@stage/directory/" - - column { - name = "id" - type = "int" - } - - column { - name = "data" - type = "text" - } -} - # basic resource resource "snowflake_stream_on_external_table" "stream" { name = "stream" schema = "schema" database = "database" - external_table = snowflake_external_table.external_table.fully_qualified_name + external_table = snowflake_external_table.example.fully_qualified_name } @@ -34,7 +15,7 @@ resource "snowflake_stream_on_external_table" "stream" { database = "database" copy_grants = true - external_table = snowflake_external_table.external_table.fully_qualified_name + external_table = snowflake_external_table.example.fully_qualified_name insert_only = "true" at { diff --git a/examples/resources/snowflake_stream_on_table/resource.tf b/examples/resources/snowflake_stream_on_table/resource.tf index c3bf45a71e..52a80e0380 100644 --- a/examples/resources/snowflake_stream_on_table/resource.tf +++ b/examples/resources/snowflake_stream_on_table/resource.tf @@ -1,15 +1,12 @@ -resource "snowflake_table" "table" { - database = "database" +# basic resource +resource "snowflake_stream_on_table" "stream" { + name = "stream" schema = "schema" - name = "name" + database = "database" - column { - type = "NUMBER(38,0)" - name = "id" - } + table = snowflake_table.example.fully_qualified_name } - # resource with more fields set resource "snowflake_stream_on_table" "stream" { name = "stream" @@ -17,7 +14,7 @@ resource "snowflake_stream_on_table" "stream" { database = "database" copy_grants = true - table = snowflake_table.table.fully_qualified_name + table = snowflake_table.example.fully_qualified_name append_only = "true" show_initial_rows = "true" diff --git a/examples/resources/snowflake_stream_on_view/resource.tf b/examples/resources/snowflake_stream_on_view/resource.tf index 754c893418..e0a7304ed5 100644 --- a/examples/resources/snowflake_stream_on_view/resource.tf +++ b/examples/resources/snowflake_stream_on_view/resource.tf @@ -1,19 +1,10 @@ -resource "snowflake_view" "view" { - database = "database" - schema = "schema" - name = "view" - statement = <<-SQL - select * from foo; -SQL -} - # basic resource resource "snowflake_stream_on_view" "stream" { name = "stream" schema = "schema" database = "database" - view = snowflake_view.view.fully_qualified_name + view = snowflake_view.example.fully_qualified_name } # resource with additional fields @@ -23,7 +14,7 @@ resource "snowflake_stream_on_view" "stream" { database = "database" copy_grants = true - view = snowflake_view.view.fully_qualified_name + view = snowflake_view.example.fully_qualified_name append_only = "true" show_initial_rows = "true" diff --git a/examples/resources/snowflake_streamlit/import.sh b/examples/resources/snowflake_streamlit/import.sh index aadf0a2952..35d7591655 100644 --- a/examples/resources/snowflake_streamlit/import.sh +++ b/examples/resources/snowflake_streamlit/import.sh @@ -1,2 +1 @@ -# format is .. terraform import snowflake_schema.example '""."".""' diff --git a/examples/resources/snowflake_streamlit/resource.tf b/examples/resources/snowflake_streamlit/resource.tf index e84ed3a08f..a5eae82e6e 100644 --- a/examples/resources/snowflake_streamlit/resource.tf +++ b/examples/resources/snowflake_streamlit/resource.tf @@ -3,18 +3,19 @@ resource "snowflake_streamlit" "streamlit" { database = "database" schema = "schema" name = "streamlit" - stage = "streamlit_db.streamlit_schema.streamlit_stage" + stage = snowflake_stage.example.fully_qualified_name main_file = "/streamlit_main.py" } + # resource with all fields set resource "snowflake_streamlit" "streamlit" { database = "database" schema = "schema" name = "streamlit" - stage = "streamlit_db.streamlit_schema.streamlit_stage" + stage = snowflake_stage.example.fully_qualified_name directory_location = "src" main_file = "streamlit_main.py" - query_warehouse = "warehouse" + query_warehouse = snowflake_warehouse.example.fully_qualified_name external_access_integrations = ["integration_id"] title = "title" comment = "comment" diff --git a/examples/resources/snowflake_tag/resource.tf b/examples/resources/snowflake_tag/resource.tf index 9c99ab0503..ee284d78ea 100644 --- a/examples/resources/snowflake_tag/resource.tf +++ b/examples/resources/snowflake_tag/resource.tf @@ -12,5 +12,5 @@ resource "snowflake_tag" "tag" { schema = "schema" comment = "comment" allowed_values = ["finance", "engineering", ""] - masking_policies = [snowfalke_masking_policy.masking_policy.fully_qualified_name] + masking_policies = [snowfalke_masking_policy.example.fully_qualified_name] } diff --git a/examples/resources/snowflake_task/import.sh b/examples/resources/snowflake_task/import.sh index 18f4e0bda8..1ea62df133 100644 --- a/examples/resources/snowflake_task/import.sh +++ b/examples/resources/snowflake_task/import.sh @@ -1,2 +1 @@ -# format is database name | schema name | task name -terraform import snowflake_task.example 'dbName|schemaName|taskName' +terraform import snowflake_task.example '""."".""' diff --git a/examples/resources/snowflake_task/resource.tf b/examples/resources/snowflake_task/resource.tf index da18d05a81..839946a3b2 100644 --- a/examples/resources/snowflake_task/resource.tf +++ b/examples/resources/snowflake_task/resource.tf @@ -53,13 +53,13 @@ resource "snowflake_task" "test" { database = "database" schema = "schema" name = "task" - warehouse = "warehouse" + warehouse = snowflake_warehouse.example.fully_qualified_name started = true sql_statement = "select 1" config = "{\"key\":\"value\"}" allow_overlapping_execution = true - error_integration = "" + error_integration = snowflake_notification_integration.example.fully_qualified_name when = "SYSTEM$STREAM_HAS_DATA('')" comment = "complete task" diff --git a/examples/resources/snowflake_user/resource.tf b/examples/resources/snowflake_user/resource.tf index 1892a4568f..70f8e4a0d5 100644 --- a/examples/resources/snowflake_user/resource.tf +++ b/examples/resources/snowflake_user/resource.tf @@ -16,9 +16,9 @@ resource "snowflake_user" "user" { display_name = "Snowflake User display name" email = "user@snowflake.example" - default_warehouse = "warehouse" + default_warehouse = snowflake_warehouse.example.fully_qualified_name default_secondary_roles_option = "ALL" - default_role = "role1" + default_role = snowflake_role.example.fully_qualified_name default_namespace = "some.namespace" mins_to_unlock = 9 diff --git a/examples/resources/snowflake_view/resource.tf b/examples/resources/snowflake_view/resource.tf index b41c2c308d..f44106e8c5 100644 --- a/examples/resources/snowflake_view/resource.tf +++ b/examples/resources/snowflake_view/resource.tf @@ -37,12 +37,12 @@ resource "snowflake_view" "test" { policy_name = "projection_policy" } masking_policy { - policy_name = "masking_policy" + policy_name = snowflake_masking_policy.example.fully_qualified_name using = ["address"] } } row_access_policy { - policy_name = "row_access_policy" + policy_name = snowflake_row_access_policy.example.fully_qualified_name on = ["id"] } aggregation_policy { @@ -50,8 +50,9 @@ resource "snowflake_view" "test" { entity_key = ["id"] } data_metric_function { - function_name = "data_metric_function" - on = ["id"] + function_name = "data_metric_function" + on = ["id"] + schedule_status = "STARTED" } data_metric_schedule { using_cron = "15 * * * * UTC" diff --git a/examples/resources/snowflake_warehouse/import.sh b/examples/resources/snowflake_warehouse/import.sh index 6fe5aa5ab8..e9e01ef33b 100644 --- a/examples/resources/snowflake_warehouse/import.sh +++ b/examples/resources/snowflake_warehouse/import.sh @@ -1 +1 @@ -terraform import snowflake_warehouse.example warehouseName +terraform import snowflake_warehouse.example '""' diff --git a/examples/resources/snowflake_warehouse/resource.tf b/examples/resources/snowflake_warehouse/resource.tf index 4172366508..1c08611115 100644 --- a/examples/resources/snowflake_warehouse/resource.tf +++ b/examples/resources/snowflake_warehouse/resource.tf @@ -1,5 +1,25 @@ +# Resource with required fields resource "snowflake_warehouse" "warehouse" { - name = "test" - comment = "foo" - warehouse_size = "small" + name = "WAREHOUSE" +} + +# Resource with all fields +resource "snowflake_warehouse" "warehouse" { + name = "WAREHOUSE" + warehouse_type = "SNOWPARK-OPTIMIZED" + warehouse_size = "MEDIUM" + max_cluster_count = 4 + min_cluster_count = 2 + scaling_policy = "ECONOMY" + auto_suspend = 1200 + auto_resume = false + initially_suspended = false + resource_monitor = snowflake_resource_monitor.monitor.fully_qualified_name + comment = "An example warehouse." + enable_query_acceleration = true + query_acceleration_max_scale_factor = 4 + + max_concurrency_level = 4 + statement_queued_timeout_in_seconds = 5 + statement_timeout_in_seconds = 86400 } diff --git a/pkg/datasources/connections.go b/pkg/datasources/connections.go index bc3c59c378..14f4f4f3d2 100644 --- a/pkg/datasources/connections.go +++ b/pkg/datasources/connections.go @@ -38,7 +38,7 @@ func Connections() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Connections, ReadConnections), Schema: connectionsSchema, - Description: "Datasource used to get details of filtered connections. Filtering is aligned with the current possibilities for [SHOW CONNECTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-connections) query. The results of SHOW is encapsulated in one output collection `connections`.", + Description: "Data source used to get details of filtered connections. Filtering is aligned with the current possibilities for [SHOW CONNECTIONS](https://docs.snowflake.com/en/sql-reference/sql/show-connections) query. The results of SHOW is encapsulated in one output collection `connections`.", } } diff --git a/pkg/datasources/connections_acceptance_test.go b/pkg/datasources/connections_acceptance_test.go index 5e71034391..0e91f4b6bd 100644 --- a/pkg/datasources/connections_acceptance_test.go +++ b/pkg/datasources/connections_acceptance_test.go @@ -2,6 +2,7 @@ package datasources_test import ( "fmt" + "regexp" "strings" "testing" @@ -221,3 +222,33 @@ func connectionAndSecondaryConnectionDatasourceWithLike(like string) string { } `, like) } + +func TestAcc_Connections_NotFound_WithPostConditions(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: connectionNonExisting(), + ExpectError: regexp.MustCompile("there should be at least one connection"), + }, + }, + }) +} + +func connectionNonExisting() string { + return ` +data "snowflake_connections" "test" { + like = "non-existing-connection" + + lifecycle { + postcondition { + condition = length(self.connections) > 0 + error_message = "there should be at least one connection" + } + } +} +` +} diff --git a/pkg/datasources/database_roles.go b/pkg/datasources/database_roles.go index 570b75fd60..43548bcb9b 100644 --- a/pkg/datasources/database_roles.go +++ b/pkg/datasources/database_roles.go @@ -68,7 +68,7 @@ func DatabaseRoles() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.DatabaseRoles, ReadDatabaseRoles), Schema: databaseRolesSchema, - Description: "Datasource used to get details of filtered database roles. Filtering is aligned with the current possibilities for [SHOW DATABASE ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-database-roles) query (`like` and `limit` are supported). The results of SHOW is encapsulated in show_output collection.", + Description: "Data source used to get details of filtered database roles. Filtering is aligned with the current possibilities for [SHOW DATABASE ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-database-roles) query (`like` and `limit` are supported). The results of SHOW is encapsulated in show_output collection.", } } diff --git a/pkg/datasources/databases.go b/pkg/datasources/databases.go index 21fb414aed..f3173dec13 100644 --- a/pkg/datasources/databases.go +++ b/pkg/datasources/databases.go @@ -95,7 +95,7 @@ func Databases() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Databases, ReadDatabases), Schema: databasesSchema, - Description: "Datasource used to get details of filtered databases. Filtering is aligned with the current possibilities for [SHOW DATABASES](https://docs.snowflake.com/en/sql-reference/sql/show-databases) query (`like`, `starts_with`, and `limit` are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection.", + Description: "Data source used to get details of filtered databases. Filtering is aligned with the current possibilities for [SHOW DATABASES](https://docs.snowflake.com/en/sql-reference/sql/show-databases) query (`like`, `starts_with`, and `limit` are all supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection.", } } diff --git a/pkg/datasources/masking_policies.go b/pkg/datasources/masking_policies.go index 670e29b760..c757088430 100644 --- a/pkg/datasources/masking_policies.go +++ b/pkg/datasources/masking_policies.go @@ -121,7 +121,7 @@ func MaskingPolicies() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.MaskingPolicies, ReadMaskingPolicies), Schema: maskingPoliciesSchema, - Description: "Datasource used to get details of filtered masking policies. Filtering is aligned with the current possibilities for [SHOW MASKING POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `masking_policies`.", + Description: "Data source used to get details of filtered masking policies. Filtering is aligned with the current possibilities for [SHOW MASKING POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-masking-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `masking_policies`.", } } diff --git a/pkg/datasources/network_policies.go b/pkg/datasources/network_policies.go index 6aa615d136..035372d0dc 100644 --- a/pkg/datasources/network_policies.go +++ b/pkg/datasources/network_policies.go @@ -56,7 +56,7 @@ func NetworkPolicies() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.NetworkPolicies, ReadNetworkPolicies), Schema: networkPoliciesSchema, - Description: "Datasource used to get details of filtered network policies. Filtering is aligned with the current possibilities for [SHOW NETWORK POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-network-policies) query (`like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection.", + Description: "Data source used to get details of filtered network policies. Filtering is aligned with the current possibilities for [SHOW NETWORK POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-network-policies) query (`like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection.", } } diff --git a/pkg/datasources/resource_monitors.go b/pkg/datasources/resource_monitors.go index 8a3825034f..74cbfce6a9 100644 --- a/pkg/datasources/resource_monitors.go +++ b/pkg/datasources/resource_monitors.go @@ -43,7 +43,7 @@ func ResourceMonitors() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.ResourceMonitors, ReadResourceMonitors), Schema: resourceMonitorsSchema, - Description: "Datasource used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for [SHOW RESOURCE MONITORS](https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors) query (`like` is supported). The results of SHOW is encapsulated in show_output collection.", + Description: "Data source used to get details of filtered resource monitors. Filtering is aligned with the current possibilities for [SHOW RESOURCE MONITORS](https://docs.snowflake.com/en/sql-reference/sql/show-resource-monitors) query (`like` is supported). The results of SHOW is encapsulated in show_output collection.", } } diff --git a/pkg/datasources/roles.go b/pkg/datasources/roles.go index 7278988f7d..bdce14c43e 100644 --- a/pkg/datasources/roles.go +++ b/pkg/datasources/roles.go @@ -51,7 +51,7 @@ func Roles() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Roles, ReadRoles), Schema: rolesSchema, - Description: "Datasource used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection.", + Description: "Data source used to get details of filtered roles. Filtering is aligned with the current possibilities for [SHOW ROLES](https://docs.snowflake.com/en/sql-reference/sql/show-roles) query (`like` and `in_class` are all supported). The results of SHOW are encapsulated in one output collection.", } } diff --git a/pkg/datasources/row_access_policies.go b/pkg/datasources/row_access_policies.go index 22d8607422..a5cd35ee87 100644 --- a/pkg/datasources/row_access_policies.go +++ b/pkg/datasources/row_access_policies.go @@ -117,7 +117,7 @@ func RowAccessPolicies() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.RowAccessPolicies, ReadRowAccessPolicies), Schema: rowAccessPoliciesSchema, - Description: "Datasource used to get details of filtered row access policies. Filtering is aligned with the current possibilities for [SHOW ROW ACCESS POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `row_access_policies`.", + Description: "Data source used to get details of filtered row access policies. Filtering is aligned with the current possibilities for [SHOW ROW ACCESS POLICIES](https://docs.snowflake.com/en/sql-reference/sql/show-row-access-policies) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `row_access_policies`.", } } diff --git a/pkg/datasources/schemas.go b/pkg/datasources/schemas.go index bab420e5e3..aec6eac8cc 100644 --- a/pkg/datasources/schemas.go +++ b/pkg/datasources/schemas.go @@ -131,7 +131,7 @@ func Schemas() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Schemas, ReadSchemas), Schema: schemasSchema, - Description: "Datasource used to get details of filtered schemas. Filtering is aligned with the current possibilities for [SHOW SCHEMAS](https://docs.snowflake.com/en/sql-reference/sql/show-schemas) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection.", + Description: "Data source used to get details of filtered schemas. Filtering is aligned with the current possibilities for [SHOW SCHEMAS](https://docs.snowflake.com/en/sql-reference/sql/show-schemas) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection.", } } diff --git a/pkg/datasources/secrets.go b/pkg/datasources/secrets.go index c40ec7aa6e..3102e6dba6 100644 --- a/pkg/datasources/secrets.go +++ b/pkg/datasources/secrets.go @@ -100,7 +100,7 @@ func Secrets() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Secrets, ReadSecrets), Schema: secretsSchema, - Description: "Datasource used to get details of filtered secrets. Filtering is aligned with the current possibilities for [SHOW SECRETS](https://docs.snowflake.com/en/sql-reference/sql/show-secrets) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `secrets`.", + Description: "Data source used to get details of filtered secrets. Filtering is aligned with the current possibilities for [SHOW SECRETS](https://docs.snowflake.com/en/sql-reference/sql/show-secrets) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `secrets`.", } } diff --git a/pkg/datasources/security_integrations.go b/pkg/datasources/security_integrations.go index 6418f6e4fc..6de6728a1c 100644 --- a/pkg/datasources/security_integrations.go +++ b/pkg/datasources/security_integrations.go @@ -56,7 +56,7 @@ func SecurityIntegrations() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.SecurityIntegrations, ReadSecurityIntegrations), Schema: securityIntegrationsSchema, - Description: "Datasource used to get details of filtered security integrations. Filtering is aligned with the current possibilities for [SHOW SECURITY INTEGRATIONS](https://docs.snowflake.com/en/sql-reference/sql/show-integrations) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `security_integrations`.", + Description: "Data source used to get details of filtered security integrations. Filtering is aligned with the current possibilities for [SHOW SECURITY INTEGRATIONS](https://docs.snowflake.com/en/sql-reference/sql/show-integrations) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `security_integrations`.", } } diff --git a/pkg/datasources/streamlits.go b/pkg/datasources/streamlits.go index 889a23548f..e9fb8a364e 100644 --- a/pkg/datasources/streamlits.go +++ b/pkg/datasources/streamlits.go @@ -104,7 +104,7 @@ func Streamlits() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Streamlits, ReadStreamlits), Schema: streamlitsSchema, - Description: "Datasource used to get details of filtered streamlits. Filtering is aligned with the current possibilities for [SHOW STREAMLITS](https://docs.snowflake.com/en/sql-reference/sql/show-streamlits) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `streamlits`.", + Description: "Data source used to get details of filtered streamlits. Filtering is aligned with the current possibilities for [SHOW STREAMLITS](https://docs.snowflake.com/en/sql-reference/sql/show-streamlits) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `streamlits`.", } } diff --git a/pkg/datasources/streams.go b/pkg/datasources/streams.go index 4323fb19d2..50ed824825 100644 --- a/pkg/datasources/streams.go +++ b/pkg/datasources/streams.go @@ -56,7 +56,7 @@ func Streams() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Streams, ReadStreams), Schema: streamsSchema, - Description: "Datasource used to get details of filtered streams. Filtering is aligned with the current possibilities for [SHOW STREAMS](https://docs.snowflake.com/en/sql-reference/sql/show-streams) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `streams`.", + Description: "Data source used to get details of filtered streams. Filtering is aligned with the current possibilities for [SHOW STREAMS](https://docs.snowflake.com/en/sql-reference/sql/show-streams) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `streams`.", } } diff --git a/pkg/datasources/tags.go b/pkg/datasources/tags.go index 6fee2d84d6..20d755df39 100644 --- a/pkg/datasources/tags.go +++ b/pkg/datasources/tags.go @@ -40,7 +40,7 @@ func Tags() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Tags, ReadTags), Schema: tagsSchema, - Description: "Datasource used to get details of filtered tags. Filtering is aligned with the current possibilities for [SHOW TAGS](https://docs.snowflake.com/en/sql-reference/sql/show-tags) query. The results of SHOW are encapsulated in one output collection `tags`.", + Description: "Data source used to get details of filtered tags. Filtering is aligned with the current possibilities for [SHOW TAGS](https://docs.snowflake.com/en/sql-reference/sql/show-tags) query. The results of SHOW are encapsulated in one output collection `tags`.", } } diff --git a/pkg/datasources/users.go b/pkg/datasources/users.go index 5afe984886..4d5c3be40a 100644 --- a/pkg/datasources/users.go +++ b/pkg/datasources/users.go @@ -95,7 +95,7 @@ func Users() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Users, ReadUsers), Schema: usersSchema, - Description: "Datasource used to get details of filtered users. Filtering is aligned with the current possibilities for [SHOW USERS](https://docs.snowflake.com/en/sql-reference/sql/show-users) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in `show_output` (only empty or default values), the DESCRIBE command cannot be called, so you have to set `with_describe = false`. Only `parameters` output is not affected by the lack of privileges.", + Description: "Data source used to get details of filtered users. Filtering is aligned with the current possibilities for [SHOW USERS](https://docs.snowflake.com/en/sql-reference/sql/show-users) query. The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection. Important note is that when querying users you don't have permissions to, the querying options are limited. You won't get almost any field in `show_output` (only empty or default values), the DESCRIBE command cannot be called, so you have to set `with_describe = false`. Only `parameters` output is not affected by the lack of privileges.", } } diff --git a/pkg/datasources/views.go b/pkg/datasources/views.go index 12a1bcdf14..64d64c7a68 100644 --- a/pkg/datasources/views.go +++ b/pkg/datasources/views.go @@ -110,7 +110,7 @@ func Views() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Views, ReadViews), Schema: viewsSchema, - Description: "Datasource used to get details of filtered views. Filtering is aligned with the current possibilities for [SHOW VIEWS](https://docs.snowflake.com/en/sql-reference/sql/show-views) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `views`.", + Description: "Data source used to get details of filtered views. Filtering is aligned with the current possibilities for [SHOW VIEWS](https://docs.snowflake.com/en/sql-reference/sql/show-views) query (only `like` is supported). The results of SHOW and DESCRIBE are encapsulated in one output collection `views`.", } } diff --git a/pkg/datasources/warehouses.go b/pkg/datasources/warehouses.go index 9c42fb5e07..2399f33872 100644 --- a/pkg/datasources/warehouses.go +++ b/pkg/datasources/warehouses.go @@ -70,7 +70,7 @@ func Warehouses() *schema.Resource { return &schema.Resource{ ReadContext: TrackingReadWrapper(datasources.Warehouses, ReadWarehouses), Schema: warehousesSchema, - Description: "Datasource used to get details of filtered warehouses. Filtering is aligned with the current possibilities for [SHOW WAREHOUSES](https://docs.snowflake.com/en/sql-reference/sql/show-warehouses) query (only `like` is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection.", + Description: "Data source used to get details of filtered warehouses. Filtering is aligned with the current possibilities for [SHOW WAREHOUSES](https://docs.snowflake.com/en/sql-reference/sql/show-warehouses) query (only `like` is supported). The results of SHOW, DESCRIBE, and SHOW PARAMETERS IN are encapsulated in one output collection.", } } diff --git a/pkg/internal/tracking/context.go b/pkg/internal/tracking/context.go index 8fdbf8c03f..f228fa4773 100644 --- a/pkg/internal/tracking/context.go +++ b/pkg/internal/tracking/context.go @@ -10,7 +10,7 @@ import ( const ( CurrentSchemaVersion string = "1" - ProviderVersion string = "v0.99.0" // TODO(SNOW-1814934): Currently hardcoded, make it computed + ProviderVersion string = "v0.100.0" // TODO(SNOW-1814934): Currently hardcoded, make it computed MetadataPrefix string = "terraform_provider_usage_tracking" ) diff --git a/pkg/resources/api_authentication_integration_common.go b/pkg/resources/api_authentication_integration_common.go index 8791a5d0df..b70f42cbc2 100644 --- a/pkg/resources/api_authentication_integration_common.go +++ b/pkg/resources/api_authentication_integration_common.go @@ -33,7 +33,7 @@ var apiAuthCommonSchema = map[string]*schema.Schema{ "oauth_client_secret": { Type: schema.TypeString, Required: true, - Description: "Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance.", + Description: externalChangesNotDetectedFieldDescription("Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance."), }, "oauth_token_endpoint": { Type: schema.TypeString, diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go index 872c7ed24f..eb33b246b7 100644 --- a/pkg/resources/custom_diffs.go +++ b/pkg/resources/custom_diffs.go @@ -275,7 +275,7 @@ func RecreateWhenStreamIsStale() schema.CustomizeDiffFunc { func RecreateWhenResourceBoolFieldChangedExternally(boolField string, wantValue bool) schema.CustomizeDiffFunc { return func(_ context.Context, diff *schema.ResourceDiff, _ interface{}) error { if n := diff.Get(boolField); n != nil { - logging.DebugLogger.Printf("[DEBUG] new external value for %v: %v\n", boolField, n.(bool)) + logging.DebugLogger.Printf("[DEBUG] new external value for %v: %v, recreating the resource...\n", boolField, n.(bool)) if n.(bool) != wantValue { return errors.Join(diff.SetNew(boolField, wantValue), diff.ForceNew(boolField)) diff --git a/pkg/resources/database.go b/pkg/resources/database.go index b9b0935bcc..2bd4784e78 100644 --- a/pkg/resources/database.go +++ b/pkg/resources/database.go @@ -9,6 +9,7 @@ import ( "time" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + providerresources "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/util" @@ -61,7 +62,7 @@ var databaseSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, // TODO(SNOW-1438810): Add account identifier validator - Description: "Specifies account identifier for which replication should be enabled. The account identifiers should be in the form of `\"\".\"\"`.", + Description: relatedResourceDescription("Specifies account identifier for which replication should be enabled. The account identifiers should be in the form of `\"\".\"\"`.", providerresources.Account), }, "with_failover": { Type: schema.TypeBool, @@ -477,6 +478,7 @@ func DeleteDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag. return diag.FromErr(err) } + // TODO(SNOW-1818849): unassign network policies inside the database before dropping err = client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ IfExists: sdk.Bool(true), }) diff --git a/pkg/resources/database_commons.go b/pkg/resources/database_commons.go index ab40bc7085..0b12163e17 100644 --- a/pkg/resources/database_commons.go +++ b/pkg/resources/database_commons.go @@ -89,19 +89,15 @@ func init() { Name: sdk.ObjectParameterLogLevel, Type: schema.TypeString, Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), - ValidateDiag: StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true), - DiffSuppress: func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) - }, + ValidateDiag: sdkValidation(sdk.ToLogLevel), + DiffSuppress: NormalizeAndCompare(sdk.ToLogLevel), }, { Name: sdk.ObjectParameterTraceLevel, Type: schema.TypeString, Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), - ValidateDiag: StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true), - DiffSuppress: func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) - }, + ValidateDiag: sdkValidation(sdk.ToTraceLevel), + DiffSuppress: NormalizeAndCompare(sdk.ToTraceLevel), }, { Name: sdk.ObjectParameterMaxDataExtensionTimeInDays, @@ -118,10 +114,8 @@ func init() { Name: sdk.ObjectParameterStorageSerializationPolicy, Type: schema.TypeString, Description: fmt.Sprintf("The storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. For more information, see [STORAGE_SERIALIZATION_POLICY](https://docs.snowflake.com/en/sql-reference/parameters#storage-serialization-policy).", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), - ValidateDiag: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), - DiffSuppress: func(k, oldValue, newValue string, d *schema.ResourceData) bool { - return strings.EqualFold(oldValue, newValue) - }, + ValidateDiag: sdkValidation(sdk.ToStorageSerializationPolicy), + DiffSuppress: NormalizeAndCompare(sdk.ToStorageSerializationPolicy), }, { Name: sdk.ObjectParameterSuspendTaskAfterNumFailures, diff --git a/pkg/resources/database_role.go b/pkg/resources/database_role.go index 8888c01f1d..86d9ab84e4 100644 --- a/pkg/resources/database_role.go +++ b/pkg/resources/database_role.go @@ -51,11 +51,15 @@ var databaseRoleSchema = map[string]*schema.Schema{ func DatabaseRole() *schema.Resource { return &schema.Resource{ + SchemaVersion: 1, + CreateContext: TrackingCreateWrapper(resources.DatabaseRole, CreateDatabaseRole), ReadContext: TrackingReadWrapper(resources.DatabaseRole, ReadDatabaseRole), UpdateContext: TrackingUpdateWrapper(resources.DatabaseRole, UpdateDatabaseRole), DeleteContext: TrackingDeleteWrapper(resources.DatabaseRole, DeleteDatabaseRole), + Description: "Resource used to manage database roles. For more information, check [database roles documentation](https://docs.snowflake.com/en/sql-reference/sql/create-database-role).", + Schema: databaseRoleSchema, Importer: &schema.ResourceImporter{ StateContext: TrackingImportWrapper(resources.DatabaseRole, ImportName[sdk.DatabaseObjectIdentifier]), @@ -63,9 +67,9 @@ func DatabaseRole() *schema.Resource { CustomizeDiff: TrackingCustomDiffWrapper(resources.DatabaseRole, customdiff.All( ComputedIfAnyAttributeChanged(databaseRoleSchema, ShowOutputAttributeName, "comment", "name"), + ComputedIfAnyAttributeChanged(databaseRoleSchema, FullyQualifiedNameAttributeName, "name"), )), - SchemaVersion: 1, StateUpgraders: []schema.StateUpgrader{ { Version: 0, diff --git a/pkg/resources/diff_suppressions.go b/pkg/resources/diff_suppressions.go index 14efa760b2..4e80bbf0e4 100644 --- a/pkg/resources/diff_suppressions.go +++ b/pkg/resources/diff_suppressions.go @@ -265,6 +265,27 @@ func IgnoreNewEmptyListOrSubfields(ignoredSubfields ...string) schema.SchemaDiff } } +// IgnoreMatchingColumnNameAndMaskingPolicyUsingFirstElem ignores when the first element of USING is matching the column name. +// see USING section in https://docs.snowflake.com/en/sql-reference/sql/create-view#optional-parameters +func IgnoreMatchingColumnNameAndMaskingPolicyUsingFirstElem() schema.SchemaDiffSuppressFunc { + return func(k, old, new string, d *schema.ResourceData) bool { + // suppress diff when the name of the column matches the name of using + parts := strings.SplitN(k, ".", 6) + if len(parts) < 6 { + log.Printf("[DEBUG] invalid resource key: %s", parts) + return false + } + // key is element count + if parts[5] == "#" && old == "1" && new == "0" { + return true + } + colNameKey := strings.Join([]string{parts[0], parts[1], "column_name"}, ".") + colName := d.Get(colNameKey).(string) + + return new == "" && old == colName + } +} + func ignoreTrimSpaceSuppressFunc(_, old, new string, _ *schema.ResourceData) bool { return strings.TrimSpace(old) == strings.TrimSpace(new) } diff --git a/pkg/resources/diff_suppressions_test.go b/pkg/resources/diff_suppressions_test.go index 7c54f03938..e34a75114d 100644 --- a/pkg/resources/diff_suppressions_test.go +++ b/pkg/resources/diff_suppressions_test.go @@ -204,3 +204,85 @@ func Test_ignoreNewEmptyList(t *testing.T) { }) } } + +func Test_IgnoreMatchingColumnNameAndMaskingPolicyUsingFirstElem(t *testing.T) { + resourceSchema := map[string]*schema.Schema{ + "column": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "column_name": { + Type: schema.TypeString, + Required: true, + }, + "masking_policy": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "using": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + }, + }, + }, + } + resourceData := func(using ...any) map[string]any { + return map[string]any{ + "column": []any{ + map[string]any{ + "column_name": "foo", + "masking_policy": []any{ + map[string]any{ + "using": using, + }, + }, + }, + }, + } + } + tests := []struct { + name string + key string + old string + new string + resourceData *schema.ResourceData + wantSuppress bool + }{ + { + name: "suppress when USING is not specified in the config, but is in the state - check count", + key: "column.0.masking_policy.0.using.#", + old: "1", + new: "0", + resourceData: schema.TestResourceDataRaw(t, resourceSchema, resourceData("foo")), + wantSuppress: true, + }, + { + name: "suppress when USING is not specified in the config, but is in the state - check elem", + key: "column.0.masking_policy.0.using.0", + old: "foo", + new: "", + resourceData: schema.TestResourceDataRaw(t, resourceSchema, resourceData("foo")), + wantSuppress: true, + }, + { + name: "do not suppress when there is column name mismatch", + key: "column.0.masking_policy.0.using.0", + old: "foo", + new: "bar", + resourceData: schema.TestResourceDataRaw(t, resourceSchema, resourceData("foo")), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.wantSuppress, resources.IgnoreMatchingColumnNameAndMaskingPolicyUsingFirstElem()(tt.key, tt.old, tt.new, tt.resourceData)) + }) + } +} diff --git a/pkg/resources/doc_helpers.go b/pkg/resources/doc_helpers.go index eec39dcf4a..eb437015f9 100644 --- a/pkg/resources/doc_helpers.go +++ b/pkg/resources/doc_helpers.go @@ -5,6 +5,7 @@ import ( "strings" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" + providerresources "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" ) func possibleValuesListed[T ~string | ~int](values []T) string { @@ -28,11 +29,11 @@ func externalChangesNotDetectedFieldDescription(description string) string { } func withPrivilegedRolesDescription(description, paramName string) string { - return fmt.Sprintf(`%s By default, this list includes the ACCOUNTADMIN, ORGADMIN and SECURITYADMIN roles. To remove these privileged roles from the list, use the ALTER ACCOUNT command to set the %s account parameter to FALSE. `, description, paramName) + return fmt.Sprintf(`%s By default, this list includes the ACCOUNTADMIN, ORGADMIN and SECURITYADMIN roles. To remove these privileged roles from the list, use the ALTER ACCOUNT command to set the %s account parameter to FALSE.`, description, paramName) } func blocklistedCharactersFieldDescription(description string) string { - return fmt.Sprintf(`%s Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: %s`, description, characterList([]rune{'|', '.', '"'})) + return fmt.Sprintf(`%s Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: %s.`, description, characterList([]rune{'|', '.', '"'})) } func diffSuppressStatementFieldDescription(description string) string { @@ -44,5 +45,13 @@ func dataTypeFieldDescription(description string) string { } func deprecatedResourceDescription(alternatives ...string) string { - return fmt.Sprintf(`This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: %s`, possibleValuesListed(alternatives)) + return fmt.Sprintf(`This resource is deprecated and will be removed in a future major version release. Please use one of the new resources instead: %s.`, possibleValuesListed(alternatives)) +} + +func copyGrantsDescription(description string) string { + return fmt.Sprintf("%s This is used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect during creating a new object with Terraform.", description) +} + +func relatedResourceDescription(description string, resource providerresources.Resource) string { + return fmt.Sprintf(`%s For more information about this resource, see [docs](./%s).`, description, strings.TrimPrefix(resource.String(), "snowflake_")) } diff --git a/pkg/resources/external_oauth_integration.go b/pkg/resources/external_oauth_integration.go index 8ec52d3327..785efca961 100644 --- a/pkg/resources/external_oauth_integration.go +++ b/pkg/resources/external_oauth_integration.go @@ -88,7 +88,7 @@ var externalOauthIntegrationSchema = map[string]*schema.Schema{ Type: schema.TypeSet, Elem: &schema.Schema{Type: schema.TypeString}, Optional: true, - Description: withPrivilegedRolesDescription("Specifies the list of roles that a client cannot set as the primary role.", string(sdk.AccountParameterExternalOAuthAddPrivilegedRolesToBlockedList)), + Description: relatedResourceDescription(withPrivilegedRolesDescription("Specifies the list of roles that a client cannot set as the primary role.", string(sdk.AccountParameterExternalOAuthAddPrivilegedRolesToBlockedList)), resources.AccountRole), DiffSuppressFunc: IgnoreValuesFromSetIfParamSet("external_oauth_blocked_roles_list", string(sdk.AccountParameterExternalOAuthAddPrivilegedRolesToBlockedList), privilegedRoles), ConflictsWith: []string{"external_oauth_allowed_roles_list"}, }, @@ -96,7 +96,7 @@ var externalOauthIntegrationSchema = map[string]*schema.Schema{ Type: schema.TypeSet, Elem: &schema.Schema{Type: schema.TypeString}, Optional: true, - Description: "Specifies the list of roles that the client can set as the primary role.", + Description: relatedResourceDescription("Specifies the list of roles that the client can set as the primary role.", resources.AccountRole), ConflictsWith: []string{"external_oauth_blocked_roles_list"}, }, "external_oauth_audience_list": { diff --git a/pkg/resources/grant_account_role.go b/pkg/resources/grant_account_role.go index ce02dc6f9c..1fd010f3f7 100644 --- a/pkg/resources/grant_account_role.go +++ b/pkg/resources/grant_account_role.go @@ -21,14 +21,14 @@ var grantAccountRoleSchema = map[string]*schema.Schema{ "role_name": { Type: schema.TypeString, Required: true, - Description: "The fully qualified name of the role which will be granted to the user or parent role.", + Description: relatedResourceDescription("The fully qualified name of the role which will be granted to the user or parent role.", resources.AccountRole), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, "user_name": { Type: schema.TypeString, Optional: true, - Description: "The fully qualified name of the user on which specified role will be granted.", + Description: relatedResourceDescription("The fully qualified name of the user on which specified role will be granted.", resources.User), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), ExactlyOneOf: []string{ @@ -39,7 +39,7 @@ var grantAccountRoleSchema = map[string]*schema.Schema{ "parent_role_name": { Type: schema.TypeString, Optional: true, - Description: "The fully qualified name of the parent role which will create a parent-child relationship between the roles.", + Description: relatedResourceDescription("The fully qualified name of the parent role which will create a parent-child relationship between the roles.", resources.AccountRole), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), ExactlyOneOf: []string{ diff --git a/pkg/resources/grant_application_role.go b/pkg/resources/grant_application_role.go index d1f12ebf54..952649e9d6 100644 --- a/pkg/resources/grant_application_role.go +++ b/pkg/resources/grant_application_role.go @@ -28,7 +28,7 @@ var grantApplicationRoleSchema = map[string]*schema.Schema{ "parent_account_role_name": { Type: schema.TypeString, Optional: true, - Description: "The fully qualified name of the account role on which application role will be granted.", + Description: relatedResourceDescription("The fully qualified name of the account role on which application role will be granted.", resources.AccountRole), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, diff --git a/pkg/resources/grant_database_role.go b/pkg/resources/grant_database_role.go index e67946fc5d..226cba801b 100644 --- a/pkg/resources/grant_database_role.go +++ b/pkg/resources/grant_database_role.go @@ -20,7 +20,7 @@ var grantDatabaseRoleSchema = map[string]*schema.Schema{ "database_role_name": { Type: schema.TypeString, Required: true, - Description: "The fully qualified name of the database role which will be granted to share or parent role.", + Description: relatedResourceDescription("The fully qualified name of the database role which will be granted to share or parent role.", resources.DatabaseRole), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.DatabaseObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, @@ -28,7 +28,7 @@ var grantDatabaseRoleSchema = map[string]*schema.Schema{ "parent_role_name": { Type: schema.TypeString, Optional: true, - Description: "The fully qualified name of the parent account role which will create a parent-child relationship between the roles.", + Description: relatedResourceDescription("The fully qualified name of the parent account role which will create a parent-child relationship between the roles.", resources.AccountRole), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, @@ -41,7 +41,7 @@ var grantDatabaseRoleSchema = map[string]*schema.Schema{ "parent_database_role_name": { Type: schema.TypeString, Optional: true, - Description: "The fully qualified name of the parent database role which will create a parent-child relationship between the roles.", + Description: relatedResourceDescription("The fully qualified name of the parent database role which will create a parent-child relationship between the roles.", resources.DatabaseRole), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.DatabaseObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, @@ -54,7 +54,7 @@ var grantDatabaseRoleSchema = map[string]*schema.Schema{ "share_name": { Type: schema.TypeString, Optional: true, - Description: "The fully qualified name of the share on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the share on which privileges will be granted.", resources.Share), ForceNew: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, diff --git a/pkg/resources/grant_ownership.go b/pkg/resources/grant_ownership.go index 24077bcd48..887d991613 100644 --- a/pkg/resources/grant_ownership.go +++ b/pkg/resources/grant_ownership.go @@ -21,7 +21,7 @@ var grantOwnershipSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the account role to which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the account role to which privileges will be granted.", resources.AccountRole), ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: []string{ @@ -33,7 +33,7 @@ var grantOwnershipSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the database role to which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the database role to which privileges will be granted.", resources.DatabaseRole), ValidateDiagFunc: IsValidIdentifier[sdk.DatabaseObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: []string{ @@ -132,7 +132,7 @@ func grantOwnershipBulkOperationSchema(branchName string) map[string]*schema.Sch Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the database.", + Description: relatedResourceDescription("The fully qualified name of the database.", resources.Database), ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: []string{ @@ -144,7 +144,7 @@ func grantOwnershipBulkOperationSchema(branchName string) map[string]*schema.Sch Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the schema.", + Description: relatedResourceDescription("The fully qualified name of the schema.", resources.Schema), ValidateDiagFunc: IsValidIdentifier[sdk.DatabaseObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: []string{ diff --git a/pkg/resources/grant_privileges_to_account_role.go b/pkg/resources/grant_privileges_to_account_role.go index ccce92e80b..33a3860cc8 100644 --- a/pkg/resources/grant_privileges_to_account_role.go +++ b/pkg/resources/grant_privileges_to_account_role.go @@ -25,7 +25,7 @@ var grantPrivilegesToAccountRoleSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, - Description: "The fully qualified name of the account role to which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the account role to which privileges will be granted.", resources.AccountRole), ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, }, @@ -36,7 +36,7 @@ var grantPrivilegesToAccountRoleSchema = map[string]*schema.Schema{ "privileges": { Type: schema.TypeSet, Optional: true, - Description: "The privileges to grant on the account role.", + Description: "The privileges to grant on the account role. This field is case-sensitive; use only upper-case privileges.", ExactlyOneOf: []string{ "privileges", "all_privileges", diff --git a/pkg/resources/grant_privileges_to_database_role.go b/pkg/resources/grant_privileges_to_database_role.go index 04e4375c91..ec6ca642e1 100644 --- a/pkg/resources/grant_privileges_to_database_role.go +++ b/pkg/resources/grant_privileges_to_database_role.go @@ -25,7 +25,7 @@ var grantPrivilegesToDatabaseRoleSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, - Description: "The fully qualified name of the database role to which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the database role to which privileges will be granted.", resources.DatabaseRole), ValidateDiagFunc: IsValidIdentifier[sdk.DatabaseObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, }, @@ -75,7 +75,7 @@ var grantPrivilegesToDatabaseRoleSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the database on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the database on which privileges will be granted.", resources.Database), ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: []string{ diff --git a/pkg/resources/grant_privileges_to_share.go b/pkg/resources/grant_privileges_to_share.go index 30d4cc2e71..c83ef72137 100644 --- a/pkg/resources/grant_privileges_to_share.go +++ b/pkg/resources/grant_privileges_to_share.go @@ -31,7 +31,7 @@ var grantPrivilegesToShareSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, - Description: "The fully qualified name of the share on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the share on which privileges will be granted.", resources.Share), ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, }, @@ -45,7 +45,7 @@ var grantPrivilegesToShareSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the database on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the database on which privileges will be granted.", resources.Database), ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: grantPrivilegesToShareGrantExactlyOneOfValidation, @@ -54,7 +54,7 @@ var grantPrivilegesToShareSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the schema on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the schema on which privileges will be granted.", resources.Schema), ValidateDiagFunc: IsValidIdentifier[sdk.DatabaseObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: grantPrivilegesToShareGrantExactlyOneOfValidation, @@ -63,7 +63,7 @@ var grantPrivilegesToShareSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the table on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the table on which privileges will be granted.", resources.Table), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: grantPrivilegesToShareGrantExactlyOneOfValidation, @@ -81,7 +81,7 @@ var grantPrivilegesToShareSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the tag on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the tag on which privileges will be granted.", resources.Tag), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: grantPrivilegesToShareGrantExactlyOneOfValidation, @@ -90,7 +90,7 @@ var grantPrivilegesToShareSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, - Description: "The fully qualified name of the view on which privileges will be granted.", + Description: relatedResourceDescription("The fully qualified name of the view on which privileges will be granted.", resources.View), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, ExactlyOneOf: grantPrivilegesToShareGrantExactlyOneOfValidation, diff --git a/pkg/resources/network_policy.go b/pkg/resources/network_policy.go index 26ab6a1b31..318df5001b 100644 --- a/pkg/resources/network_policy.go +++ b/pkg/resources/network_policy.go @@ -34,7 +34,7 @@ var networkPolicySchema = map[string]*schema.Schema{ }, DiffSuppressFunc: NormalizeAndCompareIdentifiersInSet("allowed_network_rule_list"), Optional: true, - Description: "Specifies a list of fully qualified network rules that contain the network identifiers that are allowed access to Snowflake.", + Description: relatedResourceDescription("Specifies a list of fully qualified network rules that contain the network identifiers that are allowed access to Snowflake.", resources.NetworkRule), }, "blocked_network_rule_list": { Type: schema.TypeSet, @@ -44,7 +44,7 @@ var networkPolicySchema = map[string]*schema.Schema{ }, DiffSuppressFunc: NormalizeAndCompareIdentifiersInSet("blocked_network_rule_list"), Optional: true, - Description: "Specifies a list of fully qualified network rules that contain the network identifiers that are denied access to Snowflake.", + Description: relatedResourceDescription("Specifies a list of fully qualified network rules that contain the network identifiers that are denied access to Snowflake.", resources.NetworkRule), }, "allowed_ip_list": { Type: schema.TypeSet, diff --git a/pkg/resources/network_rule.go b/pkg/resources/network_rule.go index cc965da040..5ba1a49c55 100644 --- a/pkg/resources/network_rule.go +++ b/pkg/resources/network_rule.go @@ -223,6 +223,7 @@ func DeleteContextNetworkRule(ctx context.Context, d *schema.ResourceData, meta client := meta.(*provider.Context).Client id := helpers.DecodeSnowflakeID(name).(sdk.SchemaObjectIdentifier) + // TODO(SNOW-1818849): unassign network rules before dropping if err := client.NetworkRules.Drop(ctx, sdk.NewDropNetworkRuleRequest(id).WithIfExists(sdk.Bool(true))); err != nil { diag.FromErr(err) } diff --git a/pkg/resources/oauth_integration_for_custom_clients.go b/pkg/resources/oauth_integration_for_custom_clients.go index 8f737d8dc6..0d3b6e6040 100644 --- a/pkg/resources/oauth_integration_for_custom_clients.go +++ b/pkg/resources/oauth_integration_for_custom_clients.go @@ -81,7 +81,7 @@ var oauthIntegrationForCustomClientsSchema = map[string]*schema.Schema{ ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), }, Optional: true, - Description: "A set of Snowflake roles that a user does not need to explicitly consent to using after authenticating.", + Description: relatedResourceDescription("A set of Snowflake roles that a user does not need to explicitly consent to using after authenticating.", resources.AccountRole), }, "blocked_roles_list": { Type: schema.TypeSet, @@ -91,7 +91,7 @@ var oauthIntegrationForCustomClientsSchema = map[string]*schema.Schema{ }, // TODO(SNOW-1517937): Check if can make optional Required: true, - Description: "A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", + Description: relatedResourceDescription("A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", resources.AccountRole), }, "oauth_issue_refresh_tokens": { Type: schema.TypeString, @@ -111,7 +111,7 @@ var oauthIntegrationForCustomClientsSchema = map[string]*schema.Schema{ "network_policy": { Type: schema.TypeString, Optional: true, - Description: "Specifies an existing network policy. This network policy controls network traffic that is attempting to exchange an authorization code for an access or refresh token or to use a refresh token to obtain a new access token.", + Description: relatedResourceDescription("Specifies an existing network policy. This network policy controls network traffic that is attempting to exchange an authorization code for an access or refresh token or to use a refresh token to obtain a new access token.", resources.NetworkPolicy), ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, }, @@ -119,13 +119,13 @@ var oauthIntegrationForCustomClientsSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, DiffSuppressFunc: ignoreTrimSpaceSuppressFunc, - Description: "Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource using `terraform taint`.", + Description: externalChangesNotDetectedFieldDescription("Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers."), }, "oauth_client_rsa_public_key_2": { Type: schema.TypeString, Optional: true, DiffSuppressFunc: ignoreTrimSpaceSuppressFunc, - Description: "Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource using `terraform taint`.", + Description: externalChangesNotDetectedFieldDescription("Specifies a Base64-encoded RSA public key, without the -----BEGIN PUBLIC KEY----- and -----END PUBLIC KEY----- headers."), }, "comment": { Type: schema.TypeString, diff --git a/pkg/resources/oauth_integration_for_partner_applications.go b/pkg/resources/oauth_integration_for_partner_applications.go index 3fc0d5a586..7781c1973f 100644 --- a/pkg/resources/oauth_integration_for_partner_applications.go +++ b/pkg/resources/oauth_integration_for_partner_applications.go @@ -83,7 +83,7 @@ var oauthIntegrationForPartnerApplicationsSchema = map[string]*schema.Schema{ }, // TODO(SNOW-1517937): Check if can make optional Required: true, - Description: "A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", + Description: relatedResourceDescription("A set of Snowflake roles that a user cannot explicitly consent to using after authenticating.", resources.AccountRole), DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeListValueInDescribe("blocked_roles_list"), }, "comment": { diff --git a/pkg/resources/primary_connection.go b/pkg/resources/primary_connection.go index 3dc4f8444b..3c4487d855 100644 --- a/pkg/resources/primary_connection.go +++ b/pkg/resources/primary_connection.go @@ -32,7 +32,7 @@ var primaryConnectionSchema = map[string]*schema.Schema{ "enable_failover_to_accounts": { Type: schema.TypeList, Optional: true, - Description: "Enables failover for given connection to provided accounts. Specifies a list of accounts in your organization where a secondary connection for this primary connection can be promoted to serve as the primary connection. Include your organization name for each account in the list.", + Description: relatedResourceDescription("Enables failover for given connection to provided accounts. Specifies a list of accounts in your organization where a secondary connection for this primary connection can be promoted to serve as the primary connection. Include your organization name for each account in the list.", resources.Account), Elem: &schema.Schema{ Type: schema.TypeString, DiffSuppressFunc: suppressIdentifierQuoting, diff --git a/pkg/resources/resource_monitor.go b/pkg/resources/resource_monitor.go index 53c07c77e4..ab9f9c7082 100644 --- a/pkg/resources/resource_monitor.go +++ b/pkg/resources/resource_monitor.go @@ -30,9 +30,10 @@ var resourceMonitorSchema = map[string]*schema.Schema{ "notify_users": { Type: schema.TypeSet, Optional: true, - Description: "Specifies the list of users (their identifiers) to receive email notifications on resource monitors.", + Description: relatedResourceDescription("Specifies the list of users (their identifiers) to receive email notifications on resource monitors.", resources.User), Elem: &schema.Schema{ - Type: schema.TypeString, + Type: schema.TypeString, + DiffSuppressFunc: suppressIdentifierQuoting, }, }, "credit_quota": { diff --git a/pkg/resources/saml2_integration.go b/pkg/resources/saml2_integration.go index cde54cfd63..b62f2317f7 100644 --- a/pkg/resources/saml2_integration.go +++ b/pkg/resources/saml2_integration.go @@ -161,7 +161,7 @@ func SAML2Integration() *schema.Resource { ReadContext: TrackingReadWrapper(resources.Saml2SecurityIntegration, ReadContextSAML2Integration(true)), UpdateContext: TrackingUpdateWrapper(resources.Saml2SecurityIntegration, UpdateContextSAML2Integration), DeleteContext: TrackingDeleteWrapper(resources.Saml2SecurityIntegration, DeleteContextSAM2LIntegration), - Description: "Resource used to manage saml2 security integration objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-saml2).", + Description: "Resource used to manage SAML2 security integration objects. For more information, check [security integrations documentation](https://docs.snowflake.com/en/sql-reference/sql/create-security-integration-saml2).", Schema: saml2IntegrationSchema, Importer: &schema.ResourceImporter{ diff --git a/pkg/resources/schema.go b/pkg/resources/schema.go index 66e4be7a28..32fd784e5d 100644 --- a/pkg/resources/schema.go +++ b/pkg/resources/schema.go @@ -25,13 +25,13 @@ var schemaSchema = map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, - Description: "Specifies the identifier for the schema; must be unique for the database in which the schema is created. When the name is `PUBLIC`, during creation the provider checks if this schema has already been created and, in such case, `ALTER` is used to match the desired state.", + Description: blocklistedCharactersFieldDescription("Specifies the identifier for the schema; must be unique for the database in which the schema is created. When the name is `PUBLIC`, during creation the provider checks if this schema has already been created and, in such case, `ALTER` is used to match the desired state."), DiffSuppressFunc: suppressIdentifierQuoting, }, "database": { Type: schema.TypeString, Required: true, - Description: "The database in which to create the schema.", + Description: blocklistedCharactersFieldDescription("The database in which to create the schema."), ForceNew: true, DiffSuppressFunc: suppressIdentifierQuoting, }, @@ -91,6 +91,8 @@ var schemaSchema = map[string]*schema.Schema{ // Schema returns a pointer to the resource representing a schema. func Schema() *schema.Resource { return &schema.Resource{ + SchemaVersion: 2, + CreateContext: TrackingCreateWrapper(resources.Schema, CreateContextSchema), ReadContext: TrackingReadWrapper(resources.Schema, ReadContextSchema(true)), UpdateContext: TrackingUpdateWrapper(resources.Schema, UpdateContextSchema), @@ -110,7 +112,6 @@ func Schema() *schema.Resource { StateContext: TrackingImportWrapper(resources.Schema, ImportSchema), }, - SchemaVersion: 2, StateUpgraders: []schema.StateUpgrader{ { Version: 0, diff --git a/pkg/resources/scim_integration.go b/pkg/resources/scim_integration.go index bf97e28f1a..f5482200fa 100644 --- a/pkg/resources/scim_integration.go +++ b/pkg/resources/scim_integration.go @@ -41,8 +41,8 @@ var scimIntegrationSchema = map[string]*schema.Schema{ Required: true, ForceNew: true, Description: fmt.Sprintf("Specifies the client type for the scim integration. Valid options are: %v.", possibleValuesListed(sdk.AllScimSecurityIntegrationScimClients)), - ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllScimSecurityIntegrationScimClients), true), - DiffSuppressFunc: ignoreCaseAndTrimSpaceSuppressFunc, + ValidateDiagFunc: sdkValidation(sdk.ToScimSecurityIntegrationScimClientOption), + DiffSuppressFunc: NormalizeAndCompare(sdk.ToScimSecurityIntegrationScimClientOption), }, "run_as_role": { Type: schema.TypeString, @@ -50,19 +50,14 @@ var scimIntegrationSchema = map[string]*schema.Schema{ ForceNew: true, Description: fmt.Sprintf("Specify the SCIM role in Snowflake that owns any users and roles that are imported from the identity provider into Snowflake using SCIM."+ " Provider assumes that the specified role is already provided. Valid options are: %v.", possibleValuesListed(sdk.AllScimSecurityIntegrationRunAsRoles)), - ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllScimSecurityIntegrationRunAsRoles), true), - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - normalize := func(s string) string { - return strings.ToUpper(strings.ReplaceAll(s, "-", "")) - } - return normalize(old) == normalize(new) - }, + ValidateDiagFunc: sdkValidation(sdk.ToScimSecurityIntegrationRunAsRoleOption), + DiffSuppressFunc: NormalizeAndCompare(sdk.ToScimSecurityIntegrationRunAsRoleOption), }, "network_policy": { Type: schema.TypeString, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), Optional: true, - Description: "Specifies an existing network policy that controls SCIM network traffic.", + Description: relatedResourceDescription("Specifies an existing network policy that controls SCIM network traffic.", resources.NetworkPolicy), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeListValueInDescribe("network_policy")), }, "sync_password": { diff --git a/pkg/resources/scim_integration_acceptance_test.go b/pkg/resources/scim_integration_acceptance_test.go index aa8f838cdc..277dbed973 100644 --- a/pkg/resources/scim_integration_acceptance_test.go +++ b/pkg/resources/scim_integration_acceptance_test.go @@ -282,7 +282,7 @@ func TestAcc_ScimIntegration_InvalidScimClient(t *testing.T) { { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_ScimIntegration/complete"), ConfigVariables: m(), - ExpectError: regexp.MustCompile(`expected \[{{} scim_client}] to be one of \["OKTA" "AZURE" "GENERIC"], got invalid`), + ExpectError: regexp.MustCompile(`invalid ScimSecurityIntegrationScimClientOption: INVALID`), }, }, }) @@ -311,7 +311,7 @@ func TestAcc_ScimIntegration_InvalidRunAsRole(t *testing.T) { { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_ScimIntegration/complete"), ConfigVariables: m(), - ExpectError: regexp.MustCompile(`expected \[{{} run_as_role}] to be one of \["OKTA_PROVISIONER" "AAD_PROVISIONER" "GENERIC_SCIM_PROVISIONER"], got invalid`), + ExpectError: regexp.MustCompile(`invalid ScimSecurityIntegrationRunAsRoleOption: INVALID`), }, }, }) diff --git a/pkg/resources/secondary_connection.go b/pkg/resources/secondary_connection.go index 6f9dbca91c..b948b106f1 100644 --- a/pkg/resources/secondary_connection.go +++ b/pkg/resources/secondary_connection.go @@ -33,7 +33,7 @@ var secondaryConnectionSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, - Description: "Specifies the identifier for a primary connection from which to create a replica (i.e. a secondary connection).", + Description: relatedResourceDescription("Specifies the identifier for a primary connection from which to create a replica (i.e. a secondary connection).", resources.PrimaryConnection), DiffSuppressFunc: suppressIdentifierQuoting, }, "comment": { diff --git a/pkg/resources/secondary_database.go b/pkg/resources/secondary_database.go index 448f9e5179..528a6ebf4d 100644 --- a/pkg/resources/secondary_database.go +++ b/pkg/resources/secondary_database.go @@ -28,7 +28,7 @@ var secondaryDatabaseSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, - Description: "A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `\"\".\"\".\"\"`.", + Description: relatedResourceDescription("A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `\"\".\"\".\"\"`.", resources.Database), // TODO(SNOW-1495079): Add validation when ExternalObjectIdentifier will be available in IsValidIdentifierDescription: "A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `\"\".\"\".\"\"`.", DiffSuppressFunc: suppressIdentifierQuoting, }, @@ -237,6 +237,7 @@ func DeleteSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta a return diag.FromErr(err) } + // TODO(SNOW-1818849): unassign network policies inside the database before dropping err = client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ IfExists: sdk.Bool(true), }) diff --git a/pkg/resources/secret_with_oauth_authorization_code_grant.go b/pkg/resources/secret_with_oauth_authorization_code_grant.go index 6a9eaa85cf..e823b741df 100644 --- a/pkg/resources/secret_with_oauth_authorization_code_grant.go +++ b/pkg/resources/secret_with_oauth_authorization_code_grant.go @@ -36,7 +36,7 @@ var secretAuthorizationCodeGrantSchema = func() map[string]*schema.Schema { Type: schema.TypeString, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), Required: true, - Description: "Specifies the name value of the Snowflake security integration that connects Snowflake to an external service.", + Description: relatedResourceDescription("Specifies the name value of the Snowflake security integration that connects Snowflake to an external service.", resources.ApiAuthenticationIntegrationWithAuthorizationCodeGrant), DiffSuppressFunc: suppressIdentifierQuoting, }, } diff --git a/pkg/resources/secret_with_oauth_client_credentials.go b/pkg/resources/secret_with_oauth_client_credentials.go index 1df7c77feb..816385e3e2 100644 --- a/pkg/resources/secret_with_oauth_client_credentials.go +++ b/pkg/resources/secret_with_oauth_client_credentials.go @@ -24,7 +24,7 @@ var secretClientCredentialsSchema = func() map[string]*schema.Schema { Type: schema.TypeString, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), Required: true, - Description: "Specifies the name value of the Snowflake security integration that connects Snowflake to an external service.", + Description: relatedResourceDescription("Specifies the name value of the Snowflake security integration that connects Snowflake to an external service.", resources.ApiAuthenticationIntegrationWithClientCredentials), DiffSuppressFunc: suppressIdentifierQuoting, }, "oauth_scopes": { diff --git a/pkg/resources/shared_database.go b/pkg/resources/shared_database.go index 537bafb03a..50cc1d34bc 100644 --- a/pkg/resources/shared_database.go +++ b/pkg/resources/shared_database.go @@ -28,7 +28,7 @@ var sharedDatabaseSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, - Description: "A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `\"\".\"\".\"\"`.", + Description: relatedResourceDescription("A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `\"\".\"\".\"\"`.", resources.Share), // TODO(SNOW-1495079): Add validation when ExternalObjectIdentifier will be available in IsValidIdentifierDescription: "A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `\"\".\"\".\"\"`.", DiffSuppressFunc: suppressIdentifierQuoting, }, @@ -37,7 +37,7 @@ var sharedDatabaseSchema = map[string]*schema.Schema{ Optional: true, Description: "Specifies a comment for the database.", }, - // TODO(SNOW-1325381): Add it as an item to discuss and either remove or uncomment (and implement) it + // TODO(SNOW-1843347): Add it as an item to discuss and either remove or uncomment (and implement) it // "is_transient": { // Type: schema.TypeBool, // Optional: true, @@ -80,7 +80,7 @@ func CreateSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) } opts := &sdk.CreateSharedDatabaseOptions{ - // TODO(SNOW-1325381) + // TODO(SNOW-1843347) // Transient: GetPropertyAsPointer[bool](d, "is_transient"), Comment: GetConfigPropertyAsPointerAllowingZeroValue[string](d, "comment"), } @@ -179,7 +179,7 @@ func ReadSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) d } } - // TODO(SNOW-1325381) + // TODO(SNOW-1843347) // if err := d.Set("is_transient", database.Transient); err != nil { // return diag.FromErr(err) // } @@ -207,6 +207,7 @@ func DeleteSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) return diag.FromErr(err) } + // TODO(SNOW-1818849): unassign network policies inside the database before dropping err = client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ IfExists: sdk.Bool(true), }) diff --git a/pkg/resources/shared_database_acceptance_test.go b/pkg/resources/shared_database_acceptance_test.go index 5c05b8dce2..2108bfb776 100644 --- a/pkg/resources/shared_database_acceptance_test.go +++ b/pkg/resources/shared_database_acceptance_test.go @@ -262,9 +262,10 @@ func TestAcc_CreateSharedDatabase_InvalidValues(t *testing.T) { { ConfigVariables: configVariables, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), - ExpectError: regexp.MustCompile(`(expected \[{{} log_level}\] to be one of \[\"TRACE\" \"DEBUG\" \"INFO\" \"WARN\" \"ERROR\" \"FATAL\" \"OFF\"\], got invalid_value)|` + - `(expected \[{{} trace_level}\] to be one of \[\"ALWAYS\" \"ON_EVENT\" \"OFF\"\], got invalid_value)|` + - `(expected \[{{} storage_serialization_policy}\] to be one of \[\"COMPATIBLE\" \"OPTIMIZED\"\], got invalid_value)`), + ExpectError: regexp.MustCompile(`(unknown log level: invalid_value)|` + + `(unknown trace level: invalid_value)|` + + `(unknown storage serialization policy: invalid_value)|` + + `(invalid warehouse size:)`), }, }, }) diff --git a/pkg/resources/stream_common.go b/pkg/resources/stream_common.go index dde23c0ea8..1c6b039fe0 100644 --- a/pkg/resources/stream_common.go +++ b/pkg/resources/stream_common.go @@ -38,7 +38,7 @@ var streamCommonSchema = map[string]*schema.Schema{ Type: schema.TypeBool, Optional: true, Default: false, - Description: "Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause. That is sometimes used when the provider detects changes for fields that can not be changed by ALTER. This value will not have any effect when creating a new stream.", + Description: copyGrantsDescription("Retains the access permissions from the original stream when a stream is recreated using the OR REPLACE clause."), // Changing ONLY copy grants should have no effect. It is only used as an "option" during CREATE OR REPLACE - when other attributes change, it's not an object state. There is no point in recreating the object when only this field is changed. DiffSuppressFunc: IgnoreAfterCreation, }, diff --git a/pkg/resources/stream_on_directory_table.go b/pkg/resources/stream_on_directory_table.go index c341f6b9f7..b3e726f97e 100644 --- a/pkg/resources/stream_on_directory_table.go +++ b/pkg/resources/stream_on_directory_table.go @@ -23,7 +23,7 @@ var streamOnDirectoryTableSchema = func() map[string]*schema.Schema { "stage": { Type: schema.TypeString, Required: true, - Description: blocklistedCharactersFieldDescription("Specifies an identifier for the stage the stream will monitor. Due to Snowflake limitations, the provider can not read the stage's database and schema. For stages, Snowflake returns only partially qualified name instead of fully qualified name. Please use stages located in the same schema as the stream."), + Description: relatedResourceDescription(blocklistedCharactersFieldDescription("Specifies an identifier for the stage the stream will monitor. Due to Snowflake limitations, the provider can not read the stage's database and schema. For stages, Snowflake returns only partially qualified name instead of fully qualified name. Please use stages located in the same schema as the stream."), resources.Stage), // TODO (SNOW-1733130): the returned value is not a fully qualified name DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuotingPartiallyQualifiedName, IgnoreChangeToCurrentSnowflakeValueInShow("stage")), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), diff --git a/pkg/resources/stream_on_external_table.go b/pkg/resources/stream_on_external_table.go index 05d4b3289a..be12b44638 100644 --- a/pkg/resources/stream_on_external_table.go +++ b/pkg/resources/stream_on_external_table.go @@ -23,7 +23,7 @@ var streamOnExternalTableSchema = func() map[string]*schema.Schema { "external_table": { Type: schema.TypeString, Required: true, - Description: blocklistedCharactersFieldDescription("Specifies an identifier for the external table the stream will monitor."), + Description: relatedResourceDescription(blocklistedCharactersFieldDescription("Specifies an identifier for the external table the stream will monitor."), resources.ExternalTable), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInShow("table_name")), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), }, diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go index cc9b56f371..76506f0b4a 100644 --- a/pkg/resources/stream_on_table.go +++ b/pkg/resources/stream_on_table.go @@ -23,7 +23,7 @@ var streamOnTableSchema = func() map[string]*schema.Schema { "table": { Type: schema.TypeString, Required: true, - Description: blocklistedCharactersFieldDescription("Specifies an identifier for the table the stream will monitor."), + Description: relatedResourceDescription(blocklistedCharactersFieldDescription("Specifies an identifier for the table the stream will monitor."), resources.Table), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInShow("table_name")), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), }, diff --git a/pkg/resources/stream_on_view.go b/pkg/resources/stream_on_view.go index b093ed726b..016a63eba0 100644 --- a/pkg/resources/stream_on_view.go +++ b/pkg/resources/stream_on_view.go @@ -23,7 +23,7 @@ var StreamOnViewSchema = func() map[string]*schema.Schema { "view": { Type: schema.TypeString, Required: true, - Description: blocklistedCharactersFieldDescription("Specifies an identifier for the view the stream will monitor."), + Description: relatedResourceDescription(blocklistedCharactersFieldDescription("Specifies an identifier for the view the stream will monitor."), resources.View), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInShow("table_name")), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), }, diff --git a/pkg/resources/streamlit.go b/pkg/resources/streamlit.go index c7b8f84ca1..ff18febaca 100644 --- a/pkg/resources/streamlit.go +++ b/pkg/resources/streamlit.go @@ -24,27 +24,27 @@ var streamlitSchema = map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, - Description: "String that specifies the identifier (i.e. name) for the streamlit; must be unique in your account.", + Description: blocklistedCharactersFieldDescription("String that specifies the identifier (i.e. name) for the streamlit; must be unique in your account."), DiffSuppressFunc: suppressIdentifierQuoting, }, "database": { Type: schema.TypeString, Required: true, - Description: "The database in which to create the streamlit", + Description: blocklistedCharactersFieldDescription("The database in which to create the streamlit"), ForceNew: true, DiffSuppressFunc: suppressIdentifierQuoting, }, "schema": { Type: schema.TypeString, Required: true, - Description: "The schema in which to create the streamlit.", + Description: blocklistedCharactersFieldDescription("The schema in which to create the streamlit."), ForceNew: true, DiffSuppressFunc: suppressIdentifierQuoting, }, "stage": { Type: schema.TypeString, Required: true, - Description: "The stage in which streamlit files are located.", + Description: relatedResourceDescription("The stage in which streamlit files are located.", resources.Stage), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInDescribe("root_location")), }, @@ -57,14 +57,14 @@ var streamlitSchema = map[string]*schema.Schema{ "main_file": { Type: schema.TypeString, Required: true, - Description: "Specifies the filename of the Streamlit Python application. This filename is relative to the value of `root_location`", + Description: "Specifies the filename of the Streamlit Python application. This filename is relative to the value of `directory_location`", DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInDescribe("main_file"), }, "query_warehouse": { Type: schema.TypeString, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), Optional: true, - Description: "Specifies the warehouse where SQL queries issued by the Streamlit application are run.", + Description: relatedResourceDescription("Specifies the warehouse where SQL queries issued by the Streamlit application are run. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters.", resources.Warehouse), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInShow("query_warehouse")), }, "external_access_integrations": { @@ -75,7 +75,7 @@ var streamlitSchema = map[string]*schema.Schema{ }, Optional: true, Description: "External access integrations connected to the Streamlit.", - DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInDescribe("external_access_integrations")), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompareIdentifiersInSet("external_access_integrations"), IgnoreChangeToCurrentSnowflakeValueInDescribe("external_access_integrations")), }, "title": { Type: schema.TypeString, @@ -108,6 +108,8 @@ var streamlitSchema = map[string]*schema.Schema{ func Streamlit() *schema.Resource { return &schema.Resource{ + SchemaVersion: 1, + CreateContext: TrackingCreateWrapper(resources.Streamlit, CreateContextStreamlit), ReadContext: TrackingReadWrapper(resources.Streamlit, ReadContextStreamlit), UpdateContext: TrackingUpdateWrapper(resources.Streamlit, UpdateContextStreamlit), @@ -125,7 +127,6 @@ func Streamlit() *schema.Resource { ComputedIfAnyAttributeChanged(streamlitSchema, DescribeOutputAttributeName, "title", "comment", "root_location", "main_file", "query_warehouse", "external_access_integrations"), )), - SchemaVersion: 1, StateUpgraders: []schema.StateUpgrader{ { Version: 0, diff --git a/pkg/resources/tag.go b/pkg/resources/tag.go index 668617450e..7695fd318b 100644 --- a/pkg/resources/tag.go +++ b/pkg/resources/tag.go @@ -59,7 +59,7 @@ var tagSchema = map[string]*schema.Schema{ }, Optional: true, DiffSuppressFunc: NormalizeAndCompareIdentifiersInSet("masking_policies"), - Description: "Set of masking policies for the tag. A tag can support one masking policy for each data type. If masking policies are assigned to the tag, before dropping the tag, the provider automatically unassigns them.", + Description: relatedResourceDescription("Set of masking policies for the tag. A tag can support one masking policy for each data type. If masking policies are assigned to the tag, before dropping the tag, the provider automatically unassigns them.", resources.MaskingPolicy), }, FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, ShowOutputAttributeName: { @@ -114,7 +114,7 @@ func Tag() *schema.Resource { ReadContext: TrackingReadWrapper(resources.Tag, ReadContextTag), UpdateContext: TrackingUpdateWrapper(resources.Tag, UpdateContextTag), DeleteContext: TrackingDeleteWrapper(resources.Tag, DeleteContextTag), - Description: "Resource used to manage tags. For more information, check [tag documentation](https://docs.snowflake.com/en/sql-reference/sql/create-tag).", + Description: "Resource used to manage tags. For more information, check [tag documentation](https://docs.snowflake.com/en/sql-reference/sql/create-tag). For asssigning tags to Snowflake objects, see [tag_association resource](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/resources/tag_association).", CustomizeDiff: TrackingCustomDiffWrapper(resources.Tag, customdiff.All( ComputedIfAnyAttributeChanged(tagSchema, ShowOutputAttributeName, "name", "comment", "allowed_values"), @@ -190,7 +190,6 @@ func ReadContextTag(ctx context.Context, d *schema.ResourceData, meta any) diag. return diag.FromErr(err) } errs := errors.Join( - d.Set("name", tag.Name), d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), d.Set(ShowOutputAttributeName, []map[string]any{schemas.TagToSchema(tag)}), d.Set("comment", tag.Comment), diff --git a/pkg/resources/task.go b/pkg/resources/task.go index f9f697fda3..8cab5404f8 100644 --- a/pkg/resources/task.go +++ b/pkg/resources/task.go @@ -66,7 +66,7 @@ var taskSchema = map[string]*schema.Schema{ Optional: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: suppressIdentifierQuoting, - Description: "The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with user_task_managed_initial_warehouse_size)", + Description: relatedResourceDescription("The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with user_task_managed_initial_warehouse_size)", resources.Warehouse), ConflictsWith: []string{"user_task_managed_initial_warehouse_size"}, }, "schedule": { @@ -113,7 +113,7 @@ var taskSchema = map[string]*schema.Schema{ Optional: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInShow("error_integration")), - Description: blocklistedCharactersFieldDescription("Specifies the name of the notification integration used for error notifications."), + Description: relatedResourceDescription(blocklistedCharactersFieldDescription("Specifies the name of the notification integration used for error notifications."), resources.NotificationIntegration), }, "comment": { Type: schema.TypeString, diff --git a/pkg/resources/task_parameters.go b/pkg/resources/task_parameters.go index 0c6f24d66f..5609bca254 100644 --- a/pkg/resources/task_parameters.go +++ b/pkg/resources/task_parameters.go @@ -85,7 +85,7 @@ func init() { // task parameters {Name: sdk.TaskParameterSuspendTaskAfterNumFailures, Type: schema.TypeInt, ValidateDiag: validation.ToDiagFunc(validation.IntAtLeast(0)), Description: "Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension)."}, {Name: sdk.TaskParameterTaskAutoRetryAttempts, Type: schema.TypeInt, ValidateDiag: validation.ToDiagFunc(validation.IntAtLeast(0)), Description: "Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed."}, - {Name: sdk.TaskParameterUserTaskManagedInitialWarehouseSize, Type: schema.TypeString, ValidateDiag: sdkValidation(sdk.ToWarehouseSize), DiffSuppress: NormalizeAndCompare(sdk.ToWarehouseSize), ConflictsWith: []string{"warehouse"}, Description: "Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse)"}, + {Name: sdk.TaskParameterUserTaskManagedInitialWarehouseSize, Type: schema.TypeString, ValidateDiag: sdkValidation(sdk.ToWarehouseSize), DiffSuppress: NormalizeAndCompare(sdk.ToWarehouseSize), ConflictsWith: []string{"warehouse"}, Description: "Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see [docs](./warehouse)."}, {Name: sdk.TaskParameterUserTaskMinimumTriggerIntervalInSeconds, Type: schema.TypeInt, ValidateDiag: validation.ToDiagFunc(validation.IntAtLeast(0)), Description: "Minimum amount of time between Triggered Task executions in seconds"}, {Name: sdk.TaskParameterUserTaskTimeoutMs, Type: schema.TypeInt, ValidateDiag: validation.ToDiagFunc(validation.IntAtLeast(0)), Description: "Specifies the time limit on a single run of the task before it times out (in milliseconds)."}, // session params diff --git a/pkg/resources/testdata/TestAcc_View/columns/test.tf b/pkg/resources/testdata/TestAcc_View/columns/test.tf index fd5b201fe7..7c76773ae6 100644 --- a/pkg/resources/testdata/TestAcc_View/columns/test.tf +++ b/pkg/resources/testdata/TestAcc_View/columns/test.tf @@ -13,7 +13,7 @@ resource "snowflake_view" "test" { masking_policy { policy_name = var.masking_name - using = var.masking_using + using = try(var.masking_using, null) } } diff --git a/pkg/resources/testdata/TestAcc_View/columns/variables.tf b/pkg/resources/testdata/TestAcc_View/columns/variables.tf index ba6e4bfe0d..462b89d908 100644 --- a/pkg/resources/testdata/TestAcc_View/columns/variables.tf +++ b/pkg/resources/testdata/TestAcc_View/columns/variables.tf @@ -23,5 +23,6 @@ variable "masking_name" { } variable "masking_using" { - type = list(string) + type = list(string) + default = null } diff --git a/pkg/resources/user.go b/pkg/resources/user.go index 8bf0fde1b0..1fb6f15127 100644 --- a/pkg/resources/user.go +++ b/pkg/resources/user.go @@ -33,7 +33,7 @@ var userSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, Sensitive: true, - Description: "Password for the user. **WARNING:** this will put the password in the terraform state file. Use carefully.", + Description: externalChangesNotDetectedFieldDescription("Password for the user. **WARNING:** this will put the password in the terraform state file. Use carefully."), }, "login_name": { Type: schema.TypeString, @@ -106,7 +106,7 @@ var userSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, DiffSuppressFunc: suppressIdentifierQuoting, - Description: "Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists.", + Description: relatedResourceDescription("Specifies the virtual warehouse that is active by default for the user’s session upon login. Note that the CREATE USER operation does not verify that the warehouse exists.", resources.Warehouse), }, "default_namespace": { Type: schema.TypeString, @@ -118,7 +118,7 @@ var userSchema = map[string]*schema.Schema{ Type: schema.TypeString, Optional: true, DiffSuppressFunc: suppressIdentifierQuoting, - Description: "Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists.", + Description: relatedResourceDescription("Specifies the role that is active by default for the user’s session upon login. Note that specifying a default role for a user does **not** grant the role to the user. The role must be granted explicitly to the user using the [GRANT ROLE](https://docs.snowflake.com/en/sql-reference/sql/grant-role) command. In addition, the CREATE USER operation does not verify that the role exists.", resources.AccountRole), }, "default_secondary_roles_option": { Type: schema.TypeString, diff --git a/pkg/resources/view.go b/pkg/resources/view.go index aaa77b53d3..71717adf96 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -49,7 +49,7 @@ var viewSchema = map[string]*schema.Schema{ Type: schema.TypeBool, Optional: true, Default: false, - Description: "Retains the access permissions from the original view when a new view is created using the OR REPLACE clause.", + Description: copyGrantsDescription("Retains the access permissions from the original view when a view is recreated using the OR REPLACE clause."), DiffSuppressFunc: IgnoreAfterCreation, }, "is_secure": { @@ -94,6 +94,7 @@ var viewSchema = map[string]*schema.Schema{ Required: true, Description: "Identifier of the data metric function to add to the table or view or drop from the table or view. This function identifier must be provided without arguments in parenthesis.", DiffSuppressFunc: suppressIdentifierQuoting, + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), }, "on": { Type: schema.TypeSet, @@ -159,7 +160,8 @@ var viewSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, DiffSuppressFunc: suppressIdentifierQuoting, - Description: "Specifies the masking policy to set on a column.", + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), + Description: relatedResourceDescription("Specifies the masking policy to set on a column.", resources.MaskingPolicy), }, "using": { Type: schema.TypeList, @@ -167,7 +169,8 @@ var viewSchema = map[string]*schema.Schema{ Elem: &schema.Schema{ Type: schema.TypeString, }, - Description: "Specifies the arguments to pass into the conditional masking policy SQL expression. The first column in the list specifies the column for the policy conditions to mask or tokenize the data and must match the column to which the masking policy is set. The additional columns specify the columns to evaluate to determine whether to mask or tokenize the data in each row of the query result when a query is made on the first column. If the USING clause is omitted, Snowflake treats the conditional masking policy as a normal masking policy.", + DiffSuppressFunc: IgnoreMatchingColumnNameAndMaskingPolicyUsingFirstElem(), + Description: "Specifies the arguments to pass into the conditional masking policy SQL expression. The first column in the list specifies the column for the policy conditions to mask or tokenize the data and must match the column to which the masking policy is set. The additional columns specify the columns to evaluate to determine whether to mask or tokenize the data in each row of the query result when a query is made on the first column. If the USING clause is omitted, Snowflake treats the conditional masking policy as a normal masking policy.", }, }, }, @@ -182,6 +185,7 @@ var viewSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, DiffSuppressFunc: suppressIdentifierQuoting, + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), Description: "Specifies the projection policy to set on a column.", }, }, @@ -212,7 +216,8 @@ var viewSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, DiffSuppressFunc: suppressIdentifierQuoting, - Description: "Row access policy name.", + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), + Description: relatedResourceDescription("Row access policy name.", resources.RowAccessPolicy), }, "on": { Type: schema.TypeSet, @@ -236,6 +241,7 @@ var viewSchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, DiffSuppressFunc: suppressIdentifierQuoting, + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), Description: "Aggregation policy name.", }, "entity_key": { @@ -253,7 +259,7 @@ var viewSchema = map[string]*schema.Schema{ "statement": { Type: schema.TypeString, Required: true, - Description: "Specifies the query used to create the view.", + Description: diffSuppressStatementFieldDescription("Specifies the query used to create the view."), DiffSuppressFunc: DiffSuppressStatement, }, ShowOutputAttributeName: { @@ -917,6 +923,7 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag return diag.FromErr(fmt.Errorf("error setting change_tracking for view %v: %w", d.Id(), err)) } } else { + // No UNSET for CHANGE_TRACKING, so set false instead. err := client.Views.Alter(ctx, sdk.NewAlterViewRequest(id).WithSetChangeTracking(false)) if err != nil { return diag.FromErr(fmt.Errorf("error unsetting change_tracking for view %v: %w", d.Id(), err)) diff --git a/pkg/resources/view_acceptance_test.go b/pkg/resources/view_acceptance_test.go index b64f342a1a..b72822016e 100644 --- a/pkg/resources/view_acceptance_test.go +++ b/pkg/resources/view_acceptance_test.go @@ -770,6 +770,106 @@ end;; }) } +func TestAcc_View_columnsWithMaskingPolicyWithoutUsing(t *testing.T) { + t.Setenv(string(testenvs.ConfigureClientOnce), "") + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + table, tableCleanup := acc.TestClient().Table.CreateWithColumns(t, []sdk.TableColumnRequest{ + *sdk.NewTableColumnRequest("id", sdk.DataTypeNumber), + *sdk.NewTableColumnRequest("foo", sdk.DataTypeNumber), + *sdk.NewTableColumnRequest("bar", sdk.DataTypeNumber), + }) + t.Cleanup(tableCleanup) + statement := fmt.Sprintf("SELECT id, foo FROM %s", table.ID().FullyQualifiedName()) + + maskingPolicy, maskingPolicyCleanup := acc.TestClient().MaskingPolicy.CreateMaskingPolicyWithOptions(t, + []sdk.TableColumnSignature{ + { + Name: "One", + Type: sdk.DataTypeNumber, + }, + }, + sdk.DataTypeNumber, + ` +case + when One > 0 then One + else 0 +end;; +`, + new(sdk.CreateMaskingPolicyOptions), + ) + t.Cleanup(maskingPolicyCleanup) + + projectionPolicy, projectionPolicyCleanup := acc.TestClient().ProjectionPolicy.CreateProjectionPolicy(t) + t.Cleanup(projectionPolicyCleanup) + + // generators currently don't handle lists of objects, so use the old way + viewWithPolicies := func() config.Variables { + conf := config.Variables{ + "name": config.StringVariable(id.Name()), + "database": config.StringVariable(id.DatabaseName()), + "schema": config.StringVariable(id.SchemaName()), + "statement": config.StringVariable(statement), + } + conf["projection_name"] = config.StringVariable(projectionPolicy.FullyQualifiedName()) + conf["masking_name"] = config.StringVariable(maskingPolicy.ID().FullyQualifiedName()) + return conf + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.View), + Steps: []resource.TestStep{ + // With all policies on columns + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/columns"), + ConfigVariables: viewWithPolicies(), + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasColumnLength(2), + objectassert.View(t, id). + HasMaskingPolicyReferences(acc.TestClient(), 1). + HasProjectionPolicyReferences(acc.TestClient(), 1), + ), + }, + // Remove policies on columns externally + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_View/columns"), + ConfigVariables: viewWithPolicies(), + PreConfig: func() { + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithUnsetMaskingPolicyOnColumn(*sdk.NewViewUnsetColumnMaskingPolicyRequest("ID"))) + acc.TestClient().View.Alter(t, sdk.NewAlterViewRequest(id).WithUnsetProjectionPolicyOnColumn(*sdk.NewViewUnsetProjectionPolicyRequest("ID"))) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("snowflake_view.test", plancheck.ResourceActionUpdate), + }, + }, + Check: assert.AssertThat(t, + resourceassert.ViewResource(t, "snowflake_view.test"). + HasNameString(id.Name()). + HasStatementString(statement). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasColumnLength(2), + objectassert.View(t, id). + HasMaskingPolicyReferences(acc.TestClient(), 1). + HasProjectionPolicyReferences(acc.TestClient(), 1), + ), + }, + }, + }) +} + func TestAcc_View_Rename(t *testing.T) { t.Setenv(string(testenvs.ConfigureClientOnce), "") statement := "SELECT ROLE_NAME, ROLE_OWNER FROM INFORMATION_SCHEMA.APPLICABLE_ROLES" diff --git a/pkg/resources/warehouse.go b/pkg/resources/warehouse.go index 2c6f5bd57a..8925a5ef95 100644 --- a/pkg/resources/warehouse.go +++ b/pkg/resources/warehouse.go @@ -90,7 +90,7 @@ var warehouseSchema = map[string]*schema.Schema{ Optional: true, ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInShow("resource_monitor")), - Description: "Specifies the name of a resource monitor that is explicitly assigned to the warehouse.", + Description: relatedResourceDescription("Specifies the name of a resource monitor that is explicitly assigned to the warehouse.", resources.ResourceMonitor), }, "comment": { Type: schema.TypeString, diff --git a/pkg/sdk/testint/client_integration_test.go b/pkg/sdk/testint/client_integration_test.go index 2522155196..cbf8a52391 100644 --- a/pkg/sdk/testint/client_integration_test.go +++ b/pkg/sdk/testint/client_integration_test.go @@ -4,6 +4,7 @@ import ( "context" "database/sql" "os" + "strings" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testprofiles" @@ -54,7 +55,9 @@ func TestInt_Client_NewClient(t *testing.T) { require.NotNil(t, config) account := config.Account - t.Setenv(snowflakeenvs.Account, account) + parts := strings.Split(account, "-") + t.Setenv(snowflakeenvs.OrganizationName, parts[0]) + t.Setenv(snowflakeenvs.AccountName, parts[1]) dir, err := os.UserHomeDir() require.NoError(t, err) diff --git a/pkg/sdk/testint/security_integrations_gen_integration_test.go b/pkg/sdk/testint/security_integrations_gen_integration_test.go index 3f6ee32ddf..0ae5905195 100644 --- a/pkg/sdk/testint/security_integrations_gen_integration_test.go +++ b/pkg/sdk/testint/security_integrations_gen_integration_test.go @@ -530,6 +530,18 @@ func TestInt_SecurityIntegrations(t *testing.T) { assertSecurityIntegration(t, integration, id, "OAUTH - CUSTOM", true, "a") }) + // Prove that creating a security integration with a specified network policy id with lower case characters fails. This is a bug in Snowflake. + // https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3229 + t.Run("CreateOauthCustom_issue3229", func(t *testing.T) { + id := testClientHelper().Ids.RandomAccountObjectIdentifierWithPrefix("test") + networkPolicy, networkPolicyCleanup := testClientHelper().NetworkPolicy.CreateNetworkPolicyWithRequest(t, sdk.NewCreateNetworkPolicyRequest(id)) + t.Cleanup(networkPolicyCleanup) + + req := sdk.NewCreateOauthForCustomClientsSecurityIntegrationRequest(id, sdk.OauthSecurityIntegrationClientTypePublic, "https://example.com").WithNetworkPolicy(networkPolicy.ID()) + err := client.SecurityIntegrations.CreateOauthForCustomClients(ctx, req) + require.ErrorContains(t, err, "object does not exist or not authorized") + }) + t.Run("CreateSaml2", func(t *testing.T) { _, id, issuer := createSAML2Integration(t, func(r *sdk.CreateSaml2SecurityIntegrationRequest) { r.WithAllowedEmailPatterns([]sdk.EmailPattern{{Pattern: "^(.+dev)@example.com$"}}). @@ -924,6 +936,24 @@ func TestInt_SecurityIntegrations(t *testing.T) { assert.Contains(t, details, sdk.SecurityIntegrationProperty{Name: "OAUTH_CLIENT_RSA_PUBLIC_KEY_2_FP", Type: "String", Value: "", Default: ""}) }) + // Prove that altering a security integration with a specified network policy id with lower case characters fails. This is a bug in Snowflake. + // https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3229 + t.Run("AlterOauthCustom_issue3229", func(t *testing.T) { + neworkPolicyId := testClientHelper().Ids.RandomAccountObjectIdentifierWithPrefix("test") + networkPolicy, networkPolicyCleanup := testClientHelper().NetworkPolicy.CreateNetworkPolicyWithRequest(t, sdk.NewCreateNetworkPolicyRequest(neworkPolicyId)) + t.Cleanup(networkPolicyCleanup) + + _, id := createOauthCustom(t, nil) + + setRequest := sdk.NewAlterOauthForCustomClientsSecurityIntegrationRequest(id). + WithSet( + *sdk.NewOauthForCustomClientsIntegrationSetRequest(). + WithNetworkPolicy(networkPolicy.ID()), + ) + err := client.SecurityIntegrations.AlterOauthForCustomClients(ctx, setRequest) + require.ErrorContains(t, err, "object does not exist or not authorized") + }) + t.Run("AlterSAML2Integration", func(t *testing.T) { _, id, issuer := createSAML2Integration(t, nil) diff --git a/templates/data-sources/roles.md.tmpl b/templates/data-sources/roles.md.tmpl index 60acfefd96..da95cdd5c0 100644 --- a/templates/data-sources/roles.md.tmpl +++ b/templates/data-sources/roles.md.tmpl @@ -11,6 +11,9 @@ description: |- !> **V1 release candidate** This datasource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. + +-> **Note** Fields `STARTS WITH` and `LIMIT` are currently missing. They will be added in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/data-sources/schemas.md.tmpl b/templates/data-sources/schemas.md.tmpl index 0b004f8501..67da95dca4 100644 --- a/templates/data-sources/schemas.md.tmpl +++ b/templates/data-sources/schemas.md.tmpl @@ -11,6 +11,9 @@ description: |- !> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. + +-> **Note** Field `WITH PRIVILEGES` is currently missing. It will be added in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/guides/unassigning_policies.md.tmpl b/templates/guides/unassigning_policies.md.tmpl new file mode 100644 index 0000000000..de5de63e86 --- /dev/null +++ b/templates/guides/unassigning_policies.md.tmpl @@ -0,0 +1,65 @@ +--- +page_title: "Unassigning policies" +subcategory: "" +description: |- + +--- +# Unassigning policies + +For some objects, like network policies, Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes) suggest that a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. + +Before dropping the resource: +- if the objects the policy is assigned to are managed in Terraform, follow the example below +- if they are not managed in Terraform, list them with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` + +## Example + +When you have a configuration like +```terraform +resource "snowflake_network_policy" "example" { + name = "network_policy_name" +} + +resource "snowflake_oauth_integration_for_custom_clients" "example" { + name = "integration" + oauth_client_type = "CONFIDENTIAL" + oauth_redirect_uri = "https://example.com" + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] + network_policy = snowflake_network_policy.example.fully_qualified_name +} +``` + +and try removing the network policy, Terraform fails with +``` +│ Error deleting network policy EXAMPLE, err = 001492 (42601): SQL compilation error: +│ Cannot perform Drop operation on network policy EXAMPLE. The policy is attached to INTEGRATION with name EXAMPLE. Unset the network policy from INTEGRATION and try the +│ Drop operation again. +``` + +In order to remove the policy correctly, first adjust the configuration to +```terraform +resource "snowflake_network_policy" "example" { + name = "network_policy_name" +} + +resource "snowflake_oauth_integration_for_custom_clients" "example" { + name = "integration" + oauth_client_type = "CONFIDENTIAL" + oauth_redirect_uri = "https://example.com" + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] +} +``` + +Note that the network policy has been unassigned. Now, run `terraform apply`. This should cause the policy to be unassigned. Now, adjust the configuration once again to +```terraform +resource "snowflake_oauth_integration_for_custom_clients" "example" { + name = "integration" + oauth_client_type = "CONFIDENTIAL" + oauth_redirect_uri = "https://example.com" + blocked_roles_list = ["ACCOUNTADMIN", "SECURITYADMIN"] +} +``` + +Now the network policy should be removed successfully. + +This behavior will be fixed in the provider in the future. diff --git a/templates/index.md.tmpl b/templates/index.md.tmpl index 11f3ba84bb..99bf6b0369 100644 --- a/templates/index.md.tmpl +++ b/templates/index.md.tmpl @@ -9,7 +9,7 @@ description: Manage SnowflakeDB with Terraform. ~> **Note** Please check the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md) when changing the version of the provider. --> **Note** the current roadmap is available in our GitHub repository: [ROADMAP.md](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md). +-> **Note** The current roadmap is available in our GitHub repository: [ROADMAP.md](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md). This is a terraform provider plugin for managing [Snowflake](https://www.snowflake.com/) accounts. Coverage is focused on part of Snowflake related to access control. @@ -55,7 +55,7 @@ To export the variables into your provider: ```shell export SNOWFLAKE_USER="..." -export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key" +export SNOWFLAKE_PRIVATE_KEY="~/.ssh/snowflake_key" ``` ### Keypair Authentication Passphrase @@ -77,7 +77,7 @@ To export the variables into your provider: ```shell export SNOWFLAKE_USER="..." -export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key.p8" +export SNOWFLAKE_PRIVATE_KEY="~/.ssh/snowflake_key.p8" export SNOWFLAKE_PRIVATE_KEY_PASSPHRASE="..." ``` @@ -87,7 +87,7 @@ If you have an OAuth access token, export these credentials as environment varia ```shell export SNOWFLAKE_USER='...' -export SNOWFLAKE_OAUTH_ACCESS_TOKEN='...' +export SNOWFLAKE_TOKEN='...' ``` Note that once this access token expires, you'll need to request a new one through an external application. @@ -97,11 +97,11 @@ Note that once this access token expires, you'll need to request a new one throu If you have an OAuth Refresh token, export these credentials as environment variables: ```shell -export SNOWFLAKE_OAUTH_REFRESH_TOKEN='...' -export SNOWFLAKE_OAUTH_CLIENT_ID='...' -export SNOWFLAKE_OAUTH_CLIENT_SECRET='...' -export SNOWFLAKE_OAUTH_ENDPOINT='...' -export SNOWFLAKE_OAUTH_REDIRECT_URL='https://localhost.com' +export SNOWFLAKE_TOKEN_ACCESSOR_REFRESH_TOKEN='...' +export SNOWFLAKE_TOKEN_ACCESSOR_CLIENT_ID='...' +export SNOWFLAKE_TOKEN_ACCESSOR_CLIENT_SECRET='...' +export SNOWFLAKE_TOKEN_ACCESSOR_TOKEN_ENDPOINT='...' +export SNOWFLAKE_TOKEN_ACCESSOR_REDIRECT_URI='https://localhost.com' ``` Note because access token have a short life; typically 10 minutes, by passing refresh token new access token will be generated. @@ -136,7 +136,7 @@ provider "snowflake" { ```bash export SNOWFLAKE_USER="..." -export SNOWFLAKE_PRIVATE_KEY_PATH="~/.ssh/snowflake_key" +export SNOWFLAKE_PRIVATE_KEY="~/.ssh/snowflake_key" ``` 3. In a TOML file (default in ~/.snowflake/config). Notice the use of different profiles. The profile name needs to be specified in the Terraform configuration file in `profile` field. When this is not specified, `default` profile is loaded. diff --git a/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl b/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl index 28e2af568d..fe7454c7f2 100644 --- a/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl +++ b/templates/resources/api_authentication_integration_with_authorization_code_grant.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl b/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl index 28e2af568d..fe7454c7f2 100644 --- a/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl +++ b/templates/resources/api_authentication_integration_with_client_credentials.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl b/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl index 28e2af568d..fe7454c7f2 100644 --- a/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl +++ b/templates/resources/api_authentication_integration_with_jwt_bearer.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/authentication_policy.md.tmpl b/templates/resources/authentication_policy.md.tmpl index 93b46362ee..ca835354ca 100644 --- a/templates/resources/authentication_policy.md.tmpl +++ b/templates/resources/authentication_policy.md.tmpl @@ -9,8 +9,7 @@ description: |- {{- end }} --- -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-authentication-policy#usage-notes), an authentication policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-authentication-policy#usage-notes), an authentication policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/database.md.tmpl b/templates/resources/database.md.tmpl index 28e2af568d..719a06f49e 100644 --- a/templates/resources/database.md.tmpl +++ b/templates/resources/database.md.tmpl @@ -11,6 +11,11 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/external_oauth_integration.md.tmpl b/templates/resources/external_oauth_integration.md.tmpl index 28e2af568d..fe7454c7f2 100644 --- a/templates/resources/external_oauth_integration.md.tmpl +++ b/templates/resources/external_oauth_integration.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/masking_policy.md.tmpl b/templates/resources/masking_policy.md.tmpl index 8c42e823de..c516e8e1b1 100644 --- a/templates/resources/masking_policy.md.tmpl +++ b/templates/resources/masking_policy.md.tmpl @@ -11,8 +11,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-masking-policy#usage-notes), a masking policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-masking-policy#usage-notes), a masking policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/network_policy.md.tmpl b/templates/resources/network_policy.md.tmpl index c509e6a3e9..1432fcbee1 100644 --- a/templates/resources/network_policy.md.tmpl +++ b/templates/resources/network_policy.md.tmpl @@ -11,8 +11,9 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes), a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-network-policy#usage-notes), a network policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + +!> **Note** Due to technical limitations in Terraform SDK, changes in `allowed_network_rule_list` and `blocked_network_rule_list` do not cause diff for `show_output` and `describe_output`. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/network_rule.md.tmpl b/templates/resources/network_rule.md.tmpl new file mode 100644 index 0000000000..c96f3e8a41 --- /dev/null +++ b/templates/resources/network_rule.md.tmpl @@ -0,0 +1,35 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **Note** A network rule cannot be dropped successfully if it is currently assigned to a network policy. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/templates/resources/oauth_integration_for_custom_clients.md.tmpl b/templates/resources/oauth_integration_for_custom_clients.md.tmpl index 28e2af568d..dc107a14ad 100644 --- a/templates/resources/oauth_integration_for_custom_clients.md.tmpl +++ b/templates/resources/oauth_integration_for_custom_clients.md.tmpl @@ -11,6 +11,10 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** Setting a network policy with lowercase letters does not work correctly in Snowflake (see [issue](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3229)). As a workaround, set the network policy with uppercase letters only, or use unsafe_execute with network policy ID wrapped in `'`. + +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/oauth_integration_for_partner_applications.md.tmpl b/templates/resources/oauth_integration_for_partner_applications.md.tmpl index 28e2af568d..fe7454c7f2 100644 --- a/templates/resources/oauth_integration_for_partner_applications.md.tmpl +++ b/templates/resources/oauth_integration_for_partner_applications.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/password_policy.md.tmpl b/templates/resources/password_policy.md.tmpl index 28771e2c07..2dbed59233 100644 --- a/templates/resources/password_policy.md.tmpl +++ b/templates/resources/password_policy.md.tmpl @@ -9,8 +9,7 @@ description: |- {{- end }} --- -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-password-policy#usage-notes), a password policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-password-policy#usage-notes), a password policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/primary_connection.md.tmpl b/templates/resources/primary_connection.md.tmpl index a4e271811a..76cb507a90 100644 --- a/templates/resources/primary_connection.md.tmpl +++ b/templates/resources/primary_connection.md.tmpl @@ -22,7 +22,7 @@ description: |- -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). --> **Note** To demote `snowflake_primary_connection` to [`snowflake_secondary_connection`](./secondary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then recreate it in manually using: +-> **Note** To demote `snowflake_primary_connection` to [`snowflake_secondary_connection`](./secondary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state with [terraform state rm](https://developer.hashicorp.com/terraform/cli/commands/state/rm), then recreate it in manually using: ``` CREATE CONNECTION AS REPLICA OF ..; ``` diff --git a/templates/resources/row_access_policy.md.tmpl b/templates/resources/row_access_policy.md.tmpl index eed337762b..eeff47ba51 100644 --- a/templates/resources/row_access_policy.md.tmpl +++ b/templates/resources/row_access_policy.md.tmpl @@ -11,8 +11,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0950--v0960) to use it. -> [!WARNING] -> According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-row-access-policy#usage-notes), a row access policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, list the assigned objects with `SELECT * from table(information_schema.policy_references(policy_name=>''));` and unassign them manually with `ALTER ...` or with updated Terraform configuration, if possible. +!> **Note** According to Snowflake [docs](https://docs.snowflake.com/en/sql-reference/sql/drop-row-access-policy#usage-notes), a row access policy cannot be dropped successfully if it is currently assigned to another object. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the policy from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/saml2_integration.md.tmpl b/templates/resources/saml2_integration.md.tmpl index 28e2af568d..fe7454c7f2 100644 --- a/templates/resources/saml2_integration.md.tmpl +++ b/templates/resources/saml2_integration.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/schema.md.tmpl b/templates/resources/schema.md.tmpl index d1045d341c..dea0e19916 100644 --- a/templates/resources/schema.md.tmpl +++ b/templates/resources/schema.md.tmpl @@ -11,6 +11,12 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. + +-> **Note** Field `CLASSIFICATION_ROLE` is currently missing. It will be added in the future. + +!> **Note** A schema cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098508 (2BP01): Cannot drop schema SCHEMA as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/scim_integration.md.tmpl b/templates/resources/scim_integration.md.tmpl index 28e2af568d..fe7454c7f2 100644 --- a/templates/resources/scim_integration.md.tmpl +++ b/templates/resources/scim_integration.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on security integration type. In this case, remove the integration of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/secondary_connection.md.tmpl b/templates/resources/secondary_connection.md.tmpl index b6955369ea..d54554f7d1 100644 --- a/templates/resources/secondary_connection.md.tmpl +++ b/templates/resources/secondary_connection.md.tmpl @@ -22,7 +22,7 @@ description: |- -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](../guides/identifiers#new-computed-fully-qualified-name-field-in-resources). --> **Note** To promote `snowflake_secondary_connection` to [`snowflake_primary_connection`](./primary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state, then promote it manually using: +-> **Note** To promote `snowflake_secondary_connection` to [`snowflake_primary_connection`](./primary_connection), resources need to be migrated manually. For guidance on removing and importing resources into the state check [resource migration](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). Remove the resource from the state with [terraform state rm](https://developer.hashicorp.com/terraform/cli/commands/state/rm), then promote it manually using: ``` ALTER CONNECTION PRIMARY; ``` diff --git a/templates/resources/secondary_database.md.tmpl b/templates/resources/secondary_database.md.tmpl index 35c607137b..acb3bfb61c 100644 --- a/templates/resources/secondary_database.md.tmpl +++ b/templates/resources/secondary_database.md.tmpl @@ -12,6 +12,11 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # {{.Name}} ({{.Type}}) ~> **Note** The snowflake_secondary_database resource doesn't refresh itself, as the best practice is to use tasks scheduled for a certain interval. Check out the examples to see how to set up the refresh task. For SQL-based replication guide, see the [official documentation](https://docs.snowflake.com/en/user-guide/db-replication-config#replicating-a-database-to-another-account). diff --git a/templates/resources/shared_database.md.tmpl b/templates/resources/shared_database.md.tmpl index 28e2af568d..719a06f49e 100644 --- a/templates/resources/shared_database.md.tmpl +++ b/templates/resources/shared_database.md.tmpl @@ -11,6 +11,11 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. +!> **Note** The provider does not detect external changes on database type. In this case, remove the database of wrong type manually with `terraform destroy` and recreate the resource. It will be addressed in the future. + +!> **Note** A database cannot be dropped successfully if it contains network rule-network policy associations. The error looks like `098507 (2BP01): Cannot drop database DATABASE as it includes network rule - policy associations. +`. Currently, the provider does not unassign such objects automatically. Before dropping the resource, first unassign the network rule from the relevant objects. See [guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/unassigning_policies) for more details. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/stream_on_directory_table.md.tmpl b/templates/resources/stream_on_directory_table.md.tmpl index be9cb3fb69..6a7aa75378 100644 --- a/templates/resources/stream_on_directory_table.md.tmpl +++ b/templates/resources/stream_on_directory_table.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. +~> **Note about copy_grants** Fields like `stage`, and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/stream_on_external_table.md.tmpl b/templates/resources/stream_on_external_table.md.tmpl index c82f23be97..8a062a52e7 100644 --- a/templates/resources/stream_on_external_table.md.tmpl +++ b/templates/resources/stream_on_external_table.md.tmpl @@ -11,7 +11,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. -!> **Note about copy_grants** Fields like `external_table`, `insert_only`, `at`, `before` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. +~> **Note about copy_grants** Fields like `external_table`, `insert_only`, `at`, `before` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/stream_on_table.md.tmpl b/templates/resources/stream_on_table.md.tmpl index 53dd2b9daf..270789e8c5 100644 --- a/templates/resources/stream_on_table.md.tmpl +++ b/templates/resources/stream_on_table.md.tmpl @@ -11,7 +11,7 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. -!> **Note about copy_grants** Fields like `table`, `append_only`, `at`, `before`, `show_initial_rows` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. +~> **Note about copy_grants** Fields like `table`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/stream_on_view.md.tmpl b/templates/resources/stream_on_view.md.tmpl index be9cb3fb69..31e3a88a68 100644 --- a/templates/resources/stream_on_view.md.tmpl +++ b/templates/resources/stream_on_view.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. +~> **Note about copy_grants** Fields like `view`, `append_only`, `at`, `before`, `show_initial_rows` and `stale` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-stream)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/streamlit.md.tmpl b/templates/resources/streamlit.md.tmpl index d1045d341c..dfcd9aef7f 100644 --- a/templates/resources/streamlit.md.tmpl +++ b/templates/resources/streamlit.md.tmpl @@ -11,6 +11,12 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. + +!> **Note** Setting a query warehouse with lowercase letters does not work correctly in Snowflake. As a workaround, set the query warehouse with uppercase letters only, or use unsafe_execute with query warehouse ID wrapped in `'`. + + +-> **Note** Field `IMPORTS` is currently missing. It will be added in the future. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/tag.md.tmpl b/templates/resources/tag.md.tmpl index 7a876a0017..d040e9d5b0 100644 --- a/templates/resources/tag.md.tmpl +++ b/templates/resources/tag.md.tmpl @@ -11,6 +11,8 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0980--v0990) to use it. +~> **Required warehouse** For this resource, the provider now uses [tag references](https://docs.snowflake.com/en/sql-reference/functions/tag_references) to get information about masking policies attached to tags. This function requires a warehouse in the connection. Please, make sure you have either set a `DEFAULT_WAREHOUSE` for the user, or specified a warehouse in the provider configuration. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/templates/resources/view.md.tmpl b/templates/resources/view.md.tmpl index 636271fb96..f9d69cd5de 100644 --- a/templates/resources/view.md.tmpl +++ b/templates/resources/view.md.tmpl @@ -11,10 +11,10 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v094x--v0950) to use it. -!> **Note about copy_grants** Fields like `is_recursive`, `is_temporary`, `copy_grants` and `statement` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-view)), and a change means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. - !> Due to Snowflake limitations, to properly compute diff on `statement` field, the provider parses a `text` field which contains the whole CREATE query used to create the resource. We recommend not using special characters, especially `(`, `,`, `)` in any of the fields, if possible. +~> **Note about copy_grants** Fields like `is_recursive`, `is_temporary`, `copy_grants` and `statement` can not be ALTERed on Snowflake side (check [docs](https://docs.snowflake.com/en/sql-reference/sql/alter-view)), and a change on these fields means recreation of the resource. ForceNew can not be used because it does not preserve grants from `copy_grants`. Beware that even though a change is marked as update, the resource is recreated. + ~> **Required warehouse** For this resource, the provider uses [policy references](https://docs.snowflake.com/en/sql-reference/functions/policy_references) which requires a warehouse in the connection. Please, make sure you have either set a DEFAULT_WAREHOUSE for the user, or specified a warehouse in the provider configuration. # {{.Name}} ({{.Type}}) diff --git a/templates/resources/warehouse.md.tmpl b/templates/resources/warehouse.md.tmpl index 28e2af568d..21df1ad83c 100644 --- a/templates/resources/warehouse.md.tmpl +++ b/templates/resources/warehouse.md.tmpl @@ -11,6 +11,12 @@ description: |- !> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0920--v0930) to use it. + +-> **Note** Field `RESOURCE_CONSTRAINT` is currently missing. It will be added in the future. + + +-> **Note** Assigning resource monitors to warehouses requires ACCOUNTADMIN role. To do this, either manage the warehouse resource with ACCOUNTADMIN role, or use [unsafe_execute](./unsafe_execute) instead. See [this issue](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3019) for more details. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} diff --git a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD index 2f69537f57..c020bce7b3 100644 --- a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD +++ b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD @@ -33,7 +33,7 @@ newer provider versions. We will address these while working on the given object | STREAMLIT | 🚀 | - | | TABLE | 👨‍💻 | [#2997](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2997), [#2844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2844), [#2839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2839), [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733), [#2683](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2683), [#2676](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2676), [#2674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2674), [#2629](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2629), [#2418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2418), [#2415](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2415), [#2406](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2406), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1823](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1823), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1387](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1387), [#1272](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1272), [#1271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1271), [#1248](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1248), [#1241](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1241), [#1146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1146), [#1032](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1032), [#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420) | | TAG | 👨‍💻 | [#2943](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2943), [#2598](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2598), [#1910](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1910), [#1909](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1909), [#1862](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1862), [#1806](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1806), [#1657](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1657), [#1496](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1496), [#1443](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1443), [#1394](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1394), [#1372](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1372), [#1074](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1074) | -| TASK | 👨‍💻 | [#3136](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3136), [#1419](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1419), [#1250](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1250), [#1194](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1194), [#1088](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1088) | +| TASK | 🚀 | [#3136](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/3136), [#1419](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1419), [#1250](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1250), [#1194](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1194), [#1088](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1088) | | VIEW | 🚀 | issues in the older versions: [resources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Aresource%3Aview+) and [datasources](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues?q=label%3Adata_source%3Aviews+) | | snowflake_unsafe_execute | 👨‍💻 | [#2934](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2934) | diff --git a/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md b/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md index dcb77aa33d..8eb3d34669 100644 --- a/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md +++ b/v1-preparations/LIST_OF_PREVIEW_FEATURES_FOR_V1.md @@ -45,6 +45,8 @@ * [snowflake_system_get_aws_sns_iam_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/system_get_aws_sns_iam_policy) (datasource) * [snowflake_system_get_privatelink_config](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/system_get_privatelink_config) (datasource) * [snowflake_system_get_snowflake_platform_info](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/data-sources/system_get_snowflake_platform_info) (datasource) +* [snowflake_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/table) +* [snowflake_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tables) (datasource) * [snowflake_table_column_masking_policy_application](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/table_column_masking_policy_application) * [snowflake_table_constraint](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/table_constraint) (undecided - may be deleted instead) * [snowflake_user_public_keys](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.97.0/docs/resources/user_public_keys) diff --git a/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md b/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md index b196a5a418..bb7a9d5426 100644 --- a/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md +++ b/v1-preparations/LIST_OF_STABLE_RESOURCES_FOR_V1.md @@ -1,6 +1,6 @@ We estimate the given list to be accurate, but it may be subject to small changes: -* Account (in progress) +* Account * [snowflake_account](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/account) * [snowflake_accounts](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/accounts) (datasource) * Connection @@ -37,7 +37,7 @@ We estimate the given list to be accurate, but it may be subject to small change * Network Policy * [snowflake_network_policy](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/network_policy) * [snowflake_network_policies](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/network_policies) (datasource) -* Procedure (in progress) +* Procedure (in progress) * snowflake_procedure_java * snowflake_procedure_javascript * snowflake_procedure_python @@ -85,14 +85,11 @@ We estimate the given list to be accurate, but it may be subject to small change * Streamlit * [snowflake_streamlit](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/streamlit) * [snowflake_streamlits](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/streamlits) (datasource) -* Table (in progress) - * [snowflake_table](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/table) - * [snowflake_tables](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tables) (datasource) -* Tag (in progress) +* Tag * [snowflake_tag](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/tag) * [snowflake_tag_association](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/tag_association) * [snowflake_tags](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tags) (datasource) -* Task (in progress) +* Task * [snowflake_task](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/resources/task) * [snowflake_tasks](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/0.98.0/docs/data-sources/tasks) (datasource) * User