From 151ce84eb78dfb00aafd0b3c72384377cfc6b779 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Tue, 22 Oct 2024 13:18:53 +0200 Subject: [PATCH 1/5] wip --- MIGRATION_GUIDE.md | 16 + pkg/datasources/common.go | 85 ++++ pkg/datasources/streams.go | 120 ++--- pkg/datasources/streams_acceptance_test.go | 414 +++++++++++++++--- .../TestAcc_Streams/non_existing/test.tf | 10 + .../TestAcc_Streams/optionals_set/test.tf | 16 + .../optionals_set/variables.tf | 23 + .../TestAcc_Streams/optionals_unset/test.tf | 17 + .../optionals_unset/variables.tf | 23 + templates/data-sources/streams.md.tmpl | 24 + 10 files changed, 640 insertions(+), 108 deletions(-) create mode 100644 pkg/datasources/testdata/TestAcc_Streams/non_existing/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Streams/optionals_set/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Streams/optionals_set/variables.tf create mode 100644 pkg/datasources/testdata/TestAcc_Streams/optionals_unset/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_Streams/optionals_unset/variables.tf create mode 100644 templates/data-sources/streams.md.tmpl diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index a6145e2b1f..1b3e733777 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,6 +9,22 @@ across different versions. ## v0.97.0 ➞ v0.98.0 +### snowflake_masking_policies data source changes +New filtering options: +- `in` +- `limit` +- `with_describe` + +New output fields +- `show_output` +- `describe_output` + +Breaking changes: +- `database` and `schema` are right now under `in` field +- `streams` field now organizes output of show under `show_output` field and the output of describe under `describe_output` field. + +Please adjust your Terraform configuration files. + ### *(behavior change)* handling copy_grants Currently, resources like `snowflake_view`, `snowflake_stream_on_table`, `snowflake_stream_on_external_table` and `snowflake_stream_on_directory_table` support `copy_grants` field corresponding with `COPY GRANTS` during `CREATE`. The current behavior is that, when a change leading for recreation is detected (meaning a change that can not be handled by ALTER, but only by `CREATE OR REPLACE`), `COPY GRANTS` are used during recreation when `copy_grants` is set to `true`. Changing this field without changes in other field results in a noop because in this case there is no need to recreate a resource. diff --git a/pkg/datasources/common.go b/pkg/datasources/common.go index eebc540006..6f8ab20169 100644 --- a/pkg/datasources/common.go +++ b/pkg/datasources/common.go @@ -1,10 +1,89 @@ package datasources import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) +var likeSchema = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`).", +} + +var extendedInSchema = &schema.Schema{ + Type: schema.TypeList, + Optional: true, + Description: "IN clause to filter the list of objects", + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "account": { + Type: schema.TypeBool, + Optional: true, + Description: "Returns records for the entire account.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema", "in.0.application", "in.0.application_package"}, + }, + "database": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the current database in use or for a specified database.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema", "in.0.application", "in.0.application_package"}, + ValidateDiagFunc: resources.IsValidIdentifier[sdk.AccountObjectIdentifier](), + }, + "schema": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the current schema in use or a specified schema. Use fully qualified name.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema", "in.0.application", "in.0.application_package"}, + ValidateDiagFunc: resources.IsValidIdentifier[sdk.DatabaseObjectIdentifier](), + }, + "application": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the specified application.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema", "in.0.application", "in.0.application_package"}, + ValidateDiagFunc: resources.IsValidIdentifier[sdk.AccountObjectIdentifier](), + }, + "application_package": { + Type: schema.TypeString, + Optional: true, + Description: "Returns records for the specified application package.", + ExactlyOneOf: []string{"in.0.account", "in.0.database", "in.0.schema", "in.0.application", "in.0.application_package"}, + ValidateDiagFunc: resources.IsValidIdentifier[sdk.AccountObjectIdentifier](), + }, + }, + }, +} + +var startsWithSchema = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "Filters the output with **case-sensitive** characters indicating the beginning of the object name.", +} + +var limitFromSchema = &schema.Schema{ + Type: schema.TypeList, + Optional: true, + Description: "Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`.", + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "rows": { + Type: schema.TypeInt, + Required: true, + Description: "The maximum number of rows to return.", + }, + "from": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied.", + }, + }, + }, +} + func handleLike(d *schema.ResourceData, setField **sdk.Like) { if likePattern, ok := d.GetOk("like"); ok { *setField = &sdk.Like{ @@ -13,6 +92,12 @@ func handleLike(d *schema.ResourceData, setField **sdk.Like) { } } +func handleStartsWith(d *schema.ResourceData, setField **string) { + if startsWith, ok := d.GetOk("starts_with"); ok { + *setField = sdk.String(startsWith.(string)) + } +} + func handleLimitFrom(d *schema.ResourceData, setField **sdk.LimitFrom) { if v, ok := d.GetOk("limit"); ok { l := v.([]any)[0].(map[string]any) diff --git a/pkg/datasources/streams.go b/pkg/datasources/streams.go index cde2bcbcda..d992f2d669 100644 --- a/pkg/datasources/streams.go +++ b/pkg/datasources/streams.go @@ -2,53 +2,48 @@ package datasources import ( "context" - "fmt" - "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) var streamsSchema = map[string]*schema.Schema{ - "database": { - Type: schema.TypeString, - Required: true, - Description: "The database from which to return the streams from.", - }, - "schema": { - Type: schema.TypeString, - Required: true, - Description: "The schema from which to return the streams from.", + "with_describe": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Runs DESC STREAM for each user returned by SHOW STREAMS. The output of describe is saved to the description field. By default this value is set to true.", }, + "like": likeSchema, + "in": extendedInSchema, + "starts_with": startsWithSchema, + "limit": limitFromSchema, "streams": { Type: schema.TypeList, Computed: true, - Description: "The streams in the schema", + Description: "Holds the aggregated output of all streams details queries.", Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ - "name": { - Type: schema.TypeString, - Computed: true, - }, - "database": { - Type: schema.TypeString, - Computed: true, - }, - "schema": { - Type: schema.TypeString, - Computed: true, + resources.ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of SHOW STREAMS.", + Elem: &schema.Resource{ + Schema: schemas.ShowStreamSchema, + }, }, - "comment": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "table": { - Type: schema.TypeString, - Optional: true, - Computed: true, + resources.DescribeOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Holds the output of DESCRIBE STREAM.", + Elem: &schema.Resource{ + Schema: schemas.DescribeStreamSchema, + }, }, }, }, @@ -57,40 +52,49 @@ var streamsSchema = map[string]*schema.Schema{ func Streams() *schema.Resource { return &schema.Resource{ - Read: ReadStreams, - Schema: streamsSchema, + ReadContext: ReadStreams, + Schema: streamsSchema, + Description: "Datasource used to get details of filtered streams. Filtering is aligned with the current possibilities for [SHOW STREAMS](https://docs.snowflake.com/en/sql-reference/sql/show-streams) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `streams`.", } } -func ReadStreams(d *schema.ResourceData, meta interface{}) error { +func ReadStreams(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - ctx := context.Background() - databaseName := d.Get("database").(string) - schemaName := d.Get("schema").(string) + req := sdk.ShowStreamRequest{} - currentStreams, err := client.Streams.Show(ctx, sdk.NewShowStreamRequest(). - WithIn(sdk.ExtendedIn{ - In: sdk.In{ - Schema: sdk.NewDatabaseObjectIdentifier(databaseName, schemaName), - }, - })) + handleLike(d, &req.Like) + handleLimitFrom(d, &req.Limit) + handleStartsWith(d, &req.StartsWith) + err := handleExtendedIn(d, &req.In) if err != nil { - log.Printf("[DEBUG] streams in schema (%s) not found", d.Id()) - d.SetId("") - return nil + return diag.FromErr(err) } - streams := make([]map[string]any, len(currentStreams)) - for i, stream := range currentStreams { - streams[i] = map[string]any{ - "name": stream.Name, - "database": stream.DatabaseName, - "schema": stream.SchemaName, - "comment": stream.Comment, - "table": stream.TableName, - } + streams, err := client.Streams.Show(ctx, &req) + if err != nil { + return diag.FromErr(err) } + d.SetId("streams_read") + + flattenedStreams := make([]map[string]any, len(streams)) + for i, stream := range streams { + stream := stream + var streamDescriptions []map[string]any + if d.Get("with_describe").(bool) { + describeOutput, err := client.Streams.Describe(ctx, stream.ID()) + if err != nil { + return diag.FromErr(err) + } + streamDescriptions = []map[string]any{schemas.StreamDescriptionToSchema(*describeOutput)} + } - d.SetId(fmt.Sprintf(`%v|%v`, databaseName, schemaName)) - return d.Set("streams", streams) + flattenedStreams[i] = map[string]any{ + resources.ShowOutputAttributeName: []map[string]any{schemas.StreamToSchema(&stream)}, + resources.DescribeOutputAttributeName: streamDescriptions, + } + } + if err := d.Set("streams", flattenedStreams); err != nil { + return diag.FromErr(err) + } + return nil } diff --git a/pkg/datasources/streams_acceptance_test.go b/pkg/datasources/streams_acceptance_test.go index cf2e9326a5..b7203b5a9a 100644 --- a/pkg/datasources/streams_acceptance_test.go +++ b/pkg/datasources/streams_acceptance_test.go @@ -2,81 +2,395 @@ package datasources_test import ( "fmt" + "regexp" "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + testconfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + tfconfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) func TestAcc_Streams(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - streamName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + + streamOnTable := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). + WithAppendOnly("true"). + WithComment("foo") + + dsName := "data.snowflake_streams.test" resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, - CheckDestroy: nil, Steps: []resource.TestStep{ { - Config: streams(databaseName, schemaName, tableName, streamName), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Streams/optionals_set"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, streamOnTable), + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.#", "1")), + + resourceshowoutputassert.StreamsDatasourceShowOutput(t, "snowflake_streams.test"). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.mode", string(sdk.StreamModeAppendOnly))), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner_role_type", "ROLE")), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Streams/optionals_unset"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, streamOnTable), + + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.#", "1")), + + resourceshowoutputassert.StreamsDatasourceShowOutput(t, "snowflake_streams.test"). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.#", "0")), + ), + }, + }, + }) +} + +func TestAcc_StreamOnTable(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + + streamOnTable := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). + WithAppendOnly("true"). + WithComment("foo") + + resourceName := "snowflake_stream_on_table.test" + dsName := "data.snowflake_streams.test" + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, streamOnTable) + streamsDatasource(id.Name(), resourceName), + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.#", "1")), + resourceshowoutputassert.StreamsDatasourceShowOutput(t, "snowflake_streams.test"). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.mode", string(sdk.StreamModeAppendOnly))), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner_role_type", "ROLE")), + ), + }, + }, + }) +} + +func TestAcc_StreamOnExternalTable(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceName := "snowflake_stream_on_external_table.test" + dsName := "data.snowflake_streams.test" + + stageID := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + stageLocation := fmt.Sprintf("@%s", stageID.FullyQualifiedName()) + _, stageCleanup := acc.TestClient().Stage.CreateStageWithURL(t, stageID) + t.Cleanup(stageCleanup) + + externalTable, externalTableCleanup := acc.TestClient().ExternalTable.CreateWithLocation(t, stageLocation) + t.Cleanup(externalTableCleanup) + + model := model.StreamOnExternalTableBase("test", id, externalTable.ID()). + WithCopyGrants(true). + WithComment("foo") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, model) + streamsDatasource(id.Name(), resourceName), + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.#", "1")), + resourceshowoutputassert.StreamsDatasourceShowOutput(t, "snowflake_streams.test"). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(externalTable.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeExternalTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeInsertOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.table_name", externalTable.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.source_type", string(sdk.StreamSourceTypeExternalTable))), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.0", externalTable.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.mode", string(sdk.StreamModeInsertOnly))), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner_role_type", "ROLE")), + ), + }, + }, + }) +} + +func TestAcc_StreamOnDirectoryTable(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceName := "snowflake_stream_on_directory_table.test" + dsName := "data.snowflake_streams.test" + + stage, cleanupStage := acc.TestClient().Stage.CreateStageWithDirectory(t) + t.Cleanup(cleanupStage) + + model := model.StreamOnDirectoryTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), stage.ID().FullyQualifiedName()). + WithComment("foo") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, model) + streamsDatasource(id.Name(), resourceName), + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.#", "1")), + resourceshowoutputassert.StreamsDatasourceShowOutput(t, "snowflake_streams.test"). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(stage.ID().Name()). + HasSourceType(sdk.StreamSourceTypeStage). + HasBaseTablesPartiallyQualified(stage.ID().Name()). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeDefault). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.table_name", stage.ID().Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.source_type", string(sdk.StreamSourceTypeStage))), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.0", stage.ID().Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.mode", string(sdk.StreamModeDefault))), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner_role_type", "ROLE")), + ), + }, + }, + }) +} + +// TODO (this pr): after merge, test stream on views. + +func streamsDatasource(like, resourceName string) string { + return fmt.Sprintf(` +data "snowflake_streams" "test" { + depends_on = [%s] + + like = "%s" +} +`, resourceName, like) +} + +func TestAcc_Streams_Filtering(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + + prefix := random.AlphaN(4) + id1 := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithPrefix(prefix) + id2 := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithPrefix(prefix) + id3 := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + + model1 := model.StreamOnTable("test_1", id1.DatabaseName(), id1.Name(), id1.SchemaName(), table.ID().FullyQualifiedName()) + model2 := model.StreamOnTable("test_2", id2.DatabaseName(), id2.Name(), id2.SchemaName(), table.ID().FullyQualifiedName()) + model3 := model.StreamOnTable("test_3", id3.DatabaseName(), id3.Name(), id3.SchemaName(), table.ID().FullyQualifiedName()) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + Steps: []resource.TestStep{ + { + Config: testconfig.FromModel(t, model1) + testconfig.FromModel(t, model2) + testconfig.FromModel(t, model3) + streamsDatasourceLike(id1.Name()), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.snowflake_streams.t", "database", databaseName), - resource.TestCheckResourceAttr("data.snowflake_streams.t", "schema", schemaName), - resource.TestCheckResourceAttrSet("data.snowflake_streams.t", "streams.#"), - resource.TestCheckResourceAttr("data.snowflake_streams.t", "streams.#", "1"), - resource.TestCheckResourceAttr("data.snowflake_streams.t", "streams.0.name", streamName), + resource.TestCheckResourceAttr("data.snowflake_streams.test", "streams.#", "1"), + ), + }, + { + Config: testconfig.FromModel(t, model1) + testconfig.FromModel(t, model2) + testconfig.FromModel(t, model3) + streamsDatasourceLike(prefix+"%"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_streams.test", "streams.#", "2"), ), }, }, }) } -func streams(databaseName string, schemaName string, tableName string, streamName string) string { +func streamsDatasourceLike(like string) string { return fmt.Sprintf(` +data "snowflake_streams" "test" { + depends_on = [snowflake_stream_on_table.test_1, snowflake_stream_on_table.test_2, snowflake_stream_on_table.test_3] + + like = "%s" +} +`, like) +} - resource snowflake_database "test_database" { - name = "%v" - } - - resource snowflake_schema "test_schema" { - name = "%v" - database = snowflake_database.test_database.name - } - - resource snowflake_table "test_stream_on_table" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - change_tracking = true - name = "%v" - comment = "Terraform acceptance test" - column { - name = "column1" - type = "VARIANT" - } - column { - name = "column2" - type = "VARCHAR" - } - } - - resource snowflake_stream "test_stream" { - database = snowflake_database.test_database.name - schema = snowflake_schema.test_schema.name - name = "%v" - comment = "Terraform acceptance test" - on_table = "${snowflake_database.test_database.name}.${snowflake_schema.test_schema.name}.${snowflake_table.test_stream_on_table.name}" - } - - data snowflake_streams "t" { - database = snowflake_stream.test_stream.database - schema = snowflake_stream.test_stream.schema - } - `, databaseName, schemaName, tableName, streamName) +func TestAcc_Streams_emptyIn(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: nil, + Steps: []resource.TestStep{ + { + Config: streamsDatasourceEmptyIn(), + ExpectError: regexp.MustCompile("Invalid combination of arguments"), + }, + }, + }) +} + +func streamsDatasourceEmptyIn() string { + return ` +data "snowflake_streams" "test" { + in { + } +} +` +} + +func TestAcc_Streams_NotFound_WithPostConditions(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Streams/non_existing"), + ExpectError: regexp.MustCompile("there should be at least one stream"), + }, + }, + }) } diff --git a/pkg/datasources/testdata/TestAcc_Streams/non_existing/test.tf b/pkg/datasources/testdata/TestAcc_Streams/non_existing/test.tf new file mode 100644 index 0000000000..15d5a1ca02 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Streams/non_existing/test.tf @@ -0,0 +1,10 @@ +data "snowflake_streams" "test" { + like = "non-existing-stream" + + lifecycle { + postcondition { + condition = length(self.streams) > 0 + error_message = "there should be at least one stream" + } + } +} diff --git a/pkg/datasources/testdata/TestAcc_Streams/optionals_set/test.tf b/pkg/datasources/testdata/TestAcc_Streams/optionals_set/test.tf new file mode 100644 index 0000000000..6bf1691b43 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Streams/optionals_set/test.tf @@ -0,0 +1,16 @@ +resource "snowflake_stream_on_table" "test" { + name = var.name + schema = var.schema + database = var.database + + table = var.table + append_only = var.append_only + + comment = var.comment +} + +data "snowflake_streams" "test" { + depends_on = [snowflake_stream_on_table.test] + + like = var.name +} diff --git a/pkg/datasources/testdata/TestAcc_Streams/optionals_set/variables.tf b/pkg/datasources/testdata/TestAcc_Streams/optionals_set/variables.tf new file mode 100644 index 0000000000..97babc44d7 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Streams/optionals_set/variables.tf @@ -0,0 +1,23 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "comment" { + type = string +} + +variable "table" { + type = string +} + +variable "append_only" { + type = string +} diff --git a/pkg/datasources/testdata/TestAcc_Streams/optionals_unset/test.tf b/pkg/datasources/testdata/TestAcc_Streams/optionals_unset/test.tf new file mode 100644 index 0000000000..76250d97e2 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Streams/optionals_unset/test.tf @@ -0,0 +1,17 @@ +resource "snowflake_stream_on_table" "test" { + name = var.name + schema = var.schema + database = var.database + + table = var.table + append_only = var.append_only + + comment = var.comment +} + +data "snowflake_streams" "test" { + depends_on = [snowflake_stream_on_table.test] + + with_describe = false + like = var.name +} diff --git a/pkg/datasources/testdata/TestAcc_Streams/optionals_unset/variables.tf b/pkg/datasources/testdata/TestAcc_Streams/optionals_unset/variables.tf new file mode 100644 index 0000000000..97babc44d7 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_Streams/optionals_unset/variables.tf @@ -0,0 +1,23 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "comment" { + type = string +} + +variable "table" { + type = string +} + +variable "append_only" { + type = string +} diff --git a/templates/data-sources/streams.md.tmpl b/templates/data-sources/streams.md.tmpl new file mode 100644 index 0000000000..daa12f5a7e --- /dev/null +++ b/templates/data-sources/streams.md.tmpl @@ -0,0 +1,24 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/data-sources/%s/data-source.tf" .Name)}} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} From afad1f8085793ccb3179988c13e7313fd4d3b1a1 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 23 Oct 2024 13:05:29 +0200 Subject: [PATCH 2/5] pre push --- MIGRATION_GUIDE.md | 2 + docs/data-sources/streams.md | 87 ++++++++++++++++++++++++++++++++---- 2 files changed, 80 insertions(+), 9 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 1b3e733777..b797f4f81c 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -11,7 +11,9 @@ across different versions. ### snowflake_masking_policies data source changes New filtering options: +- `like` - `in` +- `starts_with` - `limit` - `with_describe` diff --git a/docs/data-sources/streams.md b/docs/data-sources/streams.md index 41359f236d..ee575faa06 100644 --- a/docs/data-sources/streams.md +++ b/docs/data-sources/streams.md @@ -2,12 +2,14 @@ page_title: "snowflake_streams Data Source - terraform-provider-snowflake" subcategory: "" description: |- - + Datasource used to get details of filtered streams. Filtering is aligned with the current possibilities for SHOW STREAMS https://docs.snowflake.com/en/sql-reference/sql/show-streams query. The results of SHOW and DESCRIBE are encapsulated in one output collection streams. --- -# snowflake_streams (Data Source) +!> **V1 release candidate** This data source was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the data source if needed. Any errors reported will be resolved with a higher priority. We encourage checking this data source out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0970--v0980) to use it. +# snowflake_streams (Data Source) +Datasource used to get details of filtered streams. Filtering is aligned with the current possibilities for [SHOW STREAMS](https://docs.snowflake.com/en/sql-reference/sql/show-streams) query. The results of SHOW and DESCRIBE are encapsulated in one output collection `streams`. ## Example Usage @@ -21,23 +23,90 @@ data "snowflake_streams" "current" { ## Schema -### Required +### Optional -- `database` (String) The database from which to return the streams from. -- `schema` (String) The schema from which to return the streams from. +- `in` (Block List, Max: 1) IN clause to filter the list of objects (see [below for nested schema](#nestedblock--in)) +- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`). +- `limit` (Block List, Max: 1) Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`. (see [below for nested schema](#nestedblock--limit)) +- `starts_with` (String) Filters the output with **case-sensitive** characters indicating the beginning of the object name. +- `with_describe` (Boolean) Runs DESC STREAM for each user returned by SHOW STREAMS. The output of describe is saved to the description field. By default this value is set to true. ### Read-Only - `id` (String) The ID of this resource. -- `streams` (List of Object) The streams in the schema (see [below for nested schema](#nestedatt--streams)) +- `streams` (List of Object) Holds the aggregated output of all streams details queries. (see [below for nested schema](#nestedatt--streams)) + + +### Nested Schema for `in` + +Optional: + +- `account` (Boolean) Returns records for the entire account. +- `application` (String) Returns records for the specified application. +- `application_package` (String) Returns records for the specified application package. +- `database` (String) Returns records for the current database in use or for a specified database. +- `schema` (String) Returns records for the current schema in use or a specified schema. Use fully qualified name. + + + +### Nested Schema for `limit` + +Required: + +- `rows` (Number) The maximum number of rows to return. + +Optional: + +- `from` (String) Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied. + ### Nested Schema for `streams` Read-Only: +- `describe_output` (List of Object) (see [below for nested schema](#nestedobjatt--streams--describe_output)) +- `show_output` (List of Object) (see [below for nested schema](#nestedobjatt--streams--show_output)) + + +### Nested Schema for `streams.describe_output` + +Read-Only: + +- `base_tables` (List of String) +- `comment` (String) +- `created_on` (String) +- `database_name` (String) +- `invalid_reason` (String) +- `mode` (String) +- `name` (String) +- `owner` (String) +- `owner_role_type` (String) +- `schema_name` (String) +- `source_type` (String) +- `stale` (Boolean) +- `stale_after` (String) +- `table_name` (String) +- `type` (String) + + + +### Nested Schema for `streams.show_output` + +Read-Only: + +- `base_tables` (List of String) - `comment` (String) -- `database` (String) +- `created_on` (String) +- `database_name` (String) +- `invalid_reason` (String) +- `mode` (String) - `name` (String) -- `schema` (String) -- `table` (String) +- `owner` (String) +- `owner_role_type` (String) +- `schema_name` (String) +- `source_type` (String) +- `stale` (Boolean) +- `stale_after` (String) +- `table_name` (String) +- `type` (String) From c3a13e4598673dab6be9c40e9b1bde35d0b3a09f Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 30 Oct 2024 11:35:34 +0100 Subject: [PATCH 3/5] Add missing test --- .../stream_show_output_ext.go | 2 +- pkg/datasources/streams_acceptance_test.go | 71 +++++++++++++++++-- .../stream_on_view_acceptance_test.go | 12 ++-- 3 files changed, 73 insertions(+), 12 deletions(-) diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go index 84756872f6..0daeaf4db2 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go @@ -30,7 +30,7 @@ func (s *StreamShowOutputAssert) HasStaleAfterNotEmpty() *StreamShowOutputAssert return s } -func (s *StreamShowOutputAssert) HasBaseTables(ids []sdk.SchemaObjectIdentifier) *StreamShowOutputAssert { +func (s *StreamShowOutputAssert) HasBaseTables(ids ...sdk.SchemaObjectIdentifier) *StreamShowOutputAssert { s.AddAssertion(assert.ResourceShowOutputValueSet("base_tables.#", strconv.FormatInt(int64(len(ids)), 10))) for i := range ids { s.AddAssertion(assert.ResourceShowOutputValueSet(fmt.Sprintf("base_tables.%d", i), ids[i].FullyQualifiedName())) diff --git a/pkg/datasources/streams_acceptance_test.go b/pkg/datasources/streams_acceptance_test.go index b7203b5a9a..1b732d92c4 100644 --- a/pkg/datasources/streams_acceptance_test.go +++ b/pkg/datasources/streams_acceptance_test.go @@ -55,7 +55,7 @@ func TestAcc_Streams(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -94,7 +94,7 @@ func TestAcc_Streams(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -141,7 +141,7 @@ func TestAcc_StreamOnTable(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -206,7 +206,7 @@ func TestAcc_StreamOnExternalTable(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). @@ -293,7 +293,68 @@ func TestAcc_StreamOnDirectoryTable(t *testing.T) { }) } -// TODO (this pr): after merge, test stream on views. +func TestAcc_StreamOnView(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceName := "snowflake_stream_on_view.test" + dsName := "data.snowflake_streams.test" + + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + statement := fmt.Sprintf("SELECT * FROM %s", table.ID().FullyQualifiedName()) + view, cleanupView := acc.TestClient().View.CreateView(t, statement) + t.Cleanup(cleanupView) + + model := model.StreamOnView("test", id.DatabaseName(), id.Name(), id.SchemaName(), view.ID().FullyQualifiedName()). + WithComment("foo"). + WithAppendOnly("true") + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, model) + streamsDatasource(id.Name(), resourceName), + Check: assert.AssertThat(t, + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.#", "1")), + resourceshowoutputassert.StreamsDatasourceShowOutput(t, "snowflake_streams.test"). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(view.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeView). + HasBaseTables(table.ID()). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.table_name", view.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.source_type", string(sdk.StreamSourceTypeView))), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.mode", string(sdk.StreamModeAppendOnly))), + assert.Check(resource.TestCheckResourceAttrSet(dsName, "streams.0.describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(dsName, "streams.0.describe_output.0.owner_role_type", "ROLE")), + ), + }, + }, + }) +} func streamsDatasource(like, resourceName string) string { return fmt.Sprintf(` diff --git a/pkg/resources/stream_on_view_acceptance_test.go b/pkg/resources/stream_on_view_acceptance_test.go index b72f236d85..fef512a7ef 100644 --- a/pkg/resources/stream_on_view_acceptance_test.go +++ b/pkg/resources/stream_on_view_acceptance_test.go @@ -87,7 +87,7 @@ func TestAcc_StreamOnView_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(view.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeView). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeDefault). @@ -145,7 +145,7 @@ func TestAcc_StreamOnView_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(view.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeView). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -197,7 +197,7 @@ func TestAcc_StreamOnView_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(view.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeView). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -246,7 +246,7 @@ func TestAcc_StreamOnView_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(view.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeView). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeDefault). @@ -642,7 +642,7 @@ func TestAcc_StreamOnView_At(t *testing.T) { HasComment("foo"). HasTableName(view.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeView). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -762,7 +762,7 @@ func TestAcc_StreamOnView_Before(t *testing.T) { HasComment("foo"). HasTableName(view.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeView). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). From 52e3cf807f93fa71fe3d3582e14064ec1e0bbb2a Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 30 Oct 2024 11:43:26 +0100 Subject: [PATCH 4/5] pre push --- .../stream_on_external_table_acceptance_test.go | 14 +++++++------- pkg/resources/stream_on_table_acceptance_test.go | 12 ++++++------ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/pkg/resources/stream_on_external_table_acceptance_test.go b/pkg/resources/stream_on_external_table_acceptance_test.go index c926a507b9..292c383ee3 100644 --- a/pkg/resources/stream_on_external_table_acceptance_test.go +++ b/pkg/resources/stream_on_external_table_acceptance_test.go @@ -92,7 +92,7 @@ func TestAcc_StreamOnExternalTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). @@ -154,7 +154,7 @@ func TestAcc_StreamOnExternalTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). @@ -206,7 +206,7 @@ func TestAcc_StreamOnExternalTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). @@ -255,7 +255,7 @@ func TestAcc_StreamOnExternalTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). @@ -304,7 +304,7 @@ func TestAcc_StreamOnExternalTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). @@ -708,7 +708,7 @@ func TestAcc_StreamOnExternalTable_At(t *testing.T) { HasComment("foo"). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). @@ -800,7 +800,7 @@ func TestAcc_StreamOnExternalTable_Before(t *testing.T) { HasComment("foo"). HasTableName(externalTable.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeExternalTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{externalTable.ID()}). + HasBaseTables(externalTable.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeInsertOnly). diff --git a/pkg/resources/stream_on_table_acceptance_test.go b/pkg/resources/stream_on_table_acceptance_test.go index 7f1a2d9651..7f18495559 100644 --- a/pkg/resources/stream_on_table_acceptance_test.go +++ b/pkg/resources/stream_on_table_acceptance_test.go @@ -82,7 +82,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeDefault). @@ -140,7 +140,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -192,7 +192,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -241,7 +241,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { HasOwner(snowflakeroles.Accountadmin.Name()). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeDefault). @@ -619,7 +619,7 @@ func TestAcc_StreamOnTable_At(t *testing.T) { HasComment("foo"). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). @@ -736,7 +736,7 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { HasComment("foo"). HasTableName(table.ID().FullyQualifiedName()). HasSourceType(sdk.StreamSourceTypeTable). - HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasBaseTables(table.ID()). HasType("DELTA"). HasStale("false"). HasMode(sdk.StreamModeAppendOnly). From cf229e94d1d4b94332dc691588539bf72d21dce7 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 30 Oct 2024 11:44:58 +0100 Subject: [PATCH 5/5] typo --- MIGRATION_GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 2974840dba..9fe0fec214 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,7 +9,7 @@ across different versions. ## v0.97.0 ➞ v0.98.0 -### snowflake_streamsdata source changes +### snowflake_streams data source changes New filtering options: - `like` - `in`