Skip to content

Commit

Permalink
Further fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-jmichalak committed Jul 15, 2024
1 parent 1578573 commit bc9b60e
Show file tree
Hide file tree
Showing 8 changed files with 191 additions and 27 deletions.
51 changes: 49 additions & 2 deletions docs/data-sources/streamlits.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,29 @@ output "like_prefix_output" {
value = data.snowflake_streamlits.like_prefix.streamlits
}
# Filtering (limit)
data "snowflake_streamlits" "limit" {
limit {
rows = 10
from = "prefix-"
}
}
output "limit_output" {
value = data.snowflake_streamlits.limit.streamlits
}
# Filtering (in)
data "snowflake_streamlits" "in" {
in {
database = "database"
}
}
output "in_output" {
value = data.snowflake_streamlits.in.streamlits
}
# Without additional data (to limit the number of calls make for every found streamlit)
data "snowflake_streamlits" "only_show" {
# with_describe is turned on by default and it calls DESCRIBE STREAMLIT for every streamlit found and attaches its output to streamlits.*.describe_output field
Expand Down Expand Up @@ -79,14 +102,38 @@ check "streamlit_check" {

### Optional

- `in` (Block List, Max: 1) IN clause to filter the list of streamlits (see [below for nested schema](#nestedblock--in))
- `like` (String) Filters the output with **case-insensitive** pattern, with support for SQL wildcard characters (`%` and `_`).
- `limit` (Block List, Max: 1) Limits the number of rows returned. If the `limit.from` is set, then the limit wll start from the first element matched by the expression. The expression is only used to match with the first element, later on the elements are not matched by the prefix, but you can enforce a certain pattern with `starts_with` or `like`. (see [below for nested schema](#nestedblock--limit))
- `with_describe` (Boolean) Runs DESC STREAMLIT for each streamlit returned by SHOW STREAMLITS. The output of describe is saved to the description field. By default this value is set to true.

### Read-Only

- `id` (String) The ID of this resource.
- `streamlits` (List of Object) Holds the aggregated output of all streamlits details queries. (see [below for nested schema](#nestedatt--streamlits))

<a id="nestedblock--in"></a>
### Nested Schema for `in`

Optional:

- `account` (Boolean) Returns records for the entire account.
- `database` (String) Returns records for the current database in use or for a specified database (db_name).
- `schema` (String) Returns records for the current schema in use or a specified schema (schema_name).


<a id="nestedblock--limit"></a>
### Nested Schema for `limit`

Required:

- `rows` (Number) The maximum number of rows to return.

Optional:

- `from` (String) Specifies a **case-sensitive** pattern that is used to match object name. After the first match, the limit on the number of rows will be applied.


<a id="nestedatt--streamlits"></a>
### Nested Schema for `streamlits`

Expand All @@ -103,14 +150,14 @@ Read-Only:
- `default_packages` (String)
- `external_access_integrations` (Set of String)
- `external_access_secrets` (String)
- `import_urls` (List of String)
- `import_urls` (Set of String)
- `main_file` (String)
- `name` (String)
- `query_warehouse` (String)
- `root_location` (String)
- `title` (String)
- `url_id` (String)
- `user_packages` (List of String)
- `user_packages` (Set of String)


<a id="nestedobjatt--streamlits--show_output"></a>
Expand Down
6 changes: 3 additions & 3 deletions docs/resources/streamlit.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ resource "snowflake_streamlit" "streamlit" {

### Required

- `database` (String) The database in which to create the Cortex search service.
- `database` (String) The database in which to create the streamlit
- `main_file` (String) Specifies the filename of the Streamlit Python application. This filename is relative to the value of `root_location`
- `name` (String) String that specifies the identifier (i.e. name) for the streamlit; must be unique in your account.
- `schema` (String) The schema in which to create the streamlit.
Expand Down Expand Up @@ -70,14 +70,14 @@ Read-Only:
- `default_packages` (String)
- `external_access_integrations` (Set of String)
- `external_access_secrets` (String)
- `import_urls` (List of String)
- `import_urls` (Set of String)
- `main_file` (String)
- `name` (String)
- `query_warehouse` (String)
- `root_location` (String)
- `title` (String)
- `url_id` (String)
- `user_packages` (List of String)
- `user_packages` (Set of String)


<a id="nestedatt--show_output"></a>
Expand Down
23 changes: 23 additions & 0 deletions examples/data-sources/snowflake_streamlits/data-source.tf
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,29 @@ output "like_prefix_output" {
value = data.snowflake_streamlits.like_prefix.streamlits
}

# Filtering (limit)
data "snowflake_streamlits" "limit" {
limit {
rows = 10
from = "prefix-"
}
}

output "limit_output" {
value = data.snowflake_streamlits.limit.streamlits
}

# Filtering (in)
data "snowflake_streamlits" "in" {
in {
database = "database"
}
}

output "in_output" {
value = data.snowflake_streamlits.in.streamlits
}

# Without additional data (to limit the number of calls make for every found streamlit)
data "snowflake_streamlits" "only_show" {
# with_describe is turned on by default and it calls DESCRIBE STREAMLIT for every streamlit found and attaches its output to streamlits.*.describe_output field
Expand Down
73 changes: 73 additions & 0 deletions pkg/helpers/helpers_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"testing"

"github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

Expand Down Expand Up @@ -221,3 +222,75 @@ func Test_DecodeSnowflakeAccountIdentifier(t *testing.T) {
require.ErrorContains(t, err, fmt.Sprintf("unable to read identifier: %s", id))
})
}

func TestParseRootLocation(t *testing.T) {
tests := []struct {
name string
location string
id string
path string
wantErr bool
}{
{
name: "unquoted",
location: `@a.b.c`,
id: `"a"."b"."c"`,
},
{
name: "unquoted with path",
location: `@a.b.c/foo`,
id: `"a"."b"."c"`,
path: `foo`,
},
{
name: "partially quoted",
location: `@"a".b.c`,
id: `"a"."b"."c"`,
},
{
name: "partially quoted with path",
location: `@"a".b.c/foo`,
id: `"a"."b"."c"`,
path: `foo`,
},
{
name: "quoted",
location: `@"a"."b"."c"`,
id: `"a"."b"."c"`,
},
{
name: "quoted with path",
location: `@"a"."b"."c"/foo`,
id: `"a"."b"."c"`,
path: `foo`,
},
{
name: "unquoted with path with dots",
location: `@a.b.c/foo.d`,
id: `"a"."b"."c"`,
path: `foo.d`,
},
{
name: "quoted with path with dots",
location: `@"a"."b"."c"/foo.d`,
id: `"a"."b"."c"`,
path: `foo.d`,
},
{
name: "invalid location",
location: `@foo`,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotId, gotPath, err := ParseRootLocation(tt.location)
if (err != nil) != tt.wantErr {
t.Errorf("ParseRootLocation() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, tt.id, gotId.FullyQualifiedName())
assert.Equal(t, tt.path, gotPath)
})
}
}
25 changes: 9 additions & 16 deletions pkg/resources/streamlit.go
Original file line number Diff line number Diff line change
Expand Up @@ -264,23 +264,16 @@ func ReadContextStreamlit(withExternalChangesMarking bool) schema.ReadContextFun
if err := d.Set("main_file", streamlitDetails.MainFile); err != nil {
return diag.FromErr(err)
}
if withExternalChangesMarking {
if err = handleExternalChangesToObjectInShow(d,
showMapping{"query_warehouse", "query_warehouse", streamlit.QueryWarehouse, streamlit.QueryWarehouse, nil},
showMapping{"external_access_integrations", "external_access_integrations", streamlitDetails.ExternalAccessIntegrations, streamlitDetails.ExternalAccessIntegrations, nil},
showMapping{"title", "title", streamlit.Title, streamlit.Title, nil},
showMapping{"comment", "comment", streamlit.Comment, streamlit.Comment, nil},
); err != nil {
return diag.FromErr(err)
}
if err = d.Set("query_warehouse", streamlit.QueryWarehouse); err != nil {
return diag.FromErr(err)
}

if err = setStateToValuesFromConfig(d, streamlitSchema, []string{
"query_warehouse",
"external_access_integrations",
"title",
"comment",
}); err != nil {
if err = d.Set("external_access_integrations", streamlitDetails.ExternalAccessIntegrations); err != nil {
return diag.FromErr(err)
}
if err = d.Set("title", streamlit.Title); err != nil {
return diag.FromErr(err)
}
if err = d.Set("comment", streamlit.Comment); err != nil {
return diag.FromErr(err)
}

Expand Down
13 changes: 7 additions & 6 deletions pkg/resources/streamlit_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -352,13 +352,14 @@ func TestAcc_Streamlit_Rename(t *testing.T) {
newId := acc.TestClient().Ids.RandomSchemaObjectIdentifier()
stage, stageCleanup := acc.TestClient().Stage.CreateStageInSchema(t, schemaId)
t.Cleanup(stageCleanup)
m := func(name, mainFile string) map[string]config.Variable {
m := func(name, comment string) map[string]config.Variable {
return map[string]config.Variable{
"database": config.StringVariable(databaseId.Name()),
"schema": config.StringVariable(schemaId.Name()),
"stage": config.StringVariable(stage.ID().FullyQualifiedName()),
"name": config.StringVariable(name),
"main_file": config.StringVariable(mainFile),
"main_file": config.StringVariable("foo"),
"comment": config.StringVariable(comment),
}
}
resource.Test(t, resource.TestCase{
Expand All @@ -369,16 +370,16 @@ func TestAcc_Streamlit_Rename(t *testing.T) {
CheckDestroy: acc.CheckDestroy(t, resources.NetworkPolicy),
Steps: []resource.TestStep{
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Streamlit/basic"),
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Streamlit/basicWithComment"),
ConfigVariables: m(id.Name(), "foo"),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_streamlit.test", "name", id.Name()),
resource.TestCheckResourceAttr("snowflake_streamlit.test", "show_output.0.name", id.Name()),
resource.TestCheckResourceAttr("snowflake_streamlit.test", "show_output.0.main_file", "foo"),
resource.TestCheckResourceAttr("snowflake_streamlit.test", "show_output.0.comment", "foo"),
),
},
{
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Streamlit/basic"),
ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Streamlit/basicWithComment"),
ConfigVariables: m(newId.Name(), "bar"),
ConfigPlanChecks: resource.ConfigPlanChecks{
PreApply: []plancheck.PlanCheck{
Expand All @@ -388,7 +389,7 @@ func TestAcc_Streamlit_Rename(t *testing.T) {
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_streamlit.test", "name", newId.Name()),
resource.TestCheckResourceAttr("snowflake_streamlit.test", "show_output.0.name", newId.Name()),
resource.TestCheckResourceAttr("snowflake_streamlit.test", "show_output.0.main_file", "bar"),
resource.TestCheckResourceAttr("snowflake_streamlit.test", "show_output.0.comment", "bar"),
),
},
},
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
resource "snowflake_streamlit" "test" {
database = var.database
schema = var.schema
stage = var.stage
name = var.name
main_file = var.main_file
comment = var.comment
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@

variable "database" {
type = string
}
variable "schema" {
type = string
}
variable "stage" {
type = string
}
variable "name" {
type = string
}
variable "main_file" {
type = string
}
variable "comment" {
type = string
}

0 comments on commit bc9b60e

Please sign in to comment.