diff --git a/.github/titleLint.yml b/.github/titleLint.yml new file mode 100644 index 0000000000..ea44d96763 --- /dev/null +++ b/.github/titleLint.yml @@ -0,0 +1 @@ +regex: (feat|fix|docs|chore|style|refactor|perf|test): .* diff --git a/.gitignore b/.gitignore index 77c5fa4517..fcf980d7e5 100644 --- a/.gitignore +++ b/.gitignore @@ -21,4 +21,7 @@ terraform-provider-snowflake* crash.log .envrc bin -dist \ No newline at end of file +dist + +# JetBrains +.idea/ \ No newline at end of file diff --git a/README.md b/README.md index 4643a1f45d..a11b993b21 100644 --- a/README.md +++ b/README.md @@ -110,6 +110,8 @@ If you are using the Standard Snowflake plan, it's recommended you also set up t **Note: releases can only be done by those with keybase pgp keys allowed in the terraform registry.** +Releases will be performed once a week on **Monday around 11am PST**. If your change is more urgent and you need to use it sooner, use the commit hash. + Releases are done by [goreleaser](https://goreleaser.com/) and run by our make files. There two goreleaser configs, `.goreleaser.yml` for regular releases and `.goreleaser.prerelease.yml` for doing prereleases (for testing). Releases are [published to the terraform registry](https://registry.terraform.io/providers/chanzuckerberg/snowflake/latest), which requires that releases by signed. diff --git a/VERSION b/VERSION index 94a5fe438a..68275195b0 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.25.0 \ No newline at end of file +0.25.12 \ No newline at end of file diff --git a/docs/data-sources/current_account.md b/docs/data-sources/current_account.md new file mode 100644 index 0000000000..fd56e38cb3 --- /dev/null +++ b/docs/data-sources/current_account.md @@ -0,0 +1,38 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_current_account Data Source - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_current_account (Data Source) + + + +## Example Usage + +```terraform +data "snowflake_current_account" "this" {} + +resource "aws_ssm_parameter" "snowflake_account_url" { + name = "/snowflake/account_url" + type = "String" + value = data.snowflake_current_account.this.url +} +``` + + +## Schema + +### Optional + +- **id** (String) The ID of this resource. + +### Read-Only + +- **account** (String) The Snowflake Account ID; as returned by CURRENT_ACCOUNT(). +- **region** (String) The Snowflake Region; as returned by CURRENT_REGION() +- **url** (String) The Snowflake URL. + + diff --git a/docs/data-sources/system_generate_scim_access_token.md b/docs/data-sources/system_generate_scim_access_token.md new file mode 100644 index 0000000000..e76ceaaa48 --- /dev/null +++ b/docs/data-sources/system_generate_scim_access_token.md @@ -0,0 +1,36 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_system_generate_scim_access_token Data Source - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_system_generate_scim_access_token (Data Source) + + + +## Example Usage + +```terraform +data "snowflake_system_generate_scim_access_token" "scim" { + integration_name = "AAD_PROVISIONING" +} +``` + + +## Schema + +### Required + +- **integration_name** (String) SCIM Integration Name + +### Optional + +- **id** (String) The ID of this resource. + +### Read-Only + +- **access_token** (String) SCIM Access Token + + diff --git a/docs/data-sources/system_get_privatelink_config.md b/docs/data-sources/system_get_privatelink_config.md new file mode 100644 index 0000000000..38a911c669 --- /dev/null +++ b/docs/data-sources/system_get_privatelink_config.md @@ -0,0 +1,85 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_system_get_privatelink_config Data Source - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_system_get_privatelink_config (Data Source) + + + +## Example Usage + +```terraform +data "snowflake_system_get_privatelink_config" "snowflake_private_link" {} + +resource "aws_security_group" "snowflake_private_link" { + vpc_id = var.vpc_id + + ingress { + from_port = 80 + to_port = 80 + cidr_blocks = var.vpc_cidr + protocol = "tcp" + } + + ingress { + from_port = 443 + to_port = 443 + cidr_blocks = var.vpc_cidr + protocol = "tcp" + } +} + +resource "aws_vpc_endpoint" "snowflake_private_link" { + vpc_id = var.vpc_id + service_name = data.snowflake_system_get_privatelink_config.snowflake_private_link.aws_vpce_id + vpc_endpoint_type = "Interface" + security_group_ids = [aws_security_group.snowflake_private_link.id] + subnet_ids = var.subnet_ids + private_dns_enabled = false +} + +resource "aws_route53_zone" "snowflake_private_link" { + name = "privatelink.snowflakecomputing.com" + + vpc { + vpc_id = var.vpc_id + } +} + +resource "aws_route53_record" "snowflake_private_link_url" { + zone_id = aws_route53_zone.snowflake_private_link.zone_id + name = data.snowflake_system_get_privatelink_config.snowflake_private_link.account_url + type = "CNAME" + ttl = "300" + records = [aws_vpc_endpoint.snowflake_private_link.dns_entry[0]["dns_name"]] +} + +resource "aws_route53_record" "snowflake_private_link_oscp_url" { + zone_id = aws_route53_zone.snowflake_private_link_url.zone_id + name = data.snowflake_system_get_privatelink_config.snowflake_private_link.oscp_url + type = "CNAME" + ttl = "300" + records = [aws_vpc_endpoint.snowflake_private_link.dns_entry[0]["dns_name"]] +} +``` + + +## Schema + +### Optional + +- **id** (String) The ID of this resource. + +### Read-Only + +- **account_name** (String) The name of your Snowflake account. +- **account_url** (String) The URL used to connect to Snowflake through AWS PrivateLink or Azure Private Link. +- **aws_vpce_id** (String) The AWS VPCE ID for your account. +- **azure_pls_id** (String) The Azure Private Link Service ID for your account. +- **oscp_url** (String) The OCSP URL corresponding to your Snowflake account that uses AWS PrivateLink or Azure Private Link. + + diff --git a/docs/data-sources/system_get_snowflake_platform_info.md b/docs/data-sources/system_get_snowflake_platform_info.md new file mode 100644 index 0000000000..48d099ef9a --- /dev/null +++ b/docs/data-sources/system_get_snowflake_platform_info.md @@ -0,0 +1,27 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_system_get_snowflake_platform_info Data Source - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_system_get_snowflake_platform_info (Data Source) + + + + + + +## Schema + +### Optional + +- **id** (String) The ID of this resource. + +### Read-Only + +- **aws_vpc_ids** (List of String) Snowflake AWS Virtual Private Cloud IDs +- **azure_vnet_subnet_ids** (List of String) Snowflake Azure Virtual Network Subnet IDs + + diff --git a/docs/index.md b/docs/index.md index 4d2c5eb98c..991313b79c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -19,9 +19,15 @@ provider snowflake { region = "..." // optional, at exactly one must be set - password = "..." - oauth_access_token = "..." - private_key_path = "..." + password = "..." + oauth_access_token = "..." + private_key_path = "..." + private_key = "..." + oauth_refresh_token = "..." + oauth_client_id = "..." + oauth_client_secret = "..." + oauth_endpoint = "..." + oauth_redirect_url = "..." // optional role = "..." @@ -44,6 +50,11 @@ provider snowflake { - **browser_auth** (Boolean) - **oauth_access_token** (String, Sensitive) +- **oauth_client_id** (String, Sensitive) +- **oauth_client_secret** (String, Sensitive) +- **oauth_endpoint** (String, Sensitive) +- **oauth_redirect_url** (String, Sensitive) +- **oauth_refresh_token** (String, Sensitive) - **password** (String, Sensitive) - **private_key** (String, Sensitive) - **private_key_path** (String, Sensitive) @@ -56,6 +67,7 @@ The Snowflake provider support multiple ways to authenticate: * Password * OAuth Access Token +* OAuth Refresh Token * Browser Auth * Private Key @@ -90,6 +102,20 @@ export SNOWFLAKE_OAUTH_ACCESS_TOKEN='...' Note that once this access token expires, you'll need to request a new one through an external application. +### OAuth Refresh Token + +If you have an OAuth Refresh token, export these credentials as environment variables: + +```shell +export SNOWFLAKE_OAUTH_REFRESH_TOKEN='...' +export SNOWFLAKE_OAUTH_CLIENT_ID='...' +export SNOWFLAKE_OAUTH_CLIENT_SECRET='...' +export SNOWFLAKE_OAUTH_ENDPOINT='...' +export SNOWFLAKE_OAUTH_REDIRECT_URL='https://localhost.com' +``` + +Note because access token have a short life; typically 10 minutes, by passing refresh token new access token will be generated. + ### Username and Password Environment Variables If you choose to use Username and Password Authentication, export these credentials: @@ -113,10 +139,24 @@ In addition to [generic `provider` arguments](https://www.terraform.io/docs/conf * `password` - (optional) Password for username+password auth. Cannot be used with `browser_auth` or `private_key_path`. Can be source from `SNOWFLAKE_PASSWORD` environment variable. * `oauth_access_token` - (optional) Token for use with OAuth. Generating the token is left to other - tools. Cannot be used with `browser_auth`, `private_key_path` or `password`. Can be source from - `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable. + tools. Cannot be used with `browser_auth`, `private_key_path`, `oauth_refresh_token` or `password`. + Can be sourced from `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable. +* `oauth_refresh_token` - (optional) Token for use with OAuth. Setup and generation of the token is + left to other tools. Should be used in conjunction with `oauth_client_id`, `oauth_client_secret`, + `oauth_endpoint`, `oauth_redirect_url`. Cannot be used with `browser_auth`, `private_key_path`, + `oauth_access_token` or `password`. Can be sourced from `SNOWFLAKE_OAUTH_REFRESH_TOKEN` environment + variable. +* `oauth_client_id` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_CLIENT_ID` environment variable. +* `oauth_client_secret` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_CLIENT_SECRET` environment variable. +* `oauth_endpoint` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_ENDPOINT` environment variable. +* `oauth_redirect_url` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_REDIRECT_URL` environment variable. * `private_key_path` - (optional) Path to a private key for using keypair authentication.. Cannot be used with `browser_auth`, `oauth_access_token` or `password`. Can be source from `SNOWFLAKE_PRIVATE_KEY_PATH` environment variable. * `role` - (optional) Snowflake role to use for operations. If left unset, default role for user will be used. Can come from the `SNOWFLAKE_ROLE` environment variable. + diff --git a/docs/resources/account_grant.md b/docs/resources/account_grant.md index 09cde25591..696cffbf56 100644 --- a/docs/resources/account_grant.md +++ b/docs/resources/account_grant.md @@ -35,6 +35,6 @@ resource snowflake_account_grant grant { Import is supported using the following syntax: ```shell -# format is account name | privilege | true/false for with_grant_option -terraform import snowflake_account_grant.example 'accountName|USAGE|true' +# format is account name | | | privilege | true/false for with_grant_option +terraform import snowflake_account_grant.example 'accountName|||USAGE|true' ``` diff --git a/docs/resources/file_format.md b/docs/resources/file_format.md new file mode 100644 index 0000000000..182eed8e2e --- /dev/null +++ b/docs/resources/file_format.md @@ -0,0 +1,77 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_file_format Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_file_format (Resource) + + + +## Example Usage + +```terraform +resource "snowflake_file_format" "example_file_format" { + name = "EXAMPLE_FILE_FORMAT" + database = "EXAMPLE_DB" + schema = "EXAMPLE_SCHEMA" + format_type = "CSV" +} +``` + + +## Schema + +### Required + +- **database** (String) The database in which to create the file format. +- **format_type** (String) Specifies the format of the input files (for data loading) or output files (for data unloading). +- **name** (String) Specifies the identifier for the file format; must be unique for the database and schema in which the file format is created. +- **schema** (String) The schema in which to create the file format. + +### Optional + +- **allow_duplicate** (Boolean) Boolean that specifies to allow duplicate object field names (only the last one will be preserved). +- **binary_as_text** (Boolean) Boolean that specifies whether to interpret columns with no defined logical data type as UTF-8 text. +- **binary_format** (String) Defines the encoding format for binary input or output. +- **comment** (String) Specifies a comment for the file format. +- **compression** (String) Specifies the current compression algorithm for the data file. +- **date_format** (String) Defines the format of date values in the data files (data loading) or table (data unloading). +- **disable_auto_convert** (Boolean) Boolean that specifies whether the XML parser disables automatic conversion of numeric and Boolean values from text to native representation. +- **disable_snowflake_data** (Boolean) Boolean that specifies whether the XML parser disables recognition of Snowflake semi-structured data tags. +- **empty_field_as_null** (Boolean) Specifies whether to insert SQL NULL for empty fields in an input file, which are represented by two successive delimiters. +- **enable_octal** (Boolean) Boolean that enables parsing of octal numbers. +- **encoding** (String) String (constant) that specifies the character set of the source data when loading data into a table. +- **error_on_column_count_mismatch** (Boolean) Boolean that specifies whether to generate a parsing error if the number of delimited columns (i.e. fields) in an input file does not match the number of columns in the corresponding table. +- **escape** (String) Single character string used as the escape character for field values. +- **escape_unenclosed_field** (String) Single character string used as the escape character for unenclosed field values only. +- **field_delimiter** (String) Specifies one or more singlebyte or multibyte characters that separate fields in an input file (data loading) or unloaded file (data unloading). +- **field_optionally_enclosed_by** (String) Character used to enclose strings. +- **file_extension** (String) Specifies the extension for files unloaded to a stage. +- **id** (String) The ID of this resource. +- **ignore_utf8_errors** (Boolean) Boolean that specifies whether UTF-8 encoding errors produce error conditions. +- **null_if** (List of String) String used to convert to and from SQL NULL. +- **preserve_space** (Boolean) Boolean that specifies whether the XML parser preserves leading and trailing spaces in element content. +- **record_delimiter** (String) Specifies one or more singlebyte or multibyte characters that separate records in an input file (data loading) or unloaded file (data unloading). +- **replace_invalid_characters** (Boolean) Boolean that specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�). +- **skip_blank_lines** (Boolean) Boolean that specifies to skip any blank lines encountered in the data files. +- **skip_byte_order_mark** (Boolean) Boolean that specifies whether to skip the BOM (byte order mark), if present in a data file. +- **skip_header** (Number) Number of lines at the start of the file to skip. +- **strip_null_values** (Boolean) Boolean that instructs the JSON parser to remove object fields or array elements containing null values. +- **strip_outer_array** (Boolean) Boolean that instructs the JSON parser to remove outer brackets. +- **strip_outer_element** (Boolean) Boolean that specifies whether the XML parser strips out the outer XML element, exposing 2nd level elements as separate documents. +- **time_format** (String) Defines the format of time values in the data files (data loading) or table (data unloading). +- **timestamp_format** (String) Defines the format of timestamp values in the data files (data loading) or table (data unloading). +- **trim_space** (Boolean) Boolean that specifies whether to remove white space from fields. +- **validate_utf8** (Boolean) Boolean that specifies whether to validate UTF-8 character encoding in string column data. + +## Import + +Import is supported using the following syntax: + +```shell +# format is database name | schema name | file format name +terraform import snowflake_file_format.example 'dbName|schemaName|fileFormatName' +``` diff --git a/docs/resources/notification_integration.md b/docs/resources/notification_integration.md new file mode 100644 index 0000000000..1866b7f47b --- /dev/null +++ b/docs/resources/notification_integration.md @@ -0,0 +1,69 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_notification_integration Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_notification_integration (Resource) + + + +## Example Usage + +```terraform +resource snowflake_notification_integration integration { + name = "notification" + comment = "A notification integration." + + enabled = true + type = "QUEUE" + direction = "OUTBOUND" + + # AZURE_STORAGE_QUEUE + notification_provider = "AZURE_STORAGE_QUEUE" + azure_storage_queue_primary_uri = "..." + azure_tenant_id = "..." + + # AWS_SQS + notification_provider = "AWS_SQS" + aws_sqs_arn = "..." + aws_sqs_role_arn = "..." +} +``` + + +## Schema + +### Required + +- **name** (String) + +### Optional + +- **aws_sqs_arn** (String) AWS SQS queue ARN for notification integration to connect to +- **aws_sqs_role_arn** (String) AWS IAM role ARN for notification integration to assume +- **azure_storage_queue_primary_uri** (String) The queue ID for the Azure Queue Storage queue created for Event Grid notifications +- **azure_tenant_id** (String) The ID of the Azure Active Directory tenant used for identity management +- **comment** (String) +- **direction** (String) Direction of the cloud messaging with respect to Snowflake (required only for error notifications) +- **enabled** (Boolean) +- **gcp_pubsub_subscription_name** (String) The subscription id that Snowflake will listen to when using the GCP_PUBSUB provider. +- **id** (String) The ID of this resource. +- **notification_provider** (String) The third-party cloud message queuing service (e.g. AZURE_STORAGE_QUEUE, AWS_SQS) +- **type** (String) A type of integration + +### Read-Only + +- **aws_sqs_external_id** (String) The external ID that Snowflake will use when assuming the AWS role +- **aws_sqs_iam_user_arn** (String) The Snowflake user that will attempt to assume the AWS role. +- **created_on** (String) Date and time when the notification integration was created. + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_notification_integration.example name +``` diff --git a/docs/resources/pipe.md b/docs/resources/pipe.md index e44d21f5d8..a51229c4b4 100644 --- a/docs/resources/pipe.md +++ b/docs/resources/pipe.md @@ -44,7 +44,9 @@ resource snowflake_pipe pipe { - **auto_ingest** (Boolean) Specifies a auto_ingest param for the pipe. - **aws_sns_topic_arn** (String) Specifies the Amazon Resource Name (ARN) for the SNS topic for your S3 bucket. - **comment** (String) Specifies a comment for the pipe. +- **error_integration** (String) Specifies the name of the notification integration used for error notifications. - **id** (String) The ID of this resource. +- **integration** (String) Specifies an integration for the pipe. ### Read-Only diff --git a/docs/resources/role_grants.md b/docs/resources/role_grants.md index 063d9b0a3a..eedd14558d 100644 --- a/docs/resources/role_grants.md +++ b/docs/resources/role_grants.md @@ -33,8 +33,6 @@ resource "snowflake_role" "other_role" { } resource "snowflake_role_grants" "grants" { - name = "foo" - role_name = "${snowflake_role.role.name}" roles = [ diff --git a/docs/resources/scim_integration.md b/docs/resources/scim_integration.md new file mode 100644 index 0000000000..1126afe76d --- /dev/null +++ b/docs/resources/scim_integration.md @@ -0,0 +1,48 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_scim_integration Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_scim_integration (Resource) + + + +## Example Usage + +```terraform +resource "snowflake_scim_integration" "aad" { + name = "AAD_PROVISIONING" + network_policy = "AAD_NETWORK_POLICY" + provisioner_role = "AAD_PROVISIONER" + scim_client = "AZURE" +} +``` + + +## Schema + +### Required + +- **name** (String) Specifies the name of the SCIM integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account. +- **provisioner_role** (String) Specify the SCIM role in Snowflake that owns any users and roles that are imported from the identity provider into Snowflake using SCIM. +- **scim_client** (String) Specifies the client type for the scim integration + +### Optional + +- **id** (String) The ID of this resource. +- **network_policy** (String) Specifies an existing network policy active for your account. The network policy restricts the list of user IP addresses when exchanging an authorization code for an access or refresh token and when using a refresh token to obtain a new access token. If this parameter is not set, the network policy for the account (if any) is used instead. + +### Read-Only + +- **created_on** (String) Date and time when the SCIM integration was created. + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_scim_integration.example name +``` diff --git a/docs/resources/sequence.md b/docs/resources/sequence.md new file mode 100644 index 0000000000..95e020c51a --- /dev/null +++ b/docs/resources/sequence.md @@ -0,0 +1,51 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_sequence Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_sequence (Resource) + + + +## Example Usage + +```terraform +resource "snowflake_database" "database" { + name = "things" +} + +resource "snowflake_schema" "test_schema" { + name = "things" + database = snowflake_database.test_database.name +} + +resource "snowflake_sequence" "test_sequence" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "thing_counter" +} +``` + + +## Schema + +### Required + +- **database** (String) The database in which to create the sequence. Don't use the | character. +- **name** (String) Specifies the name for the sequence. +- **schema** (String) The schema in which to create the sequence. Don't use the | character. + +### Optional + +- **comment** (String) Specifies a comment for the sequence. +- **id** (String) The ID of this resource. +- **increment** (Number) The amount the sequence will increase by each time it is used + +### Read-Only + +- **next_value** (Number) The next value the sequence will provide. + + diff --git a/docs/resources/stream.md b/docs/resources/stream.md index 6590c59ba5..e303aa78c2 100644 --- a/docs/resources/stream.md +++ b/docs/resources/stream.md @@ -42,6 +42,7 @@ resource snowflake_stream stream { - **comment** (String) Specifies a comment for the stream. - **id** (String) The ID of this resource. - **on_table** (String) Name of the table the stream will monitor. +- **show_initial_rows** (Boolean) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. ### Read-Only diff --git a/docs/resources/table.md b/docs/resources/table.md index bf9058aae9..9e0b74c305 100644 --- a/docs/resources/table.md +++ b/docs/resources/table.md @@ -14,20 +14,33 @@ description: |- ```terraform resource snowflake_table table { - database = "database" - schema = "schmea" - name = "table" - comment = "A table." - owner = "me" + database = "database" + schema = "schmea" + name = "table" + comment = "A table." + cluster_by = ["to_date(DATE)"] + + column { + name = "id" + type = "int" + nullable = true + } column { - name = "id" - type = "int" + name = "data" + type = "text" + nullable = false } column { - name = "data" - type = "text" + name = "DATE" + type = "TIMESTAMP_NTZ(9)" + } + + primary_key { + name = "my_key" + keys = ["data"] + } } ``` @@ -44,8 +57,10 @@ resource snowflake_table table { ### Optional +- **cluster_by** (List of String) A list of one or more table columns/expressions to be used as clustering key(s) for the table - **comment** (String) Specifies a comment for the table. - **id** (String) The ID of this resource. +- **primary_key** (Block List, Max: 1) Definitions of primary key constraint to create on table (see [below for nested schema](#nestedblock--primary_key)) ### Read-Only @@ -59,6 +74,22 @@ Required: - **name** (String) Column name - **type** (String) Column type, e.g. VARIANT +Optional: + +- **nullable** (Boolean) Whether this column can contain null values. **Note**: Depending on your Snowflake version, the default value will not suffice if this column is used in a primary key constraint. + + + +### Nested Schema for `primary_key` + +Required: + +- **keys** (List of String) Columns to use in primary key + +Optional: + +- **name** (String) Name of constraint + ## Import Import is supported using the following syntax: diff --git a/docs/resources/user_public_keys.md b/docs/resources/user_public_keys.md new file mode 100644 index 0000000000..decaa24c11 --- /dev/null +++ b/docs/resources/user_public_keys.md @@ -0,0 +1,28 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_user_public_keys Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + +--- + +# snowflake_user_public_keys (Resource) + + + + + + +## Schema + +### Required + +- **name** (String) Name of the user. + +### Optional + +- **id** (String) The ID of this resource. +- **rsa_public_key** (String) Specifies the user’s RSA public key; used for key-pair authentication. Must be on 1 line without header and trailer. +- **rsa_public_key_2** (String) Specifies the user’s second RSA public key; used to rotate the public and Public keys for key-pair authentication based on an expiration schedule set by your organization. Must be on 1 line without header and trailer. + + diff --git a/docs/resources/warehouse.md b/docs/resources/warehouse.md index 56ca19b58e..213c8b1bfe 100644 --- a/docs/resources/warehouse.md +++ b/docs/resources/warehouse.md @@ -35,9 +35,11 @@ resource snowflake_warehouse w { - **id** (String) The ID of this resource. - **initially_suspended** (Boolean) Specifies whether the warehouse is created initially in the ‘Suspended’ state. - **max_cluster_count** (Number) Specifies the maximum number of server clusters for the warehouse. +- **max_concurrency_level** (Number) Object parameter that specifies the concurrency level for SQL statements (i.e. queries and DML) executed by a warehouse. - **min_cluster_count** (Number) Specifies the minimum number of server clusters for the warehouse (only applies to multi-cluster warehouses). - **resource_monitor** (String) Specifies the name of a resource monitor that is explicitly assigned to the warehouse. - **scaling_policy** (String) Specifies the policy for automatically starting and shutting down clusters in a multi-cluster warehouse running in Auto-scale mode. +- **statement_queued_timeout_in_seconds** (Number) Object parameter that specifies the time, in seconds, a SQL statement (query, DDL, DML, etc.) can be queued on a warehouse before it is canceled by the system. - **statement_timeout_in_seconds** (Number) Specifies the time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system - **wait_for_provisioning** (Boolean) Specifies whether the warehouse, after being resized, waits for all the servers to provision before executing any queued or new queries. - **warehouse_size** (String) diff --git a/examples/data-sources/snowflake_current_account/data-source.tf b/examples/data-sources/snowflake_current_account/data-source.tf new file mode 100644 index 0000000000..eff3a1746e --- /dev/null +++ b/examples/data-sources/snowflake_current_account/data-source.tf @@ -0,0 +1,7 @@ +data "snowflake_current_account" "this" {} + +resource "aws_ssm_parameter" "snowflake_account_url" { + name = "/snowflake/account_url" + type = "String" + value = data.snowflake_current_account.this.url +} diff --git a/examples/data-sources/snowflake_system_generate_scim_access_token/data-source.tf b/examples/data-sources/snowflake_system_generate_scim_access_token/data-source.tf new file mode 100644 index 0000000000..5fb1d2b134 --- /dev/null +++ b/examples/data-sources/snowflake_system_generate_scim_access_token/data-source.tf @@ -0,0 +1,3 @@ +data "snowflake_system_generate_scim_access_token" "scim" { + integration_name = "AAD_PROVISIONING" +} \ No newline at end of file diff --git a/examples/data-sources/snowflake_system_get_privatelink_config/data-source.tf b/examples/data-sources/snowflake_system_get_privatelink_config/data-source.tf new file mode 100644 index 0000000000..65e854c6f4 --- /dev/null +++ b/examples/data-sources/snowflake_system_get_privatelink_config/data-source.tf @@ -0,0 +1,52 @@ +data "snowflake_system_get_privatelink_config" "snowflake_private_link" {} + +resource "aws_security_group" "snowflake_private_link" { + vpc_id = var.vpc_id + + ingress { + from_port = 80 + to_port = 80 + cidr_blocks = var.vpc_cidr + protocol = "tcp" + } + + ingress { + from_port = 443 + to_port = 443 + cidr_blocks = var.vpc_cidr + protocol = "tcp" + } +} + +resource "aws_vpc_endpoint" "snowflake_private_link" { + vpc_id = var.vpc_id + service_name = data.snowflake_system_get_privatelink_config.snowflake_private_link.aws_vpce_id + vpc_endpoint_type = "Interface" + security_group_ids = [aws_security_group.snowflake_private_link.id] + subnet_ids = var.subnet_ids + private_dns_enabled = false +} + +resource "aws_route53_zone" "snowflake_private_link" { + name = "privatelink.snowflakecomputing.com" + + vpc { + vpc_id = var.vpc_id + } +} + +resource "aws_route53_record" "snowflake_private_link_url" { + zone_id = aws_route53_zone.snowflake_private_link.zone_id + name = data.snowflake_system_get_privatelink_config.snowflake_private_link.account_url + type = "CNAME" + ttl = "300" + records = [aws_vpc_endpoint.snowflake_private_link.dns_entry[0]["dns_name"]] +} + +resource "aws_route53_record" "snowflake_private_link_oscp_url" { + zone_id = aws_route53_zone.snowflake_private_link_url.zone_id + name = data.snowflake_system_get_privatelink_config.snowflake_private_link.oscp_url + type = "CNAME" + ttl = "300" + records = [aws_vpc_endpoint.snowflake_private_link.dns_entry[0]["dns_name"]] +} diff --git a/examples/data-sources/system_get_snowflake_platform_info/data-source.tf b/examples/data-sources/system_get_snowflake_platform_info/data-source.tf new file mode 100644 index 0000000000..22687d0b29 --- /dev/null +++ b/examples/data-sources/system_get_snowflake_platform_info/data-source.tf @@ -0,0 +1 @@ +data "snowflake_system_get_snowflake_platform_info" "current" {} \ No newline at end of file diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf index 4c5ceca185..439fd203a2 100644 --- a/examples/provider/provider.tf +++ b/examples/provider/provider.tf @@ -5,9 +5,15 @@ provider snowflake { region = "..." // optional, at exactly one must be set - password = "..." - oauth_access_token = "..." - private_key_path = "..." + password = "..." + oauth_access_token = "..." + private_key_path = "..." + private_key = "..." + oauth_refresh_token = "..." + oauth_client_id = "..." + oauth_client_secret = "..." + oauth_endpoint = "..." + oauth_redirect_url = "..." // optional role = "..." diff --git a/examples/resources/snowflake_account_grant/import.sh b/examples/resources/snowflake_account_grant/import.sh index 7d40d4ffb3..52c22aaf68 100644 --- a/examples/resources/snowflake_account_grant/import.sh +++ b/examples/resources/snowflake_account_grant/import.sh @@ -1,2 +1,2 @@ -# format is account name | privilege | true/false for with_grant_option -terraform import snowflake_account_grant.example 'accountName|USAGE|true' +# format is account name | | | privilege | true/false for with_grant_option +terraform import snowflake_account_grant.example 'accountName|||USAGE|true' diff --git a/examples/resources/snowflake_file_format/import.sh b/examples/resources/snowflake_file_format/import.sh new file mode 100644 index 0000000000..beb5797593 --- /dev/null +++ b/examples/resources/snowflake_file_format/import.sh @@ -0,0 +1,2 @@ +# format is database name | schema name | file format name +terraform import snowflake_file_format.example 'dbName|schemaName|fileFormatName' diff --git a/examples/resources/snowflake_file_format/resource.tf b/examples/resources/snowflake_file_format/resource.tf new file mode 100644 index 0000000000..eae5b523de --- /dev/null +++ b/examples/resources/snowflake_file_format/resource.tf @@ -0,0 +1,6 @@ +resource "snowflake_file_format" "example_file_format" { + name = "EXAMPLE_FILE_FORMAT" + database = "EXAMPLE_DB" + schema = "EXAMPLE_SCHEMA" + format_type = "CSV" +} diff --git a/examples/resources/snowflake_notification_integration/import.sh b/examples/resources/snowflake_notification_integration/import.sh new file mode 100644 index 0000000000..6a31e9d046 --- /dev/null +++ b/examples/resources/snowflake_notification_integration/import.sh @@ -0,0 +1 @@ +terraform import snowflake_notification_integration.example name diff --git a/examples/resources/snowflake_notification_integration/resource.tf b/examples/resources/snowflake_notification_integration/resource.tf new file mode 100644 index 0000000000..d7fb63d88e --- /dev/null +++ b/examples/resources/snowflake_notification_integration/resource.tf @@ -0,0 +1,18 @@ +resource snowflake_notification_integration integration { + name = "notification" + comment = "A notification integration." + + enabled = true + type = "QUEUE" + direction = "OUTBOUND" + + # AZURE_STORAGE_QUEUE + notification_provider = "AZURE_STORAGE_QUEUE" + azure_storage_queue_primary_uri = "..." + azure_tenant_id = "..." + + # AWS_SQS + notification_provider = "AWS_SQS" + aws_sqs_arn = "..." + aws_sqs_role_arn = "..." +} diff --git a/examples/resources/snowflake_role_grants/resource.tf b/examples/resources/snowflake_role_grants/resource.tf index 77b9225867..65664e3002 100644 --- a/examples/resources/snowflake_role_grants/resource.tf +++ b/examples/resources/snowflake_role_grants/resource.tf @@ -18,8 +18,6 @@ resource "snowflake_role" "other_role" { } resource "snowflake_role_grants" "grants" { - name = "foo" - role_name = "${snowflake_role.role.name}" roles = [ diff --git a/examples/resources/snowflake_scim_integration/import.sh b/examples/resources/snowflake_scim_integration/import.sh new file mode 100644 index 0000000000..86e162e445 --- /dev/null +++ b/examples/resources/snowflake_scim_integration/import.sh @@ -0,0 +1 @@ +terraform import snowflake_scim_integration.example name diff --git a/examples/resources/snowflake_scim_integration/resource.tf b/examples/resources/snowflake_scim_integration/resource.tf new file mode 100644 index 0000000000..4d2f73f23a --- /dev/null +++ b/examples/resources/snowflake_scim_integration/resource.tf @@ -0,0 +1,6 @@ +resource "snowflake_scim_integration" "aad" { + name = "AAD_PROVISIONING" + network_policy = "AAD_NETWORK_POLICY" + provisioner_role = "AAD_PROVISIONER" + scim_client = "AZURE" +} \ No newline at end of file diff --git a/examples/resources/snowflake_sequence/resource.tf b/examples/resources/snowflake_sequence/resource.tf new file mode 100644 index 0000000000..d3754f3bd9 --- /dev/null +++ b/examples/resources/snowflake_sequence/resource.tf @@ -0,0 +1,14 @@ +resource "snowflake_database" "database" { + name = "things" +} + +resource "snowflake_schema" "test_schema" { + name = "things" + database = snowflake_database.test_database.name +} + +resource "snowflake_sequence" "test_sequence" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "thing_counter" +} diff --git a/examples/resources/snowflake_table/resource.tf b/examples/resources/snowflake_table/resource.tf index 32b6b2e871..0578d91ff6 100644 --- a/examples/resources/snowflake_table/resource.tf +++ b/examples/resources/snowflake_table/resource.tf @@ -1,17 +1,30 @@ resource snowflake_table table { - database = "database" - schema = "schmea" - name = "table" - comment = "A table." - owner = "me" + database = "database" + schema = "schmea" + name = "table" + comment = "A table." + cluster_by = ["to_date(DATE)"] + + column { + name = "id" + type = "int" + nullable = true + } column { - name = "id" - type = "int" + name = "data" + type = "text" + nullable = false } column { - name = "data" - type = "text" + name = "DATE" + type = "TIMESTAMP_NTZ(9)" + } + + primary_key { + name = "my_key" + keys = ["data"] + } } diff --git a/go.mod b/go.mod index 806065e250..2a4993cfdd 100644 --- a/go.mod +++ b/go.mod @@ -7,9 +7,10 @@ require ( github.com/Pallinder/go-randomdata v1.2.0 github.com/apparentlymart/go-cidr v1.1.0 // indirect github.com/armon/go-radix v1.0.0 // indirect + github.com/aws/aws-sdk-go v1.37.0 // indirect github.com/chanzuckerberg/go-misc v0.0.0-20201208181439-ea3e3e39e194 - github.com/hashicorp/go-getter v1.5.1 // indirect - github.com/hashicorp/hcl/v2 v2.8.1 // indirect + github.com/hashicorp/hcl/v2 v2.8.2 // indirect + github.com/hashicorp/terraform-exec v0.13.3 // indirect github.com/hashicorp/terraform-plugin-docs v0.4.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.4.4 github.com/jmoiron/sqlx v1.3.1 @@ -19,6 +20,7 @@ require ( github.com/posener/complete v1.2.1 // indirect github.com/snowflakedb/gosnowflake v1.4.1 github.com/stretchr/testify v1.7.0 - golang.org/x/crypto v0.0.0-20201208171446-5f87f3452ae9 - golang.org/x/tools v0.1.0 + golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 + golang.org/x/tools v0.1.5 + google.golang.org/api v0.34.0 // indirect ) diff --git a/go.sum b/go.sum index 5acd58f0e7..501a15dc8d 100644 --- a/go.sum +++ b/go.sum @@ -12,8 +12,10 @@ cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6 cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.61.0 h1:NLQf5e1OMspfNT1RAHOB3ublr1TW3YTXO8OiWwVjK2U= cloud.google.com/go v0.61.0/go.mod h1:XukKJg4Y7QsUu0Hxg3qQKUWR4VuWivmyMK2+rUyxAqw= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0 h1:Dg9iHVQfrhq82rUNu9ZxUDrJLaxFUe/HlCVaLyRruq8= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -74,6 +76,8 @@ github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF0 github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60= github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= +github.com/Microsoft/go-winio v0.4.16 h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk= +github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/Netflix/go-expect v0.0.0-20180615182759-c93bf25de8e8/go.mod h1:oX5x61PbNXchhh0oikYAH+4Pcfw5LKv21+Jnpr6r6Pc= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= @@ -108,6 +112,8 @@ github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2 github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg/v12 v12.0.0 h1:bNEQyAGak9tojivJNkoqWErVCQbjdL7GzRt3F8NvfJ0= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= @@ -120,8 +126,9 @@ github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3A github.com/aws/aws-sdk-go v1.16.26/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.27.1/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.34.10 h1:VU78gcf/3wA4HNEDCHidK738l7K0Bals4SJnfnvXOtY= github.com/aws/aws-sdk-go v1.34.10/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/aws/aws-sdk-go v1.37.0 h1:GzFnhOIsrGyQ69s7VgqtrG2BG8v7X7vwB3Xpbd/DBBk= +github.com/aws/aws-sdk-go v1.37.0/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= @@ -234,12 +241,15 @@ github.com/go-errors/errors v1.0.2-0.20180813162953-d98b870cc4e0/go.mod h1:f4zRH github.com/go-errors/errors v1.1.1/go.mod h1:psDX2osz5VnTOnFWbDeWwS7yejl+uV3FEWEp4lssFEs= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= -github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM= github.com/go-git/go-billy/v5 v5.0.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= -github.com/go-git/go-git-fixtures/v4 v4.0.1 h1:q+IFMfLx200Q3scvt2hN79JsEzy4AmBTp/pqnefH+Bc= +github.com/go-git/go-billy/v5 v5.1.0 h1:4pl5BV4o7ZG/lterP4S6WzJ6xr49Ba5ET9ygheTYahk= +github.com/go-git/go-billy/v5 v5.1.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0= github.com/go-git/go-git-fixtures/v4 v4.0.1/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= -github.com/go-git/go-git/v5 v5.1.0 h1:HxJn9g/E7eYvKW3Fm7Jt4ee8LXfPOm/H1cdDu8vEssk= +github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12 h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M= +github.com/go-git/go-git-fixtures/v4 v4.0.2-0.20200613231340-f56387b50c12/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= github.com/go-git/go-git/v5 v5.1.0/go.mod h1:ZKfuPUoY1ZqIG4QG9BDBh3G4gLM5zvPuSJAozQrZuyM= +github.com/go-git/go-git/v5 v5.3.0 h1:8WKMtJR2j8RntEXR/uvTKagfEt4GYlwQ7mntE4+0GWc= +github.com/go-git/go-git/v5 v5.3.0/go.mod h1:xdX4bWJ48aOrdhnl2XqHYstHbbp6+LFS4r4X+lNVprw= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -317,8 +327,10 @@ github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-containerregistry v0.0.0-20200110202235-f4fb41bf00a3/go.mod h1:2wIuQute9+hhWqvL3vEI7YB0EKluF4WcPzI1eAliazk= github.com/google/go-github/v27 v27.0.6/go.mod h1:/0Gr8pJ55COkmv+S/yPKCczSkUPIM/LnFyubufRNIS0= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= @@ -368,14 +380,15 @@ github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU= github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI= github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs= github.com/hashicorp/go-getter v1.4.0/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY= github.com/hashicorp/go-getter v1.5.0/go.mod h1:a7z7NPPfNQpJWcn4rSWFtdrSldqLdLPEF3d8nFMsSLM= -github.com/hashicorp/go-getter v1.5.1 h1:lM9sM02nvEApQGFgkXxWbhfqtyN+AyhQmi+MaMdBDOI= -github.com/hashicorp/go-getter v1.5.1/go.mod h1:a7z7NPPfNQpJWcn4rSWFtdrSldqLdLPEF3d8nFMsSLM= +github.com/hashicorp/go-getter v1.5.3 h1:NF5+zOlQegim+w/EUhSLh6QhXHmZMEeHLQzllkQ3ROU= +github.com/hashicorp/go-getter v1.5.3/go.mod h1:BrrV/1clo8cCYu6mxvboYg+KutTiFnXjMEgDD8+i7ZI= github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v0.15.0 h1:qMuK0wxsoW4D0ddCCYwPSTm4KQv1X1ke3WmPWZ0Mvsk= @@ -396,23 +409,26 @@ github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1 github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI= github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.3.0 h1:McDWVJIU/y+u1BRV06dPaLfLCaT7fUTJLp5r04x7iNw= +github.com/hashicorp/go-version v1.3.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.3/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl/v2 v2.3.0/go.mod h1:d+FwDBbOLvpAM3Z6J7gPj/VoAGkNe/gm352ZhjJ/Zv8= -github.com/hashicorp/hcl/v2 v2.8.1 h1:FJ60CIYaMyJOKzPndhMyjiz353Fd+2jr6PodF5Xzb08= -github.com/hashicorp/hcl/v2 v2.8.1/go.mod h1:bQTN5mpo+jewjJgh8jr0JUguIi7qPHUF6yIfAEN3jqY= +github.com/hashicorp/hcl/v2 v2.8.2 h1:wmFle3D1vu0okesm8BTLVDyJ6/OL9DCLUwn0b2OptiY= +github.com/hashicorp/hcl/v2 v2.8.2/go.mod h1:bQTN5mpo+jewjJgh8jr0JUguIi7qPHUF6yIfAEN3jqY= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/terraform-exec v0.12.0/go.mod h1:SGhto91bVRlgXQWcJ5znSz+29UZIa8kpBbkGwQ+g9E8= -github.com/hashicorp/terraform-exec v0.13.0 h1:1Pth+pdWJAufJuWWjaVOVNEkoRTOjGn3hQpAqj4aPdg= github.com/hashicorp/terraform-exec v0.13.0/go.mod h1:SGhto91bVRlgXQWcJ5znSz+29UZIa8kpBbkGwQ+g9E8= -github.com/hashicorp/terraform-json v0.8.0 h1:XObQ3PgqU52YLQKEaJ08QtUshAfN3yu4u8ebSW0vztc= +github.com/hashicorp/terraform-exec v0.13.3 h1:R6L2mNpDGSEqtLrSONN8Xth0xYwNrnEVzDz6LF/oJPk= +github.com/hashicorp/terraform-exec v0.13.3/go.mod h1:SSg6lbUsVB3DmFyCPjBPklqf6EYGX0TlQ6QTxOlikDU= github.com/hashicorp/terraform-json v0.8.0/go.mod h1:3defM4kkMfttwiE7VakJDwCd4R+umhSQnvJwORXbprE= +github.com/hashicorp/terraform-json v0.10.0 h1:9syPD/Y5t+3uFjG8AiWVPu1bklJD8QB8iTCaJASc8oQ= +github.com/hashicorp/terraform-json v0.10.0/go.mod h1:3defM4kkMfttwiE7VakJDwCd4R+umhSQnvJwORXbprE= github.com/hashicorp/terraform-plugin-docs v0.4.0 h1:xJIXsMzBFwBvC1zcjoNz743GL2tNEfYFFU9+Hjp4Uek= github.com/hashicorp/terraform-plugin-docs v0.4.0/go.mod h1:fKj/V3t45tiXpSlUms/0G4OrBayyWpbUJ4WtLjBkINU= github.com/hashicorp/terraform-plugin-go v0.2.1 h1:EW/R8bB2Zbkjmugzsy1d27yS8/0454b3MtYHkzOknqA= @@ -431,8 +447,9 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1: github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.7/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= +github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= @@ -442,12 +459,16 @@ github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrO github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.3.0 h1:OS12ieG61fsCg5+qLJ+SsW9NicxNkg3b25OyT2yCeUc= github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmoiron/sqlx v1.3.1 h1:aLN7YINNZ7cYOPK3QC83dbM6KT0NMqVMw961TqrejlE= github.com/jmoiron/sqlx v1.3.1/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ= github.com/joefitzgerald/rainbow-reporter v0.1.0/go.mod h1:481CNgqmVHQZzdIbN52CupLJyoVwB10FQ/IQlF1pdL8= @@ -470,8 +491,9 @@ github.com/kataras/iris/v12 v12.0.1/go.mod h1:udK4vLQKkdDqMGJJVd/msuMtN6hpYJhg/l github.com/kataras/neffos v0.0.10/go.mod h1:ZYmJC07hQPW67eKuzlfY7SO3bC0mw83A3j6im82hfqw= github.com/kataras/pio v0.0.0-20190103105442-ea782b38602d/go.mod h1:NV88laa9UiiDuX9AhMbDPkGYSPugBOV6yTZB1l2K9Z0= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= -github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck= +github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= @@ -480,13 +502,16 @@ github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0 github.com/klauspost/compress v1.9.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.10.11/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.2 h1:MiK62aErc3gIiVEtyzKfeOHgW7atJb5g/KNX5m3c2nQ= +github.com/klauspost/compress v1.11.2/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.4/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= @@ -569,7 +594,6 @@ github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+ github.com/nats-io/nats.go v1.8.1/go.mod h1:BrFz9vVn0fU3AcH9Vn4Kd7W0NpJ651tD5omQ3M8LwxM= github.com/nats-io/nkeys v0.0.2/go.mod h1:dab7URMsZm6Z/jp9Z5UGa87Uutgc2mVpXLC4B7TDb/4= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nightlyone/lockfile v1.0.0/go.mod h1:rywoIealpdNse2r832aiD9jRk8ErCatROs6LzC841CI= github.com/nlopes/slack v0.6.0/go.mod h1:JzQ9m3PMAqcpeCam7UaHSuBuupz7CmpjehYMayT6YOk= @@ -638,8 +662,9 @@ github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFo github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= @@ -669,7 +694,6 @@ github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DM github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/testify v0.0.0-20151208002404-e3a8ff8ce365/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= @@ -706,8 +730,9 @@ github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6Ac github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/vmware/govmomi v0.20.3/go.mod h1:URlwyTFZX72RmxtxuaFL2Uj3fD1JTvZdx59bHWk6aFU= -github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70= github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= +github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI= +github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= @@ -721,11 +746,13 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/zalando/go-keyring v0.1.0/go.mod h1:RaxNwUITJaHVdQ0VC7pELPZ3tOWn13nr0gZMZEhpVU0= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.2.1/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= -github.com/zclconf/go-cty v1.7.1 h1:AvsC01GMhMLFL8CgEYdHGM+yLnnDOwhPAYcgTkeF0Gw= github.com/zclconf/go-cty v1.7.1/go.mod h1:VDR4+I79ubFBGm1uJac1226K5yANQFHeauxPBoP54+o= +github.com/zclconf/go-cty v1.8.2 h1:u+xZfBKgpycDnTNjPhGiTEYZS5qS/Sb5MqSfm7vzcjg= +github.com/zclconf/go-cty v1.8.2/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= @@ -759,8 +786,8 @@ golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201208171446-5f87f3452ae9 h1:sYNJzB4J8toYPQTM6pAkcmBRgw9SnQKP9oXCHfgy604= -golang.org/x/crypto v0.0.0-20201208171446-5f87f3452ae9/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -793,8 +820,9 @@ golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -833,14 +861,18 @@ golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/ golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974 h1:IX6qOQeG5uLjB/hjjwjedwfjND0hgjPMMyO1RoIXQNI= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43 h1:ld7aEMNHoBnnDAX15v1T6z31v8HwR2A9FYOuAhWqkwc= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -849,7 +881,7 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -876,6 +908,7 @@ golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -897,19 +930,26 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200824131525-c12d262b63d8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -964,8 +1004,14 @@ golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200713011307-fd294ab11aed/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.1.4 h1:cVngSRcfgyZCzys3KYOpCFa+4dqX/Oub9tAq00ttGVs= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -989,8 +1035,10 @@ google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/ google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0 h1:BaiDisFir8O4IJxvAabCGGkQ6yCJegNQqSVoYUNAnbk= google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.34.0 h1:k40adF3uR+6x/+hO5Dh4ZFUqFp67vxvbpafFiJxl10A= +google.golang.org/api v0.34.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1026,8 +1074,12 @@ google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200711021454-869866162049 h1:YFTFpQhgvrLrmxtiIncJxFXeCyq84ixuKWVCaCAi9Oc= google.golang.org/genproto v0.0.0-20200711021454-869866162049/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d h1:92D1fum1bJLKSdr11OJ+54YeCMCGYIygTA7R/YZxH5M= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= @@ -1043,6 +1095,8 @@ google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.32.0 h1:zWTV+LMdc3kaiJMSTOFz2UgSBgx8RNQoTGiZu3fR9S0= google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -1061,8 +1115,9 @@ gopkg.in/alexcesaro/statsd.v2 v2.0.0/go.mod h1:i0ubccKGzBVNBpdGV5MocxyA/XlLUJzA7 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= diff --git a/pkg/datasources/current_account.go b/pkg/datasources/current_account.go new file mode 100644 index 0000000000..52b5b5de90 --- /dev/null +++ b/pkg/datasources/current_account.go @@ -0,0 +1,64 @@ +package datasources + +import ( + "database/sql" + "fmt" + "log" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var currentAccountSchema = map[string]*schema.Schema{ + "account": { + Type: schema.TypeString, + Computed: true, + Description: "The Snowflake Account ID; as returned by CURRENT_ACCOUNT().", + }, + + "region": { + Type: schema.TypeString, + Computed: true, + Description: "The Snowflake Region; as returned by CURRENT_REGION()", + }, + + "url": { + Type: schema.TypeString, + Computed: true, + Description: "The Snowflake URL.", + }, +} + +// CurrentAccount the Snowflake current account resource +func CurrentAccount() *schema.Resource { + return &schema.Resource{ + Read: ReadCurrentAccount, + Schema: currentAccountSchema, + } +} + +// ReadCurrentAccount read the current snowflake account information +func ReadCurrentAccount(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + acc, err := snowflake.ReadCurrentAccount(db) + + if err != nil { + log.Printf("[DEBUG] current_account failed to decode") + d.SetId("") + return nil + } + + d.SetId(fmt.Sprintf("%s.%s", acc.Account, acc.Region)) + d.Set("account", acc.Account) + d.Set("region", acc.Region) + url, err := acc.AccountURL() + + if err != nil { + log.Printf("[DEBUG] generating snowflake url failed") + d.SetId("") + return nil + } + + d.Set("url", url) + return nil +} diff --git a/pkg/datasources/current_account_acceptance_test.go b/pkg/datasources/current_account_acceptance_test.go new file mode 100644 index 0000000000..9b5438df46 --- /dev/null +++ b/pkg/datasources/current_account_acceptance_test.go @@ -0,0 +1,30 @@ +package datasources_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccCurrentAccount(t *testing.T) { + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: currentAccount(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.snowflake_current_account.p", "account"), + resource.TestCheckResourceAttrSet("data.snowflake_current_account.p", "region"), + resource.TestCheckResourceAttrSet("data.snowflake_current_account.p", "url"), + ), + }, + }, + }) +} + +func currentAccount() string { + s := ` + data snowflake_current_account p {} + ` + return s +} diff --git a/pkg/datasources/system_generate_scim_access_token.go b/pkg/datasources/system_generate_scim_access_token.go new file mode 100644 index 0000000000..0967b08599 --- /dev/null +++ b/pkg/datasources/system_generate_scim_access_token.go @@ -0,0 +1,54 @@ +package datasources + +import ( + "database/sql" + "log" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var systemGenerateSCIMAccesstokenSchema = map[string]*schema.Schema{ + "integration_name": { + Type: schema.TypeString, + Required: true, + Description: "SCIM Integration Name", + }, + "access_token": { + Type: schema.TypeString, + Computed: true, + Description: "SCIM Access Token", + }, +} + +func SystemGenerateSCIMAccessToken() *schema.Resource { + return &schema.Resource{ + Read: ReadSystemGenerateSCIMAccessToken, + Schema: systemGenerateSCIMAccesstokenSchema, + } +} + +// ReadSystemGetAWSSNSIAMPolicy implements schema.ReadFunc +func ReadSystemGenerateSCIMAccessToken(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + integrationName := d.Get("integration_name").(string) + + sel := snowflake.SystemGenerateSCIMAccessToken(integrationName).Select() + row := snowflake.QueryRow(db, sel) + accessToken, err := snowflake.ScanSCIMAccessToken(row) + if err == sql.ErrNoRows { + // If not found, mark resource to be removed from statefile during apply or refresh + log.Printf("[DEBUG] system_generate_scim_access_token (%s) not found", d.Id()) + d.SetId("") + return nil + } + + if err != nil { + log.Printf("[DEBUG] system_generate_scim_access_token (%s) failed to generate (%q)", d.Id(), err.Error()) + d.SetId("") + return nil + } + + d.SetId(integrationName) + return d.Set("access_token", accessToken.Token) +} diff --git a/pkg/datasources/system_generate_scim_access_token_acceptance_test.go b/pkg/datasources/system_generate_scim_access_token_acceptance_test.go new file mode 100644 index 0000000000..b11bce81f2 --- /dev/null +++ b/pkg/datasources/system_generate_scim_access_token_acceptance_test.go @@ -0,0 +1,63 @@ +package datasources_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAcc_SystemGenerateSCIMAccessToken(t *testing.T) { + scimIntName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: generateAccessTokenConfig(scimIntName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.snowflake_system_generate_scim_access_token.p", "integration_name", scimIntName), + resource.TestCheckResourceAttrSet("data.snowflake_system_generate_scim_access_token.p", "access_token"), + ), + }, + }, + }) +} + +func generateAccessTokenConfig(name string) string { + return fmt.Sprintf(` + resource "snowflake_role" "azured" { + name = "AAD_PROVISIONER" + comment = "test comment" + } + + resource "snowflake_account_grant" "azurecud" { + roles = [snowflake_role.azured.name] + privilege = "CREATE USER" + } + resource "snowflake_account_grant" "azurecrd" { + roles = [snowflake_role.azured.name] + privilege = "CREATE ROLE" + } + resource "snowflake_role_grants" "azured" { + role_name = snowflake_role.azured.name + roles = ["ACCOUNTADMIN"] + } + + resource "snowflake_scim_integration" "azured" { + name = "%s" + scim_client = "AZURE" + provisioner_role = snowflake_role.azured.name + depends_on = [ + snowflake_account_grant.azurecud, + snowflake_account_grant.azurecrd, + snowflake_role_grants.azured + ] + } + + data snowflake_system_generate_scim_access_token p { + integration_name = snowflake_scim_integration.azured.name + } + `, name) +} diff --git a/pkg/datasources/system_get_privatelink_config.go b/pkg/datasources/system_get_privatelink_config.go new file mode 100644 index 0000000000..2566976c12 --- /dev/null +++ b/pkg/datasources/system_get_privatelink_config.go @@ -0,0 +1,86 @@ +package datasources + +import ( + "database/sql" + "log" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var systemGetPrivateLinkConfigSchema = map[string]*schema.Schema{ + "account_name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of your Snowflake account.", + }, + + "account_url": { + Type: schema.TypeString, + Computed: true, + Description: "The URL used to connect to Snowflake through AWS PrivateLink or Azure Private Link.", + }, + + "oscp_url": { + Type: schema.TypeString, + Computed: true, + Description: "The OCSP URL corresponding to your Snowflake account that uses AWS PrivateLink or Azure Private Link.", + }, + + "aws_vpce_id": { + Type: schema.TypeString, + Computed: true, + Description: "The AWS VPCE ID for your account.", + }, + + "azure_pls_id": { + Type: schema.TypeString, + Computed: true, + Description: "The Azure Private Link Service ID for your account.", + }, +} + +func SystemGetPrivateLinkConfig() *schema.Resource { + return &schema.Resource{ + Read: ReadSystemGetPrivateLinkConfig, + Schema: systemGetPrivateLinkConfigSchema, + } +} + +// ReadSystemGetPrivateLinkConfig implements schema.ReadFunc +func ReadSystemGetPrivateLinkConfig(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + + sel := snowflake.SystemGetPrivateLinkConfigQuery() + row := snowflake.QueryRow(db, sel) + rawConfig, err := snowflake.ScanPrivateLinkConfig(row) + + if err == sql.ErrNoRows { + // If not found, mark resource to be removed from statefile during apply or refresh + log.Print("[DEBUG] system_get_privatelink_config not found") + d.SetId("") + return nil + } + + config, err := rawConfig.GetStructuredConfig() + if err != nil { + log.Printf("[DEBUG] system_get_privatelink_config failed to decode") + d.SetId("") + return nil + } + + d.SetId(config.AccountName) + d.Set("account_name", config.AccountName) + d.Set("account_url", config.AccountURL) + d.Set("oscp_url", config.OSCPURL) + + if config.AwsVpceID != "" { + d.Set("aws_vpce_id", config.AwsVpceID) + } + + if config.AzurePrivateLinkServiceID != "" { + d.Set("azure_pls_id", config.AzurePrivateLinkServiceID) + } + + return nil +} diff --git a/pkg/datasources/system_get_privatelink_config_acceptance_test.go b/pkg/datasources/system_get_privatelink_config_acceptance_test.go new file mode 100644 index 0000000000..eb1ec9f1df --- /dev/null +++ b/pkg/datasources/system_get_privatelink_config_acceptance_test.go @@ -0,0 +1,31 @@ +package datasources_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccSystemGetPrivateLinkConfig_aws(t *testing.T) { + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: privateLinkConfig(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.snowflake_system_get_privatelink_config.p", "account_name"), + resource.TestCheckResourceAttrSet("data.snowflake_system_get_privatelink_config.p", "account_url"), + resource.TestCheckResourceAttrSet("data.snowflake_system_get_privatelink_config.p", "oscp_url"), + resource.TestCheckResourceAttrSet("data.snowflake_system_get_privatelink_config.p", "aws_vpce_id"), + ), + }, + }, + }) +} + +func privateLinkConfig() string { + s := ` + data snowflake_system_get_privatelink_config p {} + ` + return s +} diff --git a/pkg/datasources/system_get_snowflake_platform_info.go b/pkg/datasources/system_get_snowflake_platform_info.go new file mode 100644 index 0000000000..886582438d --- /dev/null +++ b/pkg/datasources/system_get_snowflake_platform_info.go @@ -0,0 +1,74 @@ +package datasources + +import ( + "database/sql" + "fmt" + "log" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/pkg/errors" +) + +var systemGetSnowflakePlatformInfoSchema = map[string]*schema.Schema{ + "azure_vnet_subnet_ids": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + Computed: true, + Description: "Snowflake Azure Virtual Network Subnet IDs", + }, + "aws_vpc_ids": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + Computed: true, + Description: "Snowflake AWS Virtual Private Cloud IDs", + }, +} + +func SystemGetSnowflakePlatformInfo() *schema.Resource { + return &schema.Resource{ + Read: ReadSystemGetSnowflakePlatformInfo, + Schema: systemGetSnowflakePlatformInfoSchema, + } +} + +// ReadSystemGetSnowflakePlatformInfo implements schema.ReadFunc +func ReadSystemGetSnowflakePlatformInfo(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + sel := snowflake.SystemGetSnowflakePlatformInfoQuery() + row := snowflake.QueryRow(db, sel) + + acc, err := snowflake.ReadCurrentAccount(db) + if err != nil { + // If not found, mark resource to be removed from statefile during apply or refresh + d.SetId("") + log.Printf("[DEBUG] current_account failed to decode") + return errors.Wrap(err, "error current_account") + } + + d.SetId(fmt.Sprintf("%s.%s", acc.Account, acc.Region)) + + rawInfo, err := snowflake.ScanSnowflakePlatformInfo(row) + if err == sql.ErrNoRows { + // If not found, mark resource to be removed from statefile during apply or refresh + log.Print("[DEBUG] system_get_snowflake_platform_info not found") + return errors.Wrap(err, "error system_get_snowflake_platform_info") + } + + info, err := rawInfo.GetStructuredConfig() + if err != nil { + log.Printf("[DEBUG] system_get_snowflake_platform_info failed to decode") + d.SetId("") + return errors.Wrap(err, "error system_get_snowflake_platform_info") + } + + if err = d.Set("azure_vnet_subnet_ids", info.AzureVnetSubnetIds); err != nil { + return errors.Wrap(err, "error system_get_snowflake_platform_info") + } + + if err = d.Set("aws_vpc_ids", info.AwsVpcIds); err != nil { + return errors.Wrap(err, "error system_get_snowflake_platform_info") + } + + return nil +} diff --git a/pkg/datasources/system_get_snowflake_platform_info_acceptance_test.go b/pkg/datasources/system_get_snowflake_platform_info_acceptance_test.go new file mode 100644 index 0000000000..dc196d8ab8 --- /dev/null +++ b/pkg/datasources/system_get_snowflake_platform_info_acceptance_test.go @@ -0,0 +1,29 @@ +package datasources_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccSystemGetSnowflakePlatformInfo(t *testing.T) { + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: snowflakePlatformInfo(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.snowflake_system_get_snowflake_platform_info.p", "aws_vpc_ids.#"), + resource.TestCheckResourceAttrSet("data.snowflake_system_get_snowflake_platform_info.p", "azure_vnet_subnet_ids.#"), + ), + }, + }, + }) +} + +func snowflakePlatformInfo() string { + s := ` + data snowflake_system_get_snowflake_platform_info "p" {} + ` + return s +} diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 7ed3821e56..2673bf947e 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -2,6 +2,8 @@ package provider import ( "crypto/rsa" + "encoding/json" + "io" "io/ioutil" "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/datasources" @@ -12,6 +14,12 @@ import ( "github.com/pkg/errors" "github.com/snowflakedb/gosnowflake" "golang.org/x/crypto/ssh" + + "fmt" + "net/http" + "net/url" + "strconv" + "strings" ) // Provider is a provider @@ -33,21 +41,61 @@ func Provider() *schema.Provider { Optional: true, DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_PASSWORD", nil), Sensitive: true, - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "oauth_access_token"}, + ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "oauth_access_token", "oauth_refresh_token"}, }, "oauth_access_token": { Type: schema.TypeString, Optional: true, DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_ACCESS_TOKEN", nil), Sensitive: true, - ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "password"}, + ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "password", "oauth_refresh_token"}, + }, + "oauth_refresh_token": { + Type: schema.TypeString, + Optional: true, + DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_REFRESH_TOKEN", nil), + Sensitive: true, + ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "password", "oauth_access_token"}, + RequiredWith: []string{"oauth_client_id", "oauth_client_secret", "oauth_endpoint", "oauth_redirect_url"}, + }, + "oauth_client_id": { + Type: schema.TypeString, + Optional: true, + DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_CLIENT_ID", nil), + Sensitive: true, + ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "password", "oauth_access_token"}, + RequiredWith: []string{"oauth_refresh_token", "oauth_client_secret", "oauth_endpoint", "oauth_redirect_url"}, + }, + "oauth_client_secret": { + Type: schema.TypeString, + Optional: true, + DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_CLIENT_SECRET", nil), + Sensitive: true, + ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "password", "oauth_access_token"}, + RequiredWith: []string{"oauth_client_id", "oauth_refresh_token", "oauth_endpoint", "oauth_redirect_url"}, + }, + "oauth_endpoint": { + Type: schema.TypeString, + Optional: true, + DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_ENDPOINT", nil), + Sensitive: true, + ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "password", "oauth_access_token"}, + RequiredWith: []string{"oauth_client_id", "oauth_client_secret", "oauth_refresh_token", "oauth_redirect_url"}, + }, + "oauth_redirect_url": { + Type: schema.TypeString, + Optional: true, + DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_OAUTH_REDIRECT_URL", nil), + Sensitive: true, + ConflictsWith: []string{"browser_auth", "private_key_path", "private_key", "password", "oauth_access_token"}, + RequiredWith: []string{"oauth_client_id", "oauth_client_secret", "oauth_endpoint", "oauth_refresh_token"}, }, "browser_auth": { Type: schema.TypeBool, Optional: true, DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_USE_BROWSER_AUTH", nil), Sensitive: false, - ConflictsWith: []string{"password", "private_key_path", "private_key", "oauth_access_token"}, + ConflictsWith: []string{"password", "private_key_path", "private_key", "oauth_access_token", "oauth_refresh_token"}, }, "private_key_path": { Type: schema.TypeString, @@ -61,7 +109,7 @@ func Provider() *schema.Provider { Optional: true, DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_PRIVATE_KEY", nil), Sensitive: true, - ConflictsWith: []string{"browser_auth", "password", "oauth_access_token", "private_key_path"}, + ConflictsWith: []string{"browser_auth", "password", "oauth_access_token", "private_key_path", "oauth_refresh_token"}, }, "role": { Type: schema.TypeString, @@ -74,11 +122,9 @@ func Provider() *schema.Provider { DefaultFunc: schema.EnvDefaultFunc("SNOWFLAKE_REGION", "us-west-2"), }, }, - ResourcesMap: getResources(), - DataSourcesMap: map[string]*schema.Resource{ - "snowflake_system_get_aws_sns_iam_policy": datasources.SystemGetAWSSNSIAMPolicy(), - }, - ConfigureFunc: ConfigureProvider, + ResourcesMap: getResources(), + DataSourcesMap: getDataSources(), + ConfigureFunc: ConfigureProvider, } } @@ -106,10 +152,12 @@ func GetGrantResources() resources.TerraformGrantResources { } func getResources() map[string]*schema.Resource { + // NOTE(): do not add grant resources here others := map[string]*schema.Resource{ "snowflake_api_integration": resources.APIIntegration(), "snowflake_database": resources.Database(), "snowflake_external_function": resources.ExternalFunction(), + "snowflake_file_format": resources.FileFormat(), "snowflake_managed_account": resources.ManagedAccount(), "snowflake_masking_policy": resources.MaskingPolicy(), "snowflake_materialized_view": resources.MaterializedView(), @@ -117,17 +165,21 @@ func getResources() map[string]*schema.Resource { "snowflake_network_policy": resources.NetworkPolicy(), "snowflake_pipe": resources.Pipe(), "snowflake_resource_monitor": resources.ResourceMonitor(), - "snowflake_role_grants": resources.RoleGrants(), "snowflake_role": resources.Role(), + "snowflake_role_grants": resources.RoleGrants(), "snowflake_schema": resources.Schema(), + "snowflake_scim_integration": resources.SCIMIntegration(), + "snowflake_sequence": resources.Sequence(), "snowflake_share": resources.Share(), "snowflake_stage": resources.Stage(), "snowflake_storage_integration": resources.StorageIntegration(), + "snowflake_notification_integration": resources.NotificationIntegration(), "snowflake_stream": resources.Stream(), "snowflake_table": resources.Table(), "snowflake_external_table": resources.ExternalTable(), "snowflake_task": resources.Task(), "snowflake_user": resources.User(), + "snowflake_user_public_keys": resources.UserPublicKeys(), "snowflake_view": resources.View(), "snowflake_warehouse": resources.Warehouse(), } @@ -138,6 +190,18 @@ func getResources() map[string]*schema.Resource { ) } +func getDataSources() map[string]*schema.Resource { + dataSources := map[string]*schema.Resource{ + "snowflake_current_account": datasources.CurrentAccount(), + "snowflake_system_generate_scim_access_token": datasources.SystemGenerateSCIMAccessToken(), + "snowflake_system_get_aws_sns_iam_policy": datasources.SystemGetAWSSNSIAMPolicy(), + "snowflake_system_get_privatelink_config": datasources.SystemGetPrivateLinkConfig(), + "snowflake_system_get_snowflake_platform_info": datasources.SystemGetSnowflakePlatformInfo(), + } + + return dataSources +} + func ConfigureProvider(s *schema.ResourceData) (interface{}, error) { account := s.Get("account").(string) user := s.Get("username").(string) @@ -148,6 +212,19 @@ func ConfigureProvider(s *schema.ResourceData) (interface{}, error) { oauthAccessToken := s.Get("oauth_access_token").(string) region := s.Get("region").(string) role := s.Get("role").(string) + oauthRefreshToken := s.Get("oauth_refresh_token").(string) + oauthClientID := s.Get("oauth_client_id").(string) + oauthClientSecret := s.Get("oauth_client_secret").(string) + oauthEndpoint := s.Get("oauth_endpoint").(string) + oauthRedirectURL := s.Get("oauth_redirect_url").(string) + + if oauthRefreshToken != "" { + accessToken, err := GetOauthAccessToken(oauthEndpoint, oauthClientID, oauthClientSecret, GetOauthData(oauthRefreshToken, oauthRedirectURL)) + if err != nil { + return nil, errors.Wrap(err, "could not retreive access token from refresh token") + } + oauthAccessToken = accessToken + } dsn, err := DSN( account, @@ -160,7 +237,6 @@ func ConfigureProvider(s *schema.ResourceData) (interface{}, error) { region, role, ) - if err != nil { return nil, errors.Wrap(err, "could not build dsn for snowflake connection") } @@ -259,3 +335,61 @@ func ParsePrivateKey(privateKeyBytes []byte) (*rsa.PrivateKey, error) { } return rsaPrivateKey, nil } + +type Result struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + ExpiresIn int `json:"expires_in"` +} + +func GetOauthData(refreshToken, redirectUrl string) url.Values { + data := url.Values{} + data.Set("grant_type", "refresh_token") + data.Set("refresh_token", refreshToken) + data.Set("redirect_uri", redirectUrl) + return data +} + +func GetOauthRequest(dataContent io.Reader, endPoint, clientId, clientSecret string) (*http.Request, error) { + request, err := http.NewRequest("POST", endPoint, dataContent) + if err != nil { + return nil, errors.Wrap(err, "Request to the endpoint could not be completed") + } + request.SetBasicAuth(clientId, clientSecret) + request.Header.Set("Content-Type", "application/x-www-form-urlencoded;charset=UTF-8") + return request, nil + +} + +func GetOauthAccessToken( + endPoint, + client_id, + client_secret string, + data url.Values) (string, error) { + + client := &http.Client{} + request, err := GetOauthRequest(strings.NewReader(data.Encode()), endPoint, client_id, client_secret) + if err != nil { + return "", errors.Wrap(err, "Oauth request returned an error:") + } + + var result Result + + response, err := client.Do(request) + if err != nil { + return "", errors.Wrap(err, "Response status returned an error:") + } + if response.StatusCode != 200 { + return "", errors.New(fmt.Sprintf("Response status code: %s: %s", strconv.Itoa(response.StatusCode), http.StatusText(response.StatusCode))) + } + defer response.Body.Close() + body, err := ioutil.ReadAll(response.Body) + if err != nil { + return "", errors.Wrap(err, "Response body was not able to be parsed") + } + err = json.Unmarshal(body, &result) + if err != nil { + return "", errors.Wrap(err, "Error parsing JSON from Snowflake") + } + return result.AccessToken, nil +} diff --git a/pkg/provider/provider_test.go b/pkg/provider/provider_test.go index f1a96be4eb..e129625641 100644 --- a/pkg/provider/provider_test.go +++ b/pkg/provider/provider_test.go @@ -1,6 +1,14 @@ package provider_test import ( + "bytes" + "encoding/json" + "io/ioutil" + "net/http" + "net/url" + "reflect" + "strconv" + "strings" "testing" "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/provider" @@ -88,3 +96,149 @@ func TestOAuthDSN(t *testing.T) { }) } } + +func TestGetOauthDATA(t *testing.T) { + type param struct { + refresh_token, + redirect_url string + } + refresh_token := "ETMsDgAAAXdeJNwXABRBRVMvQ0JDL1BLQ1M1UGFwPu1hHM3UoUexZBtXW+0cE7KJx2yoUV0ysWu3HKwhJ1v/iEa1Np5EdjGDsBqedR15aFb8NstLTWDUoTJPuQNZRJTjJeuxrX/JUM3/wzcrKt2zDf6QIpkfLXuSlDH4VABeqsaRdl5z6bE9VJVgAUKgZwizwedHAt6pcJgFcQffYZPaY=" + redirect_url := "https://localhost.com" + cases := []struct { + name string + param param + want string + wantErr bool + }{ + {"simpleData", param{refresh_token, redirect_url}, + "grant_type=refresh_token&redirect_uri=https%3A%2F%2Flocalhost.com&refresh_token=ETMsDgAAAXdeJNwXABRBRVMvQ0JDL1BLQ1M1UGFwPu1hHM3UoUexZBtXW%2B0cE7KJx2yoUV0ysWu3HKwhJ1v%2FiEa1Np5EdjGDsBqedR15aFb8NstLTWDUoTJPuQNZRJTjJeuxrX%2FJUM3%2FwzcrKt2zDf6QIpkfLXuSlDH4VABeqsaRdl5z6bE9VJVgAUKgZwizwedHAt6pcJgFcQffYZPaY%3D", + false}, + {"errorData", param{"no_refresh_token", redirect_url}, + "grant_type=refresh_token&redirect_uri=https%3A%2F%2Flocalhost.com&refresh_token=no_refresh_token", + false}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + got := provider.GetOauthData(tt.param.refresh_token, tt.param.redirect_url) + want, err := url.ParseQuery(tt.want) + if (err != nil) != tt.wantErr { + t.Errorf("GetData() error = %v, dsn = %v, wantErr %v", err, got, tt.wantErr) + return + } + if !reflect.DeepEqual(got, want) { + t.Errorf("GetData() = %v, want %v", got, tt.want) + } + + }) + } +} + +func TestGetOauthResponse(t *testing.T) { + type param struct { + dataStuff, + endpoint, + clientid, + clientscret string + } + dataStuff := "grant_type=refresh_token&redirect_uri=https%3A%2F%2Flocalhost.com&refresh_token=ETMsDgAAAXdeJNwXABRBRVMvQ0JDL1BLQ1M1UGFwPu1hHM3UoUexZBtXW%2B0cE7KJx2yoUV0ysWu3HKwhJ1v%2FiEa1Np5EdjGDsBqedR15aFb8NstLTWDUoTJPuQNZRJTjJeuxrX%2FJUM3%2FwzcrKt2zDf6QIpkfLXuSlDH4VABeqsaRdl5z6bE9VJVgAUKgZwizwedHAt6pcJgFcQffYZPaY%3D" + endpoint := "https://example.snowflakecomputing.com/oauth/token-request" + clientid := "nWsfd+gowithgoiwm1vJvGLckmLIMPS=" + clientsecret := "ThjKLFMD45wKIgVTecwVXguZrt+yHG1Ydth8eeQB34XU=" + cases := []struct { + name string + param param + want string + wantErr bool + }{ + {"simpleContent", param{dataStuff, endpoint, clientid, clientsecret}, + "application/x-www-form-urlencoded;charset=UTF-8", + false}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + got, err := provider.GetOauthRequest(strings.NewReader(tt.param.dataStuff), tt.param.endpoint, tt.param.clientid, tt.param.clientscret) + if err != nil { + t.Errorf("GetOauthRequest() %v", err) + } + if !reflect.DeepEqual(got.Header.Get("Content-Type"), tt.want) { + t.Errorf("GetResponse() = %v, want %v", got, tt.want) + } + }) + } +} + +// RoundTripFunc . +type RoundTripFunc func(req *http.Request) *http.Response + +// RoundTrip . +func (f RoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) { + return f(req), nil +} + +//NewTestClient returns *http.Client with Transport replaced to avoid making real calls +func NewTestClient(fn RoundTripFunc) *http.Client { + return &http.Client{ + Transport: RoundTripFunc(fn), + } +} + +func TestGetOauthAccessToken(t *testing.T) { + type param struct { + dataStuff, + endpoint, + clientid, + clientsecret string + } + dataStuff := "grant_type=refresh_token&redirect_uri=https%3A%2F%2Flocalhost.com&refresh_token=ETMsDgAAAXdeJNwXABRBRVMvQ0JDL1BLQ1M1UGFwPu1hHM3UoUexZBtXW%2B0cE7KJx2yoUV0ysWu3HKwhJ1v%2FiEa1Np5EdjGDsBqedR15aFb8NstLTWDUoTJPuQNZRJTjJeuxrX%2FJUM3%2FwzcrKt2zDf6QIpkfLXuSlDH4VABeqsaRdl5z6bE9VJVgAUKgZwizwedHAt6pcJgFcQffYZPaY%3D" + endpoint := "https://example.snowflakecomputing.com/oauth/token-request" + clientid := "nWsfd+gowithgoiwm1vJvGLckmLIMPS=" + clientsecret := "ThjKLFMD45wKIgVTecwVXguZrt+yHG1Ydth8eeQB34XU=" + cases := []struct { + name string + param param + want string + statuscode string + wantTok string + wantErr bool + }{ + {"simpleAccessToken", param{dataStuff, endpoint, clientid, clientsecret}, + `{"access_token": "ABCDEFGHIabchefghiJKLMNOPQRjklmnopqrSTUVWXYZstuvwxyz","token_type": "Bearer","expires_in": 600}`, + "200", "ABCDEFGHIabchefghiJKLMNOPQRjklmnopqrSTUVWXYZstuvwxyz", false}, + {"errorAccessToken", param{dataStuff, endpoint, clientid, clientsecret}, + "", + "404", "", false}, + } + for _, tt := range cases { + t.Run(tt.name, func(t *testing.T) { + client := NewTestClient(func(req *http.Request) *http.Response { + // Test request parameters + statusCODE, err := strconv.Atoi(tt.statuscode) + if err != nil { + t.Errorf("Invalid statuscode type %v", err) + } + return &http.Response{ + StatusCode: statusCODE, + Body: ioutil.NopCloser(bytes.NewBufferString(tt.want)), + Header: make(http.Header), + } + }) + req_got, err := provider.GetOauthRequest(strings.NewReader(tt.param.dataStuff), tt.param.endpoint, tt.param.clientid, tt.param.clientsecret) + if err != nil { + t.Errorf("GetOauthRequest() %v", err) + } + body, err := client.Do(req_got) + if err != nil { + t.Errorf("Body was not returned %v", err) + } + got, err := ioutil.ReadAll(body.Body) + if err != nil { + t.Errorf("Response body was not able to be parsed %v", err) + } + var result provider.Result + json.Unmarshal([]byte(got), &result) + if result.AccessToken != tt.wantTok { + t.Errorf("TestGetAccessToken() = %v, want %v", result.AccessToken, tt.want) + } + }) + } +} diff --git a/pkg/resources/account_grant.go b/pkg/resources/account_grant.go index 93d7732bbd..dc202f2b12 100644 --- a/pkg/resources/account_grant.go +++ b/pkg/resources/account_grant.go @@ -54,6 +54,9 @@ func AccountGrant() *TerraformGrantResource { Update: UpdateAccountGrant, Schema: accountGrantSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, }, ValidPrivs: validAccountPrivileges, } diff --git a/pkg/resources/external_table.go b/pkg/resources/external_table.go index e95570e93a..afc599ca71 100644 --- a/pkg/resources/external_table.go +++ b/pkg/resources/external_table.go @@ -212,8 +212,8 @@ func CreateExternalTable(data *schema.ResourceData, meta interface{}) error { // Set optionals if v, ok := data.GetOk("partition_by"); ok { - partiionBys := expandStringList(v.(*schema.Set).List()) - builder.WithPartitionBys(partiionBys) + partitionBys := expandStringList(v.([]interface{})) + builder.WithPartitionBys(partitionBys) } if v, ok := data.GetOk("pattern"); ok { diff --git a/pkg/resources/external_table_acceptance_test.go b/pkg/resources/external_table_acceptance_test.go index 517a32af8c..0c0b7abe85 100644 --- a/pkg/resources/external_table_acceptance_test.go +++ b/pkg/resources/external_table_acceptance_test.go @@ -68,13 +68,13 @@ resource "snowflake_external_table" "test_table" { comment = "Terraform acceptance test" column { name = "column1" - type = "TIMESTAMP_NTZ(9)" - as = "($1:\"CreatedDate\"::timestamp)" + type = "STRING" + as = "TO_VARCHAR(TO_TIMESTAMP_NTZ(value:unix_timestamp_property::NUMBER, 3), 'yyyy-mm-dd-hh')" } column { name = "column2" type = "TIMESTAMP_NTZ(9)" - as = "($1:\"CreatedDate\"::timestamp)" + as = "($1:'CreatedDate'::timestamp)" } file_format = "TYPE = CSV" location = "@${snowflake_database.test.name}.${snowflake_schema.test.name}.${snowflake_stage.test.name}" diff --git a/pkg/resources/file_format.go b/pkg/resources/file_format.go new file mode 100644 index 0000000000..0431f4aeac --- /dev/null +++ b/pkg/resources/file_format.go @@ -0,0 +1,1143 @@ +package resources + +import ( + "bytes" + "database/sql" + "encoding/csv" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/pkg/errors" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" +) + +const ( + fileFormatIDDelimiter = '|' +) + +// valid format type options for each File Format Type +// https://docs.snowflake.com/en/sql-reference/sql/create-file-format.html#syntax +var formatTypeOptions = map[string][]string{ + "CSV": { + "compression", + "record_delimiter", + "field_delimiter", + "file_extension", + "skip_header", + "skip_blank_lines", + "date_format", + "time_format", + "timestamp_format", + "binary_format", + "escape", + "escape_unenclosed_field", + "trim_space", + "field_optionally_enclosed_by", + "null_if", + "error_on_column_count_mismatch", + "replace_invalid_characters", + "validate_utf8", + "empty_field_as_null", + "skip_byte_order_mark", + "encoding", + }, + "JSON": { + "compression", + "date_format", + "time_format", + "timestamp_format", + "binary_format", + "trim_space", + "null_if", + "file_extension", + "enable_octal", + "allow_duplicate", + "strip_outer_array", + "strip_null_values", + "replace_invalid_characters", + "ignore_utf8_errors", + "skip_byte_order_mark", + }, + "AVRO": { + "compression", + "trim_space", + "null_if", + }, + "ORC": { + "trim_space", + "null_if", + }, + "PARQUET": { + "compression", + "binary_as_text", + "trim_space", + "null_if", + }, + "XML": { + "compression", + "ignore_utf8_errors", + "preserve_space", + "strip_outer_element", + "disable_snowflake_data", + "disable_auto_convert", + "skip_byte_order_mark", + }, +} + +var fileFormatSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the identifier for the file format; must be unique for the database and schema in which the file format is created.", + ForceNew: true, + }, + "database": { + Type: schema.TypeString, + Required: true, + Description: "The database in which to create the file format.", + ForceNew: true, + }, + "schema": { + Type: schema.TypeString, + Required: true, + Description: "The schema in which to create the file format.", + ForceNew: true, + }, + "format_type": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the format of the input files (for data loading) or output files (for data unloading).", + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"CSV", "JSON", "AVRO", "ORC", "PARQUET", "XML"}, true), + }, + "compression": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the current compression algorithm for the data file.", + }, + "record_delimiter": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies one or more singlebyte or multibyte characters that separate records in an input file (data loading) or unloaded file (data unloading).", + }, + "field_delimiter": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies one or more singlebyte or multibyte characters that separate fields in an input file (data loading) or unloaded file (data unloading).", + }, + "file_extension": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the extension for files unloaded to a stage.", + }, + "skip_header": { + Type: schema.TypeInt, + Optional: true, + Description: "Number of lines at the start of the file to skip.", + }, + "skip_blank_lines": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies to skip any blank lines encountered in the data files.", + }, + "date_format": { + Type: schema.TypeString, + Optional: true, + Description: "Defines the format of date values in the data files (data loading) or table (data unloading).", + }, + "time_format": { + Type: schema.TypeString, + Optional: true, + Description: "Defines the format of time values in the data files (data loading) or table (data unloading).", + }, + "timestamp_format": { + Type: schema.TypeString, + Optional: true, + Description: "Defines the format of timestamp values in the data files (data loading) or table (data unloading).", + }, + "binary_format": { + Type: schema.TypeString, + Optional: true, + Description: "Defines the encoding format for binary input or output.", + }, + "escape": { + Type: schema.TypeString, + Optional: true, + Description: "Single character string used as the escape character for field values.", + }, + "escape_unenclosed_field": { + Type: schema.TypeString, + Optional: true, + Description: "Single character string used as the escape character for unenclosed field values only.", + }, + "trim_space": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether to remove white space from fields.", + }, + "field_optionally_enclosed_by": { + Type: schema.TypeString, + Optional: true, + Description: "Character used to enclose strings.", + }, + "null_if": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + Description: "String used to convert to and from SQL NULL.", + }, + "error_on_column_count_mismatch": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether to generate a parsing error if the number of delimited columns (i.e. fields) in an input file does not match the number of columns in the corresponding table.", + }, + "replace_invalid_characters": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�).", + }, + "validate_utf8": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether to validate UTF-8 character encoding in string column data.", + }, + "empty_field_as_null": { + Type: schema.TypeBool, + Optional: true, + Description: "Specifies whether to insert SQL NULL for empty fields in an input file, which are represented by two successive delimiters.", + }, + "skip_byte_order_mark": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether to skip the BOM (byte order mark), if present in a data file.", + }, + "encoding": { + Type: schema.TypeString, + Optional: true, + Description: "String (constant) that specifies the character set of the source data when loading data into a table.", + }, + "enable_octal": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that enables parsing of octal numbers.", + }, + "allow_duplicate": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies to allow duplicate object field names (only the last one will be preserved).", + }, + "strip_outer_array": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that instructs the JSON parser to remove outer brackets.", + }, + "strip_null_values": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that instructs the JSON parser to remove object fields or array elements containing null values.", + }, + "ignore_utf8_errors": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether UTF-8 encoding errors produce error conditions.", + }, + "binary_as_text": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether to interpret columns with no defined logical data type as UTF-8 text.", + }, + "preserve_space": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether the XML parser preserves leading and trailing spaces in element content.", + }, + "strip_outer_element": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether the XML parser strips out the outer XML element, exposing 2nd level elements as separate documents.", + }, + "disable_snowflake_data": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether the XML parser disables recognition of Snowflake semi-structured data tags.", + }, + "disable_auto_convert": { + Type: schema.TypeBool, + Optional: true, + Description: "Boolean that specifies whether the XML parser disables automatic conversion of numeric and Boolean values from text to native representation.", + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the file format.", + }, +} + +type fileFormatID struct { + DatabaseName string + SchemaName string + FileFormatName string +} + +func (ffi *fileFormatID) String() (string, error) { + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + csvWriter.Comma = fileFormatIDDelimiter + err := csvWriter.WriteAll([][]string{{ffi.DatabaseName, ffi.SchemaName, ffi.FileFormatName}}) + if err != nil { + return "", err + } + + return strings.TrimSpace(buf.String()), nil +} + +// FileFormat returns a pointer to the resource representing a file format +func FileFormat() *schema.Resource { + return &schema.Resource{ + Create: CreateFileFormat, + Read: ReadFileFormat, + Update: UpdateFileFormat, + Delete: DeleteFileFormat, + Exists: FileFormatExists, + + Schema: fileFormatSchema, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + } +} + +// CreateFileFormat implements schema.CreateFunc +func CreateFileFormat(data *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + + dbName := data.Get("database").(string) + schemaName := data.Get("schema").(string) + fileFormatName := data.Get("name").(string) + + builder := snowflake.FileFormat(fileFormatName, dbName, schemaName) + + formatType := data.Get("format_type").(string) + builder.WithFormatType(formatType) + + // Set optionals + if v, ok, err := getFormatTypeOption(data, formatType, "compression"); ok && err == nil { + builder.WithCompression(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "record_delimiter"); ok && err == nil { + builder.WithRecordDelimiter(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "field_delimiter"); ok && err == nil { + builder.WithFieldDelimiter(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "file_extension"); ok && err == nil { + builder.WithFileExtension(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "skip_header"); ok && err == nil { + builder.WithSkipHeader(v.(int)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "skip_blank_lines"); ok && err == nil { + builder.WithSkipBlankLines(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "date_format"); ok && err == nil { + builder.WithDateFormat(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "time_format"); ok && err == nil { + builder.WithTimeFormat(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "timestamp_format"); ok && err == nil { + builder.WithTimestampFormat(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "binary_format"); ok && err == nil { + builder.WithBinaryFormat(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "escape"); ok && err == nil { + builder.WithEscape(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "escape_unenclosed_field"); ok && err == nil { + builder.WithEscapeUnenclosedField(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "trim_space"); ok && err == nil { + builder.WithTrimSpace(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "field_optionally_enclosed_by"); ok && err == nil { + builder.WithFieldOptionallyEnclosedBy(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "null_if"); ok && err == nil { + builder.WithNullIf(expandStringList(v.([]interface{}))) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "error_on_column_count_mismatch"); ok && err == nil { + builder.WithErrorOnColumnCountMismatch(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "replace_invalid_characters"); ok && err == nil { + builder.WithReplaceInvalidCharacters(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "validate_utf8"); ok && err == nil { + builder.WithValidateUTF8(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "empty_field_as_null"); ok && err == nil { + builder.WithEmptyFieldAsNull(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "skip_byte_order_mark"); ok && err == nil { + builder.WithSkipByteOrderMark(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "encoding"); ok && err == nil { + builder.WithEncoding(v.(string)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "enable_octal"); ok && err == nil { + builder.WithEnableOctal(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "allow_duplicate"); ok && err == nil { + builder.WithAllowDuplicate(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "strip_outer_array"); ok && err == nil { + builder.WithStripOuterArray(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "strip_null_values"); ok && err == nil { + builder.WithStripNullValues(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "ignore_utf8_errors"); ok && err == nil { + builder.WithIgnoreUTF8Errors(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "binary_as_text"); ok && err == nil { + builder.WithBinaryAsText(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "preserve_space"); ok && err == nil { + builder.WithPreserveSpace(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "strip_outer_element"); ok && err == nil { + builder.WithStripOuterElement(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "disable_snowflake_data"); ok && err == nil { + builder.WithDisableSnowflakeData(v.(bool)) + } else if err != nil { + return err + } + + if v, ok, err := getFormatTypeOption(data, formatType, "disable_auto_convert"); ok && err == nil { + builder.WithDisableAutoConvert(v.(bool)) + } else if err != nil { + return err + } + + if v, ok := data.GetOk("comment"); ok { + builder.WithComment(v.(string)) + } + + q := builder.Create() + + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error creating file format %v", fileFormatName) + } + + fileFormatID := &fileFormatID{ + DatabaseName: dbName, + SchemaName: schemaName, + FileFormatName: fileFormatName, + } + dataIDInput, err := fileFormatID.String() + if err != nil { + return err + } + data.SetId(dataIDInput) + + return ReadFileFormat(data, meta) +} + +// ReadFileFormat implements schema.ReadFunc +func ReadFileFormat(data *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + fileFormatID, err := fileFormatIDFromString(data.Id()) + if err != nil { + return err + } + + dbName := fileFormatID.DatabaseName + schemaName := fileFormatID.SchemaName + fileFormatName := fileFormatID.FileFormatName + + ff := snowflake.FileFormat(fileFormatName, dbName, schemaName).Show() + row := snowflake.QueryRow(db, ff) + + f, err := snowflake.ScanFileFormatShow(row) + if err != nil { + return err + } + + opts, err := snowflake.ParseFormatOptions(f.FormatOptions.String) + if err != nil { + return err + } + + err = data.Set("name", f.FileFormatName.String) + if err != nil { + return err + } + + err = data.Set("database", f.DatabaseName.String) + if err != nil { + return err + } + + err = data.Set("schema", f.SchemaName.String) + if err != nil { + return err + } + + err = data.Set("format_type", opts.Type) + if err != nil { + return err + } + + err = data.Set("compression", opts.Compression) + if err != nil { + return err + } + + err = data.Set("record_delimiter", opts.RecordDelimiter) + if err != nil { + return err + } + + err = data.Set("field_delimiter", opts.FieldDelimiter) + if err != nil { + return err + } + + err = data.Set("file_extension", opts.FileExtension) + if err != nil { + return err + } + + err = data.Set("skip_header", opts.SkipHeader) + if err != nil { + return err + } + + err = data.Set("skip_blank_lines", opts.SkipBlankLines) + if err != nil { + return err + } + + err = data.Set("date_format", opts.DateFormat) + if err != nil { + return err + } + + err = data.Set("time_format", opts.TimeFormat) + if err != nil { + return err + } + + err = data.Set("timestamp_format", opts.TimestampFormat) + if err != nil { + return err + } + + err = data.Set("binary_format", opts.BinaryFormat) + if err != nil { + return err + } + + err = data.Set("escape", opts.Escape) + if err != nil { + return err + } + + err = data.Set("escape_unenclosed_field", opts.EscapeUnenclosedField) + if err != nil { + return err + } + + err = data.Set("trim_space", opts.TrimSpace) + if err != nil { + return err + } + + err = data.Set("field_optionally_enclosed_by", opts.FieldOptionallyEnclosedBy) + if err != nil { + return err + } + + err = data.Set("null_if", opts.NullIf) + if err != nil { + return err + } + + err = data.Set("error_on_column_count_mismatch", opts.ErrorOnColumnCountMismatch) + if err != nil { + return err + } + + err = data.Set("replace_invalid_characters", opts.ReplaceInvalidCharacters) + if err != nil { + return err + } + + err = data.Set("validate_utf8", opts.ValidateUTF8) + if err != nil { + return err + } + + err = data.Set("empty_field_as_null", opts.EmptyFieldAsNull) + if err != nil { + return err + } + + err = data.Set("skip_byte_order_mark", opts.SkipByteOrderMark) + if err != nil { + return err + } + + err = data.Set("encoding", opts.Encoding) + if err != nil { + return err + } + + err = data.Set("enable_octal", opts.EnabelOctal) + if err != nil { + return err + } + + err = data.Set("allow_duplicate", opts.AllowDuplicate) + if err != nil { + return err + } + + err = data.Set("strip_outer_array", opts.StripOuterArray) + if err != nil { + return err + } + + err = data.Set("strip_null_values", opts.StripNullValues) + if err != nil { + return err + } + + err = data.Set("ignore_utf8_errors", opts.IgnoreUTF8Errors) + if err != nil { + return err + } + + err = data.Set("binary_as_text", opts.BinaryAsText) + if err != nil { + return err + } + + err = data.Set("preserve_space", opts.PreserveSpace) + if err != nil { + return err + } + + err = data.Set("strip_outer_element", opts.StripOuterElement) + if err != nil { + return err + } + + err = data.Set("disable_snowflake_data", opts.DisableSnowflakeData) + if err != nil { + return err + } + + err = data.Set("disable_auto_convert", opts.DisableAutoConvert) + if err != nil { + return err + } + + err = data.Set("comment", f.Comment.String) + if err != nil { + return err + } + + return nil +} + +// UpdateFileFormat implements schema.UpdateFunc +func UpdateFileFormat(data *schema.ResourceData, meta interface{}) error { + fileFormatID, err := fileFormatIDFromString(data.Id()) + if err != nil { + return err + } + + dbName := fileFormatID.DatabaseName + schemaName := fileFormatID.SchemaName + fileFormatName := fileFormatID.FileFormatName + + builder := snowflake.FileFormat(fileFormatName, dbName, schemaName) + fmt.Println(builder) + + db := meta.(*sql.DB) + if data.HasChange("compression") { + change := data.Get("compression") + q := builder.ChangeCompression(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format compression on %v", data.Id()) + } + } + + if data.HasChange("record_delimiter") { + change := data.Get("record_delimiter") + q := builder.ChangeRecordDelimiter(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format record delimiter on %v", data.Id()) + } + } + + if data.HasChange("field_delimiter") { + change := data.Get("field_delimiter") + q := builder.ChangeFieldDelimiter(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format field delimiter on %v", data.Id()) + } + } + + if data.HasChange("file_extension") { + change := data.Get("file_extension") + q := builder.ChangeFileExtension(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format file extension on %v", data.Id()) + } + + } + + if data.HasChange("skip_header") { + change := data.Get("skip_header") + q := builder.ChangeSkipHeader(change.(int)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format skip header on %v", data.Id()) + } + } + + if data.HasChange("skip_blank_lines") { + change := data.Get("skip_blank_lines") + q := builder.ChangeSkipBlankLines(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format skip blank lines on %v", data.Id()) + } + } + + if data.HasChange("date_format") { + change := data.Get("date_format") + q := builder.ChangeDateFormat(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format date format on %v", data.Id()) + } + } + + if data.HasChange("time_format") { + change := data.Get("time_format") + q := builder.ChangeTimeFormat(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format time format on %v", data.Id()) + } + } + + if data.HasChange("timestamp_format") { + change := data.Get("timestamp_format") + q := builder.ChangeTimestampFormat(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format timstamp format on %v", data.Id()) + } + } + + if data.HasChange("binary_format") { + change := data.Get("binary_format") + q := builder.ChangeBinaryFormat(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format binary format on %v", data.Id()) + } + } + + if data.HasChange("escape") { + change := data.Get("escape") + q := builder.ChangeEscape(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format escape on %v", data.Id()) + } + } + + if data.HasChange("escape_unenclosed_field") { + change := data.Get("escape_unenclosed_field") + q := builder.ChangeEscapeUnenclosedField(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format escape_unenclosed_field on %v", data.Id()) + } + } + + if data.HasChange("field_optionally_enclosed_by") { + change := data.Get("field_optionally_enclosed_by") + q := builder.ChangeFieldOptionallyEnclosedBy(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format field_optionally_enclosed_by on %v", data.Id()) + } + } + + if data.HasChange("encoding") { + change := data.Get("encoding") + q := builder.ChangeEncoding(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format encoding on %v", data.Id()) + } + } + + if data.HasChange("comment") { + change := data.Get("comment") + q := builder.ChangeComment(change.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format comment on %v", data.Id()) + } + } + + if data.HasChange("trim_space") { + change := data.Get("trim_space") + q := builder.ChangeTrimSpace(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format trim_space on %v", data.Id()) + } + } + + if data.HasChange("error_on_column_count_mismatch") { + change := data.Get("error_on_column_count_mismatch") + q := builder.ChangeErrorOnColumnCountMismatch(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format error_on_column_count_mismatch on %v", data.Id()) + } + } + + if data.HasChange("replace_invalid_characters") { + change := data.Get("replace_invalid_characters") + q := builder.ChangeReplaceInvalidCharacters(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format replace_invalid_characters on %v", data.Id()) + } + } + + if data.HasChange("validate_utf8") { + change := data.Get("validate_utf8") + q := builder.ChangeValidateUTF8(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format validate_utf8 on %v", data.Id()) + } + } + + if data.HasChange("empty_field_as_null") { + change := data.Get("empty_field_as_null") + q := builder.ChangeEmptyFieldAsNull(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format empty_field_as_null on %v", data.Id()) + } + } + + if data.HasChange("skip_byte_order_mark") { + change := data.Get("skip_byte_order_mark") + q := builder.ChangeSkipByteOrderMark(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format skip_byte_order_mark on %v", data.Id()) + } + } + + if data.HasChange("enable_octal") { + change := data.Get("enable_octal") + q := builder.ChangeEnableOctal(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format enable_octal on %v", data.Id()) + } + } + + if data.HasChange("allow_duplicate") { + change := data.Get("allow_duplicate") + q := builder.ChangeAllowDuplicate(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format allow_duplicate on %v", data.Id()) + } + } + + if data.HasChange("strip_outer_array") { + change := data.Get("strip_outer_array") + q := builder.ChangeStripOuterArray(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format strip_outer_array on %v", data.Id()) + } + } + + if data.HasChange("strip_null_values") { + change := data.Get("strip_null_values") + q := builder.ChangeStripNullValues(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format strip_null_values on %v", data.Id()) + } + } + + if data.HasChange("ignore_utf8_errors") { + change := data.Get("ignore_utf8_errors") + q := builder.ChangeIgnoreUTF8Errors(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format ignore_utf8_errors on %v", data.Id()) + } + } + + if data.HasChange("binary_as_text") { + change := data.Get("binary_as_text") + q := builder.ChangeBinaryAsText(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format binary_as_text on %v", data.Id()) + } + } + + if data.HasChange("preserve_space") { + change := data.Get("preserve_space") + q := builder.ChangePreserveSpace(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format preserve_space on %v", data.Id()) + } + } + + if data.HasChange("strip_outer_element") { + change := data.Get("strip_outer_element") + q := builder.ChangeStripOuterElement(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format strip_outer_element on %v", data.Id()) + } + } + + if data.HasChange("disable_snowflake_data") { + change := data.Get("disable_snowflake_data") + q := builder.ChangeDisableSnowflakeData(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format disable_snowflake_data on %v", data.Id()) + } + } + + if data.HasChange("disable_auto_convert") { + change := data.Get("disable_auto_convert") + q := builder.ChangeDisableAutoConvert(change.(bool)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format disable_auto_convert on %v", data.Id()) + } + } + + if data.HasChange("null_if") { + change := data.Get("null_if") + q := builder.ChangeNullIf(expandStringList(change.([]interface{}))) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating file format null_if on %v", data.Id()) + } + } + + return ReadFileFormat(data, meta) +} + +// DeleteFileFormat implements schema.DeleteFunc +func DeleteFileFormat(data *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + fileFormatID, err := fileFormatIDFromString(data.Id()) + if err != nil { + return err + } + + dbName := fileFormatID.DatabaseName + schemaName := fileFormatID.SchemaName + fileFormatName := fileFormatID.FileFormatName + + q := snowflake.FileFormat(fileFormatName, dbName, schemaName).Drop() + + err = snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error deleting file format %v", data.Id()) + } + + data.SetId("") + + return nil +} + +// FileFormatExists implements schema.ExistsFunc +func FileFormatExists(data *schema.ResourceData, meta interface{}) (bool, error) { + db := meta.(*sql.DB) + fileFormatID, err := fileFormatIDFromString(data.Id()) + if err != nil { + return false, err + } + + dbName := fileFormatID.DatabaseName + schemaName := fileFormatID.SchemaName + fileFormatName := fileFormatID.FileFormatName + + q := snowflake.FileFormat(fileFormatName, dbName, schemaName).Describe() + rows, err := db.Query(q) + if err != nil { + return false, err + } + defer rows.Close() + + if rows.Next() { + return true, nil + } + + return false, nil +} + +func fileFormatIDFromString(stringID string) (*fileFormatID, error) { + reader := csv.NewReader(strings.NewReader(stringID)) + reader.Comma = fileFormatIDDelimiter + lines, err := reader.ReadAll() + if err != nil { + return nil, fmt.Errorf("Not CSV compatible") + } + + if len(lines) != 1 { + return nil, fmt.Errorf("1 line at a time") + } + if len(lines[0]) != 3 { + return nil, fmt.Errorf("4 fields allowed") + } + + return &fileFormatID{ + DatabaseName: lines[0][0], + SchemaName: lines[0][1], + FileFormatName: lines[0][2], + }, nil +} + +func getFormatTypeOption(d *schema.ResourceData, formatType, formatTypeOption string) (interface{}, bool, error) { + validFormatTypeOptions := formatTypeOptions[formatType] + if v, ok := d.GetOk(formatTypeOption); ok { + if err := validateFormatTypeOptions(formatType, formatTypeOption, validFormatTypeOptions); err != nil { + return nil, true, err + } + return v, true, nil + } + return nil, false, nil +} + +func validateFormatTypeOptions(formatType, formatTypeOption string, validFormatTypeOptions []string) error { + for _, f := range validFormatTypeOptions { + if f == formatTypeOption { + return nil + } + } + return fmt.Errorf("%v is an invalid format type option for format type %v", formatTypeOption, formatType) +} diff --git a/pkg/resources/file_format_acceptance_test.go b/pkg/resources/file_format_acceptance_test.go new file mode 100644 index 0000000000..55ac17bb72 --- /dev/null +++ b/pkg/resources/file_format_acceptance_test.go @@ -0,0 +1,372 @@ +package resources_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAcc_FileFormatCSV(t *testing.T) { + accName := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: fileFormatConfigCSV(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "database", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "schema", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "format_type", "CSV"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "compression", "GZIP"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "record_delimiter", "\r"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "field_delimiter", ";"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "file_extension", ".ssv"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "skip_header", "1"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "skip_blank_lines", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "date_format", "YYY-MM-DD"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "time_format", "HH24:MI"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "timestamp_format", "YYYY-MM-DD HH24:MI:SS.FFTZH:TZM"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "binary_format", "UTF8"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "escape", "\\"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "escape_unenclosed_field", "!"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "trim_space", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "field_optionally_enclosed_by", "'"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.#", "1"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.0", "NULL"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "error_on_column_count_mismatch", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "validate_utf8", "false"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "empty_field_as_null", "false"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "skip_byte_order_mark", "false"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "encoding", "UTF-16"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "comment", "Terraform acceptance test"), + ), + }, + }, + }) +} + +func TestAcc_FileFormatJSON(t *testing.T) { + accName := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: fileFormatConfigJSON(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "database", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "schema", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "format_type", "JSON"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "compression", "GZIP"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "date_format", "YYY-MM-DD"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "time_format", "HH24:MI"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "timestamp_format", "YYYY-MM-DD HH24:MI:SS.FFTZH:TZM"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "binary_format", "UTF8"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "trim_space", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.#", "1"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.0", "NULL"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "file_extension", ".jsn"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "enable_octal", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "allow_duplicate", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "strip_outer_array", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "strip_null_values", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "ignore_utf8_errors", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "skip_byte_order_mark", "false"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "comment", "Terraform acceptance test"), + ), + }, + }, + }) +} + +func TestAcc_FileFormatAvro(t *testing.T) { + accName := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: fileFormatConfigAvro(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "database", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "schema", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "format_type", "AVRO"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "compression", "GZIP"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "trim_space", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.#", "1"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.0", "NULL"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "comment", "Terraform acceptance test"), + ), + }, + }, + }) +} + +func TestAcc_FileFormatORC(t *testing.T) { + accName := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: fileFormatConfigORC(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "database", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "schema", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "format_type", "ORC"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "trim_space", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.#", "1"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.0", "NULL"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "comment", "Terraform acceptance test"), + ), + }, + }, + }) +} + +func TestAcc_FileFormatParquet(t *testing.T) { + accName := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: fileFormatConfigParquet(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "database", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "schema", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "format_type", "PARQUET"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "compression", "SNAPPY"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "binary_as_text", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "trim_space", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.#", "1"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "null_if.0", "NULL"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "comment", "Terraform acceptance test"), + ), + }, + }, + }) +} + +func TestAcc_FileFormatXML(t *testing.T) { + accName := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: fileFormatConfigXML(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_file_format.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "database", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "schema", accName), + resource.TestCheckResourceAttr("snowflake_file_format.test", "format_type", "XML"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "compression", "GZIP"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "preserve_space", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "strip_outer_element", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "disable_snowflake_data", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "disable_auto_convert", "true"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "skip_byte_order_mark", "false"), + resource.TestCheckResourceAttr("snowflake_file_format.test", "comment", "Terraform acceptance test"), + ), + }, + }, + }) +} +func fileFormatConfigCSV(n string) string { + return fmt.Sprintf(` +resource "snowflake_database" "test" { + name = "%v" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test" { + name = "%v" + database = snowflake_database.test.name + comment = "Terraform acceptance test" +} + +resource "snowflake_file_format" "test" { + name = "%v" + database = snowflake_database.test.name + schema = snowflake_schema.test.name + format_type = "CSV" + compression = "GZIP" + record_delimiter = "\r" + field_delimiter = ";" + file_extension = ".ssv" + skip_header = 1 + skip_blank_lines = true + date_format = "YYY-MM-DD" + time_format = "HH24:MI" + timestamp_format = "YYYY-MM-DD HH24:MI:SS.FFTZH:TZM" + binary_format = "UTF8" + escape = "\\" + escape_unenclosed_field = "!" + trim_space = true + field_optionally_enclosed_by = "'" + null_if = ["NULL"] + error_on_column_count_mismatch = true + replace_invalid_characters = true + validate_utf8 = false + empty_field_as_null = false + skip_byte_order_mark = false + encoding = "UTF-16" + comment = "Terraform acceptance test" +} +`, n, n, n) +} + +func fileFormatConfigJSON(n string) string { + return fmt.Sprintf(` +resource "snowflake_database" "test" { + name = "%v" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test" { + name = "%v" + database = snowflake_database.test.name + comment = "Terraform acceptance test" +} + +resource "snowflake_file_format" "test" { + name = "%v" + database = snowflake_database.test.name + schema = snowflake_schema.test.name + format_type = "JSON" + compression = "GZIP" + date_format = "YYY-MM-DD" + time_format = "HH24:MI" + timestamp_format = "YYYY-MM-DD HH24:MI:SS.FFTZH:TZM" + binary_format = "UTF8" + trim_space = true + null_if = ["NULL"] + file_extension = ".jsn" + enable_octal = true + allow_duplicate = true + strip_outer_array = true + strip_null_values = true + ignore_utf8_errors = true + skip_byte_order_mark = false + comment = "Terraform acceptance test" +} +`, n, n, n) +} + +func fileFormatConfigAvro(n string) string { + return fmt.Sprintf(` +resource "snowflake_database" "test" { + name = "%v" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test" { + name = "%v" + database = snowflake_database.test.name + comment = "Terraform acceptance test" +} + +resource "snowflake_file_format" "test" { + name = "%v" + database = snowflake_database.test.name + schema = snowflake_schema.test.name + format_type = "AVRO" + compression = "GZIP" + trim_space = true + null_if = ["NULL"] + comment = "Terraform acceptance test" +} +`, n, n, n) +} + +func fileFormatConfigORC(n string) string { + return fmt.Sprintf(` +resource "snowflake_database" "test" { + name = "%v" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test" { + name = "%v" + database = snowflake_database.test.name + comment = "Terraform acceptance test" +} + +resource "snowflake_file_format" "test" { + name = "%v" + database = snowflake_database.test.name + schema = snowflake_schema.test.name + format_type = "ORC" + trim_space = true + null_if = ["NULL"] + comment = "Terraform acceptance test" +} +`, n, n, n) +} + +func fileFormatConfigParquet(n string) string { + return fmt.Sprintf(` +resource "snowflake_database" "test" { + name = "%v" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test" { + name = "%v" + database = snowflake_database.test.name + comment = "Terraform acceptance test" +} + +resource "snowflake_file_format" "test" { + name = "%v" + database = snowflake_database.test.name + schema = snowflake_schema.test.name + format_type = "PARQUET" + compression = "SNAPPY" + binary_as_text = true + trim_space = true + null_if = ["NULL"] + comment = "Terraform acceptance test" +} +`, n, n, n) +} + +func fileFormatConfigXML(n string) string { + return fmt.Sprintf(` +resource "snowflake_database" "test" { + name = "%v" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test" { + name = "%v" + database = snowflake_database.test.name + comment = "Terraform acceptance test" +} + +resource "snowflake_file_format" "test" { + name = "%v" + database = snowflake_database.test.name + schema = snowflake_schema.test.name + format_type = "XML" + compression = "GZIP" + ignore_utf8_errors = true + preserve_space = true + strip_outer_element = true + disable_snowflake_data = true + disable_auto_convert = true + skip_byte_order_mark = false + comment = "Terraform acceptance test" +} +`, n, n, n) +} diff --git a/pkg/resources/file_format_test.go b/pkg/resources/file_format_test.go new file mode 100644 index 0000000000..a0016551c9 --- /dev/null +++ b/pkg/resources/file_format_test.go @@ -0,0 +1,73 @@ +package resources_test + +import ( + "database/sql" + "testing" + + sqlmock "github.com/DATA-DOG/go-sqlmock" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/provider" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/resources" + . "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/testhelpers" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/require" +) + +func TestFileFormat(t *testing.T) { + r := require.New(t) + err := resources.FileFormat().InternalValidate(provider.Provider().Schema, true) + r.NoError(err) +} + +func TestFileFormatCreate(t *testing.T) { + r := require.New(t) + + in := map[string]interface{}{ + "name": "test_file_format", + "database": "test_db", + "schema": "test_schema", + "format_type": "CSV", + "null_if": []interface{}{"NULL"}, + "validate_utf8": true, + "error_on_column_count_mismatch": true, + "comment": "great comment", + } + d := schema.TestResourceDataRaw(t, resources.FileFormat().Schema, in) + r.NotNil(d) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + mock.ExpectExec( + `^CREATE FILE FORMAT "test_db"."test_schema"."test_file_format" TYPE = 'CSV' NULL_IF = \('NULL'\) SKIP_BLANK_LINES = false TRIM_SPACE = false ERROR_ON_COLUMN_COUNT_MISMATCH = true REPLACE_INVALID_CHARACTERS = false VALIDATE_UTF8 = true EMPTY_FIELD_AS_NULL = false SKIP_BYTE_ORDER_MARK = false COMMENT = 'great comment'$`, + ).WillReturnResult(sqlmock.NewResult(1, 1)) + expectReadFileFormat(mock) + err := resources.CreateFileFormat(d, db) + r.NoError(err) + }) +} + +func TestFileFormatCreateInvalidOptions(t *testing.T) { + r := require.New(t) + + in := map[string]interface{}{ + "name": "test_file_format", + "database": "test_db", + "schema": "test_schema", + "format_type": "JSON", + "null_if": []interface{}{"NULL"}, + "validate_utf8": true, + "comment": "great comment", + } + d := schema.TestResourceDataRaw(t, resources.FileFormat().Schema, in) + r.NotNil(d) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + err := resources.CreateFileFormat(d, db) + r.EqualError(err, "validate_utf8 is an invalid format type option for format type JSON") + }) +} + +func expectReadFileFormat(mock sqlmock.Sqlmock) { + rows := sqlmock.NewRows([]string{ + "created_on", "name", "database_name", "schema_name", "type", "owner", "comment", "format_options"}, + ).AddRow("2019-12-23 17:20:50.088 +0000", "test_file_format", "test_db", "test_schema", "CSV", "test", "great comment", `{"TYPE":"CSV","RECORD_DELIMITER":"\n","FIELD_DELIMITER":",","FILE_EXTENSION":null,"SKIP_HEADER":0,"DATE_FORMAT":"AUTO","TIME_FORMAT":"AUTO","TIMESTAMP_FORMAT":"AUTO","BINARY_FORMAT":"HEX","ESCAPE":"NONE","ESCAPE_UNENCLOSED_FIELD":"\\","TRIM_SPACE":false,"FIELD_OPTIONALLY_ENCLOSED_BY":"NONE","NULL_IF":["\\N"],"COMPRESSION":"AUTO","ERROR_ON_COLUMN_COUNT_MISMATCH":false,"VALIDATE_UTF8":false,"SKIP_BLANK_LINES":false,"REPLACE_INVALID_CHARACTERS":false,"EMPTY_FIELD_AS_NULL":false,"SKIP_BYTE_ORDER_MARK":false,"ENCODING":"UTF8"}`) + mock.ExpectQuery(`^SHOW FILE FORMATS LIKE 'test_file_format' IN SCHEMA "test_db"."test_schema"$`).WillReturnRows(rows) +} diff --git a/pkg/resources/helpers_test.go b/pkg/resources/helpers_test.go index ff87873cb0..512cbfadab 100644 --- a/pkg/resources/helpers_test.go +++ b/pkg/resources/helpers_test.go @@ -129,6 +129,14 @@ func resourceMonitor(t *testing.T, id string, params map[string]interface{}) *sc return d } +func sequence(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { + r := require.New(t) + d := schema.TestResourceDataRaw(t, resources.Sequence().Schema, params) + r.NotNil(d) + d.SetId(id) + return d +} + func share(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { r := require.New(t) d := schema.TestResourceDataRaw(t, resources.Share().Schema, params) @@ -184,6 +192,14 @@ func apiIntegration(t *testing.T, id string, params map[string]interface{}) *sch return d } +func scimIntegration(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { + r := require.New(t) + d := schema.TestResourceDataRaw(t, resources.SCIMIntegration().Schema, params) + r.NotNil(d) + d.SetId(id) + return d +} + func externalFunction(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { r := require.New(t) d := schema.TestResourceDataRaw(t, resources.ExternalFunction().Schema, params) @@ -199,6 +215,13 @@ func storageIntegration(t *testing.T, id string, params map[string]interface{}) d.SetId(id) return d } +func notificationIntegration(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { + r := require.New(t) + d := schema.TestResourceDataRaw(t, resources.NotificationIntegration().Schema, params) + r.NotNil(d) + d.SetId(id) + return d +} func table(t *testing.T, id string, params map[string]interface{}) *schema.ResourceData { r := require.New(t) diff --git a/pkg/resources/integration_grant.go b/pkg/resources/integration_grant.go index 6baf52d481..de6bc25ef6 100644 --- a/pkg/resources/integration_grant.go +++ b/pkg/resources/integration_grant.go @@ -50,6 +50,9 @@ func IntegrationGrant() *TerraformGrantResource { Delete: DeleteIntegrationGrant, Schema: integrationGrantSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, }, ValidPrivs: validIntegrationPrivileges, } diff --git a/pkg/resources/masking_policy.go b/pkg/resources/masking_policy.go index cd08798c53..ef24a55be5 100644 --- a/pkg/resources/masking_policy.go +++ b/pkg/resources/masking_policy.go @@ -45,7 +45,6 @@ var maskingPolicySchema = map[string]*schema.Schema{ Type: schema.TypeString, Required: true, Description: "Specifies the SQL expression that transforms the data.", - ForceNew: true, }, "return_data_type": { Type: schema.TypeString, diff --git a/pkg/resources/notification_integration.go b/pkg/resources/notification_integration.go new file mode 100644 index 0000000000..094a582f09 --- /dev/null +++ b/pkg/resources/notification_integration.go @@ -0,0 +1,351 @@ +package resources + +import ( + "database/sql" + "fmt" + "log" + "strings" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) + +var notificationIntegrationSchema = map[string]*schema.Schema{ + // The first part of the schema is shared between all integration vendors + "name": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "enabled": &schema.Schema{ + Type: schema.TypeBool, + Optional: true, + Default: true, + }, + "type": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Default: "QUEUE", + ValidateFunc: validation.StringInSlice([]string{"QUEUE"}, true), + Description: "A type of integration", + }, + "direction": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"INBOUND", "OUTBOUND"}, true), + Description: "Direction of the cloud messaging with respect to Snowflake (required only for error notifications)", + }, + // This part of the schema is the cloudProviderParams in the Snowflake documentation and differs between vendors + "notification_provider": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"AZURE_STORAGE_QUEUE", "AWS_SQS", "GCP_PUBSUB"}, true), + Description: "The third-party cloud message queuing service (e.g. AZURE_STORAGE_QUEUE, AWS_SQS)", + }, + "azure_storage_queue_primary_uri": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "The queue ID for the Azure Queue Storage queue created for Event Grid notifications", + }, + "azure_tenant_id": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "The ID of the Azure Active Directory tenant used for identity management", + }, + "aws_sqs_external_id": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "The external ID that Snowflake will use when assuming the AWS role", + }, + "aws_sqs_iam_user_arn": { + Type: schema.TypeString, + Computed: true, + Description: "The Snowflake user that will attempt to assume the AWS role.", + }, + "aws_sqs_arn": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "AWS SQS queue ARN for notification integration to connect to", + }, + "aws_sqs_role_arn": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "AWS IAM role ARN for notification integration to assume", + }, + "comment": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Default: "A comment for the integration", + }, + "created_on": &schema.Schema{ + Type: schema.TypeString, + Computed: true, + Description: "Date and time when the notification integration was created.", + }, + "gcp_pubsub_subscription_name": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "The subscription id that Snowflake will listen to when using the GCP_PUBSUB provider.", + }, +} + +// NotificationIntegration returns a pointer to the resource representing a notification integration +func NotificationIntegration() *schema.Resource { + return &schema.Resource{ + Create: CreateNotificationIntegration, + Read: ReadNotificationIntegration, + Update: UpdateNotificationIntegration, + Delete: DeleteNotificationIntegration, + Exists: NotificationIntegrationExists, + + Schema: notificationIntegrationSchema, + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + } +} + +// CreateNotificationIntegration implements schema.CreateFunc +func CreateNotificationIntegration(data *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + name := data.Get("name").(string) + + stmt := snowflake.NotificationIntegration(name).Create() + + // Set required fields + stmt.SetString(`TYPE`, data.Get("type").(string)) + stmt.SetBool(`ENABLED`, data.Get("enabled").(bool)) + + // Set optional fields + if v, ok := data.GetOk("comment"); ok { + stmt.SetString(`COMMENT`, v.(string)) + } + if v, ok := data.GetOk("direction"); ok { + stmt.SetString(`DIRECTION`, v.(string)) + } + if v, ok := data.GetOk("azure_tenant_id"); ok { + stmt.SetString(`AZURE_TENANT_ID`, v.(string)) + } + if v, ok := data.GetOk("notification_provider"); ok { + stmt.SetString(`NOTIFICATION_PROVIDER`, v.(string)) + } + if v, ok := data.GetOk("azure_storage_queue_primary_uri"); ok { + stmt.SetString(`AZURE_STORAGE_QUEUE_PRIMARY_URI`, v.(string)) + } + if v, ok := data.GetOk("azure_tenant_id"); ok { + stmt.SetString(`AZURE_TENANT_ID`, v.(string)) + } + if v, ok := data.GetOk("aws_sqs_arn"); ok { + stmt.SetString(`AWS_SQS_ARN`, v.(string)) + } + if v, ok := data.GetOk("aws_sqs_role_arn"); ok { + stmt.SetString(`AWS_SQS_ROLE_ARN`, v.(string)) + } + if v, ok := data.GetOk("gcp_pubsub_subscription_name"); ok { + stmt.SetString(`GCP_PUBSUB_SUBSCRIPTION_NAME`, v.(string)) + } + + err := snowflake.Exec(db, stmt.Statement()) + if err != nil { + return fmt.Errorf("error creating notification integration: %w", err) + } + + data.SetId(name) + + return ReadNotificationIntegration(data, meta) +} + +// ReadNotificationIntegration implements schema.ReadFunc +func ReadNotificationIntegration(data *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + id := data.Id() + + stmt := snowflake.NotificationIntegration(data.Id()).Show() + row := snowflake.QueryRow(db, stmt) + + // Some properties can come from the SHOW INTEGRATION call + + s, err := snowflake.ScanNotificationIntegration(row) + if err != nil { + return fmt.Errorf("Could not show notification integration: %w", err) + } + + // Note: category must be NOTIFICATION or something is broken + if c := s.Category.String; c != "NOTIFICATION" { + return fmt.Errorf("Expected %v to be a NOTIFICATION integration, got %v", id, c) + } + + if err := data.Set("name", s.Name.String); err != nil { + return err + } + + // Snowflake returns "QUEUE - AZURE_STORAGE_QUEUE" instead of simple "QUEUE" as a type + // so it needs to be parsed in order to not show a diff in Terraform + typeParts := strings.Split(s.Type.String, "-") + parsedType := strings.TrimSpace(typeParts[0]) + if err = data.Set("type", parsedType); err != nil { + return err + } + + if err := data.Set("created_on", s.CreatedOn.String); err != nil { + return err + } + + if err := data.Set("enabled", s.Enabled.Bool); err != nil { + return err + } + + // Some properties come from the DESCRIBE INTEGRATION call + // We need to grab them in a loop + var k, pType string + var v, d interface{} + stmt = snowflake.NotificationIntegration(data.Id()).Describe() + rows, err := db.Query(stmt) + if err != nil { + return fmt.Errorf("Could not describe notification integration: %w", err) + } + defer rows.Close() + for rows.Next() { + if err := rows.Scan(&k, &pType, &v, &d); err != nil { + return err + } + switch k { + case "ENABLED": + // We set this using the SHOW INTEGRATION call so let's ignore it here + case "DIRECTION": + if err = data.Set("direction", v.(string)); err != nil { + return err + } + case "NOTIFICATION_PROVIDER": + if err = data.Set("notification_provider", v.(string)); err != nil { + return err + } + case "AZURE_STORAGE_QUEUE_PRIMARY_URI": + if err = data.Set("azure_storage_queue_primary_uri", v.(string)); err != nil { + return err + } + case "AZURE_TENANT_ID": + if err = data.Set("azure_tenant_id", v.(string)); err != nil { + return err + } + case "AWS_SQS_ARN": + if err = data.Set("aws_sqs_arn", v.(string)); err != nil { + return err + } + case "AWS_SQS_ROLE_ARN": + if err = data.Set("aws_sqs_role_arn", v.(string)); err != nil { + return err + } + case "AWS_SQS_EXTERNAL_ID": + if err = data.Set("aws_sqs_external_id", v.(string)); err != nil { + return err + } + case "AWS_SQS_IAM_USER_ARN": + if err = data.Set("aws_sqs_iam_user_arn", v.(string)); err != nil { + return err + } + case "GCP_PUBSUB_SUBSCRIPTION_NAME": + if err = data.Set("gcp_pubsub_subscription_name", v.(string)); err != nil { + return err + } + default: + log.Printf("[WARN] unexpected property %v returned from Snowflake", k) + } + } + + return err +} + +// UpdateNotificationIntegration implements schema.UpdateFunc +func UpdateNotificationIntegration(data *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + id := data.Id() + + stmt := snowflake.NotificationIntegration(id).Alter() + + // This is required in case the only change is to UNSET STORAGE_ALLOWED_LOCATIONS. + // Not sure if there is a more elegant way of determining this + var runSetStatement bool + + if data.HasChange("comment") { + runSetStatement = true + stmt.SetString("COMMENT", data.Get("comment").(string)) + } + + if data.HasChange("type") { + runSetStatement = true + stmt.SetString("TYPE", data.Get("type").(string)) + } + + if data.HasChange("enabled") { + runSetStatement = true + stmt.SetBool(`ENABLED`, data.Get("enabled").(bool)) + } + + if data.HasChange("direction") { + runSetStatement = true + stmt.SetString("DIRECTION", data.Get("direction").(string)) + } + + if data.HasChange("notification_provider") { + runSetStatement = true + stmt.SetString("NOTIFICATION_PROVIDER", data.Get("notification_provider").(string)) + } + + if data.HasChange("azure_storage_queue_primary_uri") { + runSetStatement = true + stmt.SetString("AZURE_STORAGE_QUEUE_PRIMARY_URI", data.Get("azure_storage_queue_primary_uri").(string)) + } + + if data.HasChange("azure_tenant_id") { + runSetStatement = true + stmt.SetString("AZURE_TENANT_ID", data.Get("azure_tenant_id").(string)) + } + + if data.HasChange("aws_sqs_arn") { + runSetStatement = true + stmt.SetString("AWS_SQS_ARN", data.Get("aws_sqs_arn").(string)) + } + + if data.HasChange("aws_sqs_role_arn") { + runSetStatement = true + stmt.SetString("AWS_SQS_ROLE_ARN", data.Get("aws_sqs_role_arn").(string)) + } + + if data.HasChange("gcp_pubsub_subscription_name") { + runSetStatement = true + stmt.SetString("GCP_PUBSUB_SUBSCRIPTION_NAME", data.Get("gcp_pubsub_subscription_name").(string)) + } + + if runSetStatement { + if err := snowflake.Exec(db, stmt.Statement()); err != nil { + return fmt.Errorf("error updating notification integration: %w", err) + } + } + + return ReadNotificationIntegration(data, meta) +} + +// DeleteNotificationIntegration implements schema.DeleteFunc +func DeleteNotificationIntegration(data *schema.ResourceData, meta interface{}) error { + return DeleteResource("", snowflake.NotificationIntegration)(data, meta) +} + +// NotificationIntegrationExists implements schema.ExistsFunc +func NotificationIntegrationExists(data *schema.ResourceData, meta interface{}) (bool, error) { + db := meta.(*sql.DB) + id := data.Id() + + stmt := snowflake.NotificationIntegration(id).Show() + rows, err := db.Query(stmt) + if err != nil { + return false, err + } + defer rows.Close() + + if rows.Next() { + return true, nil + } + return false, nil +} diff --git a/pkg/resources/notification_integration_acceptance_test.go b/pkg/resources/notification_integration_acceptance_test.go new file mode 100644 index 0000000000..8801d499e4 --- /dev/null +++ b/pkg/resources/notification_integration_acceptance_test.go @@ -0,0 +1,73 @@ +package resources_test + +import ( + "fmt" + "os" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAcc_NotificationIntegration(t *testing.T) { + if _, ok := os.LookupEnv("SKIP_NOTIFICATION_INTEGRATION_TESTS"); ok { + t.Skip("Skipping TestAccNotificationIntegration") + } + accName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + storageUri := "azure://great-bucket/great-path/" + tenant := "some-guid" + + resource.Test(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: azureNotificationIntegrationConfig(accName, storageUri, tenant), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_notification_integration.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_notification_integration.test", "notification_provider", "AZURE_STORAGE_QUEUE"), + resource.TestCheckResourceAttr("snowflake_notification_integration.test", "azure_storage_queue_primary_uri", storageUri), + resource.TestCheckResourceAttr("snowflake_notification_integration.test", "azure_tenant_id", tenant), + ), + }, + }, + }) + + pubsubName := "projects/project-1234/subscriptions/sub2" + resource.Test(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: gcpNotificationIntegrationConfig(accName, pubsubName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_notification_integration.test", "name", accName), + resource.TestCheckResourceAttr("snowflake_notification_integration.test", "notification_provider", "GCP_PUBSUB"), + resource.TestCheckResourceAttr("snowflake_notification_integration.test", "gcp_pubsub_subscription_name", pubsubName), + ), + }, + }, + }) +} + +func azureNotificationIntegrationConfig(name string, azureStorageQueuePrimaryUri string, azureTenantId string) string { + s := ` +resource "snowflake_notification_integration" "test" { + name = "%s" + notification_provider = "%s" + azure_storage_queue_primary_uri = "%s" + azure_tenant_id = "%s" +} +` + return fmt.Sprintf(s, name, "AZURE_STORAGE_QUEUE", azureStorageQueuePrimaryUri, azureTenantId) +} + +func gcpNotificationIntegrationConfig(name string, gcpPubsubSubscriptionName string) string { + s := ` +resource "snowflake_notification_integration" "test" { + name = "%s" + notification_provider = "%s" + gcp_pubsub_subscription_name = "%s" +} +` + return fmt.Sprintf(s, name, "GCP_PUBSUB", gcpPubsubSubscriptionName) +} diff --git a/pkg/resources/notification_integration_test.go b/pkg/resources/notification_integration_test.go new file mode 100644 index 0000000000..786c240be8 --- /dev/null +++ b/pkg/resources/notification_integration_test.go @@ -0,0 +1,146 @@ +package resources_test + +import ( + "database/sql" + "testing" + + sqlmock "github.com/DATA-DOG/go-sqlmock" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/provider" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/resources" + . "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/testhelpers" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/require" +) + +func TestNotificationIntegration(t *testing.T) { + r := require.New(t) + err := resources.NotificationIntegration().InternalValidate(provider.Provider().Schema, true) + r.NoError(err) +} + +func TestNotificationIntegrationCreate(t *testing.T) { + testCases := []struct { + notificationProvider string + raw map[string]interface{} + expectSQL string + }{ + { + notificationProvider: "AZURE_STORAGE_QUEUE", + raw: map[string]interface{}{ + "name": "test_notification_integration", + "comment": "great comment", + "notification_provider": "AZURE_STORAGE_QUEUE", + "azure_storage_queue_primary_uri": "azure://great-bucket/great-path/", + "azure_tenant_id": "some-guid", + }, + expectSQL: `^CREATE NOTIFICATION INTEGRATION "test_notification_integration" AZURE_STORAGE_QUEUE_PRIMARY_URI='azure://great-bucket/great-path/' AZURE_TENANT_ID='some-guid' COMMENT='great comment' NOTIFICATION_PROVIDER='AZURE_STORAGE_QUEUE' TYPE='QUEUE' ENABLED=true$`, + }, + { + notificationProvider: "AWS_SQS", + raw: map[string]interface{}{ + "name": "test_notification_integration", + "comment": "great comment", + "direction": "OUTBOUND", + "notification_provider": "AWS_SQS", + "aws_sqs_arn": "some-sqs-arn", + "aws_sqs_role_arn": "some-iam-role-arn", + }, + expectSQL: `^CREATE NOTIFICATION INTEGRATION "test_notification_integration" AWS_SQS_ARN='some-sqs-arn' AWS_SQS_ROLE_ARN='some-iam-role-arn' COMMENT='great comment' DIRECTION='OUTBOUND' NOTIFICATION_PROVIDER='AWS_SQS' TYPE='QUEUE' ENABLED=true$`, + }, + { + notificationProvider: "GCP_PUBSUB", + raw: map[string]interface{}{ + "name": "test_notification_integration", + "comment": "great comment", + "notification_provider": "GCP_PUBSUB", + "gcp_pubsub_subscription_name": "some-gcp-sub-name", + }, + expectSQL: `^CREATE NOTIFICATION INTEGRATION "test_notification_integration" COMMENT='great comment' GCP_PUBSUB_SUBSCRIPTION_NAME='some-gcp-sub-name' NOTIFICATION_PROVIDER='GCP_PUBSUB' TYPE='QUEUE' ENABLED=true$`, + }, + } + for _, testCase := range testCases { + r := require.New(t) + d := schema.TestResourceDataRaw(t, resources.NotificationIntegration().Schema, testCase.raw) + r.NotNil(d) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + mock.ExpectExec(testCase.expectSQL).WillReturnResult(sqlmock.NewResult(1, 1)) + expectReadNotificationIntegration(mock, testCase.notificationProvider) + + err := resources.CreateNotificationIntegration(d, db) + r.NoError(err) + }) + } +} + +func TestNotificationIntegrationRead(t *testing.T) { + testCases := []struct { + notificationProvider string + }{ + { + notificationProvider: "AZURE_STORAGE_QUEUE", + }, + { + notificationProvider: "AWS_SQS", + }, + { + notificationProvider: "GCP_PUBSUB", + }, + } + for _, testCase := range testCases { + r := require.New(t) + + d := notificationIntegration(t, "test_notification_integration", map[string]interface{}{"name": "test_notification_integration"}) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + expectReadNotificationIntegration(mock, testCase.notificationProvider) + + err := resources.ReadNotificationIntegration(d, db) + r.NoError(err) + }) + } +} + +func TestNotificationIntegrationDelete(t *testing.T) { + r := require.New(t) + + d := notificationIntegration(t, "drop_it", map[string]interface{}{"name": "drop_it"}) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + mock.ExpectExec(`DROP NOTIFICATION INTEGRATION "drop_it"`).WillReturnResult(sqlmock.NewResult(1, 1)) + err := resources.DeleteNotificationIntegration(d, db) + r.NoError(err) + }) +} + +func expectReadNotificationIntegration(mock sqlmock.Sqlmock, notificationProvider string) { + showRows := sqlmock.NewRows([]string{ + "name", "type", "category", "enabled", "created_on"}, + ).AddRow("test_notification_integration", "QUEUE", "NOTIFICATION", true, "now") + mock.ExpectQuery(`^SHOW NOTIFICATION INTEGRATIONS LIKE 'test_notification_integration'$`).WillReturnRows(showRows) + + descRows := sqlmock.NewRows([]string{ + "property", "property_type", "property_value", "property_default", + }).AddRow("ENABLED", "Boolean", true, false) + + switch notificationProvider { + case "AZURE_STORAGE_QUEUE": + descRows = descRows. + AddRow("NOTIFICATION_PROVIDER", "String", notificationProvider, nil). + AddRow("AZURE_STORAGE_QUEUE_PRIMARY_URI", "String", "azure://great-bucket/great-path/", nil). + AddRow("AZURE_TENANT_ID", "String", "some-guid", nil) + case "AWS_SQS": + descRows = descRows. + AddRow("NOTIFICATION_PROVIDER", "String", notificationProvider, nil). + AddRow("DIRECTION", "String", "OUTBOUND", nil). + AddRow("AWS_SQS_ARN", "String", "some-sqs-arn", nil). + AddRow("AWS_SQS_ROLE_ARN", "String", "some-iam-role-arn", nil). + AddRow("AWS_SQS_EXTERNAL_ID", "String", "AGreatExternalID", nil). + AddRow("AWS_SQS_IAM_USER_ARN", "String", "some-iam-user-arn", nil) + case "GCP_PUBSUB": + descRows = descRows. + AddRow("NOTIFICATION_PROVIDER", "String", notificationProvider, nil). + AddRow("GCP_PUBSUB_SUBSCRIPTION_NAME", "String", "some-gcp-sub-name", nil) + } + mock.ExpectQuery(`DESCRIBE NOTIFICATION INTEGRATION "test_notification_integration"$`).WillReturnRows(descRows) +} diff --git a/pkg/resources/pipe.go b/pkg/resources/pipe.go index 6c9f2c0c37..5d66c49ad8 100644 --- a/pkg/resources/pipe.go +++ b/pkg/resources/pipe.go @@ -60,7 +60,12 @@ var pipeSchema = map[string]*schema.Schema{ Optional: true, Description: "Specifies the Amazon Resource Name (ARN) for the SNS topic for your S3 bucket.", }, - "notification_channel": { + "integration": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "Specifies an integration for the pipe.", + }, + "notification_channel": &schema.Schema{ Type: schema.TypeString, Computed: true, Description: "Amazon Resource Name of the Amazon SQS queue for the stage named in the DEFINITION column.", @@ -70,6 +75,11 @@ var pipeSchema = map[string]*schema.Schema{ Computed: true, Description: "Name of the role that owns the pipe.", }, + "error_integration": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + Description: "Specifies the name of the notification integration used for error notifications.", + }, } func Pipe() *schema.Resource { @@ -167,6 +177,14 @@ func CreatePipe(d *schema.ResourceData, meta interface{}) error { builder.WithAwsSnsTopicArn(v.(string)) } + if v, ok := d.GetOk("integration"); ok { + builder.WithIntegration(v.(string)) + } + + if v, ok := d.GetOk("error_integration"); ok { + builder.WithErrorIntegration((v.(string))) + } + q := builder.Create() err := snowflake.Exec(db, q) @@ -258,6 +276,11 @@ func ReadPipe(d *schema.ResourceData, meta interface{}) error { return err } + err = d.Set("error_integration", pipe.ErrorIntegration.String) + if err != nil { + return err + } + return nil } @@ -284,6 +307,15 @@ func UpdatePipe(d *schema.ResourceData, meta interface{}) error { } } + if d.HasChange("error_integration") { + errorIntegration := d.Get("error_integration") + q := builder.ChangeErrorIntegration(errorIntegration.(string)) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating pipe error_integration on %v", d.Id()) + } + } + return ReadPipe(d, meta) } diff --git a/pkg/resources/pipe_test.go b/pkg/resources/pipe_test.go index 98eef1db29..07c514017a 100644 --- a/pkg/resources/pipe_test.go +++ b/pkg/resources/pipe_test.go @@ -66,7 +66,7 @@ func TestPipeRead(t *testing.T) { func expectReadPipe(mock sqlmock.Sqlmock) { rows := sqlmock.NewRows([]string{ - "created_on", "name", "database_name", "schema_name", "definition", "owner", "notification_channel", "comment"}, - ).AddRow("2019-12-23 17:20:50.088 +0000", "test_pipe", "test_db", "test_schema", "test definition", "N", "test", "great comment") + "created_on", "name", "database_name", "schema_name", "definition", "owner", "notification_channel", "comment", "error_integration"}, + ).AddRow("2019-12-23 17:20:50.088 +0000", "test_pipe", "test_db", "test_schema", "test definition", "N", "test", "great comment", "test_integration") mock.ExpectQuery(`^SHOW PIPES LIKE 'test_pipe' IN SCHEMA "test_db"."test_schema"$`).WillReturnRows(rows) } diff --git a/pkg/resources/privileges.go b/pkg/resources/privileges.go index 6f7d60be07..4368b04ae6 100644 --- a/pkg/resources/privileges.go +++ b/pkg/resources/privileges.go @@ -35,6 +35,7 @@ const ( privilegeCreateProcedure Privilege = "CREATE PROCEDURE" privilegeCreateExternalTable Privilege = "CREATE EXTERNAL TABLE" privilegeCreateMaterializedView Privilege = "CREATE MATERIALIZED VIEW" + privilegeCreateRowAccessPolicy Privilege = "CREATE ROW ACCESS POLICY" privilegeCreateTemporaryTable Privilege = "CREATE TEMPORARY TABLE" privilegeCreateMaskingPolicy Privilege = "CREATE MASKING POLICY" privilegeCreateShare Privilege = "CREATE SHARE" diff --git a/pkg/resources/role_grants.go b/pkg/resources/role_grants.go index ac377b6dc4..37ec3905e7 100644 --- a/pkg/resources/role_grants.go +++ b/pkg/resources/role_grants.go @@ -98,6 +98,9 @@ func ReadRoleGrants(d *schema.ResourceData, meta interface{}) error { db := meta.(*sql.DB) roleName := d.Id() + tfRoles := expandStringList(d.Get("roles").(*schema.Set).List()) + tfUsers := expandStringList(d.Get("users").(*schema.Set).List()) + roles := make([]string, 0) users := make([]string, 0) @@ -109,9 +112,17 @@ func ReadRoleGrants(d *schema.ResourceData, meta interface{}) error { for _, grant := range grants { switch grant.GrantedTo.String { case "ROLE": - roles = append(roles, grant.GranteeName.String) + for _, tfRole := range tfRoles { + if tfRole == grant.GranteeName.String { + roles = append(roles, grant.GranteeName.String) + } + } case "USER": - users = append(users, grant.GranteeName.String) + for _, tfUser := range tfUsers { + if tfUser == grant.GranteeName.String { + users = append(users, grant.GranteeName.String) + } + } default: return fmt.Errorf("unknown grant type %s", grant.GrantedTo.String) } diff --git a/pkg/resources/role_grants_acceptance_test.go b/pkg/resources/role_grants_acceptance_test.go index c86238660a..a4852dace4 100644 --- a/pkg/resources/role_grants_acceptance_test.go +++ b/pkg/resources/role_grants_acceptance_test.go @@ -150,9 +150,10 @@ func TestAcc_GrantRole(t *testing.T) { }, // IMPORT { - ResourceName: "snowflake_role_grants.w", - ImportState: true, - ImportStateVerify: true, + ResourceName: "snowflake_role_grants.w", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"roles", "users"}, }, }, }) @@ -181,42 +182,44 @@ resource "snowflake_user" "u2" { func rgConfig(role1, role2, role3, user1, user2 string) string { s := ` -%s + %s -resource "snowflake_role_grants" "w" { - role_name = "${snowflake_role.r.name}" - roles = ["${snowflake_role.r2.name}", "${snowflake_role.r3.name}"] - users = ["${snowflake_user.u.name}", "${snowflake_user.u2.name}"] -} -` + resource "snowflake_role_grants" "w" { + role_name = "${snowflake_role.r.name}" + roles = ["${snowflake_role.r2.name}", "${snowflake_role.r3.name}"] + users = ["${snowflake_user.u.name}", "${snowflake_user.u2.name}"] + } + ` return fmt.Sprintf(s, rolesAndUser(role1, role2, role3, user1, user2)) } func rgConfig2(role1, role2, role3, user1, user2 string) string { s := ` -%s + %s + + resource "snowflake_role_grants" "w" { + role_name = "${snowflake_role.r.name}" + roles = ["${snowflake_role.r2.name}"] + users = ["${snowflake_user.u.name}", "${snowflake_user.u2.name}"] + } + ` -resource "snowflake_role_grants" "w" { - role_name = "${snowflake_role.r.name}" - roles = ["${snowflake_role.r2.name}"] - users = ["${snowflake_user.u.name}", "${snowflake_user.u2.name}"] -} -` return fmt.Sprintf(s, rolesAndUser(role1, role2, role3, user1, user2)) } func rgConfig3(role1, role2, role3, user1, user2 string) string { s := ` -%s + %s + + resource "snowflake_role_grants" "w" { + role_name = "${snowflake_role.r.name}" + roles = ["${snowflake_role.r2.name}", "${snowflake_role.r3.name}"] + users = ["${snowflake_user.u.name}"] + } + ` -resource "snowflake_role_grants" "w" { - role_name = "${snowflake_role.r.name}" - roles = ["${snowflake_role.r2.name}", "${snowflake_role.r3.name}"] - users = ["${snowflake_user.u.name}"] -} -` return fmt.Sprintf(s, rolesAndUser(role1, role2, role3, user1, user2)) } diff --git a/pkg/resources/schema_grant.go b/pkg/resources/schema_grant.go index b89d5599d0..fd7b58ed53 100644 --- a/pkg/resources/schema_grant.go +++ b/pkg/resources/schema_grant.go @@ -24,6 +24,7 @@ var validSchemaPrivileges = NewPrivilegeSet( privilegeCreateProcedure, privilegeCreateExternalTable, privilegeCreateMaterializedView, + privilegeCreateRowAccessPolicy, privilegeCreateTemporaryTable, privilegeCreateMaskingPolicy, privilegeAddSearchOptimization, diff --git a/pkg/resources/scim_integration.go b/pkg/resources/scim_integration.go new file mode 100644 index 0000000000..458c25bfc2 --- /dev/null +++ b/pkg/resources/scim_integration.go @@ -0,0 +1,212 @@ +package resources + +import ( + "database/sql" + "fmt" + "log" + "strings" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/pkg/errors" +) + +var scimIntegrationSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "Specifies the name of the SCIM integration. This name follows the rules for Object Identifiers. The name should be unique among security integrations in your account.", + }, + "scim_client": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the client type for the scim integration", + ValidateFunc: validation.StringInSlice([]string{ + "OKTA", "AZURE", "CUSTOM", + }, true), + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + normalize := func(s string) string { + return strings.ToUpper(strings.Replace(s, "-", "", -1)) + } + return normalize(old) == normalize(new) + }, + }, + "provisioner_role": { + Type: schema.TypeString, + Required: true, + Description: "Specify the SCIM role in Snowflake that owns any users and roles that are imported from the identity provider into Snowflake using SCIM.", + ValidateFunc: validation.StringInSlice([]string{ + "OKTA_PROVISIONER", "AAD_PROVISIONER", "GENERIC_SCIM_PROVISIONER", + }, true), + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + normalize := func(s string) string { + return strings.ToUpper(strings.Replace(s, "-", "", -1)) + } + return normalize(old) == normalize(new) + }, + }, + "network_policy": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies an existing network policy active for your account. The network policy restricts the list of user IP addresses when exchanging an authorization code for an access or refresh token and when using a refresh token to obtain a new access token. If this parameter is not set, the network policy for the account (if any) is used instead.", + }, + "created_on": { + Type: schema.TypeString, + Computed: true, + Description: "Date and time when the SCIM integration was created.", + }, +} + +// SCIMIntegration returns a pointer to the resource representing a network policy +func SCIMIntegration() *schema.Resource { + return &schema.Resource{ + Create: CreateSCIMIntegration, + Read: ReadSCIMIntegration, + Update: UpdateSCIMIntegration, + Delete: DeleteSCIMIntegration, + + Schema: scimIntegrationSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +// CreateSCIMIntegration implements schema.CreateFunc +func CreateSCIMIntegration(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + name := d.Get("name").(string) + + stmt := snowflake.ScimIntegration(name).Create() + + // Set required fields + stmt.SetRaw(`TYPE=SCIM`) + stmt.SetString(`SCIM_CLIENT`, d.Get("scim_client").(string)) + stmt.SetString(`RUN_AS_ROLE`, d.Get("provisioner_role").(string)) + + // Set optional fields + if _, ok := d.GetOk("network_policy"); ok { + stmt.SetString(`NETWORK_POLICY`, d.Get("network_policy").(string)) + } + + err := snowflake.Exec(db, stmt.Statement()) + if err != nil { + return errors.Wrap(err, "error creating security integration") + } + + d.SetId(name) + + return ReadSCIMIntegration(d, meta) +} + +// ReadSCIMIntegration implements schema.ReadFunc +func ReadSCIMIntegration(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + id := d.Id() + + stmt := snowflake.ScimIntegration(id).Show() + row := snowflake.QueryRow(db, stmt) + + // Some properties can come from the SHOW INTEGRATION call + + s, err := snowflake.ScanScimIntegration(row) + if err != nil { + return errors.Wrap(err, "could not show security integration") + } + + // Note: category must be Security or something is broken + if c := s.Category.String; c != "SECURITY" { + return fmt.Errorf("expected %v to be an Security integration, got %v", id, c) + } + + if err := d.Set("scim_client", strings.TrimPrefix(s.IntegrationType.String, "SCIM - ")); err != nil { + return err + } + + if err := d.Set("name", s.Name.String); err != nil { + return err + } + + if err := d.Set("created_on", s.CreatedOn.String); err != nil { + return err + } + + // Some properties come from the DESCRIBE INTEGRATION call + // We need to grab them in a loop + var k, pType string + var v, unused interface{} + stmt = snowflake.ScimIntegration(id).Describe() + rows, err := db.Query(stmt) + if err != nil { + return errors.Wrap(err, "could not describe security integration") + } + defer rows.Close() + for rows.Next() { + if err := rows.Scan(&k, &pType, &v, &unused); err != nil { + return errors.Wrap(err, "unable to parse security integration rows") + } + switch k { + case "NETWORK_POLICY": + if err = d.Set("network_policy", v.(string)); err != nil { + return errors.Wrap(err, "unable to set network policy for security integration") + } + case "RUN_AS_ROLE": + if err = d.Set("provisioner_role", v.(string)); err != nil { + return errors.Wrap(err, "unable to set provisioner role for security integration") + } + default: + log.Printf("[WARN] unexpected security integration property %v returned from Snowflake", k) + } + } + + return err +} + +// UpdateSCIMIntegration implements schema.UpdateFunc +func UpdateSCIMIntegration(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + id := d.Id() + + stmt := snowflake.ScimIntegration(id).Alter() + + var runSetStatement bool + + if d.HasChange("scim_client") { + runSetStatement = true + stmt.SetString(`SCIM_CLIENT`, d.Get("scim_client").(string)) + } + + if d.HasChange("provisioner_role") { + runSetStatement = true + stmt.SetString(`RUN_AS_ROLE`, d.Get("provisioner_role").(string)) + } + + // We need to UNSET this if we remove all api blocked prefixes. + if d.HasChange("network_policy") { + v := d.Get("network_policy").(string) + if len(v) == 0 { + err := snowflake.Exec(db, fmt.Sprintf(`ALTER SECURITY INTEGRATION %v UNSET NETWORK_POLICY`, id)) + if err != nil { + return errors.Wrap(err, "error unsetting network_policy") + } + } else { + runSetStatement = true + stmt.SetString(`NETWORK_POLICY`, d.Get("network_policy").(string)) + } + } + + if runSetStatement { + if err := snowflake.Exec(db, stmt.Statement()); err != nil { + return errors.Wrap(err, "error updating security integration") + } + } + + return ReadSCIMIntegration(d, meta) +} + +// DeleteSCIMIntegration implements schema.DeleteFunc +func DeleteSCIMIntegration(d *schema.ResourceData, meta interface{}) error { + return DeleteResource("", snowflake.ScimIntegration)(d, meta) +} diff --git a/pkg/resources/scim_integration_acceptance_test.go b/pkg/resources/scim_integration_acceptance_test.go new file mode 100644 index 0000000000..90445cbf58 --- /dev/null +++ b/pkg/resources/scim_integration_acceptance_test.go @@ -0,0 +1,83 @@ +package resources_test + +import ( + "fmt" + "os" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAcc_ScimIntegration(t *testing.T) { + if _, ok := os.LookupEnv("SKIP_SCIM_INTEGRATION_TESTS"); ok { + t.Skip("Skipping TestAccScimIntegration") + } + + scimIntName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + scimProvisionerRole := "AAD_PROVISIONER" + scimNetworkPolicy := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: scimIntegrationConfig_azure(scimIntName, scimProvisionerRole, scimNetworkPolicy), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_scim_integration.test", "name", scimIntName), + resource.TestCheckResourceAttr("snowflake_scim_integration.test", "scim_client", "AZURE"), + resource.TestCheckResourceAttr("snowflake_scim_integration.test", "provisioner_role", scimProvisionerRole), + resource.TestCheckResourceAttr("snowflake_scim_integration.test", "network_policy", scimNetworkPolicy), + resource.TestCheckResourceAttrSet("snowflake_scim_integration.test", "created_on"), + ), + }, + { + ResourceName: "snowflake_scim_integration.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func scimIntegrationConfig_azure(name string, role string, policy string) string { + return fmt.Sprintf(` + resource "snowflake_role" "azure" { + name = "%s" + comment = "test comment" + } + + resource "snowflake_account_grant" "azurecua" { + roles = [snowflake_role.azure.name] + privilege = "CREATE USER" + } + + resource "snowflake_account_grant" "azurecra" { + roles = [snowflake_role.azure.name] + privilege = "CREATE ROLE" + } + + resource "snowflake_role_grants" "azure" { + role_name = snowflake_role.azure.name + roles = ["ACCOUNTADMIN"] + } + + resource "snowflake_network_policy" "azure" { + name = "%s" + allowed_ip_list = ["192.168.0.100/24", "29.254.123.20"] + } + + resource "snowflake_scim_integration" "test" { + name = "%s" + scim_client = "AZURE" + provisioner_role = snowflake_role.azure.name + network_policy = snowflake_network_policy.azure.name + depends_on = [ + snowflake_account_grant.azurecua, + snowflake_account_grant.azurecra, + snowflake_role_grants.azure + ] + } + `, role, policy, name) +} diff --git a/pkg/resources/scim_integration_test.go b/pkg/resources/scim_integration_test.go new file mode 100644 index 0000000000..2c373c9543 --- /dev/null +++ b/pkg/resources/scim_integration_test.go @@ -0,0 +1,81 @@ +package resources_test + +import ( + "database/sql" + "testing" + + sqlmock "github.com/DATA-DOG/go-sqlmock" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/provider" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/resources" + . "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/testhelpers" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/require" +) + +func TestSCIMIntegration(t *testing.T) { + r := require.New(t) + err := resources.SCIMIntegration().InternalValidate(provider.Provider().Schema, true) + r.NoError(err) +} + +func TestSCIMIntegrationCreate(t *testing.T) { + r := require.New(t) + + in := map[string]interface{}{ + "name": "test_scim_integration", + "scim_client": "AZURE", + "provisioner_role": "AAD_PROVISIONER", + "network_policy": "AAD_NETWORK_POLICY", + } + d := schema.TestResourceDataRaw(t, resources.SCIMIntegration().Schema, in) + r.NotNil(d) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + mock.ExpectExec( + `^CREATE SECURITY INTEGRATION "test_scim_integration" TYPE=SCIM NETWORK_POLICY='AAD_NETWORK_POLICY' RUN_AS_ROLE='AAD_PROVISIONER' SCIM_CLIENT='AZURE'$`, + ).WillReturnResult(sqlmock.NewResult(1, 1)) + expectReadSCIMIntegration(mock) + + err := resources.CreateSCIMIntegration(d, db) + r.NoError(err) + }) +} + +func TestSCIMIntegrationRead(t *testing.T) { + r := require.New(t) + + d := scimIntegration(t, "test_scim_integration", map[string]interface{}{"name": "test_scim_integration"}) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + expectReadSCIMIntegration(mock) + + err := resources.ReadSCIMIntegration(d, db) + r.NoError(err) + }) +} + +func TestSCIMIntegrationDelete(t *testing.T) { + r := require.New(t) + + d := scimIntegration(t, "drop_it", map[string]interface{}{"name": "drop_it"}) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + mock.ExpectExec(`DROP SECURITY INTEGRATION "drop_it"`).WillReturnResult(sqlmock.NewResult(1, 1)) + err := resources.DeleteSCIMIntegration(d, db) + r.NoError(err) + }) +} + +func expectReadSCIMIntegration(mock sqlmock.Sqlmock) { + showRows := sqlmock.NewRows([]string{ + "name", "type", "category", "created_on"}, + ).AddRow("test_scim_integration", "SCIM - AZURE", "SECURITY", "now") + mock.ExpectQuery(`^SHOW SECURITY INTEGRATIONS LIKE 'test_scim_integration'$`).WillReturnRows(showRows) + + descRows := sqlmock.NewRows([]string{ + "property", "property_type", "property_value", "property_default", + }).AddRow("NETWORK_POLICY", "String", "AAD_NETWORK_POLICY", nil). + AddRow("RUN_AS_ROLE", "String", "AAD_PROVISIONER", nil) + + mock.ExpectQuery(`DESCRIBE SECURITY INTEGRATION "test_scim_integration"$`).WillReturnRows(descRows) +} diff --git a/pkg/resources/sequence.go b/pkg/resources/sequence.go new file mode 100644 index 0000000000..9ec2e44c3e --- /dev/null +++ b/pkg/resources/sequence.go @@ -0,0 +1,200 @@ +package resources + +import ( + "database/sql" + "fmt" + "log" + "strconv" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/pkg/errors" +) + +var sequenceSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the name for the sequence.", + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Default: "", + Description: "Specifies a comment for the sequence.", + }, + "increment": { + Type: schema.TypeInt, + Optional: true, + Default: 1, + Description: "The amount the sequence will increase by each time it is used", + }, + "database": { + Type: schema.TypeString, + Required: true, + Description: "The database in which to create the sequence. Don't use the | character.", + }, + "schema": { + Type: schema.TypeString, + Required: true, + Description: "The schema in which to create the sequence. Don't use the | character.", + }, + "next_value": { + Type: schema.TypeInt, + Description: "The next value the sequence will provide.", + Computed: true, + }, +} + +var sequenceProperties = []string{"comment", "data_retention_time_in_days"} + +// Sequence returns a pointer to the resource representing a sequence +func Sequence() *schema.Resource { + return &schema.Resource{ + Create: CreateSequence, + Read: ReadSequence, + Delete: DeleteSequence, + Update: UpdateSequence, + + Schema: sequenceSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +// CreateSequence implements schema.CreateFunc +func CreateSequence(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + database := d.Get("database").(string) + schema := d.Get("schema").(string) + name := d.Get("name").(string) + + sq := snowflake.Sequence(name, database, schema) + + if i, ok := d.GetOk("increment"); ok { + sq.WithIncrement(i.(int)) + } + + if v, ok := d.GetOk("comment"); ok { + sq.WithComment(v.(string)) + } + + err := snowflake.Exec(db, sq.Create()) + if err != nil { + return errors.Wrapf(err, "error creating sequence") + } + + return ReadSequence(d, meta) +} + +// ReadSequence implements schema.ReadFunc +func ReadSequence(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + database := d.Get("database").(string) + schema := d.Get("schema").(string) + name := d.Get("name").(string) + + stmt := snowflake.Sequence(name, database, schema).Show() + row := snowflake.QueryRow(db, stmt) + + sequence, err := snowflake.ScanSequence(row) + + if err != nil { + if err == sql.ErrNoRows { + // If not found, mark resource to be removed from statefile during apply or refresh + log.Printf("[DEBUG] sequence (%s) not found", d.Id()) + d.SetId("") + return nil + } + return errors.Wrap(err, "unable to scan row for SHOW SEQUENCES") + } + + err = d.Set("schema", sequence.SchemaName.String) + if err != nil { + return err + } + + err = d.Set("database", sequence.DBName.String) + if err != nil { + return err + } + + err = d.Set("comment", sequence.Comment.String) + if err != nil { + return err + } + + i, err := strconv.ParseInt(sequence.Increment.String, 10, 64) + if err != nil { + return err + } + + err = d.Set("increment", i) + if err != nil { + return err + } + + i, err = strconv.ParseInt(sequence.NextValue.String, 10, 64) + if err != nil { + return err + } + + err = d.Set("next_value", i) + if err != nil { + return err + } + + d.SetId(fmt.Sprintf(`%v|%v|%v`, sequence.DBName.String, sequence.SchemaName.String, sequence.Name.String)) + if err != nil { + return err + } + + return err +} + +func UpdateSequence(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + database := d.Get("database").(string) + schema := d.Get("schema").(string) + name := d.Get("name").(string) + next := d.Get("next_value").(int) + + DeleteSequence(d, meta) + + sq := snowflake.Sequence(name, database, schema) + + if i, ok := d.GetOk("increment"); ok { + sq.WithIncrement(i.(int)) + } + + if v, ok := d.GetOk("comment"); ok { + sq.WithComment(v.(string)) + } + + sq.WithStart(next) + + err := snowflake.Exec(db, sq.Create()) + if err != nil { + return errors.Wrapf(err, "error creating sequence") + } + + return ReadSequence(d, meta) +} + +func DeleteSequence(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + database := d.Get("database").(string) + schema := d.Get("schema").(string) + name := d.Get("name").(string) + + stmt := snowflake.Sequence(name, database, schema).Drop() + + err := snowflake.Exec(db, stmt) + if err != nil { + return errors.Wrapf(err, "error dropping sequence %s", name) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/sequence_acceptance_test.go b/pkg/resources/sequence_acceptance_test.go new file mode 100644 index 0000000000..a65bf7d1f0 --- /dev/null +++ b/pkg/resources/sequence_acceptance_test.go @@ -0,0 +1,115 @@ +package resources_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAcc_Sequence(t *testing.T) { + accName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + accRename := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + // CREATE + { + Config: sequenceConfig(accName, accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "name", accName), + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "next_value", "1"), + ), + }, + // Set comment and rename + { + Config: sequenceConfigWithComment(accName, accRename, "look at me I am a comment"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "name", accRename), + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "comment", "look at me I am a comment"), + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "next_value", "1"), + ), + }, + // Unset comment and set increment + { + Config: sequenceConfigWithIncrement(accName, accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "name", accName), + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "comment", ""), + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "next_value", "1"), + resource.TestCheckResourceAttr("snowflake_sequence.test_sequence", "increment", "32"), + ), + }, + }, + }) +} + +func sequenceConfigWithIncrement(name, sequenceName string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_sequence" "test_sequence" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + increment = 32 +} +` + return fmt.Sprintf(s, name, name, sequenceName) +} +func sequenceConfig(name, sequenceName string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_sequence" "test_sequence" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" +} +` + return fmt.Sprintf(s, name, name, sequenceName) +} + +func sequenceConfigWithComment(name, sequenceName, comment string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_sequence" "test_sequence" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + comment = "%s" +} +` + return fmt.Sprintf(s, name, name, sequenceName, comment) +} diff --git a/pkg/resources/sequence_test.go b/pkg/resources/sequence_test.go new file mode 100644 index 0000000000..c0bcf84893 --- /dev/null +++ b/pkg/resources/sequence_test.go @@ -0,0 +1,121 @@ +package resources_test + +import ( + "database/sql" + "testing" + + sqlmock "github.com/DATA-DOG/go-sqlmock" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/provider" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/resources" + . "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/testhelpers" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/stretchr/testify/require" +) + +func TestSequence(t *testing.T) { + r := require.New(t) + err := resources.Sequence().InternalValidate(provider.Provider().Schema, true) + r.NoError(err) +} + +func TestSequenceCreate(t *testing.T) { + r := require.New(t) + + in := map[string]interface{}{ + "name": "good_name", + "schema": "schema", + "database": "database", + "comment": "great comment", + } + d := schema.TestResourceDataRaw(t, resources.Sequence().Schema, in) + r.NotNil(d) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + mock.ExpectExec(`CREATE SEQUENCE "database"."schema"."good_name" COMMENT = 'great comment`).WillReturnResult(sqlmock.NewResult(1, 1)) + + rows := sqlmock.NewRows([]string{ + "name", + "database_name", + "schema_name", + "next_value", + "interval", + "created_on", + "owner", + "comment", + }).AddRow( + "good_name", + "database", + "schema", + "25", + "1", + "created_on", + "owner", + "mock comment", + ) + mock.ExpectQuery(`SHOW SEQUENCES LIKE 'good_name' IN SCHEMA "database"."schema"`).WillReturnRows(rows) + err := resources.CreateSequence(d, db) + r.NoError(err) + r.Equal("database|schema|good_name", d.Id()) + }) +} + +func TestSequenceRead(t *testing.T) { + r := require.New(t) + in := map[string]interface{}{ + "name": "good_name", + "schema": "schema", + "database": "database", + } + + d := sequence(t, "good_name", in) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + rows := sqlmock.NewRows([]string{ + "name", + "database_name", + "schema_name", + "next_value", + "interval", + "created_on", + "owner", + "comment", + }).AddRow( + "good_name", + "database", + "schema", + "5", + "25", + "created_on", + "owner", + "mock comment", + ) + mock.ExpectQuery(`SHOW SEQUENCES LIKE 'good_name' IN SCHEMA "database"."schema"`).WillReturnRows(rows) + err := resources.ReadSequence(d, db) + r.NoError(err) + r.Equal("good_name", d.Get("name").(string)) + r.Equal("schema", d.Get("schema").(string)) + r.Equal("database", d.Get("database").(string)) + r.Equal("mock comment", d.Get("comment").(string)) + r.Equal(25, d.Get("increment").(int)) + r.Equal(5, d.Get("next_value").(int)) + r.Equal("database|schema|good_name", d.Id()) + }) +} + +func TestSequenceDelete(t *testing.T) { + r := require.New(t) + in := map[string]interface{}{ + "name": "drop_it", + "schema": "schema", + "database": "database", + } + + d := sequence(t, "drop_it", in) + + WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { + mock.ExpectExec(`DROP SEQUENCE "database"."schema"."drop_it"`).WillReturnResult(sqlmock.NewResult(1, 1)) + err := resources.DeleteSequence(d, db) + r.NoError(err) + r.Equal("", d.Id()) + }) +} diff --git a/pkg/resources/stream.go b/pkg/resources/stream.go index f6c480ffb1..437685e004 100644 --- a/pkg/resources/stream.go +++ b/pkg/resources/stream.go @@ -45,14 +45,23 @@ var streamSchema = map[string]*schema.Schema{ "on_table": { Type: schema.TypeString, Optional: true, + ForceNew: true, Description: "Name of the table the stream will monitor.", }, "append_only": { Type: schema.TypeBool, Optional: true, + ForceNew: true, Default: false, Description: "Type of the stream that will be created.", }, + "show_initial_rows": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Default: false, + Description: "Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed.", + }, "owner": { Type: schema.TypeString, Computed: true, @@ -160,6 +169,7 @@ func CreateStream(d *schema.ResourceData, meta interface{}) error { name := d.Get("name").(string) onTable := d.Get("on_table").(string) appendOnly := d.Get("append_only").(bool) + showInitialRows := d.Get("show_initial_rows").(bool) builder := snowflake.Stream(name, database, schema) @@ -170,6 +180,7 @@ func CreateStream(d *schema.ResourceData, meta interface{}) error { builder.WithOnTable(resultOnTable.DatabaseName, resultOnTable.SchemaName, resultOnTable.OnTableName) builder.WithAppendOnly(appendOnly) + builder.WithShowInitialRows(showInitialRows) // Set optionals if v, ok := d.GetOk("comment"); ok { @@ -226,6 +237,26 @@ func ReadStream(d *schema.ResourceData, meta interface{}) error { return err } + err = d.Set("on_table", stream.TableName.String) + if err != nil { + return err + } + + err = d.Set("append_only", stream.AppendOnly) + if err != nil { + return err + } + + err = d.Set("show_initial_rows", stream.ShowInitialRows) + if err != nil { + return err + } + + err = d.Set("comment", stream.Comment.String) + if err != nil { + return err + } + err = d.Set("owner", stream.Owner.String) if err != nil { return err diff --git a/pkg/resources/stream_acceptance_test.go b/pkg/resources/stream_acceptance_test.go index fa36d8f34f..b2efcbbdac 100644 --- a/pkg/resources/stream_acceptance_test.go +++ b/pkg/resources/stream_acceptance_test.go @@ -23,7 +23,8 @@ func TestAcc_Stream(t *testing.T) { resource.TestCheckResourceAttr("snowflake_stream.test_stream", "schema", accName), resource.TestCheckResourceAttr("snowflake_stream.test_stream", "on_table", fmt.Sprintf("%s.%s.%s", accName, accName, "STREAM_ON_TABLE")), resource.TestCheckResourceAttr("snowflake_stream.test_stream", "comment", "Terraform acceptance test"), - checkBool("snowflake_stream.test_stream", "append_only", true), + checkBool("snowflake_stream.test_stream", "append_only", false), + checkBool("snowflake_stream.test_stream", "show_initial_rows", false), ), }, }, @@ -64,8 +65,6 @@ resource "snowflake_stream" "test_stream" { name = "%s" comment = "Terraform acceptance test" on_table = "${snowflake_database.test_database.name}.${snowflake_schema.test_schema.name}.${snowflake_table.test_stream_on_table.name}" - append_only = true - } ` return fmt.Sprintf(s, name, name, name) diff --git a/pkg/resources/stream_grant_acceptance_test.go b/pkg/resources/stream_grant_acceptance_test.go index d209583e2b..77694d2364 100644 --- a/pkg/resources/stream_grant_acceptance_test.go +++ b/pkg/resources/stream_grant_acceptance_test.go @@ -96,11 +96,10 @@ resource "snowflake_stream" "test" { name = "{{ .stream_name }}" comment = "Terraform acceptance test" on_table = "${snowflake_database.test.name}.${snowflake_schema.test.name}.${snowflake_table.test.name}" - append_only = true } resource "snowflake_stream_grant" "test" { - database_name = snowflake_database.test.name + database_name = snowflake_database.test.name roles = [snowflake_role.test.name] schema_name = snowflake_schema.test.name stream_name = snowflake_stream.test.name @@ -140,7 +139,7 @@ resource "snowflake_role" "test" { } resource "snowflake_stream_grant" "test" { - database_name = snowflake_database.test.name + database_name = snowflake_database.test.name roles = [snowflake_role.test.name] schema_name = snowflake_schema.test.name on_future = true diff --git a/pkg/resources/stream_test.go b/pkg/resources/stream_test.go index 3f166a9ac8..f05358102a 100644 --- a/pkg/resources/stream_test.go +++ b/pkg/resources/stream_test.go @@ -22,17 +22,18 @@ func TestStreamCreate(t *testing.T) { r := require.New(t) in := map[string]interface{}{ - "name": "stream_name", - "database": "database_name", - "schema": "schema_name", - "comment": "great comment", - "on_table": "target_db.target_schema.target_table", - "append_only": true, + "name": "stream_name", + "database": "database_name", + "schema": "schema_name", + "comment": "great comment", + "on_table": "target_db.target_schema.target_table", + "append_only": true, + "show_initial_rows": true, } d := stream(t, "database_name|schema_name|stream_name", in) WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec(`CREATE STREAM "database_name"."schema_name"."stream_name" ON TABLE "target_db"."target_schema"."target_table" COMMENT = 'great comment' APPEND_ONLY = true`).WillReturnResult(sqlmock.NewResult(1, 1)) + mock.ExpectExec(`CREATE STREAM "database_name"."schema_name"."stream_name" ON TABLE "target_db"."target_schema"."target_table" COMMENT = 'great comment' APPEND_ONLY = true SHOW_INITIAL_ROWS = true`).WillReturnResult(sqlmock.NewResult(1, 1)) expectStreamRead(mock) err := resources.CreateStream(d, db) r.NoError(err) @@ -48,14 +49,14 @@ func expectStreamRead(mock sqlmock.Sqlmock) { func TestStreamRead(t *testing.T) { r := require.New(t) - d := stream(t, "database_name|schema_name|stream_name", map[string]interface{}{"name": "stream_name", "comment": "mock comment"}) + d := stream(t, "database_name|schema_name|stream_name", map[string]interface{}{"name": "stream_name", "comment": "grand comment"}) WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { expectStreamRead(mock) err := resources.ReadStream(d, db) r.NoError(err) r.Equal("stream_name", d.Get("name").(string)) - r.Equal("mock comment", d.Get("comment").(string)) + r.Equal("grand comment", d.Get("comment").(string)) // Test when resource is not found, checking if state will be empty r.NotEmpty(d.State()) diff --git a/pkg/resources/table.go b/pkg/resources/table.go index 5db15aca7e..f7a1897939 100644 --- a/pkg/resources/table.go +++ b/pkg/resources/table.go @@ -36,6 +36,12 @@ var tableSchema = map[string]*schema.Schema{ ForceNew: true, Description: "The database in which to create the table.", }, + "cluster_by": { + Type: schema.TypeList, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + Description: "A list of one or more table columns/expressions to be used as clustering key(s) for the table", + }, "column": { Type: schema.TypeList, Required: true, @@ -53,6 +59,12 @@ var tableSchema = map[string]*schema.Schema{ Required: true, Description: "Column type, e.g. VARIANT", }, + "nullable": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: "Whether this column can contain null values. **Note**: Depending on your Snowflake version, the default value will not suffice if this column is used in a primary key constraint.", + }, }, }, }, @@ -66,6 +78,29 @@ var tableSchema = map[string]*schema.Schema{ Computed: true, Description: "Name of the role that owns the table.", }, + "primary_key": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Definitions of primary key constraint to create on table", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "Name of constraint", + }, + "keys": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Required: true, + Description: "Columns to use in primary key", + }, + }, + }, + }, } func Table() *schema.Resource { @@ -131,11 +166,12 @@ func tableIDFromString(stringID string) (*tableID, error) { type column struct { name string dataType string + nullable bool } func (c column) toSnowflakeColumn() snowflake.Column { sC := snowflake.Column{} - return *sC.WithName(c.name).WithType(c.dataType) + return *sC.WithName(c.name).WithType(c.dataType).WithNullable(c.nullable) } type columns []column @@ -165,20 +201,34 @@ func (old columns) getNewIn(new columns) (added columns) { return } -func (old columns) getChangedTypes(new columns) (changed columns) { - changed = columns{} +type changedColumns []changedColumn + +type changedColumn struct { + newColumn column //our new column + changedDataType bool + changedNullConstraint bool +} + +func (old columns) getChangedColumnProperties(new columns) (changed changedColumns) { + changed = changedColumns{} for _, cO := range old { for _, cN := range new { + changeColumn := changedColumn{cN, false, false} if cO.name == cN.name && cO.dataType != cN.dataType { - changed = append(changed, cN) + changeColumn.changedDataType = true + } + if cO.name == cN.name && cO.nullable != cN.nullable { + changeColumn.changedNullConstraint = true } + + changed = append(changed, changeColumn) } } return } -func (old columns) diffs(new columns) (removed columns, added columns, changed columns) { - return old.getNewIn(new), new.getNewIn(old), old.getChangedTypes(new) +func (old columns) diffs(new columns) (removed columns, added columns, changed changedColumns) { + return old.getNewIn(new), new.getNewIn(old), old.getChangedColumnProperties(new) } func getColumn(from interface{}) (to column) { @@ -186,6 +236,7 @@ func getColumn(from interface{}) (to column) { return column{ name: c["name"].(string), dataType: c["type"].(string), + nullable: c["nullable"].(bool), } } @@ -198,6 +249,29 @@ func getColumns(from interface{}) (to columns) { return to } +type primarykey struct { + name string + keys []string +} + +func getPrimaryKey(from interface{}) (to primarykey) { + pk := from.([]interface{}) + to = primarykey{} + if len(pk) > 0 { + pkDetails := pk[0].(map[string]interface{}) + to.name = pkDetails["name"].(string) + to.keys = expandStringList(pkDetails["keys"].([]interface{})) + return to + } + return to +} + +func (pk primarykey) toSnowflakePrimaryKey() snowflake.PrimaryKey { + snowPk := snowflake.PrimaryKey{} + return *snowPk.WithName(pk.name).WithKeys(pk.keys) + +} + // CreateTable implements schema.CreateFunc func CreateTable(d *schema.ResourceData, meta interface{}) error { db := meta.(*sql.DB) @@ -206,6 +280,7 @@ func CreateTable(d *schema.ResourceData, meta interface{}) error { name := d.Get("name").(string) columns := getColumns(d.Get("column").([]interface{})) + builder := snowflake.TableWithColumnDefinitions(name, database, schema, columns.toSnowflakeColumns()) // Set optionals @@ -213,6 +288,15 @@ func CreateTable(d *schema.ResourceData, meta interface{}) error { builder.WithComment(v.(string)) } + if v, ok := d.GetOk("cluster_by"); ok { + builder.WithClustering(expandStringList(v.([]interface{}))) + } + + if v, ok := d.GetOk("primary_key"); ok { + pk := getPrimaryKey(v.([]interface{})) + builder.WithPrimaryKey(pk.toSnowflakePrimaryKey()) + } + stmt := builder.Create() err := snowflake.Exec(db, stmt) if err != nil { @@ -265,14 +349,26 @@ func ReadTable(d *schema.ResourceData, meta interface{}) error { return err } + showPkrows, err := snowflake.Query(db, builder.ShowPrimaryKeys()) + if err != nil { + return err + } + + pkDescription, err := snowflake.ScanPrimaryKeyDescription(showPkrows) + if err != nil { + return err + } + // Set the relevant data in the state toSet := map[string]interface{}{ - "name": table.TableName.String, - "owner": table.Owner.String, - "database": tableID.DatabaseName, - "schema": tableID.SchemaName, - "comment": table.Comment.String, - "column": snowflake.NewColumns(tableDescription).Flatten(), + "name": table.TableName.String, + "owner": table.Owner.String, + "database": tableID.DatabaseName, + "schema": tableID.SchemaName, + "comment": table.Comment.String, + "column": snowflake.NewColumns(tableDescription).Flatten(), + "cluster_by": snowflake.ClusterStatementToList(table.ClusterBy.String), + "primary_key": snowflake.FlattenTablePrimaryKey(pkDescription), } for key, val := range toSet { @@ -306,6 +402,23 @@ func UpdateTable(d *schema.ResourceData, meta interface{}) error { return errors.Wrapf(err, "error updating table comment on %v", d.Id()) } } + + if d.HasChange("cluster_by") { + cb := expandStringList(d.Get("cluster_by").([]interface{})) + + var q string + if len(cb) != 0 { + builder.WithClustering(cb) + q = builder.ChangeClusterBy(builder.GetClusterKeyString()) + } else { + q = builder.DropClustering() + } + + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error updating table clustering on %v", d.Id()) + } + } if d.HasChange("column") { old, new := d.GetChange("column") removed, added, changed := getColumns(old).diffs(getColumns(new)) @@ -317,17 +430,56 @@ func UpdateTable(d *schema.ResourceData, meta interface{}) error { } } for _, cA := range added { - q := builder.AddColumn(cA.name, cA.dataType) + q := builder.AddColumn(cA.name, cA.dataType, cA.nullable) err := snowflake.Exec(db, q) if err != nil { return errors.Wrapf(err, "error adding column on %v", d.Id()) } } for _, cA := range changed { - q := builder.ChangeColumnType(cA.name, cA.dataType) + + if cA.changedDataType { + + q := builder.ChangeColumnType(cA.newColumn.name, cA.newColumn.dataType) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error changing property on %v", d.Id()) + + } + } + if cA.changedNullConstraint { + + q := builder.ChangeNullConstraint(cA.newColumn.name, cA.newColumn.nullable) + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error changing property on %v", d.Id()) + + } + } + + } + } + if d.HasChange("primary_key") { + opk, npk := d.GetChange("primary_key") + + newpk := getPrimaryKey(npk) + oldpk := getPrimaryKey(opk) + + if len(oldpk.keys) > 0 || len(newpk.keys) == 0 { + //drop our pk if there was an old primary key, or pk has been removed + q := builder.DropPrimaryKey() + err := snowflake.Exec(db, q) + if err != nil { + return errors.Wrapf(err, "error changing primary key first on %v", d.Id()) + } + } + + if len(newpk.keys) > 0 { + // add our new pk + q := builder.ChangePrimaryKey(newpk.toSnowflakePrimaryKey()) err := snowflake.Exec(db, q) if err != nil { - return errors.Wrapf(err, "error changing column type on %v", d.Id()) + return errors.Wrapf(err, "error changing property on %v", d.Id()) } } } diff --git a/pkg/resources/table_acceptance_test.go b/pkg/resources/table_acceptance_test.go index 74bbc3ede8..4570b7a03d 100644 --- a/pkg/resources/table_acceptance_test.go +++ b/pkg/resources/table_acceptance_test.go @@ -12,6 +12,8 @@ import ( func TestAcc_Table(t *testing.T) { accName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + table2Name := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + resource.ParallelTest(t, resource.TestCase{ Providers: providers(), Steps: []resource.TestStep{ @@ -27,6 +29,7 @@ func TestAcc_Table(t *testing.T) { resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.type", "VARIANT"), resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "column2"), resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.type", "VARCHAR(16)"), + resource.TestCheckNoResourceAttr("snowflake_table.test_table", "primary_key"), ), }, { @@ -41,6 +44,111 @@ func TestAcc_Table(t *testing.T) { resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.type", "VARCHAR(16777216)"), resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "column3"), resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.type", "FLOAT"), + resource.TestCheckNoResourceAttr("snowflake_table.test_table", "cluster_by"), + resource.TestCheckNoResourceAttr("snowflake_table.test_table", "primary_key"), + ), + }, + { + Config: tableConfig3(accName, table2Name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_table.test_table2", "name", table2Name), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "database", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "schema", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "comment", "Terraform acceptance test"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.0.name", "COL1"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.0.type", "VARCHAR(16777216)"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.1.name", "col2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "cluster_by.#", "1"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "cluster_by.0", "COL1"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.1.type", "FLOAT"), + ), + }, + { + Config: tableConfig4(accName, table2Name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_table.test_table2", "name", table2Name), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "database", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "schema", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "comment", "Terraform acceptance test"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.0.name", "COL1"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.0.type", "VARCHAR(16777216)"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.1.name", "col2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "cluster_by.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "cluster_by.1", "\"col2\""), + ), + }, + { + Config: tableConfig5(accName, table2Name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_table.test_table2", "name", table2Name), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "database", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "schema", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "comment", "Terraform acceptance test"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.0.name", "COL1"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.0.type", "VARCHAR(16777216)"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "column.1.name", "col2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "cluster_by.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table2", "cluster_by.0", "\"col2\""), + ), + }, + { + Config: tableConfig6(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_table.test_table", "name", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "database", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "schema", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "comment", "Terraform acceptance test"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.name", "column2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.type", "VARCHAR(16777216)"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.nullable", "true"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "column3"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.type", "FLOAT"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.nullable", "false"), + resource.TestCheckNoResourceAttr("snowflake_table.test_table", "cluster_by"), + resource.TestCheckNoResourceAttr("snowflake_table.test_table", "primary_key"), + ), + }, + { + Config: tableConfig7(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_table.test_table", "name", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "database", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "schema", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "comment", "Terraform acceptance test"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.name", "column2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.type", "VARCHAR(16777216)"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.nullable", "true"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "column3"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.type", "FLOAT"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.nullable", "false"), + resource.TestCheckNoResourceAttr("snowflake_table.test_table", "cluster_by"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "primary_key.0.keys.0", "column2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "primary_key.0.name", ""), + ), + }, + { + Config: tableConfig8(accName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_table.test_table", "name", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "database", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "schema", accName), + resource.TestCheckResourceAttr("snowflake_table.test_table", "comment", "Terraform acceptance test"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.#", "2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.name", "column2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.type", "VARCHAR(16777216)"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.nullable", "true"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "column3"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.type", "FLOAT"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.nullable", "false"), + resource.TestCheckNoResourceAttr("snowflake_table.test_table", "cluster_by"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "primary_key.0.keys.0", "column2"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "primary_key.0.keys.1", "column3"), + resource.TestCheckResourceAttr("snowflake_table.test_table", "primary_key.0.name", "new_name"), ), }, }, @@ -108,3 +216,203 @@ resource "snowflake_table" "test_table" { ` return fmt.Sprintf(s, name, name, name) } + +func tableConfig3(name string, table2Name string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_table" "test_table2" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + comment = "Terraform acceptance test" + cluster_by = ["COL1"] + column { + name = "COL1" + type = "VARCHAR(16777216)" + } + column { + name = "col2" + type = "FLOAT" + } +} +` + return fmt.Sprintf(s, name, name, table2Name) +} + +func tableConfig4(name string, table2Name string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_table" "test_table2" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + comment = "Terraform acceptance test" + cluster_by = ["COL1","\"col2\""] + column { + name = "COL1" + type = "VARCHAR(16777216)" + } + column { + name = "col2" + type = "FLOAT" + } +} +` + return fmt.Sprintf(s, name, name, table2Name) +} + +func tableConfig5(name string, table2Name string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_table" "test_table2" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + comment = "Terraform acceptance test" + cluster_by = ["\"col2\"","COL1"] + column { + name = "COL1" + type = "VARCHAR(16777216)" + } + column { + name = "col2" + type = "FLOAT" + } +} +` + return fmt.Sprintf(s, name, name, table2Name) +} + +func tableConfig6(name string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_table" "test_table" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + comment = "Terraform acceptance test" + column { + name = "column2" + type = "VARCHAR(16777216)" + } + column { + name = "column3" + type = "FLOAT" + nullable = false + } +} +` + return fmt.Sprintf(s, name, name, name) +} + +func tableConfig7(name string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_table" "test_table" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + comment = "Terraform acceptance test" + column { + name = "column2" + type = "VARCHAR(16777216)" + } + column { + name = "column3" + type = "FLOAT" + nullable = false + } + primary_key { + name = "" + keys = ["column2"] + } +} +` + return fmt.Sprintf(s, name, name, name) +} + +func tableConfig8(name string) string { + s := ` +resource "snowflake_database" "test_database" { + name = "%s" + comment = "Terraform acceptance test" +} + +resource "snowflake_schema" "test_schema" { + name = "%s" + database = snowflake_database.test_database.name + comment = "Terraform acceptance test" +} + +resource "snowflake_table" "test_table" { + database = snowflake_database.test_database.name + schema = snowflake_schema.test_schema.name + name = "%s" + comment = "Terraform acceptance test" + column { + name = "column2" + type = "VARCHAR(16777216)" + } + column { + name = "column3" + type = "FLOAT" + nullable = false + } + primary_key { + name = "new_name" + keys = ["column2","column3"] + } +} +` + return fmt.Sprintf(s, name, name, name) +} diff --git a/pkg/resources/table_test.go b/pkg/resources/table_test.go index 65ed8270c5..ddb6f0dc52 100644 --- a/pkg/resources/table_test.go +++ b/pkg/resources/table_test.go @@ -22,16 +22,17 @@ func TestTableCreate(t *testing.T) { r := require.New(t) in := map[string]interface{}{ - "name": "good_name", - "database": "database_name", - "schema": "schema_name", - "comment": "great comment", - "column": []interface{}{map[string]interface{}{"name": "column1", "type": "OBJECT"}, map[string]interface{}{"name": "column2", "type": "VARCHAR"}}, + "name": "good_name", + "database": "database_name", + "schema": "schema_name", + "comment": "great comment", + "column": []interface{}{map[string]interface{}{"name": "column1", "type": "OBJECT"}, map[string]interface{}{"name": "column2", "type": "VARCHAR", "nullable": false}}, + "primary_key": []interface{}{map[string]interface{}{"name": "MY_KEY", "keys": []interface{}{"column1"}}}, } d := table(t, "database_name|schema_name|good_name", in) WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec(`CREATE TABLE "database_name"."schema_name"."good_name" \("column1" OBJECT, "column2" VARCHAR\) COMMENT = 'great comment'`).WillReturnResult(sqlmock.NewResult(1, 1)) + mock.ExpectExec(`CREATE TABLE "database_name"."schema_name"."good_name" \("column1" OBJECT, "column2" VARCHAR NOT NULL ,CONSTRAINT "MY_KEY" PRIMARY KEY\("column1"\)\) COMMENT = 'great comment'`).WillReturnResult(sqlmock.NewResult(1, 1)) expectTableRead(mock) err := resources.CreateTable(d, db) r.NoError(err) @@ -43,11 +44,16 @@ func expectTableRead(mock sqlmock.Sqlmock) { rows := sqlmock.NewRows([]string{"name", "type", "kind", "null?", "default", "primary key", "unique key", "check", "expression", "comment"}).AddRow("good_name", "VARCHAR()", "COLUMN", "Y", "NULL", "NULL", "N", "N", "NULL", "mock comment") mock.ExpectQuery(`SHOW TABLES LIKE 'good_name' IN SCHEMA "database_name"."schema_name"`).WillReturnRows(rows) - describeRows := sqlmock.NewRows([]string{"name", "type", "kind"}). - AddRow("column1", "OBJECT", "COLUMN"). - AddRow("column2", "VARCHAR", "COLUMN") + describeRows := sqlmock.NewRows([]string{"name", "type", "kind", "null?"}). + AddRow("column1", "OBJECT", "COLUMN", "Y"). + AddRow("column2", "VARCHAR", "COLUMN", "N") mock.ExpectQuery(`DESC TABLE "database_name"."schema_name"."good_name"`).WillReturnRows(describeRows) + + pkRows := sqlmock.NewRows([]string{"column_name", "key_sequence", "constraint_name"}).AddRow("column1", "1", "MY_PK") + + mock.ExpectQuery(`SHOW PRIMARY KEYS IN TABLE "database_name"."schema_name"."good_name"`).WillReturnRows(pkRows) + } func TestTableRead(t *testing.T) { diff --git a/pkg/resources/task.go b/pkg/resources/task.go index dd0f5ac310..e481fab4e5 100644 --- a/pkg/resources/task.go +++ b/pkg/resources/task.go @@ -82,10 +82,11 @@ var taskSchema = map[string]*schema.Schema{ Description: "Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported.", }, "sql_statement": { - Type: schema.TypeString, - Required: true, - Description: "Any single SQL statement, or a call to a stored procedure, executed when the task runs.", - ForceNew: false, + Type: schema.TypeString, + Required: true, + Description: "Any single SQL statement, or a call to a stored procedure, executed when the task runs.", + ForceNew: false, + DiffSuppressFunc: DiffSuppressStatement, }, } diff --git a/pkg/resources/user_public_keys.go b/pkg/resources/user_public_keys.go new file mode 100644 index 0000000000..d05dbc0107 --- /dev/null +++ b/pkg/resources/user_public_keys.go @@ -0,0 +1,176 @@ +package resources + +import ( + "database/sql" + "fmt" + "log" + "strings" + + "github.com/chanzuckerberg/go-misc/sets" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var userPublicKeyProperties = []string{ + "rsa_public_key", + "rsa_public_key_2", +} + +// sanitize input to supress diffs, etc +func publicKeyStateFunc(v interface{}) string { + value := v.(string) + value = strings.TrimSuffix(value, "\n") + return value +} + +var userPublicKeysSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Name of the user.", + ForceNew: true, + }, + + "rsa_public_key": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the user’s RSA public key; used for key-pair authentication. Must be on 1 line without header and trailer.", + StateFunc: publicKeyStateFunc, + }, + "rsa_public_key_2": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the user’s second RSA public key; used to rotate the public and Public keys for key-pair authentication based on an expiration schedule set by your organization. Must be on 1 line without header and trailer.", + StateFunc: publicKeyStateFunc, + }, +} + +func UserPublicKeys() *schema.Resource { + return &schema.Resource{ + Create: CreateUserPublicKeys, + Read: ReadUserPublicKeys, + Update: UpdateUserPublicKeys, + Delete: DeleteUserPublicKeys, + + Schema: userPublicKeysSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func checkUserExists(db *sql.DB, name string) (bool, error) { + // First check if user exists + stmt := snowflake.User(name).Show() + row := snowflake.QueryRow(db, stmt) + _, err := snowflake.ScanUser(row) + if err == sql.ErrNoRows { + log.Printf("[DEBUG] user (%s) not found", name) + return false, nil + } + if err != nil { + return false, err + } + + return true, nil +} + +func ReadUserPublicKeys(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + id := d.Id() + + exists, err := checkUserExists(db, id) + if err != nil { + return err + } + // If not found, mark resource to be removed from statefile during apply or refresh + if !exists { + d.SetId("") + return nil + } + // we can't really read the public keys back from Snowflake so assume they haven't changed + return nil +} + +func CreateUserPublicKeys(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + name := d.Get("name").(string) + + for _, prop := range userPublicKeyProperties { + publicKey, publicKeyOK := d.GetOk(prop) + if !publicKeyOK { + continue + } + err := updateUserPublicKeys(db, name, prop, publicKey.(string)) + if err != nil { + return err + } + } + + d.SetId(name) + return ReadUserPublicKeys(d, meta) +} + +func UpdateUserPublicKeys(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + name := d.Id() + + propsToSet := map[string]string{} + propsToUnset := sets.NewStringSet() + + for _, prop := range userPublicKeyProperties { + // if key hasn't changed, continue + if !d.HasChange(prop) { + continue + } + // if it has changed then we should do something about it + publicKey, publicKeyOK := d.GetOk(prop) + if publicKeyOK { // if set, then we should update the value + propsToSet[prop] = publicKey.(string) + } else { // if now unset, we should unset the key from the user + propsToUnset.Add(publicKey.(string)) + } + } + + // set the keys we decided should be set + for prop, value := range propsToSet { + err := updateUserPublicKeys(db, name, prop, value) + if err != nil { + return err + } + } + + // unset the keys we decided should be unset + for _, prop := range propsToUnset.List() { + err := unsetUserPublicKeys(db, name, prop) + if err != nil { + return err + } + } + // re-sync + return ReadUserPublicKeys(d, meta) +} + +func DeleteUserPublicKeys(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + name := d.Id() + + for _, prop := range userPublicKeyProperties { + err := unsetUserPublicKeys(db, name, prop) + if err != nil { + return err + } + } + + d.SetId("") + return nil +} + +func updateUserPublicKeys(db *sql.DB, name string, prop string, value string) error { + stmt := fmt.Sprintf(`ALTER USER "%s" SET %s = '%s'`, name, prop, value) + return snowflake.Exec(db, stmt) +} +func unsetUserPublicKeys(db *sql.DB, name string, prop string) error { + stmt := fmt.Sprintf(`ALTER USER "%s" UNSET %s`, name, prop) + return snowflake.Exec(db, stmt) +} diff --git a/pkg/resources/user_public_keys_acceptance_test.go b/pkg/resources/user_public_keys_acceptance_test.go new file mode 100644 index 0000000000..865561c3e1 --- /dev/null +++ b/pkg/resources/user_public_keys_acceptance_test.go @@ -0,0 +1,87 @@ +package resources_test + +import ( + "bytes" + "strings" + "testing" + "text/template" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/testhelpers" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/stretchr/testify/require" +) + +func TestAcc_UserPublicKeys(t *testing.T) { + r := require.New(t) + prefix := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) + sshkey1, err := testhelpers.Fixture("userkey1") + r.NoError(err) + sshkey2, err := testhelpers.Fixture("userkey2") + r.NoError(err) + + resource.ParallelTest(t, resource.TestCase{ + Providers: providers(), + Steps: []resource.TestStep{ + { + Config: uPublicKeysConfig(r, PublicKeyData{ + Prefix: prefix, + PublicKey1: sshkey1, + PublicKey2: sshkey2, + }), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_user.w", "name", prefix), + + resource.TestCheckResourceAttr("snowflake_user_public_keys.foobar", "rsa_public_key", sshkey1), + resource.TestCheckResourceAttr("snowflake_user_public_keys.foobar", "rsa_public_key_2", sshkey2), + ), + }, + // IMPORT + { + ResourceName: "snowflake_user.w", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password", "rsa_public_key", "rsa_public_key_2", "must_change_password"}, + }, + }, + }) +} + +type PublicKeyData struct { + Prefix string + PublicKey1 string + PublicKey2 string +} + +func uPublicKeysConfig(r *require.Assertions, data PublicKeyData) string { + t := ` +resource "snowflake_user" "w" { + name = "{{.Prefix}}" + comment = "test comment" + login_name = "{{.Prefix}}_login" + display_name = "Display Name" + first_name = "Marcin" + last_name = "Zukowski" + email = "fake@email.com" + disabled = false + default_warehouse="foo" + default_role="foo" + default_namespace="foo" +} + +resource "snowflake_user_public_keys" "foobar" { + name = snowflake_user.w.name + rsa_public_key = < 0 { + account_id = fmt.Sprintf("%s.%s", account_id, region_id) + } + return fmt.Sprintf("https://%s.snowflakecomputing.com", account_id), nil + } + + return "", fmt.Errorf("Failed to map Snowflake account region %s to a region_id", acc.Region) +} diff --git a/pkg/snowflake/current_account_test.go b/pkg/snowflake/current_account_test.go new file mode 100644 index 0000000000..9b8c1717a0 --- /dev/null +++ b/pkg/snowflake/current_account_test.go @@ -0,0 +1,72 @@ +package snowflake_test + +import ( + "testing" + + sqlmock "github.com/DATA-DOG/go-sqlmock" + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/require" +) + +func TestCurrentAccountSelect(t *testing.T) { + r := require.New(t) + r.Equal(`SELECT CURRENT_ACCOUNT() AS "account", CURRENT_REGION() AS "region";`, snowflake.SelectCurrentAccount()) +} + +func TestCurrentAccountRead(t *testing.T) { + type testCaseEntry struct { + account string + region string + url string + } + + testCases := map[string]testCaseEntry{ + "aws oregon": { + "ab1234", + "AWS_US_WEST_2", + "https://ab1234.snowflakecomputing.com", + }, + "aws n virginia": { + "cd5678", + "AWS_US_EAST_1", + "https://cd5678.us-east-1.snowflakecomputing.com", + }, + "aws canada central": { + "ef9012", + "AWS_CA_CENTRAL_1", + "https://ef9012.ca-central-1.aws.snowflakecomputing.com", + }, + "gcp canada central": { + "gh3456", + "gcp_us_central1", + "https://gh3456.us-central1.gcp.snowflakecomputing.com", + }, + "azure washington": { + "ij7890", + "azure_westus2", + "https://ij7890.west-us-2.azure.snowflakecomputing.com", + }, + } + + for name, testCase := range testCases { + t.Run(name, func(t *testing.T) { + r := require.New(t) + mockDB, mock, err := sqlmock.New() + r.NoError(err) + defer mockDB.Close() + sqlxDB := sqlx.NewDb(mockDB, "sqlmock") + + rows := sqlmock.NewRows([]string{"account", "region"}).AddRow(testCase.account, testCase.region) + mock.ExpectQuery(`SELECT CURRENT_ACCOUNT\(\) AS "account", CURRENT_REGION\(\) AS "region";`).WillReturnRows(rows) + + acc, err := snowflake.ReadCurrentAccount(sqlxDB.DB) + r.NoError(err) + r.Equal(testCase.account, acc.Account) + r.Equal(testCase.region, acc.Region) + url, err := acc.AccountURL() + r.NoError(err) + r.Equal(testCase.url, url) + }) + } +} diff --git a/pkg/snowflake/external_table.go b/pkg/snowflake/external_table.go index b82d0b957e..502d8f21e7 100644 --- a/pkg/snowflake/external_table.go +++ b/pkg/snowflake/external_table.go @@ -113,14 +113,15 @@ func (tb *ExternalTableBuilder) Create() string { q.WriteString(fmt.Sprintf(` (`)) columnDefinitions := []string{} for _, columnDefinition := range tb.columns { - columnDefinitions = append(columnDefinitions, fmt.Sprintf(`"%v" %v AS %v`, EscapeString(columnDefinition["name"]), EscapeString(columnDefinition["type"]), EscapeString(columnDefinition["as"]))) + columnDefinitions = append(columnDefinitions, fmt.Sprintf(`"%v" %v AS %v`, EscapeString(columnDefinition["name"]), EscapeString(columnDefinition["type"]), columnDefinition["as"])) } q.WriteString(strings.Join(columnDefinitions, ", ")) q.WriteString(fmt.Sprintf(`)`)) - if len(tb.partitionBys) > 1 { - q.WriteString(` PARTIION BY `) + if len(tb.partitionBys) > 0 { + q.WriteString(` PARTITION BY ( `) q.WriteString(EscapeString(strings.Join(tb.partitionBys, ", "))) + q.WriteString(` )`) } q.WriteString(` WITH LOCATION = ` + EscapeString(tb.location)) diff --git a/pkg/snowflake/file_format.go b/pkg/snowflake/file_format.go new file mode 100644 index 0000000000..403798cb7d --- /dev/null +++ b/pkg/snowflake/file_format.go @@ -0,0 +1,623 @@ +package snowflake + +import ( + "database/sql" + "encoding/json" + "fmt" + "strings" + + "github.com/jmoiron/sqlx" +) + +// FileFormatBuilder abstracts the creation of SQL queries for a Snowflake file format +type FileFormatBuilder struct { + name string + db string + schema string + formatType string + compression string + recordDelimiter string + fieldDelimiter string + fileExtension string + skipHeader int + skipBlankLines bool + dateFormat string + timeFormat string + timestampFormat string + binaryFormat string + escape string + escapeUnenclosedField string + trimSpace bool + fieldOptionallyEnclosedBy string + nullIf []string + errorOnColumnCountMismatch bool + replaceInvalidCharacters bool + validateUTF8 bool + emptyFieldAsNull bool + skipByteOrderMark bool + encoding string + enableOctal bool + allowDuplicate bool + stripOuterArray bool + stripNullValues bool + ignoreUTF8Errors bool + binaryAsText bool + preserveSpace bool + stripOuterElement bool + disableSnowflakeData bool + disableAutoConvert bool + comment string +} + +// QualifiedName prepends the db and schema and escapes everything nicely +func (ffb *FileFormatBuilder) QualifiedName() string { + var n strings.Builder + + n.WriteString(fmt.Sprintf(`"%v"."%v"."%v"`, ffb.db, ffb.schema, ffb.name)) + + return n.String() +} + +// WithFormatType adds a comment to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithFormatType(f string) *FileFormatBuilder { + ffb.formatType = f + return ffb +} + +// WithCompression adds compression to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithCompression(c string) *FileFormatBuilder { + ffb.compression = c + return ffb +} + +// WithRecordDelimiter adds a record delimiter to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithRecordDelimiter(r string) *FileFormatBuilder { + ffb.recordDelimiter = r + return ffb +} + +// WithFieldDelimiter adds a field delimiter to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithFieldDelimiter(f string) *FileFormatBuilder { + ffb.fieldDelimiter = f + return ffb +} + +// WithFileExtension adds a file extension to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithFileExtension(f string) *FileFormatBuilder { + ffb.fileExtension = f + return ffb +} + +// WithSkipHeader adds skip header to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithSkipHeader(n int) *FileFormatBuilder { + ffb.skipHeader = n + return ffb +} + +// WithSkipBlankLines adds skip blank lines to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithSkipBlankLines(n bool) *FileFormatBuilder { + ffb.skipBlankLines = n + return ffb +} + +// WithDateFormat adds date format to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithDateFormat(s string) *FileFormatBuilder { + ffb.dateFormat = s + return ffb +} + +// WithTimeFormat adds time format to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithTimeFormat(s string) *FileFormatBuilder { + ffb.timeFormat = s + return ffb +} + +// WithTimestampFormat adds timestamp format to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithTimestampFormat(s string) *FileFormatBuilder { + ffb.timestampFormat = s + return ffb +} + +// WithBinaryFormat adds binary format to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithBinaryFormat(s string) *FileFormatBuilder { + ffb.binaryFormat = s + return ffb +} + +// WithEscape adds escape to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithEscape(s string) *FileFormatBuilder { + ffb.escape = s + return ffb +} + +// WithEscapeUnenclosedField adds escape unenclosed field to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithEscapeUnenclosedField(s string) *FileFormatBuilder { + ffb.escapeUnenclosedField = s + return ffb +} + +// WithTrimSpace adds trim space to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithTrimSpace(n bool) *FileFormatBuilder { + ffb.trimSpace = n + return ffb +} + +// WithFieldOptionallyEnclosedBy adds field optionally enclosed by to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithFieldOptionallyEnclosedBy(s string) *FileFormatBuilder { + ffb.fieldOptionallyEnclosedBy = s + return ffb +} + +// WithNullIf adds null if to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithNullIf(s []string) *FileFormatBuilder { + ffb.nullIf = s + return ffb +} + +// WithErrorOnColumnCountMismatch adds error on column count mistmatch to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithErrorOnColumnCountMismatch(n bool) *FileFormatBuilder { + ffb.errorOnColumnCountMismatch = n + return ffb +} + +// WithReplaceInvalidCharacters adds replace invalid characters to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithReplaceInvalidCharacters(n bool) *FileFormatBuilder { + ffb.replaceInvalidCharacters = n + return ffb +} + +// WithValidateUTF8 adds validate utf8 to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithValidateUTF8(n bool) *FileFormatBuilder { + ffb.validateUTF8 = n + return ffb +} + +// WithEmptyFieldAsNull adds empty field as null to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithEmptyFieldAsNull(n bool) *FileFormatBuilder { + ffb.emptyFieldAsNull = n + return ffb +} + +// WithSkipByteOrderMark adds skip byte order mark to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithSkipByteOrderMark(n bool) *FileFormatBuilder { + ffb.skipByteOrderMark = n + return ffb +} + +// WithEnableOctal adds enable octal to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithEnableOctal(n bool) *FileFormatBuilder { + ffb.enableOctal = n + return ffb +} + +// WithAllowDuplicate adds allow duplicate to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithAllowDuplicate(n bool) *FileFormatBuilder { + ffb.allowDuplicate = n + return ffb +} + +// WithStripOuterArray adds strip outer array to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithStripOuterArray(n bool) *FileFormatBuilder { + ffb.stripOuterArray = n + return ffb +} + +// WithStripNullValues adds strip null values to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithStripNullValues(n bool) *FileFormatBuilder { + ffb.stripNullValues = n + return ffb +} + +// WithIgnoreUTF8Errors adds ignore UTF8 errors to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithIgnoreUTF8Errors(n bool) *FileFormatBuilder { + ffb.ignoreUTF8Errors = n + return ffb +} + +// WithBinaryAsText adds binary as text to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithBinaryAsText(n bool) *FileFormatBuilder { + ffb.binaryAsText = n + return ffb +} + +// WithPreserveSpace adds preserve space to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithPreserveSpace(n bool) *FileFormatBuilder { + ffb.preserveSpace = n + return ffb +} + +// WithStripOuterElement adds strip outer element to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithStripOuterElement(n bool) *FileFormatBuilder { + ffb.stripOuterElement = n + return ffb +} + +// WithDisableSnowflakeData adds disable Snowflake data to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithDisableSnowflakeData(n bool) *FileFormatBuilder { + ffb.disableSnowflakeData = n + return ffb +} + +// WithDisableAutoConvert adds disbale auto convert to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithDisableAutoConvert(n bool) *FileFormatBuilder { + ffb.disableAutoConvert = n + return ffb +} + +// WithEncoding adds encoding to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithEncoding(e string) *FileFormatBuilder { + ffb.encoding = e + return ffb +} + +// WithComment adds a comment to the FileFormatBuilder +func (ffb *FileFormatBuilder) WithComment(c string) *FileFormatBuilder { + ffb.comment = c + return ffb +} + +// FileFormat returns a pointer to a Builder that abstracts the DDL operations for a file format. +// +// Supported DDL operations are: +// - CREATE FILE FORMAT +// - ALTER FILE FORMAT +// - DROP FILE FORMAT +// - SHOW FILE FORMATS +// - DESCRIBE FILE FORMAT +// +// [Snowflake Reference](https://docs.snowflake.com/en/sql-reference/sql/create-file-format.html) +func FileFormat(name, db, schema string) *FileFormatBuilder { + return &FileFormatBuilder{ + name: name, + db: db, + schema: schema, + } +} + +// Create returns the SQL query that will create a new file format. +func (ffb *FileFormatBuilder) Create() string { + q := strings.Builder{} + q.WriteString(`CREATE`) + + q.WriteString(fmt.Sprintf(` FILE FORMAT %v`, ffb.QualifiedName())) + q.WriteString(fmt.Sprintf(` TYPE = '%v'`, ffb.formatType)) + + if ffb.compression != "" { + q.WriteString(fmt.Sprintf(` COMPRESSION = '%v'`, ffb.compression)) + } + + if ffb.recordDelimiter != "" { + q.WriteString(fmt.Sprintf(` RECORD_DELIMITER = '%v'`, ffb.recordDelimiter)) + } + + if ffb.fieldDelimiter != "" { + q.WriteString(fmt.Sprintf(` FIELD_DELIMITER = '%v'`, ffb.fieldDelimiter)) + } + + if ffb.fileExtension != "" { + q.WriteString(fmt.Sprintf(` FILE_EXTENSION = '%v'`, ffb.fileExtension)) + } + + if ffb.skipHeader > 0 { + q.WriteString(fmt.Sprintf(` SKIP_HEADER = %v`, ffb.skipHeader)) + } + + if ffb.dateFormat != "" { + q.WriteString(fmt.Sprintf(` DATE_FORMAT = '%v'`, ffb.dateFormat)) + } + + if ffb.timeFormat != "" { + q.WriteString(fmt.Sprintf(` TIME_FORMAT = '%v'`, ffb.timeFormat)) + } + + if ffb.timestampFormat != "" { + q.WriteString(fmt.Sprintf(` TIMESTAMP_FORMAT = '%v'`, ffb.timestampFormat)) + } + + if ffb.binaryFormat != "" { + q.WriteString(fmt.Sprintf(` BINARY_FORMAT = '%v'`, ffb.binaryFormat)) + } + + if ffb.escape != "" { + q.WriteString(fmt.Sprintf(` ESCAPE = '%v'`, EscapeString(ffb.escape))) + } + + if ffb.escapeUnenclosedField != "" { + q.WriteString(fmt.Sprintf(` ESCAPE_UNENCLOSED_FIELD = '%v'`, ffb.escapeUnenclosedField)) + } + + if ffb.fieldOptionallyEnclosedBy != "" { + q.WriteString(fmt.Sprintf(` FIELD_OPTIONALLY_ENCLOSED_BY = '%v'`, EscapeString(ffb.fieldOptionallyEnclosedBy))) + } + + if len(ffb.nullIf) > 0 { + nullIfStr := "'" + strings.Join(ffb.nullIf, "', '") + "'" + q.WriteString(fmt.Sprintf(` NULL_IF = (%v)`, nullIfStr)) + } else if strings.ToUpper(ffb.formatType) != "XML" { + q.WriteString(` NULL_IF = ()`) + } + + if ffb.encoding != "" { + q.WriteString(fmt.Sprintf(` ENCODING = '%v'`, ffb.encoding)) + } + + // set boolean values + if ffb.formatType == "CSV" { + q.WriteString(fmt.Sprintf(` SKIP_BLANK_LINES = %v`, ffb.skipBlankLines)) + q.WriteString(fmt.Sprintf(` TRIM_SPACE = %v`, ffb.trimSpace)) + q.WriteString(fmt.Sprintf(` ERROR_ON_COLUMN_COUNT_MISMATCH = %v`, ffb.errorOnColumnCountMismatch)) + q.WriteString(fmt.Sprintf(` REPLACE_INVALID_CHARACTERS = %v`, ffb.replaceInvalidCharacters)) + q.WriteString(fmt.Sprintf(` VALIDATE_UTF8 = %v`, ffb.validateUTF8)) + q.WriteString(fmt.Sprintf(` EMPTY_FIELD_AS_NULL = %v`, ffb.emptyFieldAsNull)) + q.WriteString(fmt.Sprintf(` SKIP_BYTE_ORDER_MARK = %v`, ffb.skipByteOrderMark)) + } else if ffb.formatType == "JSON" { + q.WriteString(fmt.Sprintf(` TRIM_SPACE = %v`, ffb.trimSpace)) + q.WriteString(fmt.Sprintf(` ENABLE_OCTAL = %v`, ffb.enableOctal)) + q.WriteString(fmt.Sprintf(` ALLOW_DUPLICATE = %v`, ffb.allowDuplicate)) + q.WriteString(fmt.Sprintf(` STRIP_OUTER_ARRAY = %v`, ffb.stripOuterArray)) + q.WriteString(fmt.Sprintf(` STRIP_NULL_VALUES = %v`, ffb.stripNullValues)) + q.WriteString(fmt.Sprintf(` REPLACE_INVALID_CHARACTERS = %v`, ffb.replaceInvalidCharacters)) + q.WriteString(fmt.Sprintf(` IGNORE_UTF8_ERRORS = %v`, ffb.ignoreUTF8Errors)) + q.WriteString(fmt.Sprintf(` SKIP_BYTE_ORDER_MARK = %v`, ffb.skipByteOrderMark)) + } else if ffb.formatType == "AVRO" || ffb.formatType == "ORC" { + q.WriteString(fmt.Sprintf(` TRIM_SPACE = %v`, ffb.trimSpace)) + } else if ffb.formatType == "PARQUET" { + q.WriteString(fmt.Sprintf(` BINARY_AS_TEXT = %v`, ffb.binaryAsText)) + q.WriteString(fmt.Sprintf(` TRIM_SPACE = %v`, ffb.trimSpace)) + } else if ffb.formatType == "XML" { + q.WriteString(fmt.Sprintf(` IGNORE_UTF8_ERRORS = %v`, ffb.ignoreUTF8Errors)) + q.WriteString(fmt.Sprintf(` PRESERVE_SPACE = %v`, ffb.preserveSpace)) + q.WriteString(fmt.Sprintf(` STRIP_OUTER_ELEMENT = %v`, ffb.stripOuterElement)) + q.WriteString(fmt.Sprintf(` DISABLE_SNOWFLAKE_DATA = %v`, ffb.disableSnowflakeData)) + q.WriteString(fmt.Sprintf(` DISABLE_AUTO_CONVERT = %v`, ffb.disableAutoConvert)) + q.WriteString(fmt.Sprintf(` SKIP_BYTE_ORDER_MARK = %v`, ffb.skipByteOrderMark)) + } + + if ffb.comment != "" { + q.WriteString(fmt.Sprintf(` COMMENT = '%v'`, EscapeString(ffb.comment))) + } + + return q.String() +} + +// ChangeComment returns the SQL query that will update the comment on the file format. +func (ffb *FileFormatBuilder) ChangeComment(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET COMMENT = '%v'`, ffb.QualifiedName(), c) +} + +// RemoveComment returns the SQL query that will remove the comment on the file format. +func (ffb *FileFormatBuilder) RemoveComment() string { + return fmt.Sprintf(`ALTER FILE FORMAT %v UNSET COMMENT`, ffb.QualifiedName()) +} + +// ChangeCompression returns the SQL query that will update the compression on the file format. +func (ffb *FileFormatBuilder) ChangeCompression(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET COMPRESSION = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeRecordDelimiter returns the SQL query that will update the record delimiter on the file format. +func (ffb *FileFormatBuilder) ChangeRecordDelimiter(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET RECORD_DELIMITER = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeDateFormat returns the SQL query that will update the date format on the file format. +func (ffb *FileFormatBuilder) ChangeDateFormat(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET DATE_FORMAT = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeTimeFormat returns the SQL query that will update the time format on the file format. +func (ffb *FileFormatBuilder) ChangeTimeFormat(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET TIME_FORMAT = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeTimestampFormat returns the SQL query that will update the timestamp format on the file format. +func (ffb *FileFormatBuilder) ChangeTimestampFormat(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET TIMESTAMP_FORMAT = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeBinaryFormat returns the SQL query that will update the binary format on the file format. +func (ffb *FileFormatBuilder) ChangeBinaryFormat(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET BINARY_FORMAT = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeErrorOnColumnCountMismatch returns the SQL query that will update the error_on_column_count_mismatch on the file format. +func (ffb *FileFormatBuilder) ChangeErrorOnColumnCountMismatch(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET ERROR_ON_COLUMN_COUNT_MISMATCH = %v`, ffb.QualifiedName(), c) +} + +// ChangeValidateUTF8 returns the SQL query that will update the error_on_column_count_mismatch on the file format. +func (ffb *FileFormatBuilder) ChangeValidateUTF8(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET VALIDATE_UTF8 = %v`, ffb.QualifiedName(), c) +} + +// ChangeEmptyFieldAsNull returns the SQL query that will update the error_on_column_count_mismatch on the file format. +func (ffb *FileFormatBuilder) ChangeEmptyFieldAsNull(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET EMPTY_FIELD_AS_NULL = %v`, ffb.QualifiedName(), c) +} + +// ChangeEscape returns the SQL query that will update the escape on the file format. +func (ffb *FileFormatBuilder) ChangeEscape(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET ESCAPE = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeEscapeUnenclosedField returns the SQL query that will update the escape unenclosed field on the file format. +func (ffb *FileFormatBuilder) ChangeEscapeUnenclosedField(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET ESCAPE_UNENCLOSED_FIELD = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeFileExtension returns the SQL query that will update the FILE_EXTENSION on the file format. +func (ffb *FileFormatBuilder) ChangeFileExtension(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET FILE_EXTENSION = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeFieldDelimiter returns the SQL query that will update the FIELD_DELIMITER on the file format. +func (ffb *FileFormatBuilder) ChangeFieldDelimiter(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET FIELD_DELIMITER = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeFieldOptionallyEnclosedBy returns the SQL query that will update the field optionally enclosed by on the file format. +func (ffb *FileFormatBuilder) ChangeFieldOptionallyEnclosedBy(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET FIELD_OPTIONALLY_ENCLOSED_BY = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeNullIf returns the SQL query that will update the null if on the file format. +func (ffb *FileFormatBuilder) ChangeNullIf(c []string) string { + nullIfStr := "" + if len(c) > 0 { + nullIfStr = "'" + strings.Join(c, "', '") + "'" + } + return fmt.Sprintf(`ALTER FILE FORMAT %v SET NULL_IF = (%v)`, ffb.QualifiedName(), nullIfStr) +} + +// ChangeEncoding returns the SQL query that will update the encoding on the file format. +func (ffb *FileFormatBuilder) ChangeEncoding(c string) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET ENCODING = '%v'`, ffb.QualifiedName(), c) +} + +// ChangeSkipHeader returns the SQL query that will update the skip header on the file format. +func (ffb *FileFormatBuilder) ChangeSkipHeader(c int) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET SKIP_HEADER = %v`, ffb.QualifiedName(), c) +} + +// ChangeSkipBlankLines returns the SQL query that will update SKIP_BLANK_LINES on the file format. +func (ffb *FileFormatBuilder) ChangeSkipBlankLines(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET SKIP_BLANK_LINES = %v`, ffb.QualifiedName(), c) +} + +// ChangeTrimSpace returns the SQL query that will update TRIM_SPACE on the file format. +func (ffb *FileFormatBuilder) ChangeTrimSpace(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET TRIM_SPACE = %v`, ffb.QualifiedName(), c) +} + +// ChangeEnableOctal returns the SQL query that will update ENABLE_OCTAL on the file format. +func (ffb *FileFormatBuilder) ChangeEnableOctal(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET ENABLE_OCTAL = %v`, ffb.QualifiedName(), c) +} + +// ChangeAllowDuplicate returns the SQL query that will update ALLOW_DUPLICATE on the file format. +func (ffb *FileFormatBuilder) ChangeAllowDuplicate(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET ALLOW_DUPLICATE = %v`, ffb.QualifiedName(), c) +} + +// ChangeStripOuterArray returns the SQL query that will update STRIP_OUTER_ARRAY on the file format. +func (ffb *FileFormatBuilder) ChangeStripOuterArray(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET STRIP_OUTER_ARRAY = %v`, ffb.QualifiedName(), c) +} + +// ChangeStripNullValues returns the SQL query that will update STRIP_NULL_VALUES on the file format. +func (ffb *FileFormatBuilder) ChangeStripNullValues(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET STRIP_NULL_VALUES = %v`, ffb.QualifiedName(), c) +} + +// ChangeReplaceInvalidCharacters returns the SQL query that will update REPLACE_INVALID_CHARACTERS on the file format. +func (ffb *FileFormatBuilder) ChangeReplaceInvalidCharacters(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET REPLACE_INVALID_CHARACTERS = %v`, ffb.QualifiedName(), c) +} + +// ChangeIgnoreUTF8Errors returns the SQL query that will update IGNORE_UTF8_ERRORS on the file format. +func (ffb *FileFormatBuilder) ChangeIgnoreUTF8Errors(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET IGNORE_UTF8_ERRORS = %v`, ffb.QualifiedName(), c) +} + +// ChangeSkipByteOrderMark returns the SQL query that will update SKIP_BYTE_ORDER_MARK on the file format. +func (ffb *FileFormatBuilder) ChangeSkipByteOrderMark(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET SKIP_BYTE_ORDER_MARK = %v`, ffb.QualifiedName(), c) +} + +// ChangeBinaryAsText returns the SQL query that will update BINARY_AS_TEXT on the file format. +func (ffb *FileFormatBuilder) ChangeBinaryAsText(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET BINARY_AS_TEXT = %v`, ffb.QualifiedName(), c) +} + +// ChangePreserveSpace returns the SQL query that will update PRESERVE_SPACE on the file format. +func (ffb *FileFormatBuilder) ChangePreserveSpace(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET PRESERVE_SPACE = %v`, ffb.QualifiedName(), c) +} + +// ChangeStripOuterElement returns the SQL query that will update STRIP_OUTER_ELEMENT on the file format. +func (ffb *FileFormatBuilder) ChangeStripOuterElement(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET STRIP_OUTER_ELEMENT = %v`, ffb.QualifiedName(), c) +} + +// ChangeDisableSnowflakeData returns the SQL query that will update DISABLE_SNOWFLAKE_DATA on the file format. +func (ffb *FileFormatBuilder) ChangeDisableSnowflakeData(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET DISABLE_SNOWFLAKE_DATA = %v`, ffb.QualifiedName(), c) +} + +// ChangeDisableAutoConvert returns the SQL query that will update DISABLE_AUTO_CONVERT on the file format. +func (ffb *FileFormatBuilder) ChangeDisableAutoConvert(c bool) string { + return fmt.Sprintf(`ALTER FILE FORMAT %v SET DISABLE_AUTO_CONVERT = %v`, ffb.QualifiedName(), c) +} + +// Drop returns the SQL query that will drop a file format. +func (ffb *FileFormatBuilder) Drop() string { + return fmt.Sprintf(`DROP FILE FORMAT %v`, ffb.QualifiedName()) +} + +// Describe returns the SQL query that will describe a file format.. +func (ffb *FileFormatBuilder) Describe() string { + return fmt.Sprintf(`DESCRIBE FILE FORMAT %v`, ffb.QualifiedName()) +} + +// Show returns the SQL query that will show a file format. +func (ffb *FileFormatBuilder) Show() string { + return fmt.Sprintf(`SHOW FILE FORMATS LIKE '%v' IN SCHEMA "%v"."%v"`, ffb.name, ffb.db, ffb.schema) +} + +type fileFormatShow struct { + CreatedOn sql.NullString `db:"created_on"` + FileFormatName sql.NullString `db:"name"` + DatabaseName sql.NullString `db:"database_name"` + SchemaName sql.NullString `db:"schema_name"` + FormatType sql.NullString `db:"type"` + Owner sql.NullString `db:"owner"` + Comment sql.NullString `db:"comment"` + FormatOptions sql.NullString `db:"format_options"` +} + +type fileFormatOptions struct { + Type string `json:"TYPE"` + Compression string `json:"COMPRESSION,omitempty"` + RecordDelimiter string `json:"RECORD_DELIMITER,omitempty"` + FieldDelimiter string `json:"FIELD_DELIMITER,omitempty"` + FileExtension string `json:"FILE_EXTENSION,omitempty"` + SkipHeader int `json:"SKIP_HEADER,omitempty"` + DateFormat string `json:"DATE_FORMAT,omitempty"` + TimeFormat string `json:"TIME_FORMAT,omitempty"` + TimestampFormat string `json:"TIMESTAMP_FORMAT,omitempty"` + BinaryFormat string `json:"BINARY_FORMAT,omitempty"` + Escape string `json:"ESCAPE,omitempty"` + EscapeUnenclosedField string `json:"ESCAPE_UNENCLOSED_FIELD,omitempty"` + TrimSpace bool `json:"TRIM_SPACE,omitempty"` + FieldOptionallyEnclosedBy string `json:"FIELD_OPTIONALLY_ENCLOSED_BY,omitempty"` + NullIf []string `json:"NULL_IF,omitempty"` + ErrorOnColumnCountMismatch bool `json:"ERROR_ON_COLUMN_COUNT_MISMATCH,omitempty"` + ValidateUTF8 bool `json:"VALIDATE_UTF8,omitempty"` + SkipBlankLines bool `json:"SKIP_BLANK_LINES,omitempty"` + ReplaceInvalidCharacters bool `json:"REPLACE_INVALID_CHARACTERS,omitempty"` + EmptyFieldAsNull bool `json:"EMPTY_FIELD_AS_NULL,omitempty"` + SkipByteOrderMark bool `json:"SKIP_BYTE_ORDER_MARK,omitempty"` + Encoding string `json:"ENCODING,omitempty"` + EnabelOctal bool `json:"ENABLE_OCTAL,omitempty"` + AllowDuplicate bool `json:"ALLOW_DUPLICATE,omitempty"` + StripOuterArray bool `json:"STRIP_OUTER_ARRAY,omitempty"` + StripNullValues bool `json:"STRIP_NULL_VALUES,omitempty"` + IgnoreUTF8Errors bool `json:"IGNORE_UTF8_ERRORS,omitempty"` + BinaryAsText bool `json:"BINARY_AS_TEXT,omitempty"` + PreserveSpace bool `json:"PRESERVE_SPACE,omitempty"` + StripOuterElement bool `json:"STRIP_OUTER_ELEMENT,omitempty"` + DisableSnowflakeData bool `json:"DISABLE_SNOWFLAKE_DATA,omitempty"` + DisableAutoConvert bool `json:"DISABLE_AUTO_CONVERT,omitempty"` +} + +func ScanFileFormatShow(row *sqlx.Row) (*fileFormatShow, error) { + r := &fileFormatShow{} + err := row.StructScan(r) + return r, err +} + +func ParseFormatOptions(fileOptions string) (*fileFormatOptions, error) { + ff := &fileFormatOptions{} + err := json.Unmarshal([]byte(fileOptions), ff) + return ff, err +} diff --git a/pkg/snowflake/file_format_test.go b/pkg/snowflake/file_format_test.go new file mode 100644 index 0000000000..50dbee85b6 --- /dev/null +++ b/pkg/snowflake/file_format_test.go @@ -0,0 +1,269 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFileFormatCreateCSV(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(f.QualifiedName(), `"test_db"."test_schema"."test_file_format"`) + + f.WithFormatType("CSV") + f.WithCompression("AUTO") + f.WithRecordDelimiter("\\n") + f.WithFieldDelimiter(",") + f.WithFileExtension(".CSV") + f.WithSkipHeader(1) + f.WithSkipBlankLines(true) + f.WithDateFormat("AUTO") + f.WithTimeFormat("AUTO") + f.WithTimestampFormat("AUTO") + f.WithBinaryFormat("HEX") + f.WithEscape("None") + f.WithEscapeUnenclosedField("None") + f.WithTrimSpace(true) + f.WithFieldOptionallyEnclosedBy("\"") + f.WithNullIf([]string{}) + f.WithErrorOnColumnCountMismatch(false) + f.WithReplaceInvalidCharacters(false) + f.WithValidateUTF8(false) + f.WithEmptyFieldAsNull(true) + f.WithSkipByteOrderMark(true) + f.WithEncoding("UTF8") + + r.Equal(`CREATE FILE FORMAT "test_db"."test_schema"."test_file_format" TYPE = 'CSV' COMPRESSION = 'AUTO' RECORD_DELIMITER = '\n' FIELD_DELIMITER = ',' FILE_EXTENSION = '.CSV' SKIP_HEADER = 1 DATE_FORMAT = 'AUTO' TIME_FORMAT = 'AUTO' TIMESTAMP_FORMAT = 'AUTO' BINARY_FORMAT = 'HEX' ESCAPE = 'None' ESCAPE_UNENCLOSED_FIELD = 'None' FIELD_OPTIONALLY_ENCLOSED_BY = '"' NULL_IF = () ENCODING = 'UTF8' SKIP_BLANK_LINES = true TRIM_SPACE = true ERROR_ON_COLUMN_COUNT_MISMATCH = false REPLACE_INVALID_CHARACTERS = false VALIDATE_UTF8 = false EMPTY_FIELD_AS_NULL = true SKIP_BYTE_ORDER_MARK = true`, f.Create()) +} + +func TestFileFormatCreateJSON(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format_json", "test_db", "test_schema") + r.Equal(f.QualifiedName(), `"test_db"."test_schema"."test_file_format_json"`) + + f.WithFormatType("JSON") + f.WithCompression("AUTO") + f.WithDateFormat("AUTO") + f.WithTimeFormat("AUTO") + f.WithTimestampFormat("AUTO") + f.WithBinaryFormat("HEX") + f.WithTrimSpace(true) + f.WithNullIf([]string{"\\n", "NULL"}) + f.WithAllowDuplicate(false) + f.WithStripOuterArray(false) + f.WithStripNullValues(false) + f.WithIgnoreUTF8Errors(true) + + r.Equal(`CREATE FILE FORMAT "test_db"."test_schema"."test_file_format_json" TYPE = 'JSON' COMPRESSION = 'AUTO' DATE_FORMAT = 'AUTO' TIME_FORMAT = 'AUTO' TIMESTAMP_FORMAT = 'AUTO' BINARY_FORMAT = 'HEX' NULL_IF = ('\n', 'NULL') TRIM_SPACE = true ENABLE_OCTAL = false ALLOW_DUPLICATE = false STRIP_OUTER_ARRAY = false STRIP_NULL_VALUES = false REPLACE_INVALID_CHARACTERS = false IGNORE_UTF8_ERRORS = true SKIP_BYTE_ORDER_MARK = false`, f.Create()) +} + +func TestFileFormatChangeComment(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET COMMENT = 'worst format ever'`, f.ChangeComment("worst format ever")) +} + +func TestFileFormatChangeCompression(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET COMPRESSION = 'GZIP'`, f.ChangeCompression("GZIP")) +} + +func TestFileFormatChangeRecordDelimiter(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET RECORD_DELIMITER = '|'`, f.ChangeRecordDelimiter("|")) +} + +func TestFileFormatChangeFieldDelimiter(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET FIELD_DELIMITER = '|'`, f.ChangeFieldDelimiter("|")) +} + +func TestFileFormatChangeFileExtension(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET FILE_EXTENSION = '.csv.gz'`, f.ChangeFileExtension(".csv.gz")) +} + +func TestFileFormatChangeDateFormat(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET DATE_FORMAT = 'AUTO'`, f.ChangeDateFormat("AUTO")) +} + +func TestFileFormatChangeTimeFormat(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET TIME_FORMAT = 'AUTO'`, f.ChangeTimeFormat("AUTO")) +} + +func TestFileFormatChangeTimestampFormat(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET TIMESTAMP_FORMAT = 'AUTO'`, f.ChangeTimestampFormat("AUTO")) +} + +func TestFileFormatChangeBinaryFormat(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET BINARY_FORMAT = 'AUTO'`, f.ChangeBinaryFormat("AUTO")) +} + +func TestFileFormatChangeValidateUTF8(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET VALIDATE_UTF8 = true`, f.ChangeValidateUTF8(true)) +} + +func TestFileFormatChangeEmptyFieldAsNull(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET EMPTY_FIELD_AS_NULL = true`, f.ChangeEmptyFieldAsNull(true)) +} + +func TestFileFormatChangeErrorOnColumnCountMismatch(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET ERROR_ON_COLUMN_COUNT_MISMATCH = true`, f.ChangeErrorOnColumnCountMismatch(true)) +} + +func TestFileFormatChangeEscape(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET ESCAPE = 'None'`, f.ChangeEscape("None")) +} + +func TestFileFormatChangeFieldOptionallyEnclosedBy(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET FIELD_OPTIONALLY_ENCLOSED_BY = 'None'`, f.ChangeFieldOptionallyEnclosedBy("None")) +} + +func TestFileFormatChangeEscapeUnenclosedField(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET ESCAPE_UNENCLOSED_FIELD = '\'`, f.ChangeEscapeUnenclosedField("\\")) +} + +func TestFileFormatChangeNullIf(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET NULL_IF = ()`, f.ChangeNullIf([]string{})) +} + +func TestFileFormatChangeEncoding(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET ENCODING = 'UTF8'`, f.ChangeEncoding("UTF8")) +} + +func TestFileFormatChangeSkipHeader(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET SKIP_HEADER = 2`, f.ChangeSkipHeader(2)) +} + +func TestFileFormatChangeSkipBlankLines(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET SKIP_BLANK_LINES = true`, f.ChangeSkipBlankLines(true)) +} + +func TestFileFormatChangeTrimSpace(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET TRIM_SPACE = true`, f.ChangeTrimSpace(true)) +} + +func TestFileFormatChangeEnableOctal(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET ENABLE_OCTAL = true`, f.ChangeEnableOctal(true)) +} + +func TestFileFormatChangeAllowDuplicate(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET ALLOW_DUPLICATE = true`, f.ChangeAllowDuplicate(true)) +} + +func TestFileFormatChangeStripOuterArray(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET STRIP_OUTER_ARRAY = false`, f.ChangeStripOuterArray(false)) +} + +func TestFileFormatChangeStripNullValues(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET STRIP_NULL_VALUES = false`, f.ChangeStripNullValues(false)) +} + +func TestFileFormatChangeReplaceInvalidCharacters(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET REPLACE_INVALID_CHARACTERS = false`, f.ChangeReplaceInvalidCharacters(false)) +} + +func TestFileFormatChangeIgnoreUTF8Errors(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET IGNORE_UTF8_ERRORS = false`, f.ChangeIgnoreUTF8Errors(false)) +} + +func TestFileFormatChangeSkipByteOrderMark(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET SKIP_BYTE_ORDER_MARK = false`, f.ChangeSkipByteOrderMark(false)) +} + +func TestFileFormatChangeBinaryAsText(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET BINARY_AS_TEXT = false`, f.ChangeBinaryAsText(false)) +} + +func TestFileFormatChangePreserveSpace(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET PRESERVE_SPACE = false`, f.ChangePreserveSpace(false)) +} + +func TestFileFormatChangeStripOuterElement(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET STRIP_OUTER_ELEMENT = true`, f.ChangeStripOuterElement(true)) +} + +func TestFileFormatChangeDisableSnowflakeData(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET DISABLE_SNOWFLAKE_DATA = true`, f.ChangeDisableSnowflakeData(true)) +} + +func TestFileFormatChangeDisableAutoConvert(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`ALTER FILE FORMAT "test_db"."test_schema"."test_file_format" SET DISABLE_AUTO_CONVERT = true`, f.ChangeDisableAutoConvert(true)) +} + +func TestFileFormatDrop(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`DROP FILE FORMAT "test_db"."test_schema"."test_file_format"`, f.Drop()) +} + +func TestFileFormatDescribe(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`DESCRIBE FILE FORMAT "test_db"."test_schema"."test_file_format"`, f.Describe()) +} + +func TestFileFormatShow(t *testing.T) { + r := require.New(t) + f := FileFormat("test_file_format", "test_db", "test_schema") + r.Equal(`SHOW FILE FORMATS LIKE 'test_file_format' IN SCHEMA "test_db"."test_schema"`, f.Show()) +} diff --git a/pkg/snowflake/generic.go b/pkg/snowflake/generic.go index 513bf1d353..bb3038c881 100644 --- a/pkg/snowflake/generic.go +++ b/pkg/snowflake/generic.go @@ -3,7 +3,6 @@ package snowflake import ( "bytes" "fmt" - "sort" "strings" "text/template" ) @@ -11,15 +10,17 @@ import ( type EntityType string const ( - ApiIntegrationType EntityType = "API INTEGRATION" - DatabaseType EntityType = "DATABASE" - ManagedAccountType EntityType = "MANAGED ACCOUNT" - ResourceMonitorType EntityType = "RESOURCE MONITOR" - RoleType EntityType = "ROLE" - ShareType EntityType = "SHARE" - StorageIntegrationType EntityType = "STORAGE INTEGRATION" - UserType EntityType = "USER" - WarehouseType EntityType = "WAREHOUSE" + ApiIntegrationType EntityType = "API INTEGRATION" + DatabaseType EntityType = "DATABASE" + ManagedAccountType EntityType = "MANAGED ACCOUNT" + ResourceMonitorType EntityType = "RESOURCE MONITOR" + RoleType EntityType = "ROLE" + ShareType EntityType = "SHARE" + StorageIntegrationType EntityType = "STORAGE INTEGRATION" + NotificationIntegrationType EntityType = "NOTIFICATION INTEGRATION" + SecurityIntegrationType EntityType = "SECURITY INTEGRATION" + UserType EntityType = "USER" + WarehouseType EntityType = "WAREHOUSE" ) type Builder struct { @@ -108,24 +109,24 @@ func (ab *AlterPropertiesBuilder) Statement() string { sb.WriteString(ab.rawStatement) - for k, v := range ab.stringProperties { - sb.WriteString(fmt.Sprintf(" %s='%s'", strings.ToUpper(k), EscapeString(v))) + for _, k := range sortStrings(ab.stringProperties) { + sb.WriteString(fmt.Sprintf(" %s='%s'", strings.ToUpper(k), EscapeString(ab.stringProperties[k]))) } - for k, v := range ab.stringListProperties { - sb.WriteString(fmt.Sprintf(" %s=%s", strings.ToUpper(k), formatStringList(v))) + for _, k := range sortStringList(ab.stringListProperties) { + sb.WriteString(fmt.Sprintf(" %s=%s", strings.ToUpper(k), formatStringList(ab.stringListProperties[k]))) } - for k, v := range ab.boolProperties { - sb.WriteString(fmt.Sprintf(" %s=%t", strings.ToUpper(k), v)) + for _, k := range sortStringsBool(ab.boolProperties) { + sb.WriteString(fmt.Sprintf(" %s=%t", strings.ToUpper(k), ab.boolProperties[k])) } - for k, v := range ab.intProperties { - sb.WriteString(fmt.Sprintf(" %s=%d", strings.ToUpper(k), v)) + for _, k := range sortStringsInt(ab.intProperties) { + sb.WriteString(fmt.Sprintf(" %s=%d", strings.ToUpper(k), ab.intProperties[k])) } - for k, v := range ab.floatProperties { - sb.WriteString(fmt.Sprintf(" %s=%.2f", strings.ToUpper(k), v)) + for _, k := range sortStringsFloat(ab.floatProperties) { + sb.WriteString(fmt.Sprintf(" %s=%.2f", strings.ToUpper(k), ab.floatProperties[k])) } return sb.String() @@ -186,52 +187,23 @@ func (b *CreateBuilder) Statement() string { sb.WriteString(b.rawStatement) - sortedStringProperties := make([]string, 0) - for k := range b.stringProperties { - sortedStringProperties = append(sortedStringProperties, k) - } - sort.Strings(sortedStringProperties) - - for _, k := range sortedStringProperties { + for _, k := range sortStrings(b.stringProperties) { sb.WriteString(fmt.Sprintf(" %s='%s'", strings.ToUpper(k), EscapeString(b.stringProperties[k]))) } - sortedStringListProperties := make([]string, 0) - for k := range b.stringListProperties { - sortedStringListProperties = append(sortedStringListProperties, k) - } - - for _, k := range sortedStringListProperties { + for _, k := range sortStringList(b.stringListProperties) { sb.WriteString(fmt.Sprintf(" %s=%s", strings.ToUpper(k), formatStringList(b.stringListProperties[k]))) } - sortedBoolProperties := make([]string, 0) - for k := range b.boolProperties { - sortedBoolProperties = append(sortedBoolProperties, k) - } - sort.Strings(sortedBoolProperties) - - for _, k := range sortedBoolProperties { + for _, k := range sortStringsBool(b.boolProperties) { sb.WriteString(fmt.Sprintf(" %s=%t", strings.ToUpper(k), b.boolProperties[k])) } - sortedIntProperties := make([]string, 0) - for k := range b.intProperties { - sortedIntProperties = append(sortedIntProperties, k) - } - sort.Strings(sortedIntProperties) - - for _, k := range sortedIntProperties { + for _, k := range sortStringsInt(b.intProperties) { sb.WriteString(fmt.Sprintf(" %s=%d", strings.ToUpper(k), b.intProperties[k])) } - sortedFloatProperties := make([]string, 0) - for k := range b.floatProperties { - sortedFloatProperties = append(sortedFloatProperties, k) - } - sort.Strings(sortedFloatProperties) - - for _, k := range sortedFloatProperties { + for _, k := range sortStringsFloat(b.floatProperties) { sb.WriteString(fmt.Sprintf(" %s=%.2f", strings.ToUpper(k), b.floatProperties[k])) } diff --git a/pkg/snowflake/notification_integration.go b/pkg/snowflake/notification_integration.go new file mode 100644 index 0000000000..d4c40010b2 --- /dev/null +++ b/pkg/snowflake/notification_integration.go @@ -0,0 +1,38 @@ +package snowflake + +import ( + "database/sql" + + "github.com/jmoiron/sqlx" +) + +// NotificationIntegration returns a pointer to a Builder that abstracts the DDL operations for a notification integration. +// +// Supported DDL operations are: +// - CREATE NOTIFICATION INTEGRATION +// - ALTER NOTIFICATION INTEGRATION +// - DROP INTEGRATION +// - SHOW INTEGRATIONS +// - DESCRIBE INTEGRATION +// +// [Snowflake Reference](https://docs.snowflake.com/en/sql-reference/ddl-user-security.html#notification-integrations) +func NotificationIntegration(name string) *Builder { + return &Builder{ + entityType: NotificationIntegrationType, + name: name, + } +} + +type notificationIntegration struct { + Name sql.NullString `db:"name"` + Category sql.NullString `db:"category"` + Type sql.NullString `db:"type"` + CreatedOn sql.NullString `db:"created_on"` + Enabled sql.NullBool `db:"enabled"` +} + +func ScanNotificationIntegration(row *sqlx.Row) (*notificationIntegration, error) { + r := ¬ificationIntegration{} + err := row.StructScan(r) + return r, err +} diff --git a/pkg/snowflake/notification_integration_test.go b/pkg/snowflake/notification_integration_test.go new file mode 100644 index 0000000000..78c18f9fd3 --- /dev/null +++ b/pkg/snowflake/notification_integration_test.go @@ -0,0 +1,47 @@ +package snowflake_test + +import ( + "testing" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/stretchr/testify/require" +) + +func TestNotificationIntegration_Azure(t *testing.T) { + r := require.New(t) + builder := snowflake.NotificationIntegration("azure") + r.NotNil(builder) + + q := builder.Show() + r.Equal("SHOW NOTIFICATION INTEGRATIONS LIKE 'azure'", q) + + c := builder.Create() + + c.SetString(`type`, `QUEUE`) + c.SetString(`azure_storage_queue_primary_uri`, `azure://my-bucket/my-path/`) + c.SetString(`azure_tenant_id`, `some-guid`) + c.SetBool(`enabled`, true) + q = c.Statement() + + r.Equal(`CREATE NOTIFICATION INTEGRATION "azure" AZURE_STORAGE_QUEUE_PRIMARY_URI='azure://my-bucket/my-path/' AZURE_TENANT_ID='some-guid' TYPE='QUEUE' ENABLED=true`, q) +} + +func TestNotificationIntegration_AWS(t *testing.T) { + r := require.New(t) + builder := snowflake.NotificationIntegration("aws_sqs") + r.NotNil(builder) + + q := builder.Show() + r.Equal("SHOW NOTIFICATION INTEGRATIONS LIKE 'aws_sqs'", q) + + c := builder.Create() + + c.SetString(`type`, `QUEUE`) + c.SetString(`direction`, `OUTBOUND`) + c.SetString(`aws_sqs_arn`, `some-sqs-arn`) + c.SetString(`aws_sqs_role_arn`, `some-iam-role-arn`) + c.SetBool(`enabled`, true) + q = c.Statement() + + r.Equal(`CREATE NOTIFICATION INTEGRATION "aws_sqs" AWS_SQS_ARN='some-sqs-arn' AWS_SQS_ROLE_ARN='some-iam-role-arn' DIRECTION='OUTBOUND' TYPE='QUEUE' ENABLED=true`, q) +} diff --git a/pkg/snowflake/pipe.go b/pkg/snowflake/pipe.go index 2cb5ee2771..b0f5f13e14 100644 --- a/pkg/snowflake/pipe.go +++ b/pkg/snowflake/pipe.go @@ -1,6 +1,7 @@ package snowflake import ( + "database/sql" "fmt" "strings" @@ -9,13 +10,15 @@ import ( // PipeBuilder abstracts the creation of SQL queries for a Snowflake schema type PipeBuilder struct { - name string - db string - schema string - autoIngest bool - awsSnsTopicArn string - comment string - copyStatement string + name string + db string + schema string + autoIngest bool + awsSnsTopicArn string + comment string + copyStatement string + integration string + errorIntegration string } // QualifiedName prepends the db and schema if set and escapes everything nicely @@ -63,6 +66,18 @@ func (pb *PipeBuilder) WithCopyStatement(s string) *PipeBuilder { return pb } +/// WithIntegration adds Integration specification to the PipeBuilder +func (pb *PipeBuilder) WithIntegration(s string) *PipeBuilder { + pb.integration = s + return pb +} + +/// WithErrorIntegration adds ErrorIntegration specification to the PipeBuilder +func (pb *PipeBuilder) WithErrorIntegration(s string) *PipeBuilder { + pb.errorIntegration = s + return pb +} + // Pipe returns a pointer to a Builder that abstracts the DDL operations for a pipe. // // Supported DDL operations are: @@ -91,6 +106,14 @@ func (pb *PipeBuilder) Create() string { q.WriteString(` AUTO_INGEST = TRUE`) } + if pb.integration != "" { + q.WriteString(fmt.Sprintf(` INTEGRATION = '%v'`, EscapeString(pb.integration))) + } + + if pb.errorIntegration != "" { + q.WriteString(fmt.Sprintf(` ERROR_INTEGRATION = '%v'`, EscapeString(pb.errorIntegration))) + } + if pb.awsSnsTopicArn != "" { q.WriteString(fmt.Sprintf(` AWS_SNS_TOPIC = '%v'`, EscapeString(pb.awsSnsTopicArn))) } @@ -115,6 +138,11 @@ func (pb *PipeBuilder) RemoveComment() string { return fmt.Sprintf(`ALTER PIPE %v UNSET COMMENT`, pb.QualifiedName()) } +// ChangeErrorIntegration return SQL query that will update the error_integration on the pipe. +func (pb *PipeBuilder) ChangeErrorIntegration(c string) string { + return fmt.Sprintf(`ALTER PIPE %v SET ERROR_INTEGRATION = %v`, pb.QualifiedName(), EscapeString(c)) +} + // Drop returns the SQL query that will drop a pipe. func (pb *PipeBuilder) Drop() string { return fmt.Sprintf(`DROP PIPE %v`, pb.QualifiedName()) @@ -126,14 +154,16 @@ func (pb *PipeBuilder) Show() string { } type pipe struct { - Createdon string `db:"created_on"` - Name string `db:"name"` - DatabaseName string `db:"database_name"` - SchemaName string `db:"schema_name"` - Definition string `db:"definition"` - Owner string `db:"owner"` - NotificationChannel *string `db:"notification_channel"` - Comment string `db:"comment"` + Createdon string `db:"created_on"` + Name string `db:"name"` + DatabaseName string `db:"database_name"` + SchemaName string `db:"schema_name"` + Definition string `db:"definition"` + Owner string `db:"owner"` + NotificationChannel *string `db:"notification_channel"` + Comment string `db:"comment"` + Integration sql.NullString `db:"integration"` + ErrorIntegration sql.NullString `db:"error_integration"` } func ScanPipe(row *sqlx.Row) (*pipe, error) { diff --git a/pkg/snowflake/pipe_test.go b/pkg/snowflake/pipe_test.go index 8e5f613761..e19a02050e 100644 --- a/pkg/snowflake/pipe_test.go +++ b/pkg/snowflake/pipe_test.go @@ -24,6 +24,10 @@ func TestPipeCreate(t *testing.T) { s.WithAwsSnsTopicArn("arn:aws:sns:us-east-1:1234567890123456:mytopic") r.Equal(s.Create(), `CREATE PIPE "test_db"."test_schema"."test_pipe" AUTO_INGEST = TRUE AWS_SNS_TOPIC = 'arn:aws:sns:us-east-1:1234567890123456:mytopic' COMMENT = 'Yeehaw' AS test copy statement `) + + s.WithIntegration("myintegration") + r.Equal(s.Create(), `CREATE PIPE "test_db"."test_schema"."test_pipe" AUTO_INGEST = TRUE INTEGRATION = 'myintegration' AWS_SNS_TOPIC = 'arn:aws:sns:us-east-1:1234567890123456:mytopic' COMMENT = 'Yeehaw' AS test copy statement `) + } func TestPipeChangeComment(t *testing.T) { diff --git a/pkg/snowflake/scim_integration.go b/pkg/snowflake/scim_integration.go new file mode 100644 index 0000000000..5039ccb1e8 --- /dev/null +++ b/pkg/snowflake/scim_integration.go @@ -0,0 +1,37 @@ +package snowflake + +import ( + "database/sql" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +// ScimIntegration returns a pointer to a Builder that abstracts the DDL operations for an api integration. +// +// Supported DDL operations are: +// - CREATE SECURITY INTEGRATION +// - ALTER SECURITY INTEGRATION +// - DROP INTEGRATION +// - SHOW INTEGRATIONS +// - DESCRIBE INTEGRATION +// +// [Snowflake Reference](https://docs.snowflake.com/en/sql-reference/ddl-user-security.html#security-integrations) +func ScimIntegration(name string) *Builder { + return &Builder{ + entityType: SecurityIntegrationType, + name: name, + } +} + +type scimIntegration struct { + Name sql.NullString `db:"name"` + Category sql.NullString `db:"category"` + IntegrationType sql.NullString `db:"type"` + CreatedOn sql.NullString `db:"created_on"` +} + +func ScanScimIntegration(row *sqlx.Row) (*scimIntegration, error) { + r := &scimIntegration{} + return r, errors.Wrap(row.StructScan(r), "error scanning struct") +} diff --git a/pkg/snowflake/scim_integration_test.go b/pkg/snowflake/scim_integration_test.go new file mode 100644 index 0000000000..91e17f1953 --- /dev/null +++ b/pkg/snowflake/scim_integration_test.go @@ -0,0 +1,38 @@ +package snowflake_test + +import ( + "testing" + + "github.com/chanzuckerberg/terraform-provider-snowflake/pkg/snowflake" + "github.com/stretchr/testify/require" +) + +func TestScimIntegration(t *testing.T) { + r := require.New(t) + builder := snowflake.ScimIntegration("aad_provisioning") + r.NotNil(builder) + + q := builder.Show() + r.Equal("SHOW SECURITY INTEGRATIONS LIKE 'aad_provisioning'", q) + + q = builder.Describe() + r.Equal("DESCRIBE SECURITY INTEGRATION \"aad_provisioning\"", q) + + c := builder.Create() + c.SetRaw(`TYPE=scim`) + c.SetString(`scim_client`, "azure") + c.SetString(`run_as_role`, "AAD_PROVISIONER") + q = c.Statement() + r.Equal(`CREATE SECURITY INTEGRATION "aad_provisioning" TYPE=scim RUN_AS_ROLE='AAD_PROVISIONER' SCIM_CLIENT='azure'`, q) + + d := builder.Alter() + d.SetRaw(`TYPE=scim`) + d.SetString(`scim_client`, "azure") + d.SetString(`run_as_role`, "AAD_PROVISIONER") + d.SetString(`network_policy`, "aad_policy") + q = d.Statement() + r.Equal(`ALTER SECURITY INTEGRATION "aad_provisioning" SET TYPE=scim NETWORK_POLICY='aad_policy' RUN_AS_ROLE='AAD_PROVISIONER' SCIM_CLIENT='azure'`, q) + + e := builder.Drop() + r.Equal(`DROP SECURITY INTEGRATION "aad_provisioning"`, e) +} diff --git a/pkg/snowflake/sequence.go b/pkg/snowflake/sequence.go new file mode 100644 index 0000000000..04925c0e5c --- /dev/null +++ b/pkg/snowflake/sequence.go @@ -0,0 +1,109 @@ +package snowflake + +import ( + "database/sql" + "fmt" + "log" + "strings" + + "github.com/jmoiron/sqlx" + "github.com/pkg/errors" +) + +// Sequence returns a pointer to a Builder for a sequence +func Sequence(name, db, schema string) *SequenceBuilder { + return &SequenceBuilder{ + name: name, + db: db, + schema: schema, + increment: 1, + start: 1, + } +} + +type sequence struct { + Name sql.NullString `db:"name"` + DBName sql.NullString `db:"database_name"` + SchemaName sql.NullString `db:"schema_name"` + NextValue sql.NullString `db:"next_value"` + Increment sql.NullString `db:"interval"` + CreatedOn sql.NullString `db:"created_on"` + Owner sql.NullString `db:"owner"` + Comment sql.NullString `db:"comment"` +} + +type SequenceBuilder struct { + name string + db string + schema string + increment int + comment string + start int +} + +// Drop returns the SQL query that will drop a sequence. +func (sb *SequenceBuilder) Drop() string { + return fmt.Sprintf(`DROP SEQUENCE %v`, sb.QualifiedName()) +} + +// Drop returns the SQL query that will drop a sequence. +func (sb *SequenceBuilder) Show() string { + return fmt.Sprintf(`SHOW SEQUENCES LIKE '%v' IN SCHEMA "%v"."%v"`, sb.name, sb.db, sb.schema) +} + +func (sb *SequenceBuilder) Create() string { + q := strings.Builder{} + q.WriteString(fmt.Sprintf(`CREATE SEQUENCE %v`, sb.QualifiedName())) + if sb.start != 1 { + q.WriteString(fmt.Sprintf(` START = %d`, sb.start)) + } + if sb.increment != 1 { + q.WriteString(fmt.Sprintf(` INCREMENT = %d`, sb.increment)) + } + if sb.comment != "" { + q.WriteString(fmt.Sprintf(` COMMENT = '%v'`, EscapeString(sb.comment))) + } + return q.String() +} + +func (sb *SequenceBuilder) WithComment(comment string) *SequenceBuilder { + sb.comment = comment + return sb +} + +func (sb *SequenceBuilder) WithIncrement(increment int) *SequenceBuilder { + sb.increment = increment + return sb +} + +func (sb *SequenceBuilder) WithStart(start int) *SequenceBuilder { + sb.start = start + return sb +} + +func (sb *SequenceBuilder) QualifiedName() string { + return fmt.Sprintf(`"%v"."%v"."%v"`, sb.db, sb.schema, sb.name) +} + +func ScanSequence(row *sqlx.Row) (*sequence, error) { + d := &sequence{} + e := row.StructScan(d) + return d, e +} + +func ListSequences(sdb *sqlx.DB) ([]sequence, error) { + stmt := "SHOW SEQUENCES" + rows, err := sdb.Queryx(stmt) + if err != nil { + return nil, err + } + defer rows.Close() + + dbs := []sequence{} + err = sqlx.StructScan(rows, &dbs) + if err == sql.ErrNoRows { + log.Printf("[DEBUG] no sequence found") + return nil, nil + } + return dbs, errors.Wrapf(err, "unable to scan row for %s", stmt) +} diff --git a/pkg/snowflake/sequence_test.go b/pkg/snowflake/sequence_test.go new file mode 100644 index 0000000000..1d0e599521 --- /dev/null +++ b/pkg/snowflake/sequence_test.go @@ -0,0 +1,35 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSequenceCreate(t *testing.T) { + r := require.New(t) + s := Sequence("test_sequence", "test_db", "test_schema") + + r.Equal(`"test_db"."test_schema"."test_sequence"`, s.QualifiedName()) + + r.Equal(`CREATE SEQUENCE "test_db"."test_schema"."test_sequence"`, s.Create()) + + s.WithComment("Test Comment") + r.Equal(`CREATE SEQUENCE "test_db"."test_schema"."test_sequence" COMMENT = 'Test Comment'`, s.Create()) + s.WithIncrement(5) + r.Equal(`CREATE SEQUENCE "test_db"."test_schema"."test_sequence" INCREMENT = 5 COMMENT = 'Test Comment'`, s.Create()) + s.WithStart(26) + r.Equal(`CREATE SEQUENCE "test_db"."test_schema"."test_sequence" START = 26 INCREMENT = 5 COMMENT = 'Test Comment'`, s.Create()) +} + +func TestSequenceDrop(t *testing.T) { + r := require.New(t) + s := Sequence("test_sequence", "test_db", "test_schema") + r.Equal(`DROP SEQUENCE "test_db"."test_schema"."test_sequence"`, s.Drop()) +} + +func TestSequenceShow(t *testing.T) { + r := require.New(t) + s := Sequence("test_sequence", "test_db", "test_schema") + r.Equal(`SHOW SEQUENCES LIKE 'test_sequence' IN SCHEMA "test_db"."test_schema"`, s.Show()) +} diff --git a/pkg/snowflake/sorting.go b/pkg/snowflake/sorting.go new file mode 100644 index 0000000000..cd4babfe7d --- /dev/null +++ b/pkg/snowflake/sorting.go @@ -0,0 +1,50 @@ +package snowflake + +import ( + "sort" +) + +func sortStrings(strs map[string]string) []string { + sortedStringProperties := []string{} + for k := range strs { + sortedStringProperties = append(sortedStringProperties, k) + } + sort.Strings(sortedStringProperties) + return sortedStringProperties +} + +func sortStringList(strs map[string][]string) []string { + sortedStringProperties := []string{} + for k := range strs { + sortedStringProperties = append(sortedStringProperties, k) + } + sort.Strings(sortedStringProperties) + return sortedStringProperties +} + +func sortStringsInt(strs map[string]int) []string { + sortedStringProperties := []string{} + for k := range strs { + sortedStringProperties = append(sortedStringProperties, k) + } + sort.Strings(sortedStringProperties) + return sortedStringProperties +} + +func sortStringsFloat(strs map[string]float64) []string { + sortedStringProperties := []string{} + for k := range strs { + sortedStringProperties = append(sortedStringProperties, k) + } + sort.Strings(sortedStringProperties) + return sortedStringProperties +} + +func sortStringsBool(strs map[string]bool) []string { + sortedStringProperties := []string{} + for k := range strs { + sortedStringProperties = append(sortedStringProperties, k) + } + sort.Strings(sortedStringProperties) + return sortedStringProperties +} diff --git a/pkg/snowflake/sorting_test.go b/pkg/snowflake/sorting_test.go new file mode 100644 index 0000000000..92f40fdb22 --- /dev/null +++ b/pkg/snowflake/sorting_test.go @@ -0,0 +1,19 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSortStrings(t *testing.T) { + r := require.New(t) + + ss := []string{"a", "b", "c"} + + r.Equal(ss, sortStrings(map[string]string{"c": "", "b": "", "a": ""})) + r.Equal(ss, sortStringList(map[string][]string{"c": {}, "b": {}, "a": {}})) + r.Equal(ss, sortStringsInt(map[string]int{"c": 0, "b": 1, "a": 2})) + r.Equal(ss, sortStringsFloat(map[string]float64{"c": 0, "b": 1, "a": 2})) + r.Equal(ss, sortStringsBool(map[string]bool{"c": true, "b": false, "a": true})) +} diff --git a/pkg/snowflake/stream.go b/pkg/snowflake/stream.go index 536b651c77..b4ae420618 100644 --- a/pkg/snowflake/stream.go +++ b/pkg/snowflake/stream.go @@ -10,12 +10,13 @@ import ( // StreamBuilder abstracts the creation of SQL queries for a Snowflake stream type StreamBuilder struct { - name string - db string - schema string - onTable string - appendOnly bool - comment string + name string + db string + schema string + onTable string + appendOnly bool + showInitialRows bool + comment string } // QualifiedName prepends the db and schema if set and escapes everything nicely @@ -50,12 +51,12 @@ func (sb *StreamBuilder) WithOnTable(d string, s string, t string) *StreamBuilde } func (sb *StreamBuilder) WithAppendOnly(b bool) *StreamBuilder { - sb.appendOnly = false - - if b { - sb.appendOnly = b - } + sb.appendOnly = b + return sb +} +func (sb *StreamBuilder) WithShowInitialRows(b bool) *StreamBuilder { + sb.showInitialRows = b return sb } @@ -89,6 +90,8 @@ func (sb *StreamBuilder) Create() string { q.WriteString(fmt.Sprintf(` APPEND_ONLY = %v`, sb.appendOnly)) + q.WriteString(fmt.Sprintf(` SHOW_INITIAL_ROWS = %v`, sb.showInitialRows)) + return q.String() } @@ -113,16 +116,18 @@ func (sb *StreamBuilder) Show() string { } type descStreamRow struct { - CreatedOn sql.NullString `db:"created_on"` - StreamName sql.NullString `db:"name"` - DatabaseName sql.NullString `db:"database_name"` - SchemaName sql.NullString `db:"schema_name"` - Owner sql.NullString `db:"owner"` - Comment sql.NullString `db:"comment"` - TableName sql.NullString `db:"table_name"` - Type sql.NullString `db:"type"` - Stale sql.NullString `db:"stale"` - Mode sql.NullString `db:"mode"` + CreatedOn sql.NullString `db:"created_on"` + StreamName sql.NullString `db:"name"` + DatabaseName sql.NullString `db:"database_name"` + SchemaName sql.NullString `db:"schema_name"` + Owner sql.NullString `db:"owner"` + Comment sql.NullString `db:"comment"` + AppendOnly bool `db:"append_only"` + ShowInitialRows bool `db:"show_initial_rows"` + TableName sql.NullString `db:"table_name"` + Type sql.NullString `db:"type"` + Stale sql.NullString `db:"stale"` + Mode sql.NullString `db:"mode"` } func ScanStream(row *sqlx.Row) (*descStreamRow, error) { diff --git a/pkg/snowflake/stream_test.go b/pkg/snowflake/stream_test.go index 9b93f77624..9ebadf6680 100644 --- a/pkg/snowflake/stream_test.go +++ b/pkg/snowflake/stream_test.go @@ -11,13 +11,16 @@ func TestStreamCreate(t *testing.T) { s := Stream("test_stream", "test_db", "test_schema") s.WithOnTable("test_db", "test_schema", "test_target_table") - r.Equal(s.Create(), `CREATE STREAM "test_db"."test_schema"."test_stream" ON TABLE "test_db"."test_schema"."test_target_table" APPEND_ONLY = false`) + r.Equal(s.Create(), `CREATE STREAM "test_db"."test_schema"."test_stream" ON TABLE "test_db"."test_schema"."test_target_table" APPEND_ONLY = false SHOW_INITIAL_ROWS = false`) s.WithComment("Test Comment") - r.Equal(s.Create(), `CREATE STREAM "test_db"."test_schema"."test_stream" ON TABLE "test_db"."test_schema"."test_target_table" COMMENT = 'Test Comment' APPEND_ONLY = false`) + r.Equal(s.Create(), `CREATE STREAM "test_db"."test_schema"."test_stream" ON TABLE "test_db"."test_schema"."test_target_table" COMMENT = 'Test Comment' APPEND_ONLY = false SHOW_INITIAL_ROWS = false`) + + s.WithShowInitialRows(true) + r.Equal(s.Create(), `CREATE STREAM "test_db"."test_schema"."test_stream" ON TABLE "test_db"."test_schema"."test_target_table" COMMENT = 'Test Comment' APPEND_ONLY = false SHOW_INITIAL_ROWS = true`) s.WithAppendOnly(true) - r.Equal(s.Create(), `CREATE STREAM "test_db"."test_schema"."test_stream" ON TABLE "test_db"."test_schema"."test_target_table" COMMENT = 'Test Comment' APPEND_ONLY = true`) + r.Equal(s.Create(), `CREATE STREAM "test_db"."test_schema"."test_stream" ON TABLE "test_db"."test_schema"."test_target_table" COMMENT = 'Test Comment' APPEND_ONLY = true SHOW_INITIAL_ROWS = true`) } func TestStreamChangeComment(t *testing.T) { diff --git a/pkg/snowflake/system_generate_scim_access_token.go b/pkg/snowflake/system_generate_scim_access_token.go new file mode 100644 index 0000000000..fd9be3f92e --- /dev/null +++ b/pkg/snowflake/system_generate_scim_access_token.go @@ -0,0 +1,35 @@ +package snowflake + +import ( + "fmt" + + "github.com/jmoiron/sqlx" +) + +// SystemGenerateSCIMAccessTokenBuilder abstracts calling the SYSTEM$GENERATE_SCIM_ACCESS_TOKEN system function +type SystemGenerateSCIMAccessTokenBuilder struct { + integrationName string +} + +// SystemGenerateSCIMAccessToken returns a pointer to a builder that abstracts calling the the SYSTEM$GENERATE_SCIM_ACCESS_TOKEN system function +func SystemGenerateSCIMAccessToken(integrationName string) *SystemGenerateSCIMAccessTokenBuilder { + return &SystemGenerateSCIMAccessTokenBuilder{ + integrationName: integrationName, + } +} + +// Select generates the select statement for obtaining the scim access token +func (pb *SystemGenerateSCIMAccessTokenBuilder) Select() string { + return fmt.Sprintf(`SELECT SYSTEM$GENERATE_SCIM_ACCESS_TOKEN('%v') AS "token"`, pb.integrationName) +} + +type scimAccessToken struct { + Token string `db:"token"` +} + +// ScanSCIMAccessToken convert a result into a +func ScanSCIMAccessToken(row *sqlx.Row) (*scimAccessToken, error) { + p := &scimAccessToken{} + e := row.StructScan(p) + return p, e +} diff --git a/pkg/snowflake/system_generate_scim_access_token_test.go b/pkg/snowflake/system_generate_scim_access_token_test.go new file mode 100644 index 0000000000..881912a81a --- /dev/null +++ b/pkg/snowflake/system_generate_scim_access_token_test.go @@ -0,0 +1,14 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSystemGenerateSCIMAccessToken(t *testing.T) { + r := require.New(t) + sb := SystemGenerateSCIMAccessToken("AAD_PROVISIONING") + + r.Equal(sb.Select(), `SELECT SYSTEM$GENERATE_SCIM_ACCESS_TOKEN('AAD_PROVISIONING') AS "token"`) +} diff --git a/pkg/snowflake/system_get_privatelink_config.go b/pkg/snowflake/system_get_privatelink_config.go new file mode 100644 index 0000000000..2fc50de760 --- /dev/null +++ b/pkg/snowflake/system_get_privatelink_config.go @@ -0,0 +1,64 @@ +package snowflake + +import ( + "encoding/json" + + "github.com/jmoiron/sqlx" +) + +func SystemGetPrivateLinkConfigQuery() string { + return `SELECT SYSTEM$GET_PRIVATELINK_CONFIG() AS "config"` +} + +type RawPrivateLinkConfig struct { + Config string `db:"config"` +} + +type privateLinkConfigInternal struct { + AccountName string `json:"privatelink-account-name"` + AwsVpceID string `json:"privatelink-vpce-id,omitempty"` + AzurePrivateLinkServiceID string `json:"privatelink-pls-id,omitempty"` + AccountURL string `json:"privatelink-account-url"` + OSCPURL string `json:"privatelink-ocsp-url,omitempty"` + TypodOSCPURL string `json:"privatelink_ocsp-url,omitempty"` // because snowflake returns this for AWS, but don't have an Azure account to verify against +} + +type PrivateLinkConfig struct { + AccountName string + AwsVpceID string + AzurePrivateLinkServiceID string + AccountURL string + OSCPURL string +} + +func ScanPrivateLinkConfig(row *sqlx.Row) (*RawPrivateLinkConfig, error) { + config := &RawPrivateLinkConfig{} + err := row.StructScan(config) + return config, err +} + +func (r *RawPrivateLinkConfig) GetStructuredConfig() (*PrivateLinkConfig, error) { + config := &privateLinkConfigInternal{} + err := json.Unmarshal([]byte(r.Config), config) + if err != nil { + return nil, err + } + + return config.getPrivateLinkConfig() +} + +func (i *privateLinkConfigInternal) getPrivateLinkConfig() (*PrivateLinkConfig, error) { + config := &PrivateLinkConfig{ + i.AccountName, + i.AwsVpceID, + i.AzurePrivateLinkServiceID, + i.AccountURL, + i.OSCPURL, + } + + if i.TypodOSCPURL != "" { + config.OSCPURL = i.TypodOSCPURL + } + + return config, nil +} diff --git a/pkg/snowflake/system_get_privatelink_config_test.go b/pkg/snowflake/system_get_privatelink_config_test.go new file mode 100644 index 0000000000..2a0e8c4f6f --- /dev/null +++ b/pkg/snowflake/system_get_privatelink_config_test.go @@ -0,0 +1,65 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSystemGetPrivateLinkConfigQuery(t *testing.T) { + r := require.New(t) + sb := SystemGetPrivateLinkConfigQuery() + + r.Equal(sb, `SELECT SYSTEM$GET_PRIVATELINK_CONFIG() AS "config"`) +} + +func TestSystemGetPrivateLinkGetStructuredConfigAws(t *testing.T) { + r := require.New(t) + + raw := &RawPrivateLinkConfig{ + Config: `{"privatelink-account-name":"ab1234.eu-west-1.privatelink","privatelink-vpce-id":"com.amazonaws.vpce.eu-west-1.vpce-svc-123456789abcdef12","privatelink-account-url":"ab1234.eu-west-1.privatelink.snowflakecomputing.com","privatelink_ocsp-url":"ocsp.ab1234.eu-west-1.privatelink.snowflakecomputing.com"}`, + } + + c, e := raw.GetStructuredConfig() + r.Nil(e) + + r.Equal("ab1234.eu-west-1.privatelink", c.AccountName) + r.Equal("com.amazonaws.vpce.eu-west-1.vpce-svc-123456789abcdef12", c.AwsVpceID) + r.Equal("", c.AzurePrivateLinkServiceID) + r.Equal("ab1234.eu-west-1.privatelink.snowflakecomputing.com", c.AccountURL) + r.Equal("ocsp.ab1234.eu-west-1.privatelink.snowflakecomputing.com", c.OSCPURL) +} + +func TestSystemGetPrivateLinkGetStructuredConfigAwsAsPerDocumentation(t *testing.T) { + r := require.New(t) + + raw := &RawPrivateLinkConfig{ + Config: `{"privatelink-account-name":"ab1234.eu-west-1.privatelink","privatelink-vpce-id":"com.amazonaws.vpce.eu-west-1.vpce-svc-123456789abcdef12","privatelink-account-url":"ab1234.eu-west-1.privatelink.snowflakecomputing.com","privatelink-ocsp-url":"ocsp.ab1234.eu-west-1.privatelink.snowflakecomputing.com"}`, + } + + c, e := raw.GetStructuredConfig() + r.Nil(e) + + r.Equal("ab1234.eu-west-1.privatelink", c.AccountName) + r.Equal("com.amazonaws.vpce.eu-west-1.vpce-svc-123456789abcdef12", c.AwsVpceID) + r.Equal("", c.AzurePrivateLinkServiceID) + r.Equal("ab1234.eu-west-1.privatelink.snowflakecomputing.com", c.AccountURL) + r.Equal("ocsp.ab1234.eu-west-1.privatelink.snowflakecomputing.com", c.OSCPURL) +} + +func TestSystemGetPrivateLinkGetStructuredConfigAzure(t *testing.T) { + r := require.New(t) + + raw := &RawPrivateLinkConfig{ + Config: `{"privatelink-account-name":"ab1234.east-us-2.azure.privatelink","privatelink-pls-id":"sf-pvlinksvc-azeastus2.east-us-2.azure.something","privatelink-account-url":"ab1234.east-us-2.azure.privatelink.snowflakecomputing.com","privatelink_ocsp-url":"ocsp.ab1234.east-us-2.azure.privatelink.snowflakecomputing.com"}`, + } + + c, e := raw.GetStructuredConfig() + r.Nil(e) + + r.Equal("ab1234.east-us-2.azure.privatelink", c.AccountName) + r.Equal("", c.AwsVpceID) + r.Equal("sf-pvlinksvc-azeastus2.east-us-2.azure.something", c.AzurePrivateLinkServiceID) + r.Equal("ab1234.east-us-2.azure.privatelink.snowflakecomputing.com", c.AccountURL) + r.Equal("ocsp.ab1234.east-us-2.azure.privatelink.snowflakecomputing.com", c.OSCPURL) +} diff --git a/pkg/snowflake/system_get_snowflake_platform_info.go b/pkg/snowflake/system_get_snowflake_platform_info.go new file mode 100644 index 0000000000..dd204ab510 --- /dev/null +++ b/pkg/snowflake/system_get_snowflake_platform_info.go @@ -0,0 +1,50 @@ +package snowflake + +import ( + "encoding/json" + + "github.com/jmoiron/sqlx" +) + +func SystemGetSnowflakePlatformInfoQuery() string { + return `SELECT SYSTEM$GET_SNOWFLAKE_PLATFORM_INFO() AS "info"` +} + +type RawSnowflakePlatformInfo struct { + Info string `db:"info"` +} + +type snowflakePlatformInfoInternal struct { + AzureVnetSubnetIds []string `json:"snowflake-vnet-subnet-id,omitempty"` + AwsVpcIds []string `json:"snowflake-vpc-id,omitempty"` +} + +type SnowflakePlatformInfo struct { + AzureVnetSubnetIds []string + AwsVpcIds []string +} + +func ScanSnowflakePlatformInfo(row *sqlx.Row) (*RawSnowflakePlatformInfo, error) { + info := &RawSnowflakePlatformInfo{} + err := row.StructScan(info) + return info, err +} + +func (r *RawSnowflakePlatformInfo) GetStructuredConfig() (*SnowflakePlatformInfo, error) { + info := &snowflakePlatformInfoInternal{} + err := json.Unmarshal([]byte(r.Info), info) + if err != nil { + return nil, err + } + + return info.getSnowflakePlatformInfo() +} + +func (i *snowflakePlatformInfoInternal) getSnowflakePlatformInfo() (*SnowflakePlatformInfo, error) { + config := &SnowflakePlatformInfo{ + i.AzureVnetSubnetIds, + i.AwsVpcIds, + } + + return config, nil +} diff --git a/pkg/snowflake/system_get_snowflake_platform_info_test.go b/pkg/snowflake/system_get_snowflake_platform_info_test.go new file mode 100644 index 0000000000..cad7240ee8 --- /dev/null +++ b/pkg/snowflake/system_get_snowflake_platform_info_test.go @@ -0,0 +1,42 @@ +package snowflake + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSystemGetSnowflakePlatformInfoQuery(t *testing.T) { + r := require.New(t) + sb := SystemGetSnowflakePlatformInfoQuery() + + r.Equal(sb, `SELECT SYSTEM$GET_SNOWFLAKE_PLATFORM_INFO() AS "info"`) +} + +func TestSystemGetSnowflakePlatformInfoGetStructuredConfigAws(t *testing.T) { + r := require.New(t) + + raw := &RawSnowflakePlatformInfo{ + Info: `{"snowflake-vpc-id": ["vpc-1", "vpc-2"]}`, + } + + c, e := raw.GetStructuredConfig() + r.Nil(e) + + r.Equal([]string{"vpc-1", "vpc-2"}, c.AwsVpcIds) + r.Equal([]string(nil), c.AzureVnetSubnetIds) +} + +func TestSystemGetSnowflakePlatformInfoGetStructuredConfigAzure(t *testing.T) { + r := require.New(t) + + raw := &RawSnowflakePlatformInfo{ + Info: `{"snowflake-vnet-subnet-id": ["/subscription/1/1", "/subscription/1/2"]}`, + } + + c, e := raw.GetStructuredConfig() + r.Nil(e) + + r.Equal([]string{"/subscription/1/1", "/subscription/1/2"}, c.AzureVnetSubnetIds) + r.Equal([]string(nil), c.AwsVpcIds) +} diff --git a/pkg/snowflake/table.go b/pkg/snowflake/table.go index 6d6b1063e3..16b76ee4fa 100644 --- a/pkg/snowflake/table.go +++ b/pkg/snowflake/table.go @@ -3,14 +3,32 @@ package snowflake import ( "database/sql" "fmt" + "sort" + "strconv" "strings" "github.com/jmoiron/sqlx" ) +type PrimaryKey struct { + name string + keys []string +} + +func (pk *PrimaryKey) WithName(name string) *PrimaryKey { + pk.name = name + return pk +} + +func (pk *PrimaryKey) WithKeys(keys []string) *PrimaryKey { + pk.keys = keys + return pk +} + type Column struct { - name string - _type string // type is reserved + name string + _type string // type is reserved + nullable bool } func (c *Column) WithName(name string) *Column { @@ -22,11 +40,66 @@ func (c *Column) WithType(t string) *Column { return c } -func (c *Column) getColumnDefinition() string { +func (c *Column) WithNullable(nullable bool) *Column { + c.nullable = nullable + return c +} + +func (c *Column) getColumnDefinition(withInlineConstraints bool) string { + if c == nil { return "" } - return fmt.Sprintf(`"%v" %v`, EscapeString(c.name), EscapeString(c._type)) + var colDef strings.Builder + colDef.WriteString(fmt.Sprintf(`"%v" %v`, EscapeString(c.name), EscapeString(c._type))) + if withInlineConstraints { + if !c.nullable { + colDef.WriteString(` NOT NULL`) + } + } + + return colDef.String() + +} + +func FlattenTablePrimaryKey(pkds []primaryKeyDescription) []interface{} { + flattened := []interface{}{} + if len(pkds) == 0 { + return flattened + } + + sort.SliceStable(pkds, func(i, j int) bool { + num1, _ := strconv.Atoi(pkds[i].KeySequence.String) + num2, _ := strconv.Atoi(pkds[j].KeySequence.String) + return num1 < num2 + }) + //sort our keys on the key sequence + + flat := map[string]interface{}{} + var keys []string + var name string + var nameSet bool + + for _, pk := range pkds { + //set as empty string, sys_constraint means it was an unnnamed constraint + if strings.Contains(pk.ConstraintName.String, "SYS_CONSTRAINT") && !nameSet { + name = "" + nameSet = true + } + if !nameSet { + name = pk.ConstraintName.String + nameSet = true + } + + keys = append(keys, pk.ColumnName.String) + + } + + flat["name"] = name + flat["keys"] = keys + flattened = append(flattened, flat) + return flattened + } type Columns []Column @@ -39,8 +112,9 @@ func NewColumns(tds []tableDescription) Columns { continue } cs = append(cs, Column{ - name: td.Name.String, - _type: td.Type.String, + name: td.Name.String, + _type: td.Type.String, + nullable: td.IsNullable(), }) } return Columns(cs) @@ -52,17 +126,18 @@ func (c Columns) Flatten() []interface{} { flat := map[string]interface{}{} flat["name"] = col.name flat["type"] = col._type + flat["nullable"] = col.nullable flattened = append(flattened, flat) } return flattened } -func (c Columns) getColumnDefinitions() string { +func (c Columns) getColumnDefinitions(withInlineConstraints bool) string { // TODO(el): verify Snowflake reflects column order back in desc table calls columnDefinitions := []string{} for _, column := range c { - columnDefinitions = append(columnDefinitions, column.getColumnDefinition()) + columnDefinitions = append(columnDefinitions, column.getColumnDefinition(withInlineConstraints)) } // NOTE: intentionally blank leading space @@ -71,11 +146,13 @@ func (c Columns) getColumnDefinitions() string { // TableBuilder abstracts the creation of SQL queries for a Snowflake schema type TableBuilder struct { - name string - db string - schema string - columns Columns - comment string + name string + db string + schema string + columns Columns + comment string + clusterBy []string + primaryKey PrimaryKey } // QualifiedName prepends the db and schema if set and escapes everything nicely @@ -111,6 +188,82 @@ func (tb *TableBuilder) WithColumns(c Columns) *TableBuilder { return tb } +// WithClustering adds cluster keys/expressions to TableBuilder +func (tb *TableBuilder) WithClustering(c []string) *TableBuilder { + tb.clusterBy = c + return tb +} + +func (tb *TableBuilder) WithPrimaryKey(pk PrimaryKey) *TableBuilder { + tb.primaryKey = pk + return tb +} + +//Function to get clustering definition +func (tb *TableBuilder) GetClusterKeyString() string { + + return JoinStringList(tb.clusterBy[:], ", ") +} + +func JoinStringList(instrings []string, delimiter string) string { + + return fmt.Sprint(strings.Join(instrings[:], delimiter)) + +} + +func quoteStringList(instrings []string) []string { + var clean []string + for _, word := range instrings { + quoted := fmt.Sprintf(`"%s"`, word) + clean = append(clean, quoted) + + } + return clean + +} + +func (tb *TableBuilder) getCreateStatementBody() string { + var q strings.Builder + + colDef := tb.columns.getColumnDefinitions(true) + + if len(tb.primaryKey.keys) > 0 { + colDef = strings.TrimSuffix(colDef, ")") //strip trailing + q.WriteString(colDef) + if tb.primaryKey.name != "" { + q.WriteString(fmt.Sprintf(` ,CONSTRAINT "%v" PRIMARY KEY(%v)`, tb.primaryKey.name, JoinStringList(quoteStringList(tb.primaryKey.keys), ","))) + + } else { + q.WriteString(fmt.Sprintf(` ,PRIMARY KEY(%v)`, JoinStringList(quoteStringList(tb.primaryKey.keys), ","))) + } + + q.WriteString(")") // add closing + } else { + q.WriteString(colDef) + } + + return q.String() +} + +//function to take the literal snowflake cluster statement returned from SHOW TABLES and convert it to a list of keys. +func ClusterStatementToList(clusterStatement string) []string { + if clusterStatement == "" { + return nil + } + + cleanStatement := strings.TrimSuffix(strings.Replace(clusterStatement, "LINEAR(", "", 1), ")") + // remove cluster statement and trailing parenthesis + + var clean []string + + for _, s := range strings.Split(cleanStatement, ",") { + clean = append(clean, strings.TrimSpace(s)) + } + + return clean + +} + // Table returns a pointer to a Builder that abstracts the DDL operations for a table. // // Supported DDL operations are: @@ -146,27 +299,39 @@ func TableWithColumnDefinitions(name, db, schema string, columns Columns) *Table func (tb *TableBuilder) Create() string { q := strings.Builder{} q.WriteString(fmt.Sprintf(`CREATE TABLE %v`, tb.QualifiedName())) - q.WriteString(tb.columns.getColumnDefinitions()) + q.WriteString(tb.getCreateStatementBody()) if tb.comment != "" { q.WriteString(fmt.Sprintf(` COMMENT = '%v'`, EscapeString(tb.comment))) } + if tb.clusterBy != nil { + //add optional clustering statement + q.WriteString(fmt.Sprintf(` CLUSTER BY LINEAR(%v)`, tb.GetClusterKeyString())) + + } + return q.String() } +// ChangeClusterBy returns the SQL query to change cluastering on table +func (tb *TableBuilder) ChangeClusterBy(cb string) string { + return fmt.Sprintf(`ALTER TABLE %v CLUSTER BY LINEAR(%v)`, tb.QualifiedName(), cb) +} + // ChangeComment returns the SQL query that will update the comment on the table. func (tb *TableBuilder) ChangeComment(c string) string { return fmt.Sprintf(`ALTER TABLE %v SET COMMENT = '%v'`, tb.QualifiedName(), EscapeString(c)) } // AddColumn returns the SQL query that will add a new column to the table. -func (tb *TableBuilder) AddColumn(name string, dataType string) string { +func (tb *TableBuilder) AddColumn(name string, dataType string, nullable bool) string { col := Column{ - name: name, - _type: dataType, + name: name, + _type: dataType, + nullable: nullable, } - return fmt.Sprintf(`ALTER TABLE %s ADD COLUMN %s`, tb.QualifiedName(), col.getColumnDefinition()) + return fmt.Sprintf(`ALTER TABLE %s ADD COLUMN %s`, tb.QualifiedName(), col.getColumnDefinition(true)) } // DropColumn returns the SQL query that will add a new column to the table. @@ -180,7 +345,7 @@ func (tb *TableBuilder) ChangeColumnType(name string, dataType string) string { name: name, _type: dataType, } - return fmt.Sprintf(`ALTER TABLE %s MODIFY COLUMN %s`, tb.QualifiedName(), col.getColumnDefinition()) + return fmt.Sprintf(`ALTER TABLE %s MODIFY COLUMN %s`, tb.QualifiedName(), col.getColumnDefinition(false)) } // RemoveComment returns the SQL query that will remove the comment on the table. @@ -188,6 +353,33 @@ func (tb *TableBuilder) RemoveComment() string { return fmt.Sprintf(`ALTER TABLE %v UNSET COMMENT`, tb.QualifiedName()) } +// Return sql to set/unset null constraint on column +func (tb *TableBuilder) ChangeNullConstraint(name string, nullable bool) string { + if nullable { + return fmt.Sprintf(`ALTER TABLE %s MODIFY COLUMN "%s" DROP NOT NULL`, tb.QualifiedName(), name) + } else { + return fmt.Sprintf(`ALTER TABLE %s MODIFY COLUMN "%s" SET NOT NULL`, tb.QualifiedName(), name) + } +} + +func (tb *TableBuilder) ChangePrimaryKey(newPk PrimaryKey) string { + tb.WithPrimaryKey(newPk) + pks := JoinStringList(quoteStringList(newPk.keys), ", ") + if tb.primaryKey.name != "" { + return fmt.Sprintf(`ALTER TABLE %s ADD CONSTRAINT "%v" PRIMARY KEY(%v)`, tb.QualifiedName(), tb.primaryKey.name, pks) + } + return fmt.Sprintf(`ALTER TABLE %s ADD PRIMARY KEY(%v)`, tb.QualifiedName(), pks) +} + +func (tb *TableBuilder) DropPrimaryKey() string { + return fmt.Sprintf(`ALTER TABLE %s DROP PRIMARY KEY`, tb.QualifiedName()) +} + +// RemoveClustering returns the SQL query that will remove data clustering from the table +func (tb *TableBuilder) DropClustering() string { + return fmt.Sprintf(`ALTER TABLE %v DROP CLUSTERING KEY`, tb.QualifiedName()) +} + // Drop returns the SQL query that will drop a table. func (tb *TableBuilder) Drop() string { return fmt.Sprintf(`DROP TABLE %v`, tb.QualifiedName()) @@ -202,6 +394,10 @@ func (tb *TableBuilder) ShowColumns() string { return fmt.Sprintf(`DESC TABLE %s`, tb.QualifiedName()) } +func (tb *TableBuilder) ShowPrimaryKeys() string { + return fmt.Sprintf(`SHOW PRIMARY KEYS IN TABLE %s`, tb.QualifiedName()) +} + type table struct { CreatedOn sql.NullString `db:"created_on"` TableName sql.NullString `db:"name"` @@ -225,9 +421,24 @@ func ScanTable(row *sqlx.Row) (*table, error) { } type tableDescription struct { - Name sql.NullString `db:"name"` - Type sql.NullString `db:"type"` - Kind sql.NullString `db:"kind"` + Name sql.NullString `db:"name"` + Type sql.NullString `db:"type"` + Kind sql.NullString `db:"kind"` + Nullable sql.NullString `db:"null?"` +} + +func (td *tableDescription) IsNullable() bool { + if td.Nullable.String == "Y" { + return true + } else { + return false + } +} + +type primaryKeyDescription struct { + ColumnName sql.NullString `db:"column_name"` + KeySequence sql.NullString `db:"key_sequence"` + ConstraintName sql.NullString `db:"constraint_name"` } func ScanTableDescription(rows *sqlx.Rows) ([]tableDescription, error) { @@ -242,3 +453,16 @@ func ScanTableDescription(rows *sqlx.Rows) ([]tableDescription, error) { } return tds, rows.Err() } + +func ScanPrimaryKeyDescription(rows *sqlx.Rows) ([]primaryKeyDescription, error) { + pkds := []primaryKeyDescription{} + for rows.Next() { + pk := primaryKeyDescription{} + err := rows.StructScan(&pk) + if err != nil { + return nil, err + } + pkds = append(pkds, pk) + } + return pkds, rows.Err() +} diff --git a/pkg/snowflake/table_test.go b/pkg/snowflake/table_test.go index 8a17e40163..fa2d1320a2 100644 --- a/pkg/snowflake/table_test.go +++ b/pkg/snowflake/table_test.go @@ -11,12 +11,14 @@ func TestTableCreate(t *testing.T) { s := Table("test_table", "test_db", "test_schema") cols := []Column{ { - name: "column1", - _type: "OBJECT", + name: "column1", + _type: "OBJECT", + nullable: true, }, { - name: "column2", - _type: "VARCHAR", + name: "column2", + _type: "VARCHAR", + nullable: true, }, } @@ -27,6 +29,12 @@ func TestTableCreate(t *testing.T) { s.WithComment("Test Comment") r.Equal(s.Create(), `CREATE TABLE "test_db"."test_schema"."test_table" ("column1" OBJECT, "column2" VARCHAR) COMMENT = 'Test Comment'`) + + s.WithClustering([]string{"column1"}) + r.Equal(s.Create(), `CREATE TABLE "test_db"."test_schema"."test_table" ("column1" OBJECT, "column2" VARCHAR) COMMENT = 'Test Comment' CLUSTER BY LINEAR(column1)`) + + s.WithPrimaryKey(PrimaryKey{name: "MY_KEY", keys: []string{"column1"}}) + r.Equal(s.Create(), `CREATE TABLE "test_db"."test_schema"."test_table" ("column1" OBJECT, "column2" VARCHAR ,CONSTRAINT "MY_KEY" PRIMARY KEY("column1")) COMMENT = 'Test Comment' CLUSTER BY LINEAR(column1)`) } func TestTableChangeComment(t *testing.T) { @@ -44,7 +52,7 @@ func TestTableRemoveComment(t *testing.T) { func TestTableAddColumn(t *testing.T) { r := require.New(t) s := Table("test_table", "test_db", "test_schema") - r.Equal(s.AddColumn("new_column", "VARIANT"), `ALTER TABLE "test_db"."test_schema"."test_table" ADD COLUMN "new_column" VARIANT`) + r.Equal(s.AddColumn("new_column", "VARIANT", true), `ALTER TABLE "test_db"."test_schema"."test_table" ADD COLUMN "new_column" VARIANT`) } func TestTableDropColumn(t *testing.T) { @@ -59,6 +67,18 @@ func TestTableChangeColumnType(t *testing.T) { r.Equal(s.ChangeColumnType("old_column", "BIGINT"), `ALTER TABLE "test_db"."test_schema"."test_table" MODIFY COLUMN "old_column" BIGINT`) } +func TestTableChangeClusterBy(t *testing.T) { + r := require.New(t) + s := Table("test_table", "test_db", "test_schema") + r.Equal(s.ChangeClusterBy("column2, column3"), `ALTER TABLE "test_db"."test_schema"."test_table" CLUSTER BY LINEAR(column2, column3)`) +} + +func TestTableDropClusterBy(t *testing.T) { + r := require.New(t) + s := Table("test_table", "test_db", "test_schema") + r.Equal(s.DropClustering(), `ALTER TABLE "test_db"."test_schema"."test_table" DROP CLUSTERING KEY`) +} + func TestTableDrop(t *testing.T) { r := require.New(t) s := Table("test_table", "test_db", "test_schema") @@ -70,3 +90,27 @@ func TestTableShow(t *testing.T) { s := Table("test_table", "test_db", "test_schema") r.Equal(s.Show(), `SHOW TABLES LIKE 'test_table' IN SCHEMA "test_db"."test_schema"`) } + +func TestTableShowPrimaryKeys(t *testing.T) { + r := require.New(t) + s := Table("test_table", "test_db", "test_schema") + r.Equal(s.ShowPrimaryKeys(), `SHOW PRIMARY KEYS IN TABLE "test_db"."test_schema"."test_table"`) +} + +func TestTableDropPrimaryKeys(t *testing.T) { + r := require.New(t) + s := Table("test_table", "test_db", "test_schema") + r.Equal(s.DropPrimaryKey(), `ALTER TABLE "test_db"."test_schema"."test_table" DROP PRIMARY KEY`) +} + +func TestTableChangePrimaryKeysWithConstraintName(t *testing.T) { + r := require.New(t) + s := Table("test_table", "test_db", "test_schema") + r.Equal(s.ChangePrimaryKey(PrimaryKey{name: "MY_KEY", keys: []string{"column1", "column2"}}), `ALTER TABLE "test_db"."test_schema"."test_table" ADD CONSTRAINT "MY_KEY" PRIMARY KEY("column1", "column2")`) +} + +func TestTableChangePrimaryKeysWithoutConstraintName(t *testing.T) { + r := require.New(t) + s := Table("test_table", "test_db", "test_schema") + r.Equal(s.ChangePrimaryKey(PrimaryKey{name: "", keys: []string{"column1", "column2"}}), `ALTER TABLE "test_db"."test_schema"."test_table" ADD PRIMARY KEY("column1", "column2")`) +} diff --git a/pkg/snowflake/user.go b/pkg/snowflake/user.go index ecd4605712..c213cabd1b 100644 --- a/pkg/snowflake/user.go +++ b/pkg/snowflake/user.go @@ -33,3 +33,8 @@ func ScanUser(row *sqlx.Row) (*user, error) { err := row.StructScan(r) return r, err } + +type DescribeUserProp struct { + Property string `db:"property"` + Value sql.NullString `db:"value"` +} diff --git a/pkg/snowflake/warehouse.go b/pkg/snowflake/warehouse.go index 8ed466ab77..25b344569d 100644 --- a/pkg/snowflake/warehouse.go +++ b/pkg/snowflake/warehouse.go @@ -16,35 +16,38 @@ func Warehouse(name string) *Builder { // warehouse is a go representation of a grant that can be used in conjunction // with github.com/jmoiron/sqlx type warehouse struct { - Name string `db:"name"` - State string `db:"state"` - Type string `db:"type"` - Size string `db:"size"` - MinClusterCount int64 `db:"min_cluster_count"` - MaxClusterCount int64 `db:"max_cluster_count"` - StartedClusters int64 `db:"started_clusters"` - Running int64 `db:"running"` - Queued int64 `db:"queued"` - IsDefault string `db:"is_default"` - IsCurrent string `db:"is_current"` - AutoSuspend int64 `db:"auto_suspend"` - AutoResume bool `db:"auto_resume"` - Available string `db:"available"` - Provisioning string `db:"provisioning"` - Quiescing string `db:"quiescing"` - Other string `db:"other"` - CreatedOn time.Time `db:"created_on"` - ResumedOn time.Time `db:"resumed_on"` - UpdatedOn time.Time `db:"updated_on"` - Owner string `db:"owner"` - Comment string `db:"comment"` - ResourceMonitor string `db:"resource_monitor"` - Actives int64 `db:"actives"` - Pendings int64 `db:"pendings"` - Failed int64 `db:"failed"` - Suspended int64 `db:"suspended"` - UUID string `db:"uuid"` - ScalingPolicy string `db:"scaling_policy"` + Name string `db:"name"` + State string `db:"state"` + Type string `db:"type"` + Size string `db:"size"` + MinClusterCount int64 `db:"min_cluster_count"` + MaxClusterCount int64 `db:"max_cluster_count"` + StartedClusters int64 `db:"started_clusters"` + Running int64 `db:"running"` + Queued int64 `db:"queued"` + IsDefault string `db:"is_default"` + IsCurrent string `db:"is_current"` + AutoSuspend int64 `db:"auto_suspend"` + AutoResume bool `db:"auto_resume"` + Available string `db:"available"` + Provisioning string `db:"provisioning"` + Quiescing string `db:"quiescing"` + Other string `db:"other"` + CreatedOn time.Time `db:"created_on"` + ResumedOn time.Time `db:"resumed_on"` + UpdatedOn time.Time `db:"updated_on"` + Owner string `db:"owner"` + Comment string `db:"comment"` + ResourceMonitor string `db:"resource_monitor"` + StatementTimeoutInSeconds int64 `db:"statement_timeout_in_seconds"` + StatementQueuedTimeoutInSeconds int64 `db:"statement_queued_timeout_in_seconds"` + MaxConcurrencyLevel int64 `db:"max_concurrency_level"` + Actives int64 `db:"actives"` + Pendings int64 `db:"pendings"` + Failed int64 `db:"failed"` + Suspended int64 `db:"suspended"` + UUID string `db:"uuid"` + ScalingPolicy string `db:"scaling_policy"` } func ScanWarehouse(row *sqlx.Row) (*warehouse, error) { diff --git a/templates/index.md.tmpl b/templates/index.md.tmpl index 8cae55e0cf..67b932d53c 100644 --- a/templates/index.md.tmpl +++ b/templates/index.md.tmpl @@ -25,6 +25,7 @@ The Snowflake provider support multiple ways to authenticate: * Password * OAuth Access Token +* OAuth Refresh Token * Browser Auth * Private Key @@ -59,6 +60,20 @@ export SNOWFLAKE_OAUTH_ACCESS_TOKEN='...' Note that once this access token expires, you'll need to request a new one through an external application. +### OAuth Refresh Token + +If you have an OAuth Refresh token, export these credentials as environment variables: + +```shell +export SNOWFLAKE_OAUTH_REFRESH_TOKEN='...' +export SNOWFLAKE_OAUTH_CLIENT_ID='...' +export SNOWFLAKE_OAUTH_CLIENT_SECRET='...' +export SNOWFLAKE_OAUTH_ENDPOINT='...' +export SNOWFLAKE_OAUTH_REDIRECT_URL='https://localhost.com' +``` + +Note because access token have a short life; typically 10 minutes, by passing refresh token new access token will be generated. + ### Username and Password Environment Variables If you choose to use Username and Password Authentication, export these credentials: @@ -82,10 +97,24 @@ In addition to [generic `provider` arguments](https://www.terraform.io/docs/conf * `password` - (optional) Password for username+password auth. Cannot be used with `browser_auth` or `private_key_path`. Can be source from `SNOWFLAKE_PASSWORD` environment variable. * `oauth_access_token` - (optional) Token for use with OAuth. Generating the token is left to other - tools. Cannot be used with `browser_auth`, `private_key_path` or `password`. Can be source from - `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable. + tools. Cannot be used with `browser_auth`, `private_key_path`, `oauth_refresh_token` or `password`. + Can be sourced from `SNOWFLAKE_OAUTH_ACCESS_TOKEN` environment variable. +* `oauth_refresh_token` - (optional) Token for use with OAuth. Setup and generation of the token is + left to other tools. Should be used in conjunction with `oauth_client_id`, `oauth_client_secret`, + `oauth_endpoint`, `oauth_redirect_url`. Cannot be used with `browser_auth`, `private_key_path`, + `oauth_access_token` or `password`. Can be sourced from `SNOWFLAKE_OAUTH_REFRESH_TOKEN` environment + variable. +* `oauth_client_id` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_CLIENT_ID` environment variable. +* `oauth_client_secret` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_CLIENT_SECRET` environment variable. +* `oauth_endpoint` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_ENDPOINT` environment variable. +* `oauth_redirect_url` - (optional) Required when `oauth_refresh_token` is used. Can be sourced from + `SNOWFLAKE_OAUTH_REDIRECT_URL` environment variable. * `private_key_path` - (optional) Path to a private key for using keypair authentication.. Cannot be used with `browser_auth`, `oauth_access_token` or `password`. Can be source from `SNOWFLAKE_PRIVATE_KEY_PATH` environment variable. * `role` - (optional) Snowflake role to use for operations. If left unset, default role for user will be used. Can come from the `SNOWFLAKE_ROLE` environment variable. +