Skip to content

Commit

Permalink
fix: formatting and linting (#12)
Browse files Browse the repository at this point in the history
- fixes linting issues
- starts adding unit tests
  • Loading branch information
davenportjw authored Mar 24, 2023
1 parent 8a4a72b commit 5e55357
Show file tree
Hide file tree
Showing 19 changed files with 222 additions and 130 deletions.
35 changes: 35 additions & 0 deletions .kitchen.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

---
provisioner:
name: terraform

platforms:
- name: local

verifier:
name: terraform
systems:
- name: system
backend: gcp

suites:
- name: full
driver:
name: terraform
command_timeout: 1800
root_module_directory: test/fixtures/full
# setting version verification to false since it requires TF to be less than v1.1
verify_version: false
15 changes: 12 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,23 @@ Functional examples are included in the

| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
| bucket\_name | The name of the bucket to create | `string` | n/a | yes |
| project\_id | The project ID to deploy to | `string` | n/a | yes |
| deletion\_protection | Whether or not to protect GCS resources from deletion when solution is modified or changed. | `string` | `true` | no |
| enable\_apis | Whether or not to enable underlying apis in this solution. . | `string` | `true` | no |
| force\_destroy | Whether or not to protect BigQuery resources from deletion when solution is modified or changed. | `string` | `true` | no |
| labels | A map of labels to apply to contained resources. | `map(string)` | <pre>{<br> "edw-bigquery": true<br>}</pre> | no |
| project\_id | Google Cloud Project ID | `string` | n/a | yes |
| public\_data\_bucket | Public Data bucket for access | `string` | `"data-analytics-demos"` | no |
| region | Google Cloud Region | `string` | `"us-central1"` | no |
| use\_case\_short | Short name for use case | `string` | `"lakehouse"` | no |

## Outputs

| Name | Description |
|------|-------------|
| bucket\_name | Name of the bucket |
| call\_workflows\_create\_iceberg\_table | Output of the iceberg tables workflow |
| call\_workflows\_create\_views\_and\_others | Output of the create view workflow |
| workflow\_return\_bucket\_copy | Output of the bucket copy workflow |
| workflow\_return\_create\_bq\_tables | Output of the create bigquery tables workflow |

<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->

Expand Down
2 changes: 1 addition & 1 deletion assets/yaml/bucket_copy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,4 +46,4 @@ main:
destinationBucket: ${dest_bucket}
# destinationObject: ${object.name}
- finish:
return: ${copied_objects}
return: ${copied_objects}
2 changes: 1 addition & 1 deletion assets/yaml/dataplex_create_taxonomy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@ main:
displayName: "my taxonomy display"
result: operation
- returnResult:
return: ${operation}
return: ${operation}
2 changes: 1 addition & 1 deletion assets/yaml/workflow_create_gcp_lakehouse_tables.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
- assignStep:
assign:
- results: {}
- results: {}
- map:
gcp_tbl_order_items: $${"CREATE OR REPLACE EXTERNAL TABLE `gcp_lakehouse_ds.gcp_tbl_order_items` WITH CONNECTION `us-central1.gcp_lakehouse_connection` OPTIONS(format ='Parquet', uris = ['gs://gcp-lakehouse-edw-export-" + sys.get_env("GOOGLE_CLOUD_PROJECT_ID") + "/thelook_ecommerce/order_items-0*.Parquet'], max_staleness = INTERVAL 30 MINUTE, metadata_cache_mode = 'AUTOMATIC');"}
gcp_tbl_orders: $${"CREATE OR REPLACE EXTERNAL TABLE `gcp_lakehouse_ds.gcp_tbl_orders` WITH CONNECTION `us-central1.gcp_lakehouse_connection` OPTIONS(format ='Parquet', uris = ['gs://gcp-lakehouse-edw-export-" + sys.get_env("GOOGLE_CLOUD_PROJECT_ID") + "/thelook_ecommerce/orders-*.Parquet'], max_staleness = INTERVAL 30 MINUTE, metadata_cache_mode = 'AUTOMATIC');"}
Expand Down
8 changes: 4 additions & 4 deletions assets/yaml/workflow_create_views_and_others.yaml
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
- assignStep:
assign:
- marketing_user: ${marketing_user}
- data_analyst_user: ${data_analyst_user}
- results: {}
- data_analyst_user: ${data_analyst_user}
- results: {}
- map:
row_policy_usa_filter: $${"CREATE OR REPLACE ROW ACCESS POLICY usa_filter ON `" + sys.get_env("GOOGLE_CLOUD_PROJECT_ID") + ".gcp_lakehouse_ds.gcp_tbl_users` GRANT TO ('serviceAccount:" + data_analyst_user + "') FILTER USING (Country = 'United States')"}
row_policy_product_category_filter: $${"CREATE OR REPLACE ROW ACCESS POLICY product_category_filter ON `" + sys.get_env("GOOGLE_CLOUD_PROJECT_ID") + ".gcp_lakehouse_ds.gcp_tbl_products` GRANT TO ('serviceAccount:" + marketing_user + "') FILTER USING (Category = 'Swim' or Category = 'Active' or Category = 'Fashion Hoodies & Sweatshirts')"}
create_view_ecommerce: $${"call gcp_lakehouse_ds.create_view_ecommerce()"}
- loopStep:
for:
value: key
in: $${keys(map)}
value: key
in: $${keys(map)}
steps:
- runQuery:
call: googleapis.bigquery.v2.jobs.query
Expand Down
18 changes: 9 additions & 9 deletions dataplex.tf
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
resource "google_project_service_identity" "dataplex_sa" {
provider = google-beta
project = module.project-services.project_id
service = "dataplex.googleapis.com"
provider = google-beta
project = module.project-services.project_id
service = "dataplex.googleapis.com"
depends_on = [time_sleep.wait_after_all_workflows]
}

Expand All @@ -15,7 +15,7 @@ resource "google_dataplex_lake" "gcp_primary" {
gcp-lake = "exists"
}

project = module.project-services.project_id
project = module.project-services.project_id
depends_on = [time_sleep.wait_after_all_workflows]

}
Expand All @@ -39,14 +39,14 @@ resource "google_dataplex_zone" "gcp_primary_zone" {
display_name = "Zone 1"
labels = {}
project = module.project-services.project_id
depends_on = [time_sleep.wait_after_all_workflows]
depends_on = [time_sleep.wait_after_all_workflows]
}

#give dataplex access to biglake bucket
resource "google_project_iam_member" "dataplex_bucket_access" {
project = module.project-services.project_id
role = "roles/dataplex.serviceAgent"
member = "serviceAccount:${google_project_service_identity.dataplex_sa.email}"
project = module.project-services.project_id
role = "roles/dataplex.serviceAgent"
member = "serviceAccount:${google_project_service_identity.dataplex_sa.email}"
depends_on = [time_sleep.wait_after_all_workflows]
}

Expand All @@ -67,7 +67,7 @@ resource "google_dataplex_asset" "gcp_primary_asset" {
type = "STORAGE_BUCKET"
}

project = module.project-services.project_id
project = module.project-services.project_id
depends_on = [time_sleep.wait_after_all_workflows, google_project_iam_member.dataplex_bucket_access]

}
3 changes: 0 additions & 3 deletions examples/simple_example/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,11 @@ This example illustrates how to use the `` module.

| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
| bucket\_name | The name of the bucket to create. | `string` | n/a | yes |
| project\_id | The ID of the project in which to provision resources. | `string` | n/a | yes |

## Outputs

| Name | Description |
|------|-------------|
| bucket\_name | The name of the bucket. |

<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->

Expand Down
4 changes: 4 additions & 0 deletions examples/simple_example/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,7 @@
* limitations under the License.
*/

module "analytics_lakehouse" {
source = "/"

}
1 change: 0 additions & 1 deletion examples/simple_example/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,3 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

5 changes: 0 additions & 5 deletions examples/simple_example/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,3 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

variable "project_id" {
description = "The ID of the project in which to provision resources."
type = string
}
39 changes: 17 additions & 22 deletions main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,6 @@
* limitations under the License.
*/

data "google_project" "project" {
project_id = var.project_id
}

module "project-services" {
source = "terraform-google-modules/project-factory/google//modules/project_services"
version = "13.0.0"
Expand Down Expand Up @@ -145,65 +141,64 @@ resource "time_sleep" "wait_after_all_resources" {
google_project_iam_member.connectionPermissionGrant,
google_workflows_workflow.workflows_create_gcp_biglake_tables,
data.google_storage_project_service_account.gcs_account
]
]
}

resource "time_sleep" "wait_after_all_workflows" {
create_duration = "60s"
depends_on = [data.http.call_workflows_bucket_copy_run,
data.http.call_workflows_create_gcp_biglake_tables_run,
data.http.call_workflows_create_iceberg_table,
data.http.call_workflows_create_views_and_others
]
}
#execute workflows
data "google_client_config" "current" {
}
provider "http" {

resource "time_sleep" "wait_after_all_workflows" {
create_duration = "30s"
depends_on = [data.http.call_workflows_bucket_copy_run,
data.http.call_workflows_create_gcp_biglake_tables_run,
data.http.call_workflows_create_iceberg_table,
data.http.call_workflows_create_views_and_others
]
}

data "http" "call_workflows_bucket_copy_run" {
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflow_bucket_copy.name}/executions"
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflow_bucket_copy.name}/executions"
method = "POST"
request_headers = {
Accept = "application/json"
Authorization = "Bearer ${data.google_client_config.current.access_token}" }
depends_on = [
depends_on = [
time_sleep.wait_after_all_resources
]
}

data "http" "call_workflows_create_gcp_biglake_tables_run" {
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflows_create_gcp_biglake_tables.name}/executions"
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflows_create_gcp_biglake_tables.name}/executions"
method = "POST"
request_headers = {
Accept = "application/json"
Authorization = "Bearer ${data.google_client_config.current.access_token}" }
depends_on = [
depends_on = [
time_sleep.wait_after_all_resources
]
}

resource "time_sleep" "wait_after_bucket_copy" {
create_duration = "30s"
depends_on = [data.http.call_workflows_bucket_copy_run
]
]
}

data "http" "call_workflows_create_views_and_others" {
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflow_create_views_and_others.name}/executions"
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.workflow_create_views_and_others.name}/executions"
method = "POST"
request_headers = {
Accept = "application/json"
Authorization = "Bearer ${data.google_client_config.current.access_token}" }
depends_on = [
time_sleep.wait_after_all_resources,
data.http.call_workflows_create_gcp_biglake_tables_run
data.http.call_workflows_create_gcp_biglake_tables_run
]
}

data "http" "call_workflows_create_iceberg_table" {
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.initial-workflow-pyspark.name}/executions"
url = "https://workflowexecutions.googleapis.com/v1/projects/${module.project-services.project_id}/locations/${var.region}/workflows/${google_workflows_workflow.initial-workflow-pyspark.name}/executions"
method = "POST"
request_headers = {
Accept = "application/json"
Expand Down
12 changes: 8 additions & 4 deletions outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,20 @@
*/

output "workflow_return_bucket_copy" {
value = data.http.call_workflows_bucket_copy_run.response_body
description = "Output of the bucket copy workflow"
value = data.http.call_workflows_bucket_copy_run.response_body
}

output "workflow_return_create_bq_tables" {
value = data.http.call_workflows_create_gcp_biglake_tables_run.response_body
description = "Output of the create bigquery tables workflow"
value = data.http.call_workflows_create_gcp_biglake_tables_run.response_body
}

output "call_workflows_create_views_and_others" {
value = data.http.call_workflows_create_views_and_others.response_body
description = "Output of the create view workflow"
value = data.http.call_workflows_create_views_and_others.response_body
}
output "call_workflows_create_iceberg_table" {
value = data.http.call_workflows_create_iceberg_table.response_body
description = "Output of the iceberg tables workflow"
value = data.http.call_workflows_create_iceberg_table.response_body
}
47 changes: 47 additions & 0 deletions test/integration/analytics_lakehouse/analytics_lakehouse_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
// Copyright 2023 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

package multiple_buckets

import (
"testing"
"time"

"github.com/GoogleCloudPlatform/cloud-foundation-toolkit/infra/blueprint-test/pkg/gcloud"
"github.com/GoogleCloudPlatform/cloud-foundation-toolkit/infra/blueprint-test/pkg/tft"
"github.com/stretchr/testify/assert"
)

// Retry if these errors are encountered.
var retryErrors = map[string]string{
// IAM for Eventarc service agent is eventually consistent
".*Permission denied while using the Eventarc Service Agent.*": "Eventarc Service Agent IAM is eventually consistent",
}

func TestAnalyticsLakehouse(t *testing.T) {
dwh := tft.NewTFBlueprintTest(t, tft.WithRetryableTerraformErrors(retryErrors, 10, time.Minute))

dwh.DefineVerify(func(assert *assert.Assertions) {
dwh.DefaultVerify(assert)

projectID := dwh.GetTFSetupStringOutput("project_id")
bucket := dwh.GetStringOutput("raw_bucket")

bucketOP := gcloud.Runf(t, "storage buckets describe gs://%s --project %s", bucket, projectID)
assert.Equal("US-CENTRAL1", bucketOP.Get("location").String(), "should be in us-central1")
assert.Equal("STANDARD", bucketOP.Get("storageClass").String(), "should have standard storageClass")
//TODO: Add additional asserts for other resources
})
dwh.Test()
}
39 changes: 0 additions & 39 deletions test/integration/simple_example/simple_example_test.go

This file was deleted.

Loading

0 comments on commit 5e55357

Please sign in to comment.