Skip to content

Commit

Permalink
fix: rolling back PHS creation in deployment (#105)
Browse files Browse the repository at this point in the history
  • Loading branch information
bradmiro authored Dec 15, 2023
1 parent e854da8 commit f5acf8e
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 39 deletions.
27 changes: 0 additions & 27 deletions dataproc.tf
Original file line number Diff line number Diff line change
Expand Up @@ -103,30 +103,3 @@ resource "google_project_iam_member" "bq_connection_iam_biglake" {
role = "roles/biglake.admin"
member = "serviceAccount:${google_bigquery_connection.ds_connection.cloud_resource[0].service_account_id}"
}

resource "google_dataproc_cluster" "phs" {
name = "gcp-${var.use_case_short}-phs-${random_id.id.hex}"
project = module.project-services.project_id
region = var.region
cluster_config {
staging_bucket = google_storage_bucket.phs-staging-bucket.name
temp_bucket = google_storage_bucket.phs-temp-bucket.name
gce_cluster_config {
service_account = google_service_account.dataproc_service_account.email
subnetwork = google_compute_subnetwork.subnet.name
}
software_config {
override_properties = {
"dataproc:dataproc.allow.zero.workers" = "true"
"spark:spark.history.fs.logDirectory" = "gs://${google_storage_bucket.spark-log-directory.name}/phs/*/spark-job-history"
}
}
endpoint_config {
enable_http_port_access = "true"
}
}

depends_on = [
google_project_iam_member.dataproc_sa_roles
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ func TestAnalyticsLakehouse(t *testing.T) {

verifyNoVMs := func() (bool, error) {
currentComputeInstances := gcloud.Runf(t, "compute instances list --project %s", projectID).Array()
// There should only be 1 compute instance (Dataproc PHS). Wait to destroy if other instances exist.
if len(currentComputeInstances) > 1 {
// If compute instances is greater than 0, wait and check again until 0 to complete destroy
if len(currentComputeInstances) > 0 {
return true, nil
}
return false, nil
Expand Down
10 changes: 0 additions & 10 deletions workflows.tf
Original file line number Diff line number Diff line change
Expand Up @@ -170,13 +170,3 @@ resource "time_sleep" "wait_after_all_workflows" {
data.http.call_workflows_project_setup,
]
}

# Stop the PHS cluster after creation since it costs too much.
# tflint-ignore: terraform_unused_declarations
data "http" "call_stop_cluster" {
url = "https://dataproc.googleapis.com/v1/projects/${module.project-services.project_id}/regions/${var.region}/clusters/${google_dataproc_cluster.phs.name}:stop"
method = "POST"
request_headers = {
Accept = "application/json"
Authorization = "Bearer ${data.google_client_config.current.access_token}" }
}

0 comments on commit f5acf8e

Please sign in to comment.