Skip to content

Commit

Permalink
Applyind terraform format to all files
Browse files Browse the repository at this point in the history
  • Loading branch information
ilakhtenkov committed Nov 16, 2022
1 parent 95c6691 commit cf5b234
Show file tree
Hide file tree
Showing 9 changed files with 158 additions and 88 deletions.
50 changes: 50 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
name: Check quality gates
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
tflint:
name: Run TFlint
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
name: Checkout source code

- uses: actions/cache@v2
name: Cache plugin dir
with:
path: ~/.tflint.d/plugins
key: ubuntu-latest-tflint-${{ hashFiles('.tflint.hcl') }}

- uses: terraform-linters/setup-tflint@v2
name: Setup TFLint
with:
tflint_version: v0.42.2

- name: Show version
run: tflint --version

- name: Init TFLint
run: tflint --init

- name: Run TFLint
run: tflint -f compact
fmt:
name: Run Terraform format
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
name: Checkout source code

- uses: hashicorp/setup-terraform@v2
with:
terraform_version: 1.1.7
name: Install Terraform

- name: Run terraform fmt
run: terraform fmt -check=true -write=false
41 changes: 15 additions & 26 deletions main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

terraform {
required_version = ">= 0.13"
}

provider "google" {
project = var.project
region = var.region
}

data "google_project" "project" {}

data "google_client_openid_userinfo" "provider_identity" {}
Expand All @@ -44,29 +35,27 @@ locals {
dataflow_temporary_gcs_bucket_path = "tmp/"

dataflow_splunk_template_gcs_path = "gs://dataflow-templates/${var.dataflow_template_version}/Cloud_PubSub_to_Splunk"
dataflow_pubsub_template_gcs_path = "gs://dataflow-templates/${var.dataflow_template_version}/Cloud_PubSub_to_Cloud_PubSub"

# If provided, set Dataflow worker to new user-managed service account;
# otherwise, use Compute Engine default service account
dataflow_worker_service_account = ((var.dataflow_worker_service_account != "")
? "${var.dataflow_worker_service_account}@${var.project}.iam.gserviceaccount.com"
: "${data.google_project.project.number}-compute@developer.gserviceaccount.com")
: "${data.google_project.project.number}-compute@developer.gserviceaccount.com")

subnet_name = coalesce(var.subnet, "${var.network}-${var.region}")
subnet_name = coalesce(var.subnet, "${var.network}-${var.region}")
project_log_sink_name = "${var.dataflow_job_name}-project-log-sink"
organization_log_sink_name = "${var.dataflow_job_name}-organization-log-sink"

dataflow_main_job_name = "${var.dataflow_job_name}-main-${random_id.dataflow_job_instance.hex}"
dataflow_main_job_name = "${var.dataflow_job_name}-main-${random_id.dataflow_job_instance.hex}"
dataflow_replay_job_name = "${var.dataflow_job_name}-replay-${random_id.dataflow_job_instance.hex}"

dataflow_input_topic_name = "${var.dataflow_job_name}-input-topic"
dataflow_input_subscription_name = "${var.dataflow_job_name}-input-subscription"
dataflow_input_topic_name = "${var.dataflow_job_name}-input-topic"
dataflow_input_subscription_name = "${var.dataflow_job_name}-input-subscription"
dataflow_output_deadletter_topic_name = "${var.dataflow_job_name}-deadletter-topic"
dataflow_output_deadletter_sub_name = "${var.dataflow_job_name}-deadletter-subscription"
dataflow_output_deadletter_sub_name = "${var.dataflow_job_name}-deadletter-subscription"

# Dataflow job parameters (not externalized for this project)
dataflow_job_include_pubsub_message = true
dataflow_job_enable_batch_logs = false
dataflow_job_include_pubsub_message = true
dataflow_job_enable_batch_logs = false
dataflow_job_enable_gzip_http_compression = true

# Metrics scope for Monitoring dashboard defaults to project unless explicitly provided
Expand All @@ -83,7 +72,7 @@ resource "google_pubsub_subscription" "dataflow_input_pubsub_subscription" {

# messages retained for 7 days (max)
message_retention_duration = "604800s"
ack_deadline_seconds = 30
ack_deadline_seconds = 30

# subscription never expires
expiration_policy {
Expand All @@ -92,9 +81,9 @@ resource "google_pubsub_subscription" "dataflow_input_pubsub_subscription" {
}

resource "google_logging_project_sink" "project_log_sink" {
name = local.project_log_sink_name
name = local.project_log_sink_name
destination = "pubsub.googleapis.com/projects/${var.project}/topics/${google_pubsub_topic.dataflow_input_pubsub_topic.name}"
filter = var.log_filter
filter = var.log_filter

unique_writer_identity = true
}
Expand All @@ -109,17 +98,17 @@ resource "google_logging_project_sink" "project_log_sink" {
# }

output "dataflow_job_id" {
value = google_dataflow_job.dataflow_job.job_id
value = google_dataflow_job.dataflow_job.job_id
}

output "dataflow_input_topic" {
value = google_pubsub_topic.dataflow_input_pubsub_topic.name
value = google_pubsub_topic.dataflow_input_pubsub_topic.name
}

output "dataflow_output_deadletter_subscription" {
value = google_pubsub_subscription.dataflow_deadletter_pubsub_sub.name
value = google_pubsub_subscription.dataflow_deadletter_pubsub_sub.name
}

output "dataflow_log_export_dashboard" {
value = google_monitoring_dashboard.splunk-export-pipeline-dashboard.id
value = google_monitoring_dashboard.splunk-export-pipeline-dashboard.id
}
4 changes: 2 additions & 2 deletions monitoring.tf
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@

resource "google_monitoring_group" "splunk-export-pipeline-group" {
display_name = "Splunk Log Export Group"
project = local.scoping_project
project = local.scoping_project

filter = "resource.metadata.name=starts_with(\"${var.dataflow_job_name}\")"
}

resource "google_monitoring_dashboard" "splunk-export-pipeline-dashboard" {
project = local.scoping_project
project = local.scoping_project
dashboard_json = <<EOF
{
"displayName": "Splunk Log Export Ops",
Expand Down
6 changes: 3 additions & 3 deletions network.tf
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@ resource "google_compute_subnetwork" "splunk_subnet" {
resource "google_dns_policy" "splunk_network_dns_policy" {
count = var.create_network == true ? 1 : 0

name = "${var.network}-dns-policy"
name = "${var.network}-dns-policy"

enable_logging = true

networks {
network_url = google_compute_network.splunk_export[count.index].id
network_url = google_compute_network.splunk_export[count.index].id
}
}

Expand All @@ -59,7 +59,7 @@ resource "google_compute_router_nat" "dataflow_nat" {
region = google_compute_router.dataflow_to_splunk_router[count.index].region
nat_ip_allocate_option = "MANUAL_ONLY"
source_subnetwork_ip_ranges_to_nat = "LIST_OF_SUBNETWORKS"
nat_ips = google_compute_address.dataflow_nat_ip_address.*.self_link
nat_ips = google_compute_address.dataflow_nat_ip_address[*].self_link
min_ports_per_vm = 128
subnetwork {
name = google_compute_subnetwork.splunk_subnet[count.index].id
Expand Down
26 changes: 13 additions & 13 deletions permissions.tf
Original file line number Diff line number Diff line change
Expand Up @@ -14,51 +14,51 @@

resource "google_pubsub_topic_iam_binding" "input_sub_publisher" {
project = google_pubsub_topic.dataflow_input_pubsub_topic.project
topic = google_pubsub_topic.dataflow_input_pubsub_topic.name
role = "roles/pubsub.publisher"
topic = google_pubsub_topic.dataflow_input_pubsub_topic.name
role = "roles/pubsub.publisher"
members = [
google_logging_project_sink.project_log_sink.writer_identity
]
}

resource "google_pubsub_subscription_iam_binding" "input_sub_subscriber" {
project = google_pubsub_subscription.dataflow_input_pubsub_subscription.project
project = google_pubsub_subscription.dataflow_input_pubsub_subscription.project
subscription = google_pubsub_subscription.dataflow_input_pubsub_subscription.name
role = "roles/pubsub.subscriber"
role = "roles/pubsub.subscriber"
members = [
"serviceAccount:${local.dataflow_worker_service_account}"
]
}

resource "google_pubsub_subscription_iam_binding" "input_sub_viewer" {
project = google_pubsub_subscription.dataflow_input_pubsub_subscription.project
project = google_pubsub_subscription.dataflow_input_pubsub_subscription.project
subscription = google_pubsub_subscription.dataflow_input_pubsub_subscription.name
role = "roles/pubsub.viewer"
role = "roles/pubsub.viewer"
members = [
"serviceAccount:${local.dataflow_worker_service_account}"
]
}

resource "google_pubsub_topic_iam_binding" "deadletter_topic_publisher" {
project = google_pubsub_topic.dataflow_deadletter_pubsub_topic.project
topic = google_pubsub_topic.dataflow_deadletter_pubsub_topic.name
role = "roles/pubsub.publisher"
topic = google_pubsub_topic.dataflow_deadletter_pubsub_topic.name
role = "roles/pubsub.publisher"
members = [
"serviceAccount:${local.dataflow_worker_service_account}"
]
}

resource "google_storage_bucket_iam_binding" "dataflow_worker_bucket_access" {
bucket = google_storage_bucket.dataflow_job_temp_bucket.name
role = "roles/storage.objectAdmin"
role = "roles/storage.objectAdmin"
members = [
"serviceAccount:${local.dataflow_worker_service_account}"
]
}

resource "google_project_iam_binding" "dataflow_worker_role" {
project = var.project
role = "roles/dataflow.worker"
role = "roles/dataflow.worker"
members = [
"serviceAccount:${local.dataflow_worker_service_account}"
]
Expand All @@ -73,11 +73,11 @@ resource "google_project_iam_binding" "dataflow_worker_role" {
# deployment will return an error. For security purposes, we do not modify access to existing
# default Compute Engine service account
resource "google_service_account_iam_binding" "terraform_caller_impersonate_dataflow_worker" {
count = (var.dataflow_worker_service_account != "") ? 1 : 0
count = (var.dataflow_worker_service_account != "") ? 1 : 0
service_account_id = google_service_account.dataflow_worker_service_account[0].id
role = "roles/iam.serviceAccountUser"
role = "roles/iam.serviceAccountUser"

members = [
"user:${data.google_client_openid_userinfo.provider_identity.email}"
"user:${data.google_client_openid_userinfo.provider_identity.email}"
]
}
54 changes: 27 additions & 27 deletions pipeline.tf
Original file line number Diff line number Diff line change
Expand Up @@ -31,50 +31,50 @@ resource "google_pubsub_subscription" "dataflow_deadletter_pubsub_sub" {
}

resource "google_storage_bucket" "dataflow_job_temp_bucket" {
name = local.dataflow_temporary_gcs_bucket_name
location = var.region
name = local.dataflow_temporary_gcs_bucket_name
location = var.region
storage_class = "REGIONAL"
}

resource "google_storage_bucket_object" "dataflow_job_temp_object" {
name = local.dataflow_temporary_gcs_bucket_path
name = local.dataflow_temporary_gcs_bucket_path
content = "Placeholder for Dataflow to write temporary files"
bucket = google_storage_bucket.dataflow_job_temp_bucket.name
bucket = google_storage_bucket.dataflow_job_temp_bucket.name
}

resource "google_service_account" "dataflow_worker_service_account" {
count = (var.dataflow_worker_service_account != "") ? 1 : 0
account_id = var.dataflow_worker_service_account
display_name = "Dataflow worker service account to execute pipeline operations"
count = (var.dataflow_worker_service_account != "") ? 1 : 0
account_id = var.dataflow_worker_service_account
display_name = "Dataflow worker service account to execute pipeline operations"
}

resource "google_dataflow_job" "dataflow_job" {
name = local.dataflow_main_job_name
template_gcs_path = local.dataflow_splunk_template_gcs_path
temp_gcs_location = "gs://${local.dataflow_temporary_gcs_bucket_name}/${local.dataflow_temporary_gcs_bucket_path}"
name = local.dataflow_main_job_name
template_gcs_path = local.dataflow_splunk_template_gcs_path
temp_gcs_location = "gs://${local.dataflow_temporary_gcs_bucket_name}/${local.dataflow_temporary_gcs_bucket_path}"
service_account_email = local.dataflow_worker_service_account
machine_type = var.dataflow_job_machine_type
max_workers = var.dataflow_job_machine_count
machine_type = var.dataflow_job_machine_type
max_workers = var.dataflow_job_machine_count
parameters = merge({
inputSubscription = google_pubsub_subscription.dataflow_input_pubsub_subscription.id
outputDeadletterTopic = google_pubsub_topic.dataflow_deadletter_pubsub_topic.id
url = var.splunk_hec_url
token = var.splunk_hec_token
parallelism = var.dataflow_job_parallelism
batchCount = var.dataflow_job_batch_count
includePubsubMessage = local.dataflow_job_include_pubsub_message
inputSubscription = google_pubsub_subscription.dataflow_input_pubsub_subscription.id
outputDeadletterTopic = google_pubsub_topic.dataflow_deadletter_pubsub_topic.id
url = var.splunk_hec_url
token = var.splunk_hec_token
parallelism = var.dataflow_job_parallelism
batchCount = var.dataflow_job_batch_count
includePubsubMessage = local.dataflow_job_include_pubsub_message
disableCertificateValidation = var.dataflow_job_disable_certificate_validation
enableBatchLogs = local.dataflow_job_enable_batch_logs # Supported as of 2022-03-21-00_RC01
enableGzipHttpCompression = local.dataflow_job_enable_gzip_http_compression # Supported as of 2022-04-25-00_RC00
},
enableBatchLogs = local.dataflow_job_enable_batch_logs # Supported as of 2022-03-21-00_RC01
enableGzipHttpCompression = local.dataflow_job_enable_gzip_http_compression # Supported as of 2022-04-25-00_RC00
},
(var.dataflow_job_udf_gcs_path != "" && var.dataflow_job_udf_function_name != "") ?
{
javascriptTextTransformGcsPath = var.dataflow_job_udf_gcs_path
javascriptTextTransformGcsPath = var.dataflow_job_udf_gcs_path
javascriptTextTransformFunctionName = var.dataflow_job_udf_function_name
} : {})
region = var.region
network = var.network
subnetwork = "regions/${var.region}/subnetworks/${local.subnet_name}"
} : {})
region = var.region
network = var.network
subnetwork = "regions/${var.region}/subnetworks/${local.subnet_name}"
ip_configuration = "WORKER_IP_PRIVATE"

depends_on = [
Expand Down
6 changes: 6 additions & 0 deletions providers.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
provider "random" {}

provider "google" {
project = var.project
region = var.region
}
Loading

0 comments on commit cf5b234

Please sign in to comment.