diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..1c8cb71 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,50 @@ +name: Check quality gates +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + tflint: + name: Run TFlint + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + name: Checkout source code + + - uses: actions/cache@v2 + name: Cache plugin dir + with: + path: ~/.tflint.d/plugins + key: ubuntu-latest-tflint-${{ hashFiles('.tflint.hcl') }} + + - uses: terraform-linters/setup-tflint@v2 + name: Setup TFLint + with: + tflint_version: v0.42.2 + + - name: Show version + run: tflint --version + + - name: Init TFLint + run: tflint --init + + - name: Run TFLint + run: tflint -f compact + fmt: + name: Run Terraform format + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + name: Checkout source code + + - uses: hashicorp/setup-terraform@v2 + with: + terraform_version: 1.1.7 + name: Install Terraform + + - name: Run terraform fmt + run: terraform fmt -check=true -write=false diff --git a/main.tf b/main.tf index 2b2ecd9..498c6b0 100644 --- a/main.tf +++ b/main.tf @@ -12,15 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -terraform { - required_version = ">= 0.13" -} - -provider "google" { - project = var.project - region = var.region -} - data "google_project" "project" {} data "google_client_openid_userinfo" "provider_identity" {} @@ -44,29 +35,31 @@ locals { dataflow_temporary_gcs_bucket_path = "tmp/" dataflow_splunk_template_gcs_path = "gs://dataflow-templates/${var.dataflow_template_version}/Cloud_PubSub_to_Splunk" + # tflint-ignore: terraform_unused_declarations dataflow_pubsub_template_gcs_path = "gs://dataflow-templates/${var.dataflow_template_version}/Cloud_PubSub_to_Cloud_PubSub" # If provided, set Dataflow worker to new user-managed service account; # otherwise, use Compute Engine default service account dataflow_worker_service_account = ((var.dataflow_worker_service_account != "") ? "${var.dataflow_worker_service_account}@${var.project}.iam.gserviceaccount.com" - : "${data.google_project.project.number}-compute@developer.gserviceaccount.com") + : "${data.google_project.project.number}-compute@developer.gserviceaccount.com") - subnet_name = coalesce(var.subnet, "${var.network}-${var.region}") + subnet_name = coalesce(var.subnet, "${var.network}-${var.region}") project_log_sink_name = "${var.dataflow_job_name}-project-log-sink" + # tflint-ignore: terraform_unused_declarations organization_log_sink_name = "${var.dataflow_job_name}-organization-log-sink" - dataflow_main_job_name = "${var.dataflow_job_name}-main-${random_id.dataflow_job_instance.hex}" + dataflow_main_job_name = "${var.dataflow_job_name}-main-${random_id.dataflow_job_instance.hex}" dataflow_replay_job_name = "${var.dataflow_job_name}-replay-${random_id.dataflow_job_instance.hex}" - dataflow_input_topic_name = "${var.dataflow_job_name}-input-topic" - dataflow_input_subscription_name = "${var.dataflow_job_name}-input-subscription" + dataflow_input_topic_name = "${var.dataflow_job_name}-input-topic" + dataflow_input_subscription_name = "${var.dataflow_job_name}-input-subscription" dataflow_output_deadletter_topic_name = "${var.dataflow_job_name}-deadletter-topic" - dataflow_output_deadletter_sub_name = "${var.dataflow_job_name}-deadletter-subscription" + dataflow_output_deadletter_sub_name = "${var.dataflow_job_name}-deadletter-subscription" # Dataflow job parameters (not externalized for this project) - dataflow_job_include_pubsub_message = true - dataflow_job_enable_batch_logs = false + dataflow_job_include_pubsub_message = true + dataflow_job_enable_batch_logs = false dataflow_job_enable_gzip_http_compression = true # Metrics scope for Monitoring dashboard defaults to project unless explicitly provided @@ -83,7 +76,7 @@ resource "google_pubsub_subscription" "dataflow_input_pubsub_subscription" { # messages retained for 7 days (max) message_retention_duration = "604800s" - ack_deadline_seconds = 30 + ack_deadline_seconds = 30 # subscription never expires expiration_policy { @@ -92,9 +85,9 @@ resource "google_pubsub_subscription" "dataflow_input_pubsub_subscription" { } resource "google_logging_project_sink" "project_log_sink" { - name = local.project_log_sink_name + name = local.project_log_sink_name destination = "pubsub.googleapis.com/projects/${var.project}/topics/${google_pubsub_topic.dataflow_input_pubsub_topic.name}" - filter = var.log_filter + filter = var.log_filter unique_writer_identity = true } @@ -109,17 +102,17 @@ resource "google_logging_project_sink" "project_log_sink" { # } output "dataflow_job_id" { - value = google_dataflow_job.dataflow_job.job_id + value = google_dataflow_job.dataflow_job.job_id } output "dataflow_input_topic" { - value = google_pubsub_topic.dataflow_input_pubsub_topic.name + value = google_pubsub_topic.dataflow_input_pubsub_topic.name } output "dataflow_output_deadletter_subscription" { - value = google_pubsub_subscription.dataflow_deadletter_pubsub_sub.name + value = google_pubsub_subscription.dataflow_deadletter_pubsub_sub.name } output "dataflow_log_export_dashboard" { - value = google_monitoring_dashboard.splunk-export-pipeline-dashboard.id + value = google_monitoring_dashboard.splunk-export-pipeline-dashboard.id } diff --git a/monitoring.tf b/monitoring.tf index 56dadcd..daf7b33 100644 --- a/monitoring.tf +++ b/monitoring.tf @@ -14,13 +14,13 @@ resource "google_monitoring_group" "splunk-export-pipeline-group" { display_name = "Splunk Log Export Group" - project = local.scoping_project + project = local.scoping_project filter = "resource.metadata.name=starts_with(\"${var.dataflow_job_name}\")" } resource "google_monitoring_dashboard" "splunk-export-pipeline-dashboard" { - project = local.scoping_project + project = local.scoping_project dashboard_json = <