diff --git a/google/resource_dataflow_job.go b/google/resource_dataflow_job.go index ff4996b3419..1cf0a9eaee8 100644 --- a/google/resource_dataflow_job.go +++ b/google/resource_dataflow_job.go @@ -122,6 +122,13 @@ func resourceDataflowJob() *schema.Resource { Optional: true, ForceNew: true, }, + + "ip_configuration": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"WORKER_IP_PUBLIC", "WORKER_IP_PRIVATE", ""}, false), + }, }, } } @@ -154,6 +161,7 @@ func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error { Subnetwork: d.Get("subnetwork").(string), TempLocation: d.Get("temp_gcs_location").(string), MachineType: d.Get("machine_type").(string), + IpConfiguration: d.Get("ip_configuration").(string), AdditionalUserLabels: labels, Zone: zone, } diff --git a/google/resource_dataflow_job_test.go b/google/resource_dataflow_job_test.go index 37ffc05f180..ae38eeb8d63 100644 --- a/google/resource_dataflow_job_test.go +++ b/google/resource_dataflow_job_test.go @@ -131,6 +131,24 @@ func TestAccDataflowJobCreateWithLabels(t *testing.T) { }) } +func TestAccDataflowJobCreateWithIpConfig(t *testing.T) { + t.Parallel() + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataflowJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataflowJobWithIpConfig, + Check: resource.ComposeTestCheckFunc( + testAccDataflowJobExists( + "google_dataflow_job.big_data"), + ), + }, + }, + }) +} + func testAccCheckDataflowJobDestroy(s *terraform.State) error { for _, rs := range s.RootModule().Resources { if rs.Type != "google_dataflow_job" { @@ -519,6 +537,33 @@ resource "google_dataflow_job" "big_data" { on_delete = "cancel" }`, acctest.RandString(10), acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv()) +var testAccDataflowJobWithIpConfig = fmt.Sprintf(` +resource "google_storage_bucket" "temp" { + name = "dfjob-test-%s-temp" + + force_destroy = true +} + +resource "google_dataflow_job" "big_data" { + name = "dfjob-test-%s" + + template_gcs_path = "gs://dataflow-templates/wordcount/template_file" + temp_gcs_location = "${google_storage_bucket.temp.url}" + machine_type = "n1-standard-2" + + parameters = { + inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt" + output = "${google_storage_bucket.temp.url}/output" + } + + ip_configuration = "WORKER_IP_PRIVATE" + + zone = "us-central1-f" + project = "%s" + + on_delete = "cancel" +}`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv()) + func testAccDataflowJobWithLabels(key string) string { return fmt.Sprintf(` resource "google_storage_bucket" "temp" { diff --git a/website/docs/r/dataflow_job.html.markdown b/website/docs/r/dataflow_job.html.markdown index 9cc2e1e102f..8b3901b4cf8 100644 --- a/website/docs/r/dataflow_job.html.markdown +++ b/website/docs/r/dataflow_job.html.markdown @@ -54,6 +54,7 @@ The following arguments are supported: * `network` - (Optional) The network to which VMs will be assigned. If it is not provided, "default" will be used. * `subnetwork` - (Optional) The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK". * `machine_type` - (Optional) The machine type to use for the job. +* `ip_configuration` - (Optional) The configuration for VM IPs. Options are `"WORKER_IP_PUBLIC"` or `"WORKER_IP_PUBLIC"`. ## Attributes Reference