Skip to content

Commit

Permalink
Address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
zli82016 committed Sep 18, 2024
1 parent 6697157 commit 3c7b7f1
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 42 deletions.
16 changes: 10 additions & 6 deletions mmv1/products/dataproc/Batch.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2023 Google Inc.
# Copyright 2024 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
Expand Down Expand Up @@ -49,7 +49,8 @@ examples:
test_vars_overrides:
subnetwork_name: 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "dataproc-spark-test-network", "dataproc-spark-test-subnetwork")'
prevent_destroy: 'false'
skip_import_test: true
ignore_read_extra:
- 'runtime_config.0.properties'
- !ruby/object:Provider::Terraform::Examples
name: 'dataproc_batch_sparksql'
primary_resource_id: 'example_batch_sparsql'
Expand All @@ -63,7 +64,8 @@ examples:
test_vars_overrides:
subnetwork_name: 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "dataproc-sparksql-test-network", "dataproc-sparksql-test-subnetwork")'
prevent_destroy: 'false'
skip_import_test: true
ignore_read_extra:
- 'runtime_config.0.properties'
- !ruby/object:Provider::Terraform::Examples
name: 'dataproc_batch_pyspark'
primary_resource_id: 'example_batch_pyspark'
Expand All @@ -77,7 +79,8 @@ examples:
test_vars_overrides:
subnetwork_name: 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "dataproc-pyspark-test-network", "dataproc-pyspark-test-subnetwork")'
prevent_destroy: 'false'
skip_import_test: true
ignore_read_extra:
- 'runtime_config.0.properties'
- !ruby/object:Provider::Terraform::Examples
name: 'dataproc_batch_sparkr'
primary_resource_id: 'example_batch_sparkr'
Expand All @@ -91,10 +94,11 @@ examples:
test_vars_overrides:
subnetwork_name: 'acctest.BootstrapSubnetWithFirewallForDataprocBatches(t, "dataproc-pyspark-test-network", "dataproc-pyspark-test-subnetwork")'
prevent_destroy: 'false'
skip_import_test: true
ignore_read_extra:
- 'runtime_config.0.properties'
custom_code: !ruby/object:Provider::Terraform::CustomCode
decoder: templates/terraform/decoders/cloud_dataproc_batch.go.erb
constants: templates/terraform/constants/cloud_dataproc_batch.go.erb
constants: templates/terraform/constants/cloud_dataproc_batch.go
parameters:
- !ruby/object:Api::Type::String
name: 'location'
Expand Down
3 changes: 0 additions & 3 deletions mmv1/products/dataproc/product.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,5 @@ versions:
- !ruby/object:Api::Product::Version
name: ga
base_url: https://dataproc.googleapis.com/v1/
- !ruby/object:Api::Product::Version
name: beta
base_url: https://dataproc.googleapis.com/v1beta2/
scopes:
- https://www.googleapis.com/auth/cloud-identity
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
* version. We are suppressing this server generated subminor.
*/
func CloudDataprocBatchRuntimeConfigVersionDiffSuppressFunc(old, new string) bool {
if old == "" || strings.HasPrefix(new, old) {
return true
}
if old != "" && strings.HasPrefix(new, old) || (new != "" && strings.HasPrefix(old, new)) {
return true
}

return false
return old == new
}

func CloudDataprocBatchRuntimeConfigVersionDiffSuppress(_, old, new string, d *schema.ResourceData) bool {
return CloudDataprocBatchRuntimeConfigVersionDiffSuppressFunc(old, new)
return CloudDataprocBatchRuntimeConfigVersionDiffSuppressFunc(old, new)
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
if obj1, ok := res["runtimeConfig"]; ok {
if rconfig, ok := obj1.(map[string]interface{}); ok {
if obj2, ok := rconfig["properties"]; ok {
rconfig["version"] = d.Get("runtime_config.0.version").(string)
if properties, ok := obj2.(map[string]interface{}); ok {
// Update effective_properties to include both server set and client set properties
propertiesCopy := make(map[string]interface{})
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package dataproc

import (
"testing"
)

func TestCloudDataprocBatchRuntimeConfigVersionDiffSuppress(t *testing.T) {
cases := map[string]struct {
Old, New string
ExpectDiffSuppress bool
}{
"old version is empty, new version has a value": {
Old: "",
New: "2.2.100",
ExpectDiffSuppress: false,
},
"old version is the prefix of the new version": {
Old: "2.2",
New: "2.2.100",
ExpectDiffSuppress: true,
},
"old version is not the prefix of the new version": {
Old: "2.1",
New: "2.2.100",
ExpectDiffSuppress: false,
},
"new version is empty, old version has a value": {
Old: "2.2.100",
New: "",
ExpectDiffSuppress: false,
},
"new version is the prefix of the old version": {
Old: "2.2.100",
New: "2.2",
ExpectDiffSuppress: true,
},
"new version is not the prefix of the old version": {
Old: "2.2.100",
New: "2.1",
ExpectDiffSuppress: false,
},
"old version is the same with the new version": {
Old: "2.2.100",
New: "2.2.100",
ExpectDiffSuppress: true,
},
"both new version and old version are empty string": {
Old: "",
New: "",
ExpectDiffSuppress: true,
},
}

for tn, tc := range cases {
if CloudDataprocBatchRuntimeConfigVersionDiffSuppressFunc(tc.Old, tc.New) != tc.ExpectDiffSuppress {
t.Errorf("bad: %s, %q => %q expect DiffSuppress to return %t", tn, tc.Old, tc.New, tc.ExpectDiffSuppress)
}
}
}

This file was deleted.

0 comments on commit 3c7b7f1

Please sign in to comment.