Skip to content

Commit

Permalink
Fix salesforce params specification in google_bigquery_data_transfer_…
Browse files Browse the repository at this point in the history
…config (#11232)
  • Loading branch information
wj-chen authored Jul 29, 2024
1 parent 7cebdc2 commit 57061b9
Show file tree
Hide file tree
Showing 4 changed files with 97 additions and 2 deletions.
7 changes: 7 additions & 0 deletions mmv1/products/bigquerydatatransfer/Config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,13 @@ examples:
vars:
display_name: 'my-query'
dataset_id: 'my_dataset'
- !ruby/object:Provider::Terraform::Examples
skip_test: true
name: 'bigquerydatatransfer_config_salesforce'
primary_resource_id: 'salesforce_config'
vars:
display_name: 'my-salesforce-config'
dataset_id: 'my_dataset'
parameters:
- !ruby/object:Api::Type::String
name: 'location'
Expand Down
20 changes: 18 additions & 2 deletions mmv1/templates/terraform/encoders/bigquery_data_transfer.go.erb
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,24 @@ if !ok {
paramMap = make(map[string]string)
}

var params map[string]string
params = paramMap.(map[string]string)
params := map[string]interface{}{}

for k, v := range paramMap.(map[string]string) {
var value interface{}
if err := json.Unmarshal([]byte(v), &value); err != nil {
// If the value is a string, don't convert it to anything.
params[k] = v
} else {
switch value.(type) {
case float64:
// If the value is a number, keep the string representation.
params[k] = v
default:
// If the value is another JSON type, keep the unmarshalled type as is.
params[k] = value
}
}
}

for _, sp := range sensitiveParams {
if auth, _ := d.GetOkExists("sensitive_params.0." + sp); auth != "" {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
data "google_project" "project" {
}

resource "google_bigquery_dataset" "my_dataset" {
dataset_id = "<%= ctx[:vars]['dataset_id'] %>"
description = "My dataset"
location = "asia-northeast1"
}
resource "google_bigquery_data_transfer_config" "<%= ctx[:primary_resource_id] %>" {
display_name = "<%= ctx[:vars]['display_name'] %>"
location = "asia-northeast1"
data_source_id = "salesforce"
schedule = "first sunday of quarter 00:00"
destination_dataset_id = google_bigquery_dataset.my_dataset.dataset_id
params = {
"connector.authentication.oauth.clientId" = "client-id"
"connector.authentication.oauth.clientSecret" = "client-secret"
"connector.authentication.username" = "username"
"connector.authentication.password" = "password"
"connector.authentication.securityToken" = "security-token"
"assets" = "[asset-a, asset-b]"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,7 @@ func TestAccBigqueryDataTransferConfig(t *testing.T) {
"booleanParam": testAccBigqueryDataTransferConfig_copy_booleanParam,
"update_params": testAccBigqueryDataTransferConfig_force_new_update_params,
"update_service_account": testAccBigqueryDataTransferConfig_scheduledQuery_update_service_account,
"salesforce": testAccBigqueryDataTransferConfig_salesforce_basic,
}

for name, tc := range testCases {
Expand Down Expand Up @@ -570,6 +571,27 @@ func testAccCheckDataTransferServiceAccountNamePrefix(resourceName string, prefi
}
}

func testAccBigqueryDataTransferConfig_salesforce_basic(t *testing.T) {
randomSuffix := acctest.RandString(t, 10)

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_salesforce(randomSuffix),
},
{
ResourceName: "google_bigquery_data_transfer_config.salesforce_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
},
})
}

func testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, random_suffix2, schedule, start_time, end_time, letter string) string {
return fmt.Sprintf(`
data "google_project" "project" {}
Expand Down Expand Up @@ -810,3 +832,30 @@ resource "google_bigquery_data_transfer_config" "query_config" {
}
`, service_account, service_account, service_account, random_suffix, random_suffix, service_account)
}

func testAccBigqueryDataTransferConfig_salesforce(randomSuffix string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "dataset" {
dataset_id = "tf_test_%s"
friendly_name = "foo"
description = "bar"
location = "US"
}
resource "google_bigquery_data_transfer_config" "salesforce_config" {
display_name = "tf-test-%s"
data_source_id = "salesforce"
destination_dataset_id = google_bigquery_dataset.dataset.dataset_id
location = google_bigquery_dataset.dataset.location
params = {
"connector.authentication.oauth.clientId" = ""
"connector.authentication.oauth.clientSecret" = ""
"connector.authentication.username" = ""
"connector.authentication.password" = ""
"connector.authentication.securityToken" = ""
"assets" = "[asset-a, asset-b]"
}
}
`, randomSuffix, randomSuffix)
}

0 comments on commit 57061b9

Please sign in to comment.