From 242827e8bebf8d132e46eac4bb19e7ba135b3fdc Mon Sep 17 00:00:00 2001 From: wj-chen Date: Fri, 20 Sep 2024 10:34:33 -0700 Subject: [PATCH] Stop sending external data configuration schema when updating google_bigquery_table (#11739) --- .../bigquery/resource_bigquery_table.go | 5 ++++ .../bigquery/resource_bigquery_table_test.go | 23 +++++++++++++++---- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go index 0afadb925de8..5a3f99a8c3b6 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go @@ -1895,6 +1895,11 @@ func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error return err } + if table.ExternalDataConfiguration != nil && table.ExternalDataConfiguration.Schema != nil { + log.Printf("[INFO] Removing ExternalDataConfiguration.Schema when updating BigQuery table %s", d.Id()) + table.ExternalDataConfiguration.Schema = nil + } + log.Printf("[INFO] Updating BigQuery table: %s", d.Id()) project, err := tpgresource.GetProject(d, config) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index f47b7031258b..3a4f990d6773 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -228,7 +228,7 @@ func TestAccBigQueryTable_HivePartitioning(t *testing.T) { }) } -func TestAccBigQueryTable_HivePartitioningCustomSchema(t *testing.T) { +func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) { t.Parallel() bucketName := acctest.TestBucketName(t) resourceName := "google_bigquery_table.test" @@ -241,13 +241,22 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema(t *testing.T) { CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID), + Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, "old-label"), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection"}, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"}, + }, + { + Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, "new-label"), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"}, }, }, }) @@ -2129,7 +2138,7 @@ resource "google_bigquery_table" "test" { `, bucketName, datasetID, tableID) } -func testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID string) string { +func testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, tableLabel string) string { return fmt.Sprintf(` resource "google_storage_bucket" "test" { name = "%s" @@ -2152,6 +2161,10 @@ resource "google_bigquery_table" "test" { table_id = "%s" dataset_id = google_bigquery_dataset.test.dataset_id + labels = { + label = "%s" + } + external_data_configuration { source_format = "NEWLINE_DELIMITED_JSON" autodetect = false @@ -2178,7 +2191,7 @@ EOH } depends_on = ["google_storage_bucket_object.test"] } -`, bucketName, datasetID, tableID) +`, bucketName, datasetID, tableID, tableLabel) } func testAccBigQueryTableAvroPartitioning(bucketName, avroFilePath, datasetID, tableID string) string {