From 188a4b8b9fb39dbbe8de6d387e94ca5bf2e8dd91 Mon Sep 17 00:00:00 2001 From: The Magician Date: Fri, 8 Nov 2024 10:56:49 -0800 Subject: [PATCH] adds support for the Dataproc on GDC SparkApplication resource (#12237) (#20242) [upstream:f954b7c9dab564bf88a97b24f94e9d795889faaf] Signed-off-by: Modular Magician --- .changelog/12237.txt | 3 + google/provider/provider_mmv1_resources.go | 5 +- ...resource_dataproc_gdc_spark_application.go | 1413 +++++++++++++++++ ..._gdc_spark_application_generated_meta.yaml | 5 + ...oc_gdc_spark_application_generated_test.go | 363 +++++ ..._dataproc_gdc_spark_application_sweeper.go | 143 ++ ...taproc_gdc_spark_application.html.markdown | 466 ++++++ 7 files changed, 2396 insertions(+), 2 deletions(-) create mode 100644 .changelog/12237.txt create mode 100644 google/services/dataprocgdc/resource_dataproc_gdc_spark_application.go create mode 100644 google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_meta.yaml create mode 100644 google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_test.go create mode 100644 google/services/dataprocgdc/resource_dataproc_gdc_spark_application_sweeper.go create mode 100644 website/docs/r/dataproc_gdc_spark_application.html.markdown diff --git a/.changelog/12237.txt b/.changelog/12237.txt new file mode 100644 index 00000000000..b51dae12f3e --- /dev/null +++ b/.changelog/12237.txt @@ -0,0 +1,3 @@ +```release-note:new-resource +`google_dataproc_gdc_spark_application` +``` \ No newline at end of file diff --git a/google/provider/provider_mmv1_resources.go b/google/provider/provider_mmv1_resources.go index 019e7e06545..e9b326ffe2e 100644 --- a/google/provider/provider_mmv1_resources.go +++ b/google/provider/provider_mmv1_resources.go @@ -453,9 +453,9 @@ var handwrittenIAMDatasources = map[string]*schema.Resource{ } // Resources -// Generated resources: 487 +// Generated resources: 488 // Generated IAM resources: 261 -// Total generated resources: 748 +// Total generated resources: 749 var generatedResources = map[string]*schema.Resource{ "google_folder_access_approval_settings": accessapproval.ResourceAccessApprovalFolderSettings(), "google_organization_access_approval_settings": accessapproval.ResourceAccessApprovalOrganizationSettings(), @@ -794,6 +794,7 @@ var generatedResources = map[string]*schema.Resource{ "google_dataproc_batch": dataproc.ResourceDataprocBatch(), "google_dataproc_gdc_application_environment": dataprocgdc.ResourceDataprocGdcApplicationEnvironment(), "google_dataproc_gdc_service_instance": dataprocgdc.ResourceDataprocGdcServiceInstance(), + "google_dataproc_gdc_spark_application": dataprocgdc.ResourceDataprocGdcSparkApplication(), "google_dataproc_metastore_federation": dataprocmetastore.ResourceDataprocMetastoreFederation(), "google_dataproc_metastore_federation_iam_binding": tpgiamresource.ResourceIamBinding(dataprocmetastore.DataprocMetastoreFederationIamSchema, dataprocmetastore.DataprocMetastoreFederationIamUpdaterProducer, dataprocmetastore.DataprocMetastoreFederationIdParseFunc), "google_dataproc_metastore_federation_iam_member": tpgiamresource.ResourceIamMember(dataprocmetastore.DataprocMetastoreFederationIamSchema, dataprocmetastore.DataprocMetastoreFederationIamUpdaterProducer, dataprocmetastore.DataprocMetastoreFederationIdParseFunc), diff --git a/google/services/dataprocgdc/resource_dataproc_gdc_spark_application.go b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application.go new file mode 100644 index 00000000000..ec631873a5b --- /dev/null +++ b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application.go @@ -0,0 +1,1413 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dataprocgdc + +import ( + "fmt" + "log" + "net/http" + "reflect" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func ResourceDataprocGdcSparkApplication() *schema.Resource { + return &schema.Resource{ + Create: resourceDataprocGdcSparkApplicationCreate, + Read: resourceDataprocGdcSparkApplicationRead, + Update: resourceDataprocGdcSparkApplicationUpdate, + Delete: resourceDataprocGdcSparkApplicationDelete, + + Importer: &schema.ResourceImporter{ + State: resourceDataprocGdcSparkApplicationImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(20 * time.Minute), + Update: schema.DefaultTimeout(20 * time.Minute), + Delete: schema.DefaultTimeout(20 * time.Minute), + }, + + CustomizeDiff: customdiff.All( + tpgresource.SetLabelsDiff, + tpgresource.SetAnnotationsDiff, + tpgresource.DefaultProviderProject, + ), + + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The location of the spark application.`, + }, + "serviceinstance": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The id of the service instance to which this spark application belongs.`, + }, + "spark_application_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The id of the application`, + }, + "annotations": { + Type: schema.TypeMap, + Optional: true, + ForceNew: true, + Description: `The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server. + +**Note**: This field is non-authoritative, and will only manage the annotations present in your configuration. +Please refer to the field 'effective_annotations' for all of the annotations present on the resource.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "application_environment": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `An ApplicationEnvironment from which to inherit configuration properties.`, + }, + "dependency_images": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "display_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `User-provided human-readable name to be used in user interfaces.`, + }, + "labels": { + Type: schema.TypeMap, + Optional: true, + Description: `The labels to associate with this application. Labels may be used for filtering and billing tracking. + +**Note**: This field is non-authoritative, and will only manage the labels present in your configuration. +Please refer to the field 'effective_labels' for all of the labels present on the resource.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "namespace": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.`, + }, + "properties": { + Type: schema.TypeMap, + Optional: true, + ForceNew: true, + Description: `application-specific properties.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "pyspark_application_config": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Represents the PySparkApplicationConfig.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "main_python_file_uri": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The HCFS URI of the main Python file to use as the driver. Must be a .py file.`, + }, + "archive_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "args": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `The arguments to pass to the driver. Do not include arguments, such as '--conf', that can be set as job properties, since a collision may occur that causes an incorrect job submission.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "file_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "jar_file_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "python_file_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + ExactlyOneOf: []string{"pyspark_application_config", "spark_application_config", "spark_sql_application_config", "spark_r_application_config"}, + }, + "spark_application_config": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Represents the SparkApplicationConfig.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "archive_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: '.jar', '.tar', '.tar.gz', '.tgz', and '.zip'.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "args": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as '--conf', since a collision can occur that causes an incorrect application submission.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "file_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of files to be placed in the working directory of each executor.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "jar_file_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "main_class": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The name of the driver main class. The jar file that contains the class must be in the classpath or specified in 'jar_file_uris'.`, + }, + "main_jar_file_uri": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The HCFS URI of the jar file that contains the main class.`, + }, + }, + }, + ExactlyOneOf: []string{"pyspark_application_config", "spark_application_config", "spark_sql_application_config", "spark_r_application_config"}, + }, + "spark_r_application_config": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Represents the SparkRApplicationConfig.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "main_r_file_uri": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The HCFS URI of the main R file to use as the driver. Must be a .R file.`, + }, + "archive_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "args": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `The arguments to pass to the driver. Do not include arguments, such as '--conf', that can be set as job properties, since a collision may occur that causes an incorrect job submission.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "file_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + ExactlyOneOf: []string{"pyspark_application_config", "spark_application_config", "spark_sql_application_config", "spark_r_application_config"}, + }, + "spark_sql_application_config": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Represents the SparkRApplicationConfig.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "jar_file_uris": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `HCFS URIs of jar files to be added to the Spark CLASSPATH.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "query_file_uri": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The HCFS URI of the script that contains SQL queries.`, + }, + "query_list": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Represents a list of queries.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "queries": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + Description: `The queries to run.`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "script_variables": { + Type: schema.TypeMap, + Optional: true, + ForceNew: true, + Description: `Mapping of query variable names to values (equivalent to the Spark SQL command: SET 'name="value";').`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + }, + ExactlyOneOf: []string{"pyspark_application_config", "spark_application_config", "spark_sql_application_config", "spark_r_application_config"}, + }, + "version": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `The Dataproc version of this application.`, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: `The timestamp when the resource was created.`, + }, + "effective_annotations": { + Type: schema.TypeMap, + Computed: true, + ForceNew: true, + Description: `All of annotations (key/value pairs) present on the resource in GCP, including the annotations configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + ForceNew: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "monitoring_endpoint": { + Type: schema.TypeString, + Computed: true, + Description: `URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA`, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: `Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}`, + }, + "output_uri": { + Type: schema.TypeString, + Computed: true, + Description: `An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA`, + }, + "reconciling": { + Type: schema.TypeBool, + Computed: true, + Description: `Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.`, + }, + "state": { + Type: schema.TypeString, + Computed: true, + Description: `The current state. +Possible values: +* 'STATE_UNSPECIFIED' +* 'PENDING' +* 'RUNNING' +* 'CANCELLING' +* 'CANCELLED' +* 'SUCCEEDED' +* 'FAILED'`, + }, + "state_message": { + Type: schema.TypeString, + Computed: true, + Description: `A message explaining the current state.`, + }, + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource + and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "uid": { + Type: schema.TypeString, + Computed: true, + Description: `System generated unique identifier for this application, formatted as UUID4.`, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: `The timestamp when the resource was most recently updated.`, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + UseJSONNumber: true, + } +} + +func resourceDataprocGdcSparkApplicationCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + pysparkApplicationConfigProp, err := expandDataprocGdcSparkApplicationPysparkApplicationConfig(d.Get("pyspark_application_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("pyspark_application_config"); !tpgresource.IsEmptyValue(reflect.ValueOf(pysparkApplicationConfigProp)) && (ok || !reflect.DeepEqual(v, pysparkApplicationConfigProp)) { + obj["pysparkApplicationConfig"] = pysparkApplicationConfigProp + } + sparkApplicationConfigProp, err := expandDataprocGdcSparkApplicationSparkApplicationConfig(d.Get("spark_application_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("spark_application_config"); !tpgresource.IsEmptyValue(reflect.ValueOf(sparkApplicationConfigProp)) && (ok || !reflect.DeepEqual(v, sparkApplicationConfigProp)) { + obj["sparkApplicationConfig"] = sparkApplicationConfigProp + } + sparkRApplicationConfigProp, err := expandDataprocGdcSparkApplicationSparkRApplicationConfig(d.Get("spark_r_application_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("spark_r_application_config"); !tpgresource.IsEmptyValue(reflect.ValueOf(sparkRApplicationConfigProp)) && (ok || !reflect.DeepEqual(v, sparkRApplicationConfigProp)) { + obj["sparkRApplicationConfig"] = sparkRApplicationConfigProp + } + sparkSqlApplicationConfigProp, err := expandDataprocGdcSparkApplicationSparkSqlApplicationConfig(d.Get("spark_sql_application_config"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("spark_sql_application_config"); !tpgresource.IsEmptyValue(reflect.ValueOf(sparkSqlApplicationConfigProp)) && (ok || !reflect.DeepEqual(v, sparkSqlApplicationConfigProp)) { + obj["sparkSqlApplicationConfig"] = sparkSqlApplicationConfigProp + } + displayNameProp, err := expandDataprocGdcSparkApplicationDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !tpgresource.IsEmptyValue(reflect.ValueOf(displayNameProp)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + propertiesProp, err := expandDataprocGdcSparkApplicationProperties(d.Get("properties"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("properties"); !tpgresource.IsEmptyValue(reflect.ValueOf(propertiesProp)) && (ok || !reflect.DeepEqual(v, propertiesProp)) { + obj["properties"] = propertiesProp + } + versionProp, err := expandDataprocGdcSparkApplicationVersion(d.Get("version"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("version"); !tpgresource.IsEmptyValue(reflect.ValueOf(versionProp)) && (ok || !reflect.DeepEqual(v, versionProp)) { + obj["version"] = versionProp + } + applicationEnvironmentProp, err := expandDataprocGdcSparkApplicationApplicationEnvironment(d.Get("application_environment"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("application_environment"); !tpgresource.IsEmptyValue(reflect.ValueOf(applicationEnvironmentProp)) && (ok || !reflect.DeepEqual(v, applicationEnvironmentProp)) { + obj["applicationEnvironment"] = applicationEnvironmentProp + } + namespaceProp, err := expandDataprocGdcSparkApplicationNamespace(d.Get("namespace"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("namespace"); !tpgresource.IsEmptyValue(reflect.ValueOf(namespaceProp)) && (ok || !reflect.DeepEqual(v, namespaceProp)) { + obj["namespace"] = namespaceProp + } + dependencyImagesProp, err := expandDataprocGdcSparkApplicationDependencyImages(d.Get("dependency_images"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("dependency_images"); !tpgresource.IsEmptyValue(reflect.ValueOf(dependencyImagesProp)) && (ok || !reflect.DeepEqual(v, dependencyImagesProp)) { + obj["dependencyImages"] = dependencyImagesProp + } + labelsProp, err := expandDataprocGdcSparkApplicationEffectiveLabels(d.Get("effective_labels"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(labelsProp)) && (ok || !reflect.DeepEqual(v, labelsProp)) { + obj["labels"] = labelsProp + } + annotationsProp, err := expandDataprocGdcSparkApplicationEffectiveAnnotations(d.Get("effective_annotations"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("effective_annotations"); !tpgresource.IsEmptyValue(reflect.ValueOf(annotationsProp)) && (ok || !reflect.DeepEqual(v, annotationsProp)) { + obj["annotations"] = annotationsProp + } + + url, err := tpgresource.ReplaceVars(d, config, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications?sparkApplicationId={{spark_application_id}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new SparkApplication: %#v", obj) + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for SparkApplication: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutCreate), + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error creating SparkApplication: %s", err) + } + + // Store the ID now + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + err = DataprocGdcOperationWaitTime( + config, res, project, "Creating SparkApplication", userAgent, + d.Timeout(schema.TimeoutCreate)) + + if err != nil { + // The resource didn't actually create + d.SetId("") + return fmt.Errorf("Error waiting to create SparkApplication: %s", err) + } + + log.Printf("[DEBUG] Finished creating SparkApplication %q: %#v", d.Id(), res) + + return resourceDataprocGdcSparkApplicationRead(d, meta) +} + +func resourceDataprocGdcSparkApplicationRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}") + if err != nil { + return err + } + + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for SparkApplication: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Headers: headers, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("DataprocGdcSparkApplication %q", d.Id())) + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + + if err := d.Set("pyspark_application_config", flattenDataprocGdcSparkApplicationPysparkApplicationConfig(res["pysparkApplicationConfig"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("spark_application_config", flattenDataprocGdcSparkApplicationSparkApplicationConfig(res["sparkApplicationConfig"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("spark_r_application_config", flattenDataprocGdcSparkApplicationSparkRApplicationConfig(res["sparkRApplicationConfig"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("spark_sql_application_config", flattenDataprocGdcSparkApplicationSparkSqlApplicationConfig(res["sparkSqlApplicationConfig"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("name", flattenDataprocGdcSparkApplicationName(res["name"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("uid", flattenDataprocGdcSparkApplicationUid(res["uid"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("display_name", flattenDataprocGdcSparkApplicationDisplayName(res["displayName"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("create_time", flattenDataprocGdcSparkApplicationCreateTime(res["createTime"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("update_time", flattenDataprocGdcSparkApplicationUpdateTime(res["updateTime"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("state", flattenDataprocGdcSparkApplicationState(res["state"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("reconciling", flattenDataprocGdcSparkApplicationReconciling(res["reconciling"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("labels", flattenDataprocGdcSparkApplicationLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("annotations", flattenDataprocGdcSparkApplicationAnnotations(res["annotations"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("output_uri", flattenDataprocGdcSparkApplicationOutputUri(res["outputUri"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("monitoring_endpoint", flattenDataprocGdcSparkApplicationMonitoringEndpoint(res["monitoringEndpoint"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("properties", flattenDataprocGdcSparkApplicationProperties(res["properties"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("state_message", flattenDataprocGdcSparkApplicationStateMessage(res["stateMessage"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("version", flattenDataprocGdcSparkApplicationVersion(res["version"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("application_environment", flattenDataprocGdcSparkApplicationApplicationEnvironment(res["applicationEnvironment"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("namespace", flattenDataprocGdcSparkApplicationNamespace(res["namespace"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("dependency_images", flattenDataprocGdcSparkApplicationDependencyImages(res["dependencyImages"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("terraform_labels", flattenDataprocGdcSparkApplicationTerraformLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("effective_labels", flattenDataprocGdcSparkApplicationEffectiveLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + if err := d.Set("effective_annotations", flattenDataprocGdcSparkApplicationEffectiveAnnotations(res["annotations"], d, config)); err != nil { + return fmt.Errorf("Error reading SparkApplication: %s", err) + } + + return nil +} + +func resourceDataprocGdcSparkApplicationUpdate(d *schema.ResourceData, meta interface{}) error { + // Only the root field "labels" and "terraform_labels" are mutable + return resourceDataprocGdcSparkApplicationRead(d, meta) +} + +func resourceDataprocGdcSparkApplicationDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for SparkApplication: %s", err) + } + billingProject = project + + url, err := tpgresource.ReplaceVars(d, config, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}") + if err != nil { + return err + } + + var obj map[string]interface{} + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + headers := make(http.Header) + + log.Printf("[DEBUG] Deleting SparkApplication %q", d.Id()) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutDelete), + Headers: headers, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, "SparkApplication") + } + + err = DataprocGdcOperationWaitTime( + config, res, project, "Deleting SparkApplication", userAgent, + d.Timeout(schema.TimeoutDelete)) + + if err != nil { + return err + } + + log.Printf("[DEBUG] Finished deleting SparkApplication %q: %#v", d.Id(), res) + return nil +} + +func resourceDataprocGdcSparkApplicationImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*transport_tpg.Config) + if err := tpgresource.ParseImportId([]string{ + "^projects/(?P[^/]+)/locations/(?P[^/]+)/serviceInstances/(?P[^/]+)/sparkApplications/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + }, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +func flattenDataprocGdcSparkApplicationPysparkApplicationConfig(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["main_python_file_uri"] = + flattenDataprocGdcSparkApplicationPysparkApplicationConfigMainPythonFileUri(original["mainPythonFileUri"], d, config) + transformed["args"] = + flattenDataprocGdcSparkApplicationPysparkApplicationConfigArgs(original["args"], d, config) + transformed["python_file_uris"] = + flattenDataprocGdcSparkApplicationPysparkApplicationConfigPythonFileUris(original["pythonFileUris"], d, config) + transformed["jar_file_uris"] = + flattenDataprocGdcSparkApplicationPysparkApplicationConfigJarFileUris(original["jarFileUris"], d, config) + transformed["file_uris"] = + flattenDataprocGdcSparkApplicationPysparkApplicationConfigFileUris(original["fileUris"], d, config) + transformed["archive_uris"] = + flattenDataprocGdcSparkApplicationPysparkApplicationConfigArchiveUris(original["archiveUris"], d, config) + return []interface{}{transformed} +} +func flattenDataprocGdcSparkApplicationPysparkApplicationConfigMainPythonFileUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationPysparkApplicationConfigArgs(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationPysparkApplicationConfigPythonFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationPysparkApplicationConfigJarFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationPysparkApplicationConfigFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationPysparkApplicationConfigArchiveUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkApplicationConfig(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["main_jar_file_uri"] = + flattenDataprocGdcSparkApplicationSparkApplicationConfigMainJarFileUri(original["mainJarFileUri"], d, config) + transformed["main_class"] = + flattenDataprocGdcSparkApplicationSparkApplicationConfigMainClass(original["mainClass"], d, config) + transformed["args"] = + flattenDataprocGdcSparkApplicationSparkApplicationConfigArgs(original["args"], d, config) + transformed["jar_file_uris"] = + flattenDataprocGdcSparkApplicationSparkApplicationConfigJarFileUris(original["jarFileUris"], d, config) + transformed["file_uris"] = + flattenDataprocGdcSparkApplicationSparkApplicationConfigFileUris(original["fileUris"], d, config) + transformed["archive_uris"] = + flattenDataprocGdcSparkApplicationSparkApplicationConfigArchiveUris(original["archiveUris"], d, config) + return []interface{}{transformed} +} +func flattenDataprocGdcSparkApplicationSparkApplicationConfigMainJarFileUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkApplicationConfigMainClass(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkApplicationConfigArgs(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkApplicationConfigJarFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkApplicationConfigFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkApplicationConfigArchiveUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkRApplicationConfig(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["main_r_file_uri"] = + flattenDataprocGdcSparkApplicationSparkRApplicationConfigMainRFileUri(original["mainRFileUri"], d, config) + transformed["args"] = + flattenDataprocGdcSparkApplicationSparkRApplicationConfigArgs(original["args"], d, config) + transformed["file_uris"] = + flattenDataprocGdcSparkApplicationSparkRApplicationConfigFileUris(original["fileUris"], d, config) + transformed["archive_uris"] = + flattenDataprocGdcSparkApplicationSparkRApplicationConfigArchiveUris(original["archiveUris"], d, config) + return []interface{}{transformed} +} +func flattenDataprocGdcSparkApplicationSparkRApplicationConfigMainRFileUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkRApplicationConfigArgs(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkRApplicationConfigFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkRApplicationConfigArchiveUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkSqlApplicationConfig(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["query_file_uri"] = + flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryFileUri(original["queryFileUri"], d, config) + transformed["query_list"] = + flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryList(original["queryList"], d, config) + transformed["script_variables"] = + flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigScriptVariables(original["scriptVariables"], d, config) + transformed["jar_file_uris"] = + flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigJarFileUris(original["jarFileUris"], d, config) + return []interface{}{transformed} +} +func flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryFileUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryList(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["queries"] = + flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListQueries(original["queries"], d, config) + return []interface{}{transformed} +} +func flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListQueries(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigScriptVariables(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationSparkSqlApplicationConfigJarFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationUid(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationDisplayName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationCreateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationUpdateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationState(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationReconciling(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationLabels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + + transformed := make(map[string]interface{}) + if l, ok := d.GetOkExists("labels"); ok { + for k := range l.(map[string]interface{}) { + transformed[k] = v.(map[string]interface{})[k] + } + } + + return transformed +} + +func flattenDataprocGdcSparkApplicationAnnotations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + + transformed := make(map[string]interface{}) + if l, ok := d.GetOkExists("annotations"); ok { + for k := range l.(map[string]interface{}) { + transformed[k] = v.(map[string]interface{})[k] + } + } + + return transformed +} + +func flattenDataprocGdcSparkApplicationOutputUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationMonitoringEndpoint(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationProperties(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationStateMessage(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationVersion(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationApplicationEnvironment(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationNamespace(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationDependencyImages(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationTerraformLabels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + + transformed := make(map[string]interface{}) + if l, ok := d.GetOkExists("terraform_labels"); ok { + for k := range l.(map[string]interface{}) { + transformed[k] = v.(map[string]interface{})[k] + } + } + + return transformed +} + +func flattenDataprocGdcSparkApplicationEffectiveLabels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenDataprocGdcSparkApplicationEffectiveAnnotations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func expandDataprocGdcSparkApplicationPysparkApplicationConfig(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedMainPythonFileUri, err := expandDataprocGdcSparkApplicationPysparkApplicationConfigMainPythonFileUri(original["main_python_file_uri"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedMainPythonFileUri); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["mainPythonFileUri"] = transformedMainPythonFileUri + } + + transformedArgs, err := expandDataprocGdcSparkApplicationPysparkApplicationConfigArgs(original["args"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedArgs); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["args"] = transformedArgs + } + + transformedPythonFileUris, err := expandDataprocGdcSparkApplicationPysparkApplicationConfigPythonFileUris(original["python_file_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedPythonFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["pythonFileUris"] = transformedPythonFileUris + } + + transformedJarFileUris, err := expandDataprocGdcSparkApplicationPysparkApplicationConfigJarFileUris(original["jar_file_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedJarFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["jarFileUris"] = transformedJarFileUris + } + + transformedFileUris, err := expandDataprocGdcSparkApplicationPysparkApplicationConfigFileUris(original["file_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["fileUris"] = transformedFileUris + } + + transformedArchiveUris, err := expandDataprocGdcSparkApplicationPysparkApplicationConfigArchiveUris(original["archive_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedArchiveUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["archiveUris"] = transformedArchiveUris + } + + return transformed, nil +} + +func expandDataprocGdcSparkApplicationPysparkApplicationConfigMainPythonFileUri(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationPysparkApplicationConfigArgs(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationPysparkApplicationConfigPythonFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationPysparkApplicationConfigJarFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationPysparkApplicationConfigFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationPysparkApplicationConfigArchiveUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkApplicationConfig(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedMainJarFileUri, err := expandDataprocGdcSparkApplicationSparkApplicationConfigMainJarFileUri(original["main_jar_file_uri"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedMainJarFileUri); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["mainJarFileUri"] = transformedMainJarFileUri + } + + transformedMainClass, err := expandDataprocGdcSparkApplicationSparkApplicationConfigMainClass(original["main_class"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedMainClass); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["mainClass"] = transformedMainClass + } + + transformedArgs, err := expandDataprocGdcSparkApplicationSparkApplicationConfigArgs(original["args"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedArgs); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["args"] = transformedArgs + } + + transformedJarFileUris, err := expandDataprocGdcSparkApplicationSparkApplicationConfigJarFileUris(original["jar_file_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedJarFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["jarFileUris"] = transformedJarFileUris + } + + transformedFileUris, err := expandDataprocGdcSparkApplicationSparkApplicationConfigFileUris(original["file_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["fileUris"] = transformedFileUris + } + + transformedArchiveUris, err := expandDataprocGdcSparkApplicationSparkApplicationConfigArchiveUris(original["archive_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedArchiveUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["archiveUris"] = transformedArchiveUris + } + + return transformed, nil +} + +func expandDataprocGdcSparkApplicationSparkApplicationConfigMainJarFileUri(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkApplicationConfigMainClass(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkApplicationConfigArgs(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkApplicationConfigJarFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkApplicationConfigFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkApplicationConfigArchiveUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkRApplicationConfig(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedMainRFileUri, err := expandDataprocGdcSparkApplicationSparkRApplicationConfigMainRFileUri(original["main_r_file_uri"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedMainRFileUri); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["mainRFileUri"] = transformedMainRFileUri + } + + transformedArgs, err := expandDataprocGdcSparkApplicationSparkRApplicationConfigArgs(original["args"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedArgs); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["args"] = transformedArgs + } + + transformedFileUris, err := expandDataprocGdcSparkApplicationSparkRApplicationConfigFileUris(original["file_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["fileUris"] = transformedFileUris + } + + transformedArchiveUris, err := expandDataprocGdcSparkApplicationSparkRApplicationConfigArchiveUris(original["archive_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedArchiveUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["archiveUris"] = transformedArchiveUris + } + + return transformed, nil +} + +func expandDataprocGdcSparkApplicationSparkRApplicationConfigMainRFileUri(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkRApplicationConfigArgs(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkRApplicationConfigFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkRApplicationConfigArchiveUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkSqlApplicationConfig(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedQueryFileUri, err := expandDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryFileUri(original["query_file_uri"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedQueryFileUri); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["queryFileUri"] = transformedQueryFileUri + } + + transformedQueryList, err := expandDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryList(original["query_list"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedQueryList); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["queryList"] = transformedQueryList + } + + transformedScriptVariables, err := expandDataprocGdcSparkApplicationSparkSqlApplicationConfigScriptVariables(original["script_variables"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedScriptVariables); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["scriptVariables"] = transformedScriptVariables + } + + transformedJarFileUris, err := expandDataprocGdcSparkApplicationSparkSqlApplicationConfigJarFileUris(original["jar_file_uris"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedJarFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["jarFileUris"] = transformedJarFileUris + } + + return transformed, nil +} + +func expandDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryFileUri(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryList(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedQueries, err := expandDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListQueries(original["queries"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedQueries); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["queries"] = transformedQueries + } + + return transformed, nil +} + +func expandDataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListQueries(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationSparkSqlApplicationConfigScriptVariables(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} + +func expandDataprocGdcSparkApplicationSparkSqlApplicationConfigJarFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationDisplayName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationProperties(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} + +func expandDataprocGdcSparkApplicationVersion(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationApplicationEnvironment(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationNamespace(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationDependencyImages(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandDataprocGdcSparkApplicationEffectiveLabels(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} + +func expandDataprocGdcSparkApplicationEffectiveAnnotations(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} diff --git a/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_meta.yaml b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_meta.yaml new file mode 100644 index 00000000000..7584c1c2a85 --- /dev/null +++ b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_meta.yaml @@ -0,0 +1,5 @@ +resource: 'google_dataproc_gdc_spark_application' +generation_type: 'mmv1' +api_service_name: 'dataprocgdc.googleapis.com' +api_version: 'v1' +api_resource_type_kind: 'SparkApplication' diff --git a/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_test.go b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_test.go new file mode 100644 index 00000000000..1711e070caa --- /dev/null +++ b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_generated_test.go @@ -0,0 +1,363 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dataprocgdc_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationBasicExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": "gdce-cluster-monitoring", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocGdcSparkApplicationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationBasicExample(context), + }, + { + ResourceName: "google_dataproc_gdc_spark_application.spark-application", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "serviceinstance", "spark_application_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationBasicExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-test-tf-e2e-spark-app-basic%{random_suffix}" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "%{project}" + location = "us-west2" + namespace = "default" + spark_application_config { + main_class = "org.apache.spark.examples.SparkPi" + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + args = ["10000"] + } +} +`, context) +} + +func TestAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": "gdce-cluster-monitoring", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocGdcSparkApplicationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationExample(context), + }, + { + ResourceName: "google_dataproc_gdc_spark_application.spark-application", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "serviceinstance", "spark_application_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_gdc_application_environment" "app_env" { + application_environment_id = "tf-test-tf-e2e-spark-app-env%{random_suffix}" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "%{project}" + location = "us-west2" + namespace = "default" +} + +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-test-tf-e2e-spark-app%{random_suffix}" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "%{project}" + location = "us-west2" + namespace = "default" + labels = { + "test-label": "label-value" + } + annotations = { + "an_annotation": "annotation_value" + } + properties = { + "spark.executor.instances": "2" + } + application_environment = google_dataproc_gdc_application_environment.app_env.name + version = "1.2" + spark_application_config { + main_jar_file_uri = "file:///usr/lib/spark/examples/jars/spark-examples.jar" + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + } +} +`, context) +} + +func TestAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationPysparkExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": "gdce-cluster-monitoring", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocGdcSparkApplicationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationPysparkExample(context), + }, + { + ResourceName: "google_dataproc_gdc_spark_application.spark-application", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "serviceinstance", "spark_application_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationPysparkExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-test-tf-e2e-pyspark-app%{random_suffix}" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "%{project}" + location = "us-west2" + namespace = "default" + display_name = "A Pyspark application for a Terraform create test" + dependency_images = ["gcr.io/some/image"] + pyspark_application_config { + main_python_file_uri = "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py" + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + python_file_uris = ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"] + file_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + args = ["10"] + } +} +`, context) +} + +func TestAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparkrExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": "gdce-cluster-monitoring", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocGdcSparkApplicationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparkrExample(context), + }, + { + ResourceName: "google_dataproc_gdc_spark_application.spark-application", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "serviceinstance", "spark_application_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparkrExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-test-tf-e2e-sparkr-app%{random_suffix}" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "%{project}" + location = "us-west2" + namespace = "default" + display_name = "A SparkR application for a Terraform create test" + spark_r_application_config { + main_r_file_uri = "gs://some-bucket/something.R" + file_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + args = ["10"] + } +} +`, context) +} + +func TestAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparksqlExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": "gdce-cluster-monitoring", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocGdcSparkApplicationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparksqlExample(context), + }, + { + ResourceName: "google_dataproc_gdc_spark_application.spark-application", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "serviceinstance", "spark_application_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparksqlExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-test-tf-e2e-sparksql-app%{random_suffix}" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "%{project}" + location = "us-west2" + namespace = "default" + display_name = "A SparkSql application for a Terraform create test" + spark_sql_application_config { + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + query_list { + queries = ["show tables;"] + } + script_variables = { + "MY_VAR": "1" + } + } +} +`, context) +} + +func TestAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparksqlQueryFileExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": "gdce-cluster-monitoring", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDataprocGdcSparkApplicationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparksqlQueryFileExample(context), + }, + { + ResourceName: "google_dataproc_gdc_spark_application.spark-application", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "serviceinstance", "spark_application_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccDataprocGdcSparkApplication_dataprocgdcSparkapplicationSparksqlQueryFileExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-test-tf-e2e-sparksql-app%{random_suffix}" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "%{project}" + location = "us-west2" + namespace = "default" + display_name = "A SparkSql application for a Terraform create test" + spark_sql_application_config { + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + query_file_uri = "gs://some-bucket/something.sql" + script_variables = { + "MY_VAR": "1" + } + } +} +`, context) +} + +func testAccCheckDataprocGdcSparkApplicationDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_dataproc_gdc_spark_application" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{DataprocGdcBasePath}}projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("DataprocGdcSparkApplication still exists at %s", url) + } + } + + return nil + } +} diff --git a/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_sweeper.go b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_sweeper.go new file mode 100644 index 00000000000..fbf4db49579 --- /dev/null +++ b/google/services/dataprocgdc/resource_dataproc_gdc_spark_application_sweeper.go @@ -0,0 +1,143 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package dataprocgdc + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/sweeper" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func init() { + sweeper.AddTestSweepers("DataprocGdcSparkApplication", testSweepDataprocGdcSparkApplication) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepDataprocGdcSparkApplication(region string) error { + resourceName := "DataprocGdcSparkApplication" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://dataprocgdc.googleapis.com/v1/projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["sparkApplications"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + var name string + // Id detected in the delete URL, attempt to use id. + if obj["id"] != nil { + name = tpgresource.GetResourceNameFromSelfLink(obj["id"].(string)) + } else if obj["name"] != nil { + name = tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + } else { + log.Printf("[INFO][SWEEPER_LOG] %s resource name and id were nil", resourceName) + return nil + } + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://dataprocgdc.googleapis.com/v1/projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} diff --git a/website/docs/r/dataproc_gdc_spark_application.html.markdown b/website/docs/r/dataproc_gdc_spark_application.html.markdown new file mode 100644 index 00000000000..57ad62dfd2c --- /dev/null +++ b/website/docs/r/dataproc_gdc_spark_application.html.markdown @@ -0,0 +1,466 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in +# .github/CONTRIBUTING.md. +# +# ---------------------------------------------------------------------------- +subcategory: "Dataproc on GDC" +description: |- + A Spark application is a single Spark workload run on a GDC cluster. +--- + +# google_dataproc_gdc_spark_application + +A Spark application is a single Spark workload run on a GDC cluster. + + +To get more information about SparkApplication, see: + +* [API documentation](https://cloud.google.com/dataproc-gdc/docs/reference/rest/v1/projects.locations.serviceInstances.sparkApplications) +* How-to Guides + * [Dataproc Intro](https://cloud.google.com/dataproc/) + + +## Example Usage - Dataprocgdc Sparkapplication Basic + + +```hcl +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-e2e-spark-app-basic" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "my-project" + location = "us-west2" + namespace = "default" + spark_application_config { + main_class = "org.apache.spark.examples.SparkPi" + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + args = ["10000"] + } +} +``` + +## Example Usage - Dataprocgdc Sparkapplication + + +```hcl +resource "google_dataproc_gdc_application_environment" "app_env" { + application_environment_id = "tf-e2e-spark-app-env" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "my-project" + location = "us-west2" + namespace = "default" +} + +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-e2e-spark-app" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "my-project" + location = "us-west2" + namespace = "default" + labels = { + "test-label": "label-value" + } + annotations = { + "an_annotation": "annotation_value" + } + properties = { + "spark.executor.instances": "2" + } + application_environment = google_dataproc_gdc_application_environment.app_env.name + version = "1.2" + spark_application_config { + main_jar_file_uri = "file:///usr/lib/spark/examples/jars/spark-examples.jar" + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + } +} +``` + +## Example Usage - Dataprocgdc Sparkapplication Pyspark + + +```hcl +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-e2e-pyspark-app" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "my-project" + location = "us-west2" + namespace = "default" + display_name = "A Pyspark application for a Terraform create test" + dependency_images = ["gcr.io/some/image"] + pyspark_application_config { + main_python_file_uri = "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py" + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + python_file_uris = ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"] + file_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + args = ["10"] + } +} +``` + +## Example Usage - Dataprocgdc Sparkapplication Sparkr + + +```hcl +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-e2e-sparkr-app" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "my-project" + location = "us-west2" + namespace = "default" + display_name = "A SparkR application for a Terraform create test" + spark_r_application_config { + main_r_file_uri = "gs://some-bucket/something.R" + file_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + archive_uris = ["file://usr/lib/spark/examples/spark-examples.jar"] + args = ["10"] + } +} +``` + +## Example Usage - Dataprocgdc Sparkapplication Sparksql + + +```hcl +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-e2e-sparksql-app" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "my-project" + location = "us-west2" + namespace = "default" + display_name = "A SparkSql application for a Terraform create test" + spark_sql_application_config { + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + query_list { + queries = ["show tables;"] + } + script_variables = { + "MY_VAR": "1" + } + } +} +``` + +## Example Usage - Dataprocgdc Sparkapplication Sparksql Query File + + +```hcl +resource "google_dataproc_gdc_spark_application" "spark-application" { + spark_application_id = "tf-e2e-sparksql-app" + serviceinstance = "do-not-delete-dataproc-gdc-instance" + project = "my-project" + location = "us-west2" + namespace = "default" + display_name = "A SparkSql application for a Terraform create test" + spark_sql_application_config { + jar_file_uris = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"] + query_file_uri = "gs://some-bucket/something.sql" + script_variables = { + "MY_VAR": "1" + } + } +} +``` + +## Argument Reference + +The following arguments are supported: + + +* `location` - + (Required) + The location of the spark application. + +* `serviceinstance` - + (Required) + The id of the service instance to which this spark application belongs. + +* `spark_application_id` - + (Required) + The id of the application + + +- - - + + +* `pyspark_application_config` - + (Optional) + Represents the PySparkApplicationConfig. + Structure is [documented below](#nested_pyspark_application_config). + +* `spark_application_config` - + (Optional) + Represents the SparkApplicationConfig. + Structure is [documented below](#nested_spark_application_config). + +* `spark_r_application_config` - + (Optional) + Represents the SparkRApplicationConfig. + Structure is [documented below](#nested_spark_r_application_config). + +* `spark_sql_application_config` - + (Optional) + Represents the SparkRApplicationConfig. + Structure is [documented below](#nested_spark_sql_application_config). + +* `display_name` - + (Optional) + User-provided human-readable name to be used in user interfaces. + +* `labels` - + (Optional) + The labels to associate with this application. Labels may be used for filtering and billing tracking. + **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. + Please refer to the field `effective_labels` for all of the labels present on the resource. + +* `annotations` - + (Optional) + The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server. + **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration. + Please refer to the field `effective_annotations` for all of the annotations present on the resource. + +* `properties` - + (Optional) + application-specific properties. + +* `version` - + (Optional) + The Dataproc version of this application. + +* `application_environment` - + (Optional) + An ApplicationEnvironment from which to inherit configuration properties. + +* `namespace` - + (Optional) + The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster. + +* `dependency_images` - + (Optional) + List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used. + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + + +The `pyspark_application_config` block supports: + +* `main_python_file_uri` - + (Required) + The HCFS URI of the main Python file to use as the driver. Must be a .py file. + +* `args` - + (Optional) + The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission. + +* `python_file_uris` - + (Optional) + HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. + +* `jar_file_uris` - + (Optional) + HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks. + +* `file_uris` - + (Optional) + HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. + +* `archive_uris` - + (Optional) + HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. + +The `spark_application_config` block supports: + +* `main_jar_file_uri` - + (Optional) + The HCFS URI of the jar file that contains the main class. + +* `main_class` - + (Optional) + The name of the driver main class. The jar file that contains the class must be in the classpath or specified in `jar_file_uris`. + +* `args` - + (Optional) + The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as `--conf`, since a collision can occur that causes an incorrect application submission. + +* `jar_file_uris` - + (Optional) + HCFS URIs of jar files to add to the classpath of the Spark driver and tasks. + +* `file_uris` - + (Optional) + HCFS URIs of files to be placed in the working directory of each executor. + +* `archive_uris` - + (Optional) + HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. + +The `spark_r_application_config` block supports: + +* `main_r_file_uri` - + (Required) + The HCFS URI of the main R file to use as the driver. Must be a .R file. + +* `args` - + (Optional) + The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission. + +* `file_uris` - + (Optional) + HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks. + +* `archive_uris` - + (Optional) + HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. + +The `spark_sql_application_config` block supports: + +* `query_file_uri` - + (Optional) + The HCFS URI of the script that contains SQL queries. + +* `query_list` - + (Optional) + Represents a list of queries. + Structure is [documented below](#nested_query_list). + +* `script_variables` - + (Optional) + Mapping of query variable names to values (equivalent to the Spark SQL command: SET `name="value";`). + +* `jar_file_uris` - + (Optional) + HCFS URIs of jar files to be added to the Spark CLASSPATH. + + +The `query_list` block supports: + +* `queries` - + (Required) + The queries to run. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + +* `id` - an identifier for the resource with format `projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}` + +* `name` - + Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application} + +* `uid` - + System generated unique identifier for this application, formatted as UUID4. + +* `create_time` - + The timestamp when the resource was created. + +* `update_time` - + The timestamp when the resource was most recently updated. + +* `state` - + The current state. + Possible values: + * `STATE_UNSPECIFIED` + * `PENDING` + * `RUNNING` + * `CANCELLING` + * `CANCELLED` + * `SUCCEEDED` + * `FAILED` + +* `reconciling` - + Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated. + +* `output_uri` - + An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA + +* `monitoring_endpoint` - + URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA + +* `state_message` - + A message explaining the current state. + +* `terraform_labels` - + The combination of labels configured directly on the resource + and default labels configured on the provider. + +* `effective_labels` - + All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services. + +* `effective_annotations` - + All of annotations (key/value pairs) present on the resource in GCP, including the annotations configured through Terraform, other clients and services. + + +## Timeouts + +This resource provides the following +[Timeouts](https://developer.hashicorp.com/terraform/plugin/sdkv2/resources/retries-and-customizable-timeouts) configuration options: + +- `create` - Default is 20 minutes. +- `update` - Default is 20 minutes. +- `delete` - Default is 20 minutes. + +## Import + + +SparkApplication can be imported using any of these accepted formats: + +* `projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}` +* `{{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}}` +* `{{location}}/{{serviceinstance}}/{{spark_application_id}}` + + +In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import SparkApplication using one of the formats above. For example: + +```tf +import { + id = "projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}" + to = google_dataproc_gdc_spark_application.default +} +``` + +When using the [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import), SparkApplication can be imported using one of the formats above. For example: + +``` +$ terraform import google_dataproc_gdc_spark_application.default projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}} +$ terraform import google_dataproc_gdc_spark_application.default {{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}} +$ terraform import google_dataproc_gdc_spark_application.default {{location}}/{{serviceinstance}}/{{spark_application_id}} +``` + +## User Project Overrides + +This resource supports [User Project Overrides](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference#user_project_override).