diff --git a/azurerm/config.go b/azurerm/config.go index eadb2a8f6784..e3d521e5bb1b 100644 --- a/azurerm/config.go +++ b/azurerm/config.go @@ -226,6 +226,7 @@ type ArmClient struct { // Data Factory dataFactoryClient datafactory.FactoriesClient + dataFactoryDatasetClient datafactory.DatasetsClient dataFactoryLinkedServiceClient datafactory.LinkedServicesClient // Data Lake Store @@ -886,6 +887,10 @@ func (c *ArmClient) registerDataFactoryClients(endpoint, subscriptionId string, c.configureClient(&dataFactoryClient.Client, auth) c.dataFactoryClient = dataFactoryClient + dataFactoryDatasetClient := datafactory.NewDatasetsClientWithBaseURI(endpoint, subscriptionId) + c.configureClient(&dataFactoryDatasetClient.Client, auth) + c.dataFactoryDatasetClient = dataFactoryDatasetClient + dataFactoryLinkedServiceClient := datafactory.NewLinkedServicesClientWithBaseURI(endpoint, subscriptionId) c.configureClient(&dataFactoryLinkedServiceClient.Client, auth) c.dataFactoryLinkedServiceClient = dataFactoryLinkedServiceClient diff --git a/azurerm/data_factory.go b/azurerm/data_factory.go new file mode 100644 index 000000000000..48db9dbf683c --- /dev/null +++ b/azurerm/data_factory.go @@ -0,0 +1,54 @@ +package azurerm + +import ( + "log" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" +) + +func expandDataFactoryParameters(input map[string]interface{}) map[string]*datafactory.ParameterSpecification { + output := make(map[string]*datafactory.ParameterSpecification) + + for k, v := range input { + output[k] = &datafactory.ParameterSpecification{ + Type: datafactory.ParameterTypeString, + DefaultValue: v.(string), + } + } + + return output +} + +func flattenDataFactoryParameters(input map[string]*datafactory.ParameterSpecification) map[string]interface{} { + output := make(map[string]interface{}) + + for k, v := range input { + if v != nil { + // we only support string parameters at this time + val, ok := v.DefaultValue.(string) + if !ok { + log.Printf("[DEBUG] Skipping parameter %q since it's not a string", k) + } + + output[k] = val + } + } + + return output +} + +func flattenDataFactoryAnnotations(input *[]interface{}) []string { + annotations := make([]string, 0) + if input == nil { + return annotations + } + + for _, annotation := range *input { + val, ok := annotation.(string) + if !ok { + log.Printf("[DEBUG] Skipping annotation %q since it's not a string", val) + } + annotations = append(annotations, val) + } + return annotations +} diff --git a/azurerm/provider.go b/azurerm/provider.go index 1051b7ba04f8..e7ba93070e4f 100644 --- a/azurerm/provider.go +++ b/azurerm/provider.go @@ -217,6 +217,7 @@ func Provider() terraform.ResourceProvider { "azurerm_container_service": resourceArmContainerService(), "azurerm_cosmosdb_account": resourceArmCosmosDBAccount(), "azurerm_data_factory": resourceArmDataFactory(), + "azurerm_data_factory_dataset_sql_server_table": resourceArmDataFactoryDatasetSQLServerTable(), "azurerm_data_factory_linked_service_sql_server": resourceArmDataFactoryLinkedServiceSQLServer(), "azurerm_data_lake_analytics_account": resourceArmDataLakeAnalyticsAccount(), "azurerm_data_lake_analytics_firewall_rule": resourceArmDataLakeAnalyticsFirewallRule(), diff --git a/azurerm/resource_arm_data_factory_dataset_sql_server_table.go b/azurerm/resource_arm_data_factory_dataset_sql_server_table.go new file mode 100644 index 000000000000..454e1e7c0fc9 --- /dev/null +++ b/azurerm/resource_arm_data_factory_dataset_sql_server_table.go @@ -0,0 +1,369 @@ +package azurerm + +import ( + "fmt" + "log" + "regexp" + + "github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory" + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataFactoryDatasetSQLServerTable() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataFactoryDatasetSQLServerTableCreateOrUpdate, + Read: resourceArmDataFactoryDatasetSQLServerTableRead, + Update: resourceArmDataFactoryDatasetSQLServerTableCreateOrUpdate, + Delete: resourceArmDataFactoryDatasetSQLServerTableDelete, + + Importer: &schema.ResourceImporter{ + State: schema.ImportStatePassthrough, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName, + }, + + "data_factory_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validation.StringMatch( + regexp.MustCompile(`^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$`), + `Invalid name for Data Factory, see https://docs.microsoft.com/en-us/azure/data-factory/naming-rules`, + ), + }, + + "resource_group_name": resourceGroupNameSchema(), + + "linked_service_name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "table_name": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "parameters": { + Type: schema.TypeMap, + Optional: true, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "annotations": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + + "folder": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + + "additional_properties": { + Type: schema.TypeMap, + Optional: true, + }, + + "schema_column": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.NoEmptyStrings, + }, + "type": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{ + "Byte", + "Byte[]", + "Boolean", + "Date", + "DateTime", + "DateTimeOffset", + "Decimal", + "Double", + "Guid", + "Int16", + "Int32", + "Int64", + "Single", + "String", + "TimeSpan", + }, false), + }, + "description": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validate.NoEmptyStrings, + }, + }, + }, + }, + }, + } +} + +func resourceArmDataFactoryDatasetSQLServerTableCreateOrUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryDatasetClient + ctx := meta.(*ArmClient).StopContext + + name := d.Get("name").(string) + dataFactoryName := d.Get("data_factory_name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + if requireResourcesToBeImported && d.IsNewResource() { + existing, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("Error checking for presence of existing Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + } + + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_factory_dataset_sql_server", *existing.ID) + } + } + + sqlServerDatasetProperties := datafactory.SQLServerTableDatasetTypeProperties{ + TableName: d.Get("table_name").(string), + } + + linkedServiceName := d.Get("linked_service_name").(string) + linkedServiceType := "LinkedServiceReference" + linkedService := &datafactory.LinkedServiceReference{ + ReferenceName: &linkedServiceName, + Type: &linkedServiceType, + } + + description := d.Get("description").(string) + sqlServerTableset := datafactory.SQLServerTableDataset{ + SQLServerTableDatasetTypeProperties: &sqlServerDatasetProperties, + LinkedServiceName: linkedService, + Description: &description, + } + + if v, ok := d.GetOk("folder"); ok { + name := v.(string) + sqlServerTableset.Folder = &datafactory.DatasetFolder{ + Name: &name, + } + } + + if v, ok := d.GetOk("parameters"); ok { + sqlServerTableset.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) + } + + if v, ok := d.GetOk("annotations"); ok { + annotations := v.([]interface{}) + sqlServerTableset.Annotations = &annotations + } + + if v, ok := d.GetOk("additional_properties"); ok { + sqlServerTableset.AdditionalProperties = v.(map[string]interface{}) + } + + if v, ok := d.GetOk("schema_column"); ok { + sqlServerTableset.Structure = expandDataFactoryDatasetStructure(v.([]interface{})) + } + + datasetType := string(datafactory.TypeSQLServerTable) + dataset := datafactory.DatasetResource{ + Properties: &sqlServerTableset, + Type: &datasetType, + } + + if _, err := client.CreateOrUpdate(ctx, resourceGroup, dataFactoryName, name, dataset, ""); err != nil { + return fmt.Errorf("Error creating/updating Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Error retrieving Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + if resp.ID == nil { + return fmt.Errorf("Cannot read Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + d.SetId(*resp.ID) + + return resourceArmDataFactoryDatasetSQLServerTableRead(d, meta) +} + +func resourceArmDataFactoryDatasetSQLServerTableRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryDatasetClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + d.SetId("") + return nil + } + + return fmt.Errorf("Error retrieving Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + + d.Set("name", resp.Name) + d.Set("resource_group_name", resourceGroup) + d.Set("data_factory_name", dataFactoryName) + + sqlServerTable, ok := resp.Properties.AsSQLServerTableDataset() + if !ok { + return fmt.Errorf("Error classifiying Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): Expected: %q Received: %q", name, dataFactoryName, resourceGroup, datafactory.TypeSQLServerTable, *resp.Type) + } + + d.Set("additional_properties", sqlServerTable.AdditionalProperties) + + if sqlServerTable.Description != nil { + d.Set("description", sqlServerTable.Description) + } + + parameters := flattenDataFactoryParameters(sqlServerTable.Parameters) + if err := d.Set("parameters", parameters); err != nil { + return fmt.Errorf("Error setting `parameters`: %+v", err) + } + + annotations := flattenDataFactoryAnnotations(sqlServerTable.Annotations) + if err := d.Set("annotations", annotations); err != nil { + return fmt.Errorf("Error setting `annotations`: %+v", err) + } + + if linkedService := sqlServerTable.LinkedServiceName; linkedService != nil { + if linkedService.ReferenceName != nil { + d.Set("linked_service_name", linkedService.ReferenceName) + } + } + + if properties := sqlServerTable.SQLServerTableDatasetTypeProperties; properties != nil { + val, ok := properties.TableName.(string) + if !ok { + log.Printf("[DEBUG] Skipping `table_name` since it's not a string") + } else { + d.Set("table_name", val) + } + } + + if folder := sqlServerTable.Folder; folder != nil { + if folder.Name != nil { + d.Set("folder", folder.Name) + } + } + + structureColumns := flattenDataFactoryStructureColumns(sqlServerTable.Structure) + if err := d.Set("schema_column", structureColumns); err != nil { + return fmt.Errorf("Error setting `schema_column`: %+v", err) + } + + return nil +} + +func resourceArmDataFactoryDatasetSQLServerTableDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*ArmClient).dataFactoryDatasetClient + ctx := meta.(*ArmClient).StopContext + + id, err := parseAzureResourceID(d.Id()) + if err != nil { + return err + } + resourceGroup := id.ResourceGroup + dataFactoryName := id.Path["factories"] + name := id.Path["datasets"] + + response, err := client.Delete(ctx, resourceGroup, dataFactoryName, name) + if err != nil { + if !utils.ResponseWasNotFound(response) { + return fmt.Errorf("Error deleting Data Factory Dataset SQL Server Table %q (Data Factory %q / Resource Group %q): %s", name, dataFactoryName, resourceGroup, err) + } + } + + return nil +} + +// DatasetColumn describes the attributes needed to specify a structure column for a dataset +type DatasetColumn struct { + Name string `json:"name,omitempty"` + Description string `json:"description,omitempty"` + Type string `json:"type,omitempty"` +} + +func expandDataFactoryDatasetStructure(input []interface{}) interface{} { + columns := make([]DatasetColumn, 0) + for _, column := range input { + attrs := column.(map[string]interface{}) + + datasetColumn := DatasetColumn{ + Name: attrs["name"].(string), + } + if attrs["description"] != nil { + datasetColumn.Description = attrs["description"].(string) + } + if attrs["type"] != nil { + datasetColumn.Type = attrs["type"].(string) + } + columns = append(columns, datasetColumn) + } + return columns +} + +func flattenDataFactoryStructureColumns(input interface{}) []interface{} { + output := make([]interface{}, 0) + + columns, ok := input.([]interface{}) + if !ok { + return columns + } + + for _, v := range columns { + column, ok := v.(map[string]interface{}) + if !ok { + continue + } + result := make(map[string]interface{}) + if column["name"] != nil { + result["name"] = column["name"] + } + if column["type"] != nil { + result["type"] = column["type"] + } + if column["description"] != nil { + result["description"] = column["description"] + } + output = append(output, result) + } + return output +} diff --git a/azurerm/resource_arm_data_factory_dataset_sql_server_table_test.go b/azurerm/resource_arm_data_factory_dataset_sql_server_table_test.go new file mode 100644 index 000000000000..30107bfa3668 --- /dev/null +++ b/azurerm/resource_arm_data_factory_dataset_sql_server_table_test.go @@ -0,0 +1,273 @@ +package azurerm + +import ( + "fmt" + "net/http" + "testing" + + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" + + "github.com/hashicorp/terraform/helper/acctest" +) + +func TestAccAzureRMDataFactoryDatasetSQLServerTable_basic(t *testing.T) { + ri := acctest.RandInt() + config := testAccAzureRMDataFactoryDatasetSQLServerTable_basic(ri, testLocation()) + resourceName := "azurerm_data_factory_dataset_sql_server_table.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetSQLServerTableDestroy, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetSQLServerTableExists(resourceName), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccAzureRMDataFactoryDatasetSQLServerTable_update(t *testing.T) { + ri := acctest.RandInt() + config := testAccAzureRMDataFactoryDatasetSQLServerTable_update1(ri, testLocation()) + config2 := testAccAzureRMDataFactoryDatasetSQLServerTable_update2(ri, testLocation()) + resourceName := "azurerm_data_factory_dataset_sql_server_table.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testCheckAzureRMDataFactoryDatasetSQLServerTableDestroy, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetSQLServerTableExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "3"), + resource.TestCheckResourceAttr(resourceName, "schema_column.#", "1"), + resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "2"), + resource.TestCheckResourceAttr(resourceName, "description", "test description"), + ), + }, + { + Config: config2, + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataFactoryDatasetSQLServerTableExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "parameters.%", "3"), + resource.TestCheckResourceAttr(resourceName, "annotations.#", "2"), + resource.TestCheckResourceAttr(resourceName, "schema_column.#", "2"), + resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "1"), + resource.TestCheckResourceAttr(resourceName, "description", "test description 2"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testCheckAzureRMDataFactoryDatasetSQLServerTableExists(name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + // Ensure we have enough information in state to look up in API + rs, ok := s.RootModule().Resources[name] + if !ok { + return fmt.Errorf("Not found: %s", name) + } + + name := rs.Primary.Attributes["name"] + resourceGroup, hasResourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + if !hasResourceGroup { + return fmt.Errorf("Bad: no resource group found in state for Data Factory: %s", name) + } + + client := testAccProvider.Meta().(*ArmClient).dataFactoryDatasetClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + if err != nil { + return fmt.Errorf("Bad: Get on dataFactoryDatasetClient: %+v", err) + } + + if utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("Bad: Data Factory Dataset SQL Server Table %q (data factory name: %q / resource group: %q) does not exist", name, dataFactoryName, resourceGroup) + } + + return nil + } +} + +func testCheckAzureRMDataFactoryDatasetSQLServerTableDestroy(s *terraform.State) error { + client := testAccProvider.Meta().(*ArmClient).dataFactoryDatasetClient + ctx := testAccProvider.Meta().(*ArmClient).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_data_factory_dataset_sql_server_table" { + continue + } + + name := rs.Primary.Attributes["name"] + resourceGroup := rs.Primary.Attributes["resource_group_name"] + dataFactoryName := rs.Primary.Attributes["data_factory_name"] + + resp, err := client.Get(ctx, resourceGroup, dataFactoryName, name, "") + + if err != nil { + return nil + } + + if resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("Data Factory Dataset SQL Server Table still exists:\n%#v", resp.Properties) + } + } + + return nil +} + +func testAccAzureRMDataFactoryDatasetSQLServerTable_basic(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_linked_service_sql_server" "test" { + name = "acctestlssql%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" +} + +resource "azurerm_data_factory_dataset_sql_server_table" "test" { + name = "acctestds%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + linked_service_name = "${azurerm_data_factory_linked_service_sql_server.test.name}" +} +`, rInt, location, rInt, rInt, rInt) +} + +func testAccAzureRMDataFactoryDatasetSQLServerTable_update1(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_linked_service_sql_server" "test" { + name = "acctestlssql%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" +} + +resource "azurerm_data_factory_dataset_sql_server_table" "test" { + name = "acctestds%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + linked_service_name = "${azurerm_data_factory_linked_service_sql_server.test.name}" + + description = "test description" + annotations = ["test1", "test2", "test3"] + table_name = "testTable" + folder = "testFolder" + + parameters { + "foo" = "test1" + "bar" = "test2" + } + + additional_properties { + "foo" = "test1" + "bar" = "test2" + } + + schema_column { + name = "test1" + type = "Byte" + description = "description" + } +} +`, rInt, location, rInt, rInt, rInt) +} + +func testAccAzureRMDataFactoryDatasetSQLServerTable_update2(rInt int, location string) string { + return fmt.Sprintf(` +resource "azurerm_resource_group" "test" { + name = "acctestrg-%d" + location = "%s" +} + +resource "azurerm_data_factory" "test" { + name = "acctestdf%d" + location = "${azurerm_resource_group.test.location}" + resource_group_name = "${azurerm_resource_group.test.name}" +} + +resource "azurerm_data_factory_linked_service_sql_server" "test" { + name = "acctestlssql%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" +} + +resource "azurerm_data_factory_dataset_sql_server_table" "test" { + name = "acctestds%d" + resource_group_name = "${azurerm_resource_group.test.name}" + data_factory_name = "${azurerm_data_factory.test.name}" + linked_service_name = "${azurerm_data_factory_linked_service_sql_server.test.name}" + + description = "test description 2" + annotations = ["test1", "test2"] + table_name = "testTable" + folder = "testFolder" + + parameters { + "foo" = "test1" + "bar" = "test2" + "buzz" = "test3" + } + + additional_properties { + "foo" = "test1" + } + + schema_column { + name = "test1" + type = "Byte" + description = "description" + } + + schema_column { + name = "test2" + type = "Byte" + description = "description" + } +} +`, rInt, location, rInt, rInt, rInt) +} diff --git a/azurerm/resource_arm_data_factory_linked_service_sql_server.go b/azurerm/resource_arm_data_factory_linked_service_sql_server.go index 157fe98e2bd9..145deade7b21 100644 --- a/azurerm/resource_arm_data_factory_linked_service_sql_server.go +++ b/azurerm/resource_arm_data_factory_linked_service_sql_server.go @@ -31,7 +31,7 @@ func resourceArmDataFactoryLinkedServiceSQLServer() *schema.Resource { Type: schema.TypeString, Required: true, ForceNew: true, - ValidateFunc: validateAzureRMDataFactoryLinkedServiceName, + ValidateFunc: validateAzureRMDataFactoryLinkedServiceDatasetName, }, "data_factory_name": { @@ -120,7 +120,7 @@ func resourceArmDataFactoryLinkedServiceSQLServerCreateOrUpdate(d *schema.Resour } if v, ok := d.GetOk("parameters"); ok { - sqlServerLinkedService.Parameters = expandDataFactoryLinkedServiceParameters(v.(map[string]interface{})) + sqlServerLinkedService.Parameters = expandDataFactoryParameters(v.(map[string]interface{})) } if v, ok := d.GetOk("integration_runtime_name"); ok { @@ -128,11 +128,12 @@ func resourceArmDataFactoryLinkedServiceSQLServerCreateOrUpdate(d *schema.Resour } if v, ok := d.GetOk("additional_properties"); ok { - sqlServerLinkedService.AdditionalProperties = expandDataFactoryLinkedServiceAdditionalProperties(v.(map[string]interface{})) + sqlServerLinkedService.AdditionalProperties = v.(map[string]interface{}) } if v, ok := d.GetOk("annotations"); ok { - sqlServerLinkedService.Annotations = expandDataFactoryLinkedServiceAnnotations(v.([]interface{})) + annotations := v.([]interface{}) + sqlServerLinkedService.Annotations = &annotations } linkedService := datafactory.LinkedServiceResource{ @@ -194,12 +195,12 @@ func resourceArmDataFactoryLinkedServiceSQLServerRead(d *schema.ResourceData, me d.Set("description", *sqlServer.Description) } - annotations := flattenDataFactoryLinkedServiceAnnotations(sqlServer.Annotations) + annotations := flattenDataFactoryAnnotations(sqlServer.Annotations) if err := d.Set("annotations", annotations); err != nil { return fmt.Errorf("Error setting `annotations`: %+v", err) } - parameters := flattenDataFactoryLinkedServiceParameters(sqlServer.Parameters) + parameters := flattenDataFactoryParameters(sqlServer.Parameters) if err := d.Set("parameters", parameters); err != nil { return fmt.Errorf("Error setting `parameters`: %+v", err) } @@ -278,7 +279,7 @@ func azureRmDataFactoryLinkedServiceConnectionStringDiff(k, old string, new stri return true } -func validateAzureRMDataFactoryLinkedServiceName(v interface{}, k string) (warnings []string, errors []error) { +func validateAzureRMDataFactoryLinkedServiceDatasetName(v interface{}, k string) (warnings []string, errors []error) { value := v.(string) if regexp.MustCompile(`^[-.+?/<>*%&:\\]+$`).MatchString(value) { errors = append(errors, fmt.Errorf("any of '-' '.', '+', '?', '/', '<', '>', '*', '%%', '&', ':', '\\', are not allowed in %q: %q", k, value)) @@ -295,70 +296,3 @@ func expandDataFactoryLinkedServiceIntegrationRuntime(integrationRuntimeName str Type: &typeString, } } - -func expandDataFactoryLinkedServiceParameters(input map[string]interface{}) map[string]*datafactory.ParameterSpecification { - output := make(map[string]*datafactory.ParameterSpecification) - - for k, v := range input { - output[k] = &datafactory.ParameterSpecification{ - Type: datafactory.ParameterTypeString, - DefaultValue: v.(string), - } - } - - return output -} - -func expandDataFactoryLinkedServiceAdditionalProperties(input map[string]interface{}) map[string]interface{} { - output := make(map[string]interface{}) - - for k, v := range input { - output[k] = v - } - - return output -} - -func flattenDataFactoryLinkedServiceParameters(input map[string]*datafactory.ParameterSpecification) map[string]interface{} { - output := make(map[string]interface{}) - - for k, v := range input { - if v != nil { - // we only support string parameters at this time - val, ok := v.DefaultValue.(string) - if !ok { - log.Printf("[DEBUG] Skipping parameter %q since it's not a string", k) - } - - output[k] = val - } - } - - return output -} - -func expandDataFactoryLinkedServiceAnnotations(input []interface{}) *[]interface{} { - annotations := make([]interface{}, 0) - - for _, annotation := range input { - annotations = append(annotations, annotation.(string)) - } - - return &annotations -} - -func flattenDataFactoryLinkedServiceAnnotations(input *[]interface{}) []string { - annotations := make([]string, 0) - if input == nil { - return annotations - } - - for _, annotation := range *input { - val, ok := annotation.(string) - if !ok { - log.Printf("[DEBUG] Skipping annotation %q since it's not a string", val) - } - annotations = append(annotations, val) - } - return annotations -} diff --git a/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go b/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go index 1248b89de068..a2d7503aba00 100644 --- a/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go +++ b/azurerm/resource_arm_data_factory_linked_service_sql_server_test.go @@ -65,10 +65,10 @@ func TestAccAzureRMDataFactoryLinkedServiceSQLServer_basic(t *testing.T) { Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataFactoryLinkedServiceSQLServerExists(resourceName), resource.TestCheckResourceAttr(resourceName, "parameters.%", "2"), - resource.TestCheckResourceAttrSet(resourceName, "connection_string"), resource.TestCheckResourceAttr(resourceName, "annotations.#", "3"), resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "2"), resource.TestCheckResourceAttr(resourceName, "description", "test description"), + resource.TestCheckResourceAttrSet(resourceName, "connection_string"), ), }, { @@ -76,10 +76,10 @@ func TestAccAzureRMDataFactoryLinkedServiceSQLServer_basic(t *testing.T) { Check: resource.ComposeTestCheckFunc( testCheckAzureRMDataFactoryLinkedServiceSQLServerExists(resourceName), resource.TestCheckResourceAttr(resourceName, "parameters.%", "3"), - resource.TestCheckResourceAttrSet(resourceName, "connection_string"), resource.TestCheckResourceAttr(resourceName, "annotations.#", "2"), resource.TestCheckResourceAttr(resourceName, "additional_properties.%", "1"), resource.TestCheckResourceAttr(resourceName, "description", "test description 2"), + resource.TestCheckResourceAttrSet(resourceName, "connection_string"), ), }, { diff --git a/website/azurerm.erb b/website/azurerm.erb index 7e811386196c..83b860e9e9e4 100644 --- a/website/azurerm.erb +++ b/website/azurerm.erb @@ -716,6 +716,9 @@ > azurerm_data_factory + > + azurerm_data_factory_dataset_sql_server_table + > azurerm_data_factory_linked_service_sql_server diff --git a/website/docs/r/data_factory_dataset_sql_server_table.html.markdown b/website/docs/r/data_factory_dataset_sql_server_table.html.markdown new file mode 100644 index 000000000000..111026143bf6 --- /dev/null +++ b/website/docs/r/data_factory_dataset_sql_server_table.html.markdown @@ -0,0 +1,91 @@ +--- +layout: "azurerm" +page_title: "Azure Resource Manager: azurerm_data_factory_dataset_sql_server_table" +sidebar_current: "docs-azurerm-resource-data-factory-dataset-sql-server-table" +description: |- + Manage a SQL Server Table Dataset inside a Azure Data Factory. +--- + +# azurerm_data_factory_dataset_sql_server + +Manage a SQL Server Table Dataset inside a Azure Data Factory. + +## Example Usage + +```hcl +resource "azurerm_resource_group" "example" { + name = "example" + location = "northeurope" +} + +resource "azurerm_data_factory" "example" { + name = "example" + location = "${azurerm_resource_group.example.location}" + resource_group_name = "${azurerm_resource_group.example.name}" +} + +resource "azurerm_data_factory_linked_service_sql_server" "example" { + name = "example" + resource_group_name = "${azurerm_resource_group.example.name}" + data_factory_name = "${azurerm_data_factory.example.name}" + connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" +} + +resource "azurerm_data_factory_dataset_sql_server_table" "example" { + name = "example" + resource_group_name = "${azurerm_resource_group.example.name}" + data_factory_name = "${azurerm_data_factory.example.name}" + linked_service_name = "${azurerm_data_factory_linked_service_sql_server.test.name}" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `name` - (Required) Specifies the name of the Data Factory Dataset SQL Server Table. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. + +* `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset SQL Server Table. Changing this forces a new resource + +* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. + +* `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. + +* `table_name` - (Optional) The table name of the Data Factory Dataset SQL Server Table. + +* `folder` - (Optional) The folder that this Dataset is in. If not specified, the Dataset will appear at the root level. + +* `schema_column` - (Optional) A `schema_column` block as defined below. + +* `description` - (Optional) The description for the Data Factory Dataset SQL Server Table. + +* `annotations` - (Optional) List of tags that can be used for describing the Data Factory Dataset SQL Server Table. + +* `parameters` - (Optional) A map of parameters to associate with the Data Factory Dataset SQL Server Table. + +* `additional_properties` - (Optional) A map of additional properties to associate with the Data Factory Dataset SQL Server Table. + +--- + +A `schema_column` block supports the following: + +* `name` - (Required) The name of the column. + +* `type` - (Optional) Type of the column. Valid values are `Byte`, `Byte[]`, `Boolean`, `Date`, `DateTime`,`DateTimeOffset`, `Decimal`, `Double`, `Guid`, `Int16`, `Int32`, `Int64`, `Single`, `String`, `TimeSpan`. Please note these values are case sensitive. + +* `description` - (Optional) The description of the column. + + +## Attributes Reference + +The following attributes are exported: + +* `id` - The ID of the Data Factory Dataset. + +## Import + +Data Factory Dataset SQL Server Table can be imported using the `resource id`, e.g. + +```shell +terraform import azurerm_data_factory_dataset_sql_server_table.example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example +```