From 14efb950b99a9007256f11b4ed5601f43ee6a08b Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 14 Dec 2021 13:11:47 +0100 Subject: [PATCH 1/4] deprecate data_factory_name --- ...ata_factory_dataset_azure_blob_resource.go | 41 +++++++++++--- ...actory_dataset_azure_blob_resource_test.go | 10 ++-- .../data_factory_dataset_binary_resource.go | 45 ++++++++++++---- ...ta_factory_dataset_binary_resource_test.go | 18 +++---- ...actory_dataset_cosmosdb_sqlapi_resource.go | 44 ++++++++++++--- ...y_dataset_cosmosdb_sqlapi_resource_test.go | 10 ++-- ...factory_dataset_delimited_text_resource.go | 46 +++++++++++++--- ...ry_dataset_delimited_text_resource_test.go | 24 ++++----- .../data_factory_dataset_http_resource.go | 41 +++++++++++--- ...data_factory_dataset_http_resource_test.go | 12 ++--- .../data_factory_dataset_json_resource.go | 46 +++++++++++++--- ...data_factory_dataset_json_resource_test.go | 46 ++++++++-------- .../data_factory_dataset_mysql_resource.go | 44 ++++++++++++--- ...ata_factory_dataset_mysql_resource_test.go | 12 ++--- .../data_factory_dataset_parquet_resource.go | 42 ++++++++++++--- ...a_factory_dataset_parquet_resource_test.go | 16 +++--- ...ata_factory_dataset_postgresql_resource.go | 44 ++++++++++++--- ...actory_dataset_postgresql_resource_test.go | 12 ++--- ...data_factory_dataset_snowflake_resource.go | 42 ++++++++++++--- ...factory_dataset_snowflake_resource_test.go | 24 ++++----- ...ctory_dataset_sql_server_table_resource.go | 42 ++++++++++++--- ..._dataset_sql_server_table_resource_test.go | 12 ++--- ...tory_integration_runtime_azure_resource.go | 45 +++++++++++++--- ...integration_runtime_azure_resource_test.go | 12 ++--- ...integration_runtime_azure_ssis_resource.go | 45 +++++++++++++--- ...ration_runtime_azure_ssis_resource_test.go | 16 +++--- ...ntegration_runtime_self_hosted_resource.go | 43 ++++++++++++--- ...ation_runtime_self_hosted_resource_test.go | 4 +- .../data_factory_pipeline_resource.go | 41 +++++++++++--- .../data_factory_pipeline_resource_test.go | 10 ++-- .../data_factory_trigger_schedule_resource.go | 53 ++++++++++++------- ..._factory_trigger_schedule_resource_test.go | 14 ++--- 32 files changed, 709 insertions(+), 247 deletions(-) diff --git a/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go b/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go index a504931968c1..7127479ae97b 100644 --- a/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go +++ b/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go @@ -43,12 +43,23 @@ func resourceDataFactoryDatasetAzureBlob() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -172,13 +183,27 @@ func resourceDataFactoryDatasetAzureBlobCreateUpdate(d *pluginsdk.ResourceData, ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset Azure Blob %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -234,7 +259,7 @@ func resourceDataFactoryDatasetAzureBlobCreateUpdate(d *pluginsdk.ResourceData, } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset Azure Blob %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -252,6 +277,8 @@ func resourceDataFactoryDatasetAzureBlobRead(d *pluginsdk.ResourceData, meta int return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -259,12 +286,14 @@ func resourceDataFactoryDatasetAzureBlobRead(d *pluginsdk.ResourceData, meta int return nil } - return fmt.Errorf("retrieving Data Factory Dataset Azure Blob %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) azureBlobTable, ok := resp.Properties.AsAzureBlobDataset() if !ok { @@ -329,7 +358,7 @@ func resourceDataFactoryDatasetAzureBlobDelete(d *pluginsdk.ResourceData, meta i response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset Azure Blob %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go b/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go index d01fa8da69f6..fe4df778237a 100644 --- a/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_azure_blob_resource_test.go @@ -86,7 +86,7 @@ func (t DatasetAzureBlobResource) Exists(ctx context.Context, clients *clients.C resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset Azure Blob (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -112,14 +112,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "DefaultEndpointsProtocol=https;AccountName=foo;AccountKey=bar" } resource "azurerm_data_factory_dataset_azure_blob" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name path = "foo" @@ -204,7 +204,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "DefaultEndpointsProtocol=https;AccountName=foo;AccountKey=bar" } @@ -266,7 +266,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "DefaultEndpointsProtocol=https;AccountName=foo;AccountKey=bar" } diff --git a/internal/services/datafactory/data_factory_dataset_binary_resource.go b/internal/services/datafactory/data_factory_dataset_binary_resource.go index 51aadb0ad9ac..0c77d252b1ff 100644 --- a/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -42,12 +42,23 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -241,13 +252,27 @@ func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, met ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -305,11 +330,7 @@ func resourceDataFactoryDatasetBinaryCreateUpdate(d *pluginsdk.ResourceData, met } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) - } - - if _, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, ""); err != nil { - return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -327,6 +348,8 @@ func resourceDataFactoryDatasetBinaryRead(d *pluginsdk.ResourceData, meta interf return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -334,12 +357,14 @@ func resourceDataFactoryDatasetBinaryRead(d *pluginsdk.ResourceData, meta interf return nil } - return fmt.Errorf("retrieving Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) binaryTable, ok := resp.Properties.AsBinaryDataset() if !ok { @@ -410,7 +435,7 @@ func resourceDataFactoryDatasetBinaryDelete(d *pluginsdk.ResourceData, meta inte response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset Binary %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_binary_resource_test.go b/internal/services/datafactory/data_factory_dataset_binary_resource_test.go index e1f7092ef3b8..6e950c0c5895 100644 --- a/internal/services/datafactory/data_factory_dataset_binary_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_binary_resource_test.go @@ -100,7 +100,7 @@ func (t DatasetBinaryResource) Exists(ctx context.Context, clients *clients.Clie resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset Binary (%s): %+v", id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -141,14 +141,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlsblob%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = azurerm_storage_account.test.primary_connection_string } resource "azurerm_data_factory_dataset_binary" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name azure_blob_storage_location { @@ -193,14 +193,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlsblob%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = azurerm_storage_account.test.primary_connection_string } resource "azurerm_data_factory_dataset_binary" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name azure_blob_storage_location { @@ -232,7 +232,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "https://www.bing.com" } @@ -240,7 +240,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_binary" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -277,7 +277,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_sftp" "test" { name = "acctestlssftp%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Basic" host = "http://www.bing.com" port = 22 @@ -288,7 +288,7 @@ resource "azurerm_data_factory_linked_service_sftp" "test" { resource "azurerm_data_factory_dataset_binary" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_sftp.test.name sftp_server_location { diff --git a/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go b/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go index 621f7e33dda0..9c4b89355143 100644 --- a/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go +++ b/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetCosmosDbSQLAPI() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -154,13 +166,27 @@ func resourceDataFactoryDatasetCosmosDbSQLAPICreateUpdate(d *pluginsdk.ResourceD ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset CosmosDB SQL API %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -219,7 +245,7 @@ func resourceDataFactoryDatasetCosmosDbSQLAPICreateUpdate(d *pluginsdk.ResourceD } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset CosmosDB SQL API %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -237,6 +263,8 @@ func resourceDataFactoryDatasetCosmosDbSQLAPIRead(d *pluginsdk.ResourceData, met return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -244,12 +272,14 @@ func resourceDataFactoryDatasetCosmosDbSQLAPIRead(d *pluginsdk.ResourceData, met return nil } - return fmt.Errorf("retrieving Data Factory Dataset CosmosDB SQL API %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) cosmosDbTable, ok := resp.Properties.AsCosmosDbSQLAPICollectionDataset() if !ok { @@ -314,7 +344,7 @@ func resourceDataFactoryDatasetCosmosDbSQLAPIDelete(d *pluginsdk.ResourceData, m response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset CosmosDB SQL API %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go b/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go index fffdc2c4b0e1..d473ade75304 100644 --- a/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_cosmosdb_sqlapi_resource_test.go @@ -71,7 +71,7 @@ func (t DatasetCosmosDbSQLAPIResource) Exists(ctx context.Context, clients *clie resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset CosomsDB SQL (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -97,14 +97,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_cosmosdb" "test" { name = "acctestlscosmosdb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Server=test;Port=3306;Database=test;User=test;SSLMode=1;UseSystemTrustStore=0;Password=test" } resource "azurerm_data_factory_dataset_cosmosdb_sqlapi" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_cosmosdb.test.name collection_name = "Foo" @@ -133,14 +133,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_cosmosdb" "test" { name = "acctestlscosmosdb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Server=test;Port=3306;Database=test;User=test;SSLMode=1;UseSystemTrustStore=0;Password=test" } resource "azurerm_data_factory_dataset_cosmosdb_sqlapi" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_cosmosdb.test.name collection_name = "Foo" diff --git a/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go b/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go index be867897fa8b..92266c0cf019 100644 --- a/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go +++ b/internal/services/datafactory/data_factory_dataset_delimited_text_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetDelimitedText() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -315,13 +327,27 @@ func resourceDataFactoryDatasetDelimitedTextCreateUpdate(d *pluginsdk.ResourceDa ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset DelimitedText %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -332,7 +358,7 @@ func resourceDataFactoryDatasetDelimitedTextCreateUpdate(d *pluginsdk.ResourceDa location := expandDataFactoryDatasetLocation(d) if location == nil { - return fmt.Errorf("One of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") + return fmt.Errorf("one of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") } delimited_textDatasetProperties := datafactory.DelimitedTextDatasetTypeProperties{ @@ -421,7 +447,7 @@ func resourceDataFactoryDatasetDelimitedTextCreateUpdate(d *pluginsdk.ResourceDa } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset DelimitedText %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -439,6 +465,8 @@ func resourceDataFactoryDatasetDelimitedTextRead(d *pluginsdk.ResourceData, meta return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -446,12 +474,14 @@ func resourceDataFactoryDatasetDelimitedTextRead(d *pluginsdk.ResourceData, meta return nil } - return fmt.Errorf("retrieving Data Factory Dataset DelimitedText %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", resp.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) delimited_textTable, ok := resp.Properties.AsDelimitedTextDataset() if !ok { @@ -583,7 +613,7 @@ func resourceDataFactoryDatasetDelimitedTextDelete(d *pluginsdk.ResourceData, me response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset DelimitedText %q (Data Factory %q / Resource Group %q): %s", id.Name, id.FactoryName, id.ResourceGroup, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go b/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go index 27e2764bef5e..5c928f09bb5e 100644 --- a/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_delimited_text_resource_test.go @@ -128,7 +128,7 @@ func (t DatasetDelimitedTextResource) Exists(ctx context.Context, clients *clien resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset Delimited Text (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -154,7 +154,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "https://www.bing.com" } @@ -162,7 +162,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_delimited_text" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -203,7 +203,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } @@ -211,7 +211,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_delimited_text" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -278,7 +278,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } @@ -286,7 +286,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_delimited_text" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -372,14 +372,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlsblob%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = azurerm_storage_account.test.primary_connection_string } resource "azurerm_data_factory_dataset_delimited_text" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name azure_blob_storage_location { @@ -426,14 +426,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlsblob%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = azurerm_storage_account.test.primary_connection_string } resource "azurerm_data_factory_dataset_delimited_text" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name azure_blob_storage_location { @@ -502,7 +502,7 @@ resource "azurerm_role_assignment" "test" { resource "azurerm_data_factory_linked_service_data_lake_storage_gen2" "test" { name = "acctestDataLakeStorage%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id use_managed_identity = true url = azurerm_storage_account.test.primary_dfs_endpoint } diff --git a/internal/services/datafactory/data_factory_dataset_http_resource.go b/internal/services/datafactory/data_factory_dataset_http_resource.go index d497f0388619..f88770adff4e 100644 --- a/internal/services/datafactory/data_factory_dataset_http_resource.go +++ b/internal/services/datafactory/data_factory_dataset_http_resource.go @@ -44,12 +44,23 @@ func resourceDataFactoryDatasetHTTP() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, Required: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -168,13 +179,27 @@ func resourceDataFactoryDatasetHTTPCreateUpdate(d *pluginsdk.ResourceData, meta ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset HTTP %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -235,7 +260,7 @@ func resourceDataFactoryDatasetHTTPCreateUpdate(d *pluginsdk.ResourceData, meta } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset HTTP %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -253,6 +278,8 @@ func resourceDataFactoryDatasetHTTPRead(d *pluginsdk.ResourceData, meta interfac return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -260,12 +287,14 @@ func resourceDataFactoryDatasetHTTPRead(d *pluginsdk.ResourceData, meta interfac return nil } - return fmt.Errorf("retrieving Data Factory Dataset HTTP %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) httpTable, ok := resp.Properties.AsHTTPDataset() if !ok { @@ -342,7 +371,7 @@ func resourceDataFactoryDatasetHTTPDelete(d *pluginsdk.ResourceData, meta interf response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset HTTP %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_http_resource_test.go b/internal/services/datafactory/data_factory_dataset_http_resource_test.go index 134e160022e4..de5c746a3648 100644 --- a/internal/services/datafactory/data_factory_dataset_http_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_http_resource_test.go @@ -71,7 +71,7 @@ func (t DatasetHTTPResource) Exists(ctx context.Context, clients *clients.Client resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset HTTP (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -97,7 +97,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" @@ -106,7 +106,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_http" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name relative_url = "/foo/bar" @@ -137,7 +137,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" @@ -146,7 +146,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_http" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name relative_url = "/foo/bar" @@ -196,7 +196,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } diff --git a/internal/services/datafactory/data_factory_dataset_json_resource.go b/internal/services/datafactory/data_factory_dataset_json_resource.go index c30037622c46..8123d1253a13 100644 --- a/internal/services/datafactory/data_factory_dataset_json_resource.go +++ b/internal/services/datafactory/data_factory_dataset_json_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetJSON() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -230,13 +242,27 @@ func resourceDataFactoryDatasetJSONCreateUpdate(d *pluginsdk.ResourceData, meta ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset JSON %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -247,7 +273,7 @@ func resourceDataFactoryDatasetJSONCreateUpdate(d *pluginsdk.ResourceData, meta location := expandDataFactoryDatasetLocation(d) if location == nil { - return fmt.Errorf("One of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") + return fmt.Errorf("one of `http_server_location`, `azure_blob_storage_location` must be specified to create a DataFactory Delimited Text Dataset") } jsonDatasetProperties := datafactory.JSONDatasetTypeProperties{ @@ -301,7 +327,7 @@ func resourceDataFactoryDatasetJSONCreateUpdate(d *pluginsdk.ResourceData, meta } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset JSON %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -319,6 +345,8 @@ func resourceDataFactoryDatasetJSONRead(d *pluginsdk.ResourceData, meta interfac return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -326,12 +354,14 @@ func resourceDataFactoryDatasetJSONRead(d *pluginsdk.ResourceData, meta interfac return nil } - return fmt.Errorf("retrieving Data Factory Dataset JSON %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) jsonTable, ok := resp.Properties.AsJSONDataset() if !ok { @@ -407,7 +437,7 @@ func resourceDataFactoryDatasetJSONDelete(d *pluginsdk.ResourceData, meta interf response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset JSON %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_json_resource_test.go b/internal/services/datafactory/data_factory_dataset_json_resource_test.go index 55277e7be4d2..aa3d0bcca36c 100644 --- a/internal/services/datafactory/data_factory_dataset_json_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_json_resource_test.go @@ -63,6 +63,21 @@ func TestAccDataFactoryDatasetJSON_update(t *testing.T) { }) } +func TestAccDataFactoryDatasetJSON_blob(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_json", "test") + r := DatasetJSONResource{} + + data.ResourceTest(t, r, []acceptance.TestStep{ + { + Config: r.blob(data), + Check: acceptance.ComposeTestCheckFunc( + check.That(data.ResourceName).ExistsInAzure(r), + ), + }, + data.ImportStep(), + }) +} + func (t DatasetJSONResource) Exists(ctx context.Context, clients *clients.Client, state *pluginsdk.InstanceState) (*bool, error) { id, err := parse.DataSetID(state.ID) if err != nil { @@ -71,7 +86,7 @@ func (t DatasetJSONResource) Exists(ctx context.Context, clients *clients.Client resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset JSON (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -97,7 +112,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } @@ -105,7 +120,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_json" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -139,7 +154,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } @@ -147,7 +162,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_json" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -203,7 +218,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } @@ -251,21 +266,6 @@ resource "azurerm_data_factory_dataset_json" "test" { `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) } -func TestAccDataFactoryDatasetJSON_blob(t *testing.T) { - data := acceptance.BuildTestData(t, "azurerm_data_factory_dataset_json", "test") - r := DatasetJSONResource{} - - data.ResourceTest(t, r, []acceptance.TestStep{ - { - Config: r.blob(data), - Check: acceptance.ComposeTestCheckFunc( - check.That(data.ResourceName).ExistsInAzure(r), - ), - }, - data.ImportStep(), - }) -} - func (DatasetJSONResource) blob(data acceptance.TestData) string { return fmt.Sprintf(` provider "azurerm" { @@ -301,14 +301,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlsblob%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = azurerm_storage_account.test.primary_connection_string } resource "azurerm_data_factory_dataset_json" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.test.name azure_blob_storage_location { diff --git a/internal/services/datafactory/data_factory_dataset_mysql_resource.go b/internal/services/datafactory/data_factory_dataset_mysql_resource.go index 272a90d014ad..75c0a4907d10 100644 --- a/internal/services/datafactory/data_factory_dataset_mysql_resource.go +++ b/internal/services/datafactory/data_factory_dataset_mysql_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetMySQL() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -153,13 +165,27 @@ func resourceDataFactoryDatasetMySQLCreateUpdate(d *pluginsdk.ResourceData, meta ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset MySQL %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -217,7 +243,7 @@ func resourceDataFactoryDatasetMySQLCreateUpdate(d *pluginsdk.ResourceData, meta } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset MySQL %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -235,6 +261,8 @@ func resourceDataFactoryDatasetMySQLRead(d *pluginsdk.ResourceData, meta interfa return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -242,12 +270,14 @@ func resourceDataFactoryDatasetMySQLRead(d *pluginsdk.ResourceData, meta interfa return nil } - return fmt.Errorf("retrieving Data Factory Dataset MySQL %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) mysqlTable, ok := resp.Properties.AsRelationalTableDataset() if !ok { @@ -312,7 +342,7 @@ func resourceDataFactoryDatasetMySQLDelete(d *pluginsdk.ResourceData, meta inter response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset MySQL %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go b/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go index 9baf87589e20..0c1a1d680bc6 100644 --- a/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_mysql_resource_test.go @@ -70,7 +70,7 @@ func (t DatasetMySQLResource) Exists(ctx context.Context, clients *clients.Clien resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset MySQL (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -96,14 +96,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_mysql" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Server=test;Port=3306;Database=test;User=test;SSLMode=1;UseSystemTrustStore=0;Password=test" } resource "azurerm_data_factory_dataset_mysql" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_mysql.test.name } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) @@ -129,14 +129,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_mysql" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Server=test;Port=3306;Database=test;User=test;SSLMode=1;UseSystemTrustStore=0;Password=test" } resource "azurerm_data_factory_dataset_mysql" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_mysql.test.name description = "test description" @@ -183,7 +183,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_mysql" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Server=test;Port=3306;Database=test;User=test;SSLMode=1;UseSystemTrustStore=0;Password=test" } diff --git a/internal/services/datafactory/data_factory_dataset_parquet_resource.go b/internal/services/datafactory/data_factory_dataset_parquet_resource.go index 8b833838ed2f..c6ffeb5be9bd 100644 --- a/internal/services/datafactory/data_factory_dataset_parquet_resource.go +++ b/internal/services/datafactory/data_factory_dataset_parquet_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetParquet() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -245,13 +257,27 @@ func resourceDataFactoryDatasetParquetCreateUpdate(d *pluginsdk.ResourceData, me subscriptionId := meta.(*clients.Client).Account.SubscriptionId defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset Parquet %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -334,6 +360,8 @@ func resourceDataFactoryDatasetParquetRead(d *pluginsdk.ResourceData, meta inter return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -341,12 +369,14 @@ func resourceDataFactoryDatasetParquetRead(d *pluginsdk.ResourceData, meta inter return nil } - return fmt.Errorf("retrieving Data Factory Dataset Parquet %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", resp.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) parquetTable, ok := resp.Properties.AsParquetDataset() if !ok { @@ -422,7 +452,7 @@ func resourceDataFactoryDatasetParquetDelete(d *pluginsdk.ResourceData, meta int response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset Parquet %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go b/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go index 17fe069f4317..e1b7c31fcda1 100644 --- a/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_parquet_resource_test.go @@ -88,7 +88,7 @@ func (t DatasetParquetResource) Exists(ctx context.Context, clients *clients.Cli resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset Parquet (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -114,7 +114,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "https://www.bing.com" } @@ -122,7 +122,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_parquet" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -154,7 +154,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } @@ -162,7 +162,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_parquet" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -217,7 +217,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_web" "test" { name = "acctestlsweb%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id authentication_type = "Anonymous" url = "http://www.bing.com" } @@ -225,7 +225,7 @@ resource "azurerm_data_factory_linked_service_web" "test" { resource "azurerm_data_factory_dataset_parquet" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_web.test.name http_server_location { @@ -303,7 +303,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "test" { name = "acctestlsblob%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = azurerm_storage_account.test.primary_connection_string } diff --git a/internal/services/datafactory/data_factory_dataset_postgresql_resource.go b/internal/services/datafactory/data_factory_dataset_postgresql_resource.go index 2f8acbb3f61f..ac8bd746fe70 100644 --- a/internal/services/datafactory/data_factory_dataset_postgresql_resource.go +++ b/internal/services/datafactory/data_factory_dataset_postgresql_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetPostgreSQL() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -153,13 +165,27 @@ func resourceDataFactoryDatasetPostgreSQLCreateUpdate(d *pluginsdk.ResourceData, ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset PostgreSQL %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -217,7 +243,7 @@ func resourceDataFactoryDatasetPostgreSQLCreateUpdate(d *pluginsdk.ResourceData, } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset PostgreSQL %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -235,6 +261,8 @@ func resourceDataFactoryDatasetPostgreSQLRead(d *pluginsdk.ResourceData, meta in return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -242,12 +270,14 @@ func resourceDataFactoryDatasetPostgreSQLRead(d *pluginsdk.ResourceData, meta in return nil } - return fmt.Errorf("retrieving Data Factory Dataset PostgreSQL %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) postgresqlTable, ok := resp.Properties.AsRelationalTableDataset() if !ok { @@ -312,7 +342,7 @@ func resourceDataFactoryDatasetPostgreSQLDelete(d *pluginsdk.ResourceData, meta response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset PostgreSQL %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go b/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go index 5d9ee19d849e..0eb627541058 100644 --- a/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_postgresql_resource_test.go @@ -70,7 +70,7 @@ func (t DatasetPostgreSQLResource) Exists(ctx context.Context, clients *clients. resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset PostgreSQL (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -96,14 +96,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_postgresql" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Host=example;Port=5432;Database=example;UID=example;EncryptionMethod=0;Password=example" } resource "azurerm_data_factory_dataset_postgresql" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_postgresql.test.name } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) @@ -129,14 +129,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_postgresql" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Host=example;Port=5432;Database=example;UID=example;EncryptionMethod=0;Password=example" } resource "azurerm_data_factory_dataset_postgresql" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_postgresql.test.name description = "test description" @@ -183,7 +183,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_postgresql" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Host=example;Port=5432;Database=example;UID=example;EncryptionMethod=0;Password=example" } diff --git a/internal/services/datafactory/data_factory_dataset_snowflake_resource.go b/internal/services/datafactory/data_factory_dataset_snowflake_resource.go index 86335401bc74..b8d7f75e0fd8 100644 --- a/internal/services/datafactory/data_factory_dataset_snowflake_resource.go +++ b/internal/services/datafactory/data_factory_dataset_snowflake_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetSnowflake() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -228,13 +240,27 @@ func resourceDataFactoryDatasetSnowflakeCreateUpdate(d *pluginsdk.ResourceData, ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset Snowflake %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -297,7 +323,7 @@ func resourceDataFactoryDatasetSnowflakeCreateUpdate(d *pluginsdk.ResourceData, } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset Snowflake %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -315,6 +341,8 @@ func resourceDataFactoryDatasetSnowflakeRead(d *pluginsdk.ResourceData, meta int return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -327,7 +355,9 @@ func resourceDataFactoryDatasetSnowflakeRead(d *pluginsdk.ResourceData, meta int d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) snowflakeTable, ok := resp.Properties.AsSnowflakeDataset() if !ok { @@ -403,7 +433,7 @@ func resourceDataFactoryDatasetSnowflakeDelete(d *pluginsdk.ResourceData, meta i response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset Snowflake %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_snowflake_resource_test.go b/internal/services/datafactory/data_factory_dataset_snowflake_resource_test.go index d1ccdb6033f2..f124af3b1707 100644 --- a/internal/services/datafactory/data_factory_dataset_snowflake_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_snowflake_resource_test.go @@ -82,7 +82,7 @@ func (t DatasetSnowflakeResource) Exists(ctx context.Context, clients *clients.C resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset Snowflake (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -118,14 +118,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_key_vault" "test" { name = "linkkv" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id key_vault_id = azurerm_key_vault.test.id } resource "azurerm_data_factory_linked_service_snowflake" "test" { name = "linksnowflake" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh" key_vault_password { @@ -137,7 +137,7 @@ resource "azurerm_data_factory_linked_service_snowflake" "test" { resource "azurerm_data_factory_dataset_snowflake" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_snowflake.test.name } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) @@ -173,14 +173,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_key_vault" "test" { name = "linkkv" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id key_vault_id = azurerm_key_vault.test.id } resource "azurerm_data_factory_linked_service_snowflake" "test" { name = "linksnowflake" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh" key_vault_password { @@ -192,7 +192,7 @@ resource "azurerm_data_factory_linked_service_snowflake" "test" { resource "azurerm_data_factory_dataset_snowflake" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_snowflake.test.name description = "test description" @@ -246,14 +246,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_key_vault" "test" { name = "linkkv" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id key_vault_id = azurerm_key_vault.test.id } resource "azurerm_data_factory_linked_service_snowflake" "test" { name = "linksnowflake" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh" key_vault_password { @@ -265,7 +265,7 @@ resource "azurerm_data_factory_linked_service_snowflake" "test" { resource "azurerm_data_factory_dataset_snowflake" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_snowflake.test.name description = "test description 2" @@ -329,14 +329,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_key_vault" "test" { name = "linkkv" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id key_vault_id = azurerm_key_vault.test.id } resource "azurerm_data_factory_linked_service_snowflake" "test" { name = "linksnowflake" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh" key_vault_password { diff --git a/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go b/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go index 86721ca6c095..941a2bd8a9e4 100644 --- a/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go +++ b/internal/services/datafactory/data_factory_dataset_sql_server_table_resource.go @@ -44,12 +44,24 @@ func resourceDataFactoryDatasetSQLServerTable() *pluginsdk.Resource { ValidateFunc: validate.LinkedServiceDatasetName, }, - // TODO: replace with `data_factory_id` in 3.0 + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -153,13 +165,27 @@ func resourceDataFactoryDatasetSQLServerTableCreateUpdate(d *pluginsdk.ResourceD ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewDataSetID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewDataSetID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Dataset SQL Server Table %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -217,7 +243,7 @@ func resourceDataFactoryDatasetSQLServerTableCreateUpdate(d *pluginsdk.ResourceD } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, dataset, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Dataset SQL Server Table %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -235,6 +261,8 @@ func resourceDataFactoryDatasetSQLServerTableRead(d *pluginsdk.ResourceData, met return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -247,7 +275,9 @@ func resourceDataFactoryDatasetSQLServerTableRead(d *pluginsdk.ResourceData, met d.Set("name", id.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) sqlServerTable, ok := resp.Properties.AsSQLServerTableDataset() if !ok { @@ -312,7 +342,7 @@ func resourceDataFactoryDatasetSQLServerTableDelete(d *pluginsdk.ResourceData, m response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Dataset SQL Server Table %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go b/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go index 3b7d0aea678f..f0d8f6bff39a 100644 --- a/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go +++ b/internal/services/datafactory/data_factory_dataset_sql_server_table_resource_test.go @@ -70,7 +70,7 @@ func (t DatasetSQLServerTableResource) Exists(ctx context.Context, clients *clie resp, err := clients.DataFactory.DatasetClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Dataset SQL Server Table (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -96,14 +96,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_sql_server" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" } resource "azurerm_data_factory_dataset_sql_server_table" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_sql_server.test.name } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger, data.RandomInteger) @@ -129,14 +129,14 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_sql_server" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" } resource "azurerm_data_factory_dataset_sql_server_table" "test" { name = "acctestds%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_sql_server.test.name description = "test description" @@ -183,7 +183,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_linked_service_sql_server" "test" { name = "acctestlssql%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" } diff --git a/internal/services/datafactory/data_factory_integration_runtime_azure_resource.go b/internal/services/datafactory/data_factory_integration_runtime_azure_resource.go index 04b600fc84a3..93884877f170 100644 --- a/internal/services/datafactory/data_factory_integration_runtime_azure_resource.go +++ b/internal/services/datafactory/data_factory_integration_runtime_azure_resource.go @@ -53,11 +53,24 @@ func resourceDataFactoryIntegrationRuntimeAzure() *pluginsdk.Resource { Optional: true, }, + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -122,14 +135,28 @@ func resourceDataFactoryIntegrationRuntimeAzureCreateUpdate(d *pluginsdk.Resourc ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewIntegrationRuntimeID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewIntegrationRuntimeID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Azure %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -170,7 +197,7 @@ func resourceDataFactoryIntegrationRuntimeAzureCreateUpdate(d *pluginsdk.Resourc } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, integrationRuntime, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Azure %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -188,6 +215,8 @@ func resourceDataFactoryIntegrationRuntimeAzureRead(d *pluginsdk.ResourceData, m return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -195,12 +224,14 @@ func resourceDataFactoryIntegrationRuntimeAzureRead(d *pluginsdk.ResourceData, m return nil } - return fmt.Errorf("retrieving Data Factory Azure %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) - d.Set("data_factory_name", id.FactoryName) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 + d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) managedIntegrationRuntime, convertSuccess := resp.Properties.AsManagedIntegrationRuntime() if !convertSuccess { @@ -257,7 +288,7 @@ func resourceDataFactoryIntegrationRuntimeAzureDelete(d *pluginsdk.ResourceData, if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Azure %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_integration_runtime_azure_resource_test.go b/internal/services/datafactory/data_factory_integration_runtime_azure_resource_test.go index 109c947422a5..e24dcab5b837 100644 --- a/internal/services/datafactory/data_factory_integration_runtime_azure_resource_test.go +++ b/internal/services/datafactory/data_factory_integration_runtime_azure_resource_test.go @@ -145,7 +145,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_azure" "test" { name = "azure-integration-runtime" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location } @@ -171,7 +171,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_azure" "test" { name = "azure-integration-runtime" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location compute_type = "ComputeOptimized" @@ -198,7 +198,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_azure" "test" { name = "azure-integration-runtime" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location core_count = 16 @@ -225,7 +225,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_azure" "test" { name = "azure-integration-runtime" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location time_to_live_min = 10 @@ -252,7 +252,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_azure" "test" { name = "azure-integration-runtime" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location cleanup_enabled = %t @@ -296,7 +296,7 @@ func (t IntegrationRuntimeAzureResource) Exists(ctx context.Context, clients *cl resp, err := clients.DataFactory.IntegrationRuntimesClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Azure (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil diff --git a/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource.go b/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource.go index 2200ba850708..3e9471ef727a 100644 --- a/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource.go +++ b/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource.go @@ -53,11 +53,24 @@ func resourceDataFactoryIntegrationRuntimeAzureSsis() *pluginsdk.Resource { Optional: true, }, + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -427,13 +440,27 @@ func resourceDataFactoryIntegrationRuntimeAzureSsisCreateUpdate(d *pluginsdk.Res ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewIntegrationRuntimeID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewIntegrationRuntimeID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Azure-SSIS %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -460,7 +487,7 @@ func resourceDataFactoryIntegrationRuntimeAzureSsisCreateUpdate(d *pluginsdk.Res } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, integrationRuntime, ""); err != nil { - return fmt.Errorf("creating/updating Data Factory Azure-SSIS %s: %+v", id, err) + return fmt.Errorf("creating/updating %s: %+v", id, err) } d.SetId(id.ID()) @@ -478,6 +505,8 @@ func resourceDataFactoryIntegrationRuntimeAzureSsisRead(d *pluginsdk.ResourceDat return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -485,12 +514,14 @@ func resourceDataFactoryIntegrationRuntimeAzureSsisRead(d *pluginsdk.ResourceDat return nil } - return fmt.Errorf("retrieving Data Factory Azure-SSIS %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) - d.Set("data_factory_name", id.FactoryName) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 + d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) managedIntegrationRuntime, convertSuccess := resp.Properties.AsManagedIntegrationRuntime() if !convertSuccess { @@ -564,7 +595,7 @@ func resourceDataFactoryIntegrationRuntimeAzureSsisDelete(d *pluginsdk.ResourceD response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory Azure-SSIS %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } diff --git a/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource_test.go b/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource_test.go index 774e2d3ccd3c..10af5516243a 100644 --- a/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource_test.go +++ b/internal/services/datafactory/data_factory_integration_runtime_azure_ssis_resource_test.go @@ -169,7 +169,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_azure_ssis" "test" { name = "managed-integration-runtime" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location node_size = "Standard_D8_v3" @@ -301,14 +301,14 @@ JSON resource "azurerm_data_factory_integration_runtime_self_hosted" "test" { name = "acctestSIRsh%[1]d" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name } resource "azurerm_data_factory_integration_runtime_azure_ssis" "test" { name = "acctestiras%[1]d" description = "acctest" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location @@ -409,7 +409,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_azure_ssis" "test" { name = "acctestiras%[1]d" description = "acctest" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location @@ -570,20 +570,20 @@ JSON resource "azurerm_data_factory_linked_service_key_vault" "test" { name = "acctestlinkkv%[1]d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id key_vault_id = azurerm_key_vault.test.id } resource "azurerm_data_factory_integration_runtime_self_hosted" "test" { name = "acctestSIRsh%[1]d" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name } resource "azurerm_data_factory_integration_runtime_azure_ssis" "test" { name = "acctestiras%[1]d" description = "acctest" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name location = azurerm_resource_group.test.location @@ -731,7 +731,7 @@ func (t IntegrationRuntimeManagedSsisResource) Exists(ctx context.Context, clien resp, err := clients.DataFactory.IntegrationRuntimesClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Integration Runtime Managed SSIS (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil diff --git a/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go b/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go index e669810f9aaf..9f0094c35a8e 100644 --- a/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go +++ b/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource.go @@ -48,11 +48,24 @@ func resourceDataFactoryIntegrationRuntimeSelfHosted() *pluginsdk.Resource { ), }, + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, "resource_group_name": azure.SchemaResourceGroupName(), @@ -98,13 +111,27 @@ func resourceDataFactoryIntegrationRuntimeSelfHostedCreateUpdate(d *pluginsdk.Re ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - id := parse.NewIntegrationRuntimeID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } + + id := parse.NewIntegrationRuntimeID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory Self-Hosted %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -151,6 +178,8 @@ func resourceDataFactoryIntegrationRuntimeSelfHostedRead(d *pluginsdk.ResourceDa return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -158,12 +187,14 @@ func resourceDataFactoryIntegrationRuntimeSelfHostedRead(d *pluginsdk.ResourceDa return nil } - return fmt.Errorf("retrieving Data Factory Self-Hosted %s: %+v", *id, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", id.Name) - d.Set("data_factory_name", id.FactoryName) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 + d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) selfHostedIntegrationRuntime, convertSuccess := resp.Properties.AsSelfHostedIntegrationRuntime() @@ -217,7 +248,7 @@ func resourceDataFactoryIntegrationRuntimeSelfHostedDelete(d *pluginsdk.Resource response, err := client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name) if err != nil { if !utils.ResponseWasNotFound(response) { - return fmt.Errorf("deleting Data Factory SelfHosted %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } } return nil diff --git a/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go b/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go index b927118b6efa..8ddc467268e3 100644 --- a/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go +++ b/internal/services/datafactory/data_factory_integration_runtime_self_hosted_resource_test.go @@ -66,7 +66,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_integration_runtime_self_hosted" "test" { name = "acctestSIR%d" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) @@ -222,7 +222,7 @@ func (t IntegrationRuntimeSelfHostedResource) Exists(ctx context.Context, client resp, err := clients.DataFactory.IntegrationRuntimesClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Integration Runtime Self Hosted (%s): %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil diff --git a/internal/services/datafactory/data_factory_pipeline_resource.go b/internal/services/datafactory/data_factory_pipeline_resource.go index 4baf14758292..1bd5e29b7855 100644 --- a/internal/services/datafactory/data_factory_pipeline_resource.go +++ b/internal/services/datafactory/data_factory_pipeline_resource.go @@ -44,11 +44,24 @@ func resourceDataFactoryPipeline() *pluginsdk.Resource { ValidateFunc: validate.DataFactoryPipelineAndTriggerName(), }, + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, // There's a bug in the Azure API where this is returned in lower-case @@ -117,15 +130,27 @@ func resourceDataFactoryPipelineCreateUpdate(d *pluginsdk.ResourceData, meta int ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - log.Printf("[INFO] preparing arguments for Data Factory Pipeline creation.") + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } - id := parse.NewPipelineID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string), d.Get("name").(string)) + id := parse.NewPipelineID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -180,7 +205,7 @@ func resourceDataFactoryPipelineCreateUpdate(d *pluginsdk.ResourceData, meta int } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, config, ""); err != nil { - return fmt.Errorf("creating Data Factory %s: %+v", id, err) + return fmt.Errorf("creating %s: %+v", id, err) } d.SetId(id.ID()) @@ -198,6 +223,8 @@ func resourceDataFactoryPipelineRead(d *pluginsdk.ResourceData, meta interface{} return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -210,7 +237,9 @@ func resourceDataFactoryPipelineRead(d *pluginsdk.ResourceData, meta interface{} d.Set("name", resp.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) if props := resp.Pipeline; props != nil { d.Set("description", props.Description) @@ -275,7 +304,7 @@ func resourceDataFactoryPipelineDelete(d *pluginsdk.ResourceData, meta interface } if _, err = client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name); err != nil { - return fmt.Errorf("deleting Data Factory %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } return nil diff --git a/internal/services/datafactory/data_factory_pipeline_resource_test.go b/internal/services/datafactory/data_factory_pipeline_resource_test.go index 653ea6f47a19..ed3723c1d1f0 100644 --- a/internal/services/datafactory/data_factory_pipeline_resource_test.go +++ b/internal/services/datafactory/data_factory_pipeline_resource_test.go @@ -104,7 +104,7 @@ func (t PipelineResource) Exists(ctx context.Context, clients *clients.Client, s resp, err := clients.DataFactory.PipelinesClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory %s: %+v", *id, err) + return nil, fmt.Errorf("reading %s: %+v", *id, err) } return utils.Bool(resp.ID != nil), nil @@ -150,7 +150,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id } `, data.RandomInteger, data.Locations.Primary, data.RandomInteger, data.RandomInteger) } @@ -175,7 +175,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id annotations = ["test1", "test2", "test3"] description = "test description" @@ -252,7 +252,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id variables = { "bob" = "item1" } @@ -294,7 +294,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id variables = { "bob" = "item1" } diff --git a/internal/services/datafactory/data_factory_trigger_schedule_resource.go b/internal/services/datafactory/data_factory_trigger_schedule_resource.go index a386c4a1e41f..ff7b6584ee20 100644 --- a/internal/services/datafactory/data_factory_trigger_schedule_resource.go +++ b/internal/services/datafactory/data_factory_trigger_schedule_resource.go @@ -50,11 +50,24 @@ func resourceDataFactoryTriggerSchedule() *pluginsdk.Resource { // BUG: https://github.com/Azure/azure-rest-api-specs/issues/5788 "resource_group_name": azure.SchemaResourceGroupNameDiffSuppress(), + // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), + Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", + ExactlyOneOf: []string{"data_factory_id"}, + }, + + "data_factory_id": { + Type: pluginsdk.TypeString, + Optional: true, // TODO set to Required in 3.0 + Computed: true, // TODO remove in 3.0 + ForceNew: true, + ValidateFunc: validate.DataFactoryID, + ExactlyOneOf: []string{"data_factory_name"}, }, "description": { @@ -217,17 +230,26 @@ func resourceDataFactoryTriggerScheduleCreateUpdate(d *pluginsdk.ResourceData, m ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d) defer cancel() - log.Printf("[INFO] preparing arguments for Data Factory Trigger Schedule creation.") - - dataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) - + // TODO remove/simplify this after deprecation in 3.0 + var err error + var dataFactoryId *parse.DataFactoryId + if v := d.Get("data_factory_name").(string); v != "" { + newDataFactoryId := parse.NewDataFactoryID(subscriptionId, d.Get("resource_group_name").(string), d.Get("data_factory_name").(string)) + dataFactoryId = &newDataFactoryId + } + if v := d.Get("data_factory_id").(string); v != "" { + dataFactoryId, err = parse.DataFactoryID(v) + if err != nil { + return err + } + } id := parse.NewTriggerID(subscriptionId, dataFactoryId.ResourceGroup, dataFactoryId.FactoryName, d.Get("name").(string)) if d.IsNewResource() { existing, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if !utils.ResponseWasNotFound(existing.Response) { - return fmt.Errorf("checking for presence of existing Data Factory %s: %+v", id, err) + return fmt.Errorf("checking for presence of existing %s: %+v", id, err) } } @@ -283,16 +305,7 @@ func resourceDataFactoryTriggerScheduleCreateUpdate(d *pluginsdk.ResourceData, m } if _, err := client.CreateOrUpdate(ctx, id.ResourceGroup, id.FactoryName, id.Name, trigger, ""); err != nil { - return fmt.Errorf("creating Data Factory %s: %+v", id, err) - } - - read, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") - if err != nil { - return fmt.Errorf("retrieving Data Factory %s: %+v", id, err) - } - - if read.ID == nil { - return fmt.Errorf("cannot read Data Factory %s", id) + return fmt.Errorf("creating %s: %+v", id, err) } if d.Get("activated").(bool) { @@ -320,6 +333,8 @@ func resourceDataFactoryTriggerScheduleRead(d *pluginsdk.ResourceData, meta inte return err } + dataFactoryId := parse.NewDataFactoryID(id.SubscriptionId, id.ResourceGroup, id.FactoryName) + resp, err := client.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { if utils.ResponseWasNotFound(resp.Response) { @@ -327,12 +342,14 @@ func resourceDataFactoryTriggerScheduleRead(d *pluginsdk.ResourceData, meta inte log.Printf("[DEBUG] Data Factory Trigger Schedule %q was not found in Resource Group %q - removing from state!", id.Name, id.ResourceGroup) return nil } - return fmt.Errorf("reading the state of Data Factory Trigger Schedule %q: %+v", id.Name, err) + return fmt.Errorf("retrieving %s: %+v", *id, err) } d.Set("name", resp.Name) d.Set("resource_group_name", id.ResourceGroup) + // TODO remove in 3.0 d.Set("data_factory_name", id.FactoryName) + d.Set("data_factory_id", dataFactoryId.ID()) scheduleTriggerProps, ok := resp.Properties.AsScheduleTrigger() if !ok { @@ -397,7 +414,7 @@ func resourceDataFactoryTriggerScheduleDelete(d *pluginsdk.ResourceData, meta in } if _, err = client.Delete(ctx, id.ResourceGroup, id.FactoryName, id.Name); err != nil { - return fmt.Errorf("deleting Data Factory %s: %+v", *id, err) + return fmt.Errorf("deleting %s: %+v", *id, err) } return nil diff --git a/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go b/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go index 23b9b368a753..982dc7200da3 100644 --- a/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go +++ b/internal/services/datafactory/data_factory_trigger_schedule_resource_test.go @@ -92,7 +92,7 @@ func (t TriggerScheduleResource) Exists(ctx context.Context, clients *clients.Cl resp, err := clients.DataFactory.TriggersClient.Get(ctx, id.ResourceGroup, id.FactoryName, id.Name, "") if err != nil { - return nil, fmt.Errorf("reading Data Factory Trigger Schedule (%s): %+v", id, err) + return nil, fmt.Errorf("reading %s: %+v", id, err) } return utils.Bool(resp.ID != nil), nil @@ -118,7 +118,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id parameters = { test = "testparameter" @@ -127,7 +127,7 @@ resource "azurerm_data_factory_pipeline" "test" { resource "azurerm_data_factory_trigger_schedule" "test" { name = "acctestdf%d" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name pipeline_name = azurerm_data_factory_pipeline.test.name @@ -200,7 +200,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id parameters = { test = "testparameter" @@ -209,7 +209,7 @@ resource "azurerm_data_factory_pipeline" "test" { resource "azurerm_data_factory_trigger_schedule" "test" { name = "acctestdf%d" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name pipeline_name = azurerm_data_factory_pipeline.test.name @@ -246,7 +246,7 @@ resource "azurerm_data_factory" "test" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id parameters = { test = "testparameter" @@ -255,7 +255,7 @@ resource "azurerm_data_factory_pipeline" "test" { resource "azurerm_data_factory_trigger_schedule" "test" { name = "acctestdf%d" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name pipeline_name = azurerm_data_factory_pipeline.test.name From 65f3e9d7e484b15b723a3be5c266c598ed54358d Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 14 Dec 2021 13:36:46 +0100 Subject: [PATCH 2/4] update docs --- .../r/data_factory_dataset_azure_blob.html.markdown | 12 +++++++++--- .../docs/r/data_factory_dataset_binary.html.markdown | 12 +++++++++--- ...ata_factory_dataset_cosmosdb_sqlapi.html.markdown | 12 +++++++++--- ...data_factory_dataset_delimited_text.html.markdown | 12 +++++++++--- .../docs/r/data_factory_dataset_http.html.markdown | 12 +++++++++--- .../docs/r/data_factory_dataset_json.html.markdown | 12 +++++++++--- .../docs/r/data_factory_dataset_mysql.html.markdown | 12 +++++++++--- .../r/data_factory_dataset_parquet.html.markdown | 4 ++-- .../r/data_factory_dataset_postgresql.html.markdown | 12 +++++++++--- .../r/data_factory_dataset_snowflake.html.markdown | 12 +++++++++--- ...ta_factory_dataset_sql_server_table.html.markdown | 12 +++++++++--- ...a_factory_integration_runtime_azure.html.markdown | 10 ++++++++-- ...tory_integration_runtime_azure_ssis.html.markdown | 10 ++++++++-- ...ory_integration_runtime_self_hosted.html.markdown | 12 +++++++++--- website/docs/r/data_factory_pipeline.html.markdown | 12 +++++++++--- .../r/data_factory_trigger_blob_event.html.markdown | 2 +- .../data_factory_trigger_custom_event.html.markdown | 2 +- .../r/data_factory_trigger_schedule.html.markdown | 12 +++++++++--- .../r/data_factory_tumbling_window.html.markdown | 2 +- 19 files changed, 138 insertions(+), 48 deletions(-) diff --git a/website/docs/r/data_factory_dataset_azure_blob.html.markdown b/website/docs/r/data_factory_dataset_azure_blob.html.markdown index 1e91cd67a95c..e24b228af937 100644 --- a/website/docs/r/data_factory_dataset_azure_blob.html.markdown +++ b/website/docs/r/data_factory_dataset_azure_blob.html.markdown @@ -32,14 +32,14 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_azure_blob_storage" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id connection_string = data.azurerm_storage_account.example.primary_connection_string } resource "azurerm_data_factory_dataset_azure_blob" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_azure_blob_storage.example.name path = "foo" @@ -55,7 +55,13 @@ The following supported arguments are common across all Azure Data Factory Datas * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_binary.html.markdown b/website/docs/r/data_factory_dataset_binary.html.markdown index 5a5056144a64..aca0af20a404 100644 --- a/website/docs/r/data_factory_dataset_binary.html.markdown +++ b/website/docs/r/data_factory_dataset_binary.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_sftp" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id authentication_type = "Basic" host = "http://www.bing.com" @@ -39,7 +39,7 @@ resource "azurerm_data_factory_linked_service_sftp" "example" { resource "azurerm_data_factory_dataset_binary" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_sftp.example.name sftp_server_location { @@ -55,7 +55,13 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Data Factory Binary Dataset. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. -* `data_factory_name` - (Required) The Data Factory name in which to associate the Binary Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Binary Dataset with. diff --git a/website/docs/r/data_factory_dataset_cosmosdb_sqlapi.html.markdown b/website/docs/r/data_factory_dataset_cosmosdb_sqlapi.html.markdown index acdcf848bb47..160be4463936 100644 --- a/website/docs/r/data_factory_dataset_cosmosdb_sqlapi.html.markdown +++ b/website/docs/r/data_factory_dataset_cosmosdb_sqlapi.html.markdown @@ -32,7 +32,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_cosmosdb" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id account_endpoint = azurerm_cosmosdb_account.example.endpoint account_key = data.azurerm_cosmosdb_account.example.primary_access_key database = "foo" @@ -41,7 +41,7 @@ resource "azurerm_data_factory_linked_service_cosmosdb" "example" { resource "azurerm_data_factory_dataset_cosmosdb_sqlapi" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_cosmosdb.example.name collection_name = "bar" @@ -56,7 +56,13 @@ The following supported arguments are common across all Azure Data Factory Datas * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_delimited_text.html.markdown b/website/docs/r/data_factory_dataset_delimited_text.html.markdown index f468435d3633..ca5ac19383c3 100644 --- a/website/docs/r/data_factory_dataset_delimited_text.html.markdown +++ b/website/docs/r/data_factory_dataset_delimited_text.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_web" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id authentication_type = "Anonymous" url = "https://www.bing.com" } @@ -35,7 +35,7 @@ resource "azurerm_data_factory_linked_service_web" "example" { resource "azurerm_data_factory_dataset_delimited_text" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_web.example.name http_server_location { @@ -62,7 +62,13 @@ The following supported arguments are common across all Azure Data Factory Datas * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_http.html.markdown b/website/docs/r/data_factory_dataset_http.html.markdown index 75d00c22da94..3c6e6e0db8e4 100644 --- a/website/docs/r/data_factory_dataset_http.html.markdown +++ b/website/docs/r/data_factory_dataset_http.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_web" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id authentication_type = "Anonymous" url = "https://www.bing.com" } @@ -35,7 +35,7 @@ resource "azurerm_data_factory_linked_service_web" "example" { resource "azurerm_data_factory_dataset_http" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_web.example.name relative_url = "http://www.bing.com" @@ -53,7 +53,13 @@ The following supported arguments are common across all Azure Data Factory Datas * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_json.html.markdown b/website/docs/r/data_factory_dataset_json.html.markdown index 8f319e1a022b..0788be846b82 100644 --- a/website/docs/r/data_factory_dataset_json.html.markdown +++ b/website/docs/r/data_factory_dataset_json.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_web" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id authentication_type = "Anonymous" url = "https://www.bing.com" } @@ -35,7 +35,7 @@ resource "azurerm_data_factory_linked_service_web" "example" { resource "azurerm_data_factory_dataset_json" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_web.example.name http_server_location { @@ -57,7 +57,13 @@ The following supported arguments are common across all Azure Data Factory Datas * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_mysql.html.markdown b/website/docs/r/data_factory_dataset_mysql.html.markdown index 829d66cd724c..ba5714247440 100644 --- a/website/docs/r/data_factory_dataset_mysql.html.markdown +++ b/website/docs/r/data_factory_dataset_mysql.html.markdown @@ -27,14 +27,14 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_mysql" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id connection_string = "Server=test;Port=3306;Database=test;User=test;SSLMode=1;UseSystemTrustStore=0;Password=test" } resource "azurerm_data_factory_dataset_mysql" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_mysql.example.name } ``` @@ -47,7 +47,13 @@ The following supported arguments are common across all Azure Data Factory Datas * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset MySQL. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_parquet.html.markdown b/website/docs/r/data_factory_dataset_parquet.html.markdown index 1397d378b701..ce71455bd730 100644 --- a/website/docs/r/data_factory_dataset_parquet.html.markdown +++ b/website/docs/r/data_factory_dataset_parquet.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_web" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id authentication_type = "Anonymous" url = "https://www.bing.com" } @@ -35,7 +35,7 @@ resource "azurerm_data_factory_linked_service_web" "example" { resource "azurerm_data_factory_dataset_parquet" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_web.example.name http_server_location { diff --git a/website/docs/r/data_factory_dataset_postgresql.html.markdown b/website/docs/r/data_factory_dataset_postgresql.html.markdown index 55b9ea70764c..ebceda2392f0 100644 --- a/website/docs/r/data_factory_dataset_postgresql.html.markdown +++ b/website/docs/r/data_factory_dataset_postgresql.html.markdown @@ -27,14 +27,14 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_postgresql" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id connection_string = "Host=example;Port=5432;Database=example;UID=example;EncryptionMethod=0;Password=example" } resource "azurerm_data_factory_dataset_postgresql" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_postgresql.example.name } ``` @@ -47,7 +47,13 @@ The following arguments are supported: * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset PostgreSQL. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_snowflake.html.markdown b/website/docs/r/data_factory_dataset_snowflake.html.markdown index bd6ca8a599f0..1204cb05421a 100644 --- a/website/docs/r/data_factory_dataset_snowflake.html.markdown +++ b/website/docs/r/data_factory_dataset_snowflake.html.markdown @@ -27,14 +27,14 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_snowflake" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id connection_string = "jdbc:snowflake://account.region.snowflakecomputing.com/?user=user&db=db&warehouse=wh" } resource "azurerm_data_factory_dataset_snowflake" "example" { name = "example" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id linked_service_name = azurerm_data_factory_linked_service_snowflake.test.name schema_name = "foo_schema" @@ -50,7 +50,13 @@ The following arguments are supported: * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset Snowflake. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_dataset_sql_server_table.html.markdown b/website/docs/r/data_factory_dataset_sql_server_table.html.markdown index 7d6aad98c5ca..942628eaf9ce 100644 --- a/website/docs/r/data_factory_dataset_sql_server_table.html.markdown +++ b/website/docs/r/data_factory_dataset_sql_server_table.html.markdown @@ -27,14 +27,14 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_linked_service_sql_server" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id connection_string = "Integrated Security=False;Data Source=test;Initial Catalog=test;User ID=test;Password=test" } resource "azurerm_data_factory_dataset_sql_server_table" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id linked_service_name = azurerm_data_factory_linked_service_sql_server.example.name } ``` @@ -47,7 +47,13 @@ The following arguments are supported: * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Dataset SQL Server Table. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Dataset with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `linked_service_name` - (Required) The Data Factory Linked Service name in which to associate the Dataset with. diff --git a/website/docs/r/data_factory_integration_runtime_azure.html.markdown b/website/docs/r/data_factory_integration_runtime_azure.html.markdown index 063198381528..f7308463281d 100644 --- a/website/docs/r/data_factory_integration_runtime_azure.html.markdown +++ b/website/docs/r/data_factory_integration_runtime_azure.html.markdown @@ -26,7 +26,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_integration_runtime_azure" "example" { name = "example" - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id resource_group_name = azurerm_resource_group.example.name location = azurerm_resource_group.example.location } @@ -38,7 +38,13 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. -* `data_factory_name` - (Required) Specifies the name of the Data Factory the Managed Integration Runtime belongs to. Changing this forces a new resource to be created. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `resource_group_name` - (Required) The name of the resource group in which to create the Managed Integration Runtime. Changing this forces a new resource to be created. diff --git a/website/docs/r/data_factory_integration_runtime_azure_ssis.html.markdown b/website/docs/r/data_factory_integration_runtime_azure_ssis.html.markdown index 920fcef13801..e73003255084 100644 --- a/website/docs/r/data_factory_integration_runtime_azure_ssis.html.markdown +++ b/website/docs/r/data_factory_integration_runtime_azure_ssis.html.markdown @@ -26,7 +26,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_integration_runtime_azure_ssis" "example" { name = "example" - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id resource_group_name = azurerm_resource_group.example.name location = azurerm_resource_group.example.location @@ -40,7 +40,13 @@ The following arguments are supported: * `name` - (Required) Specifies the name of the Azure-SSIS Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the [Microsoft documentation](https://docs.microsoft.com/en-us/azure/data-factory/naming-rules) for all restrictions. -* `data_factory_name` - (Required) Specifies the name of the Data Factory the Azure-SSIS Integration Runtime belongs to. Changing this forces a new resource to be created. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `resource_group_name` - (Required) The name of the resource group in which to create the Azure-SSIS Integration Runtime. Changing this forces a new resource to be created. diff --git a/website/docs/r/data_factory_integration_runtime_self_hosted.html.markdown b/website/docs/r/data_factory_integration_runtime_self_hosted.html.markdown index 5888579cf6df..6a91bb0ed334 100644 --- a/website/docs/r/data_factory_integration_runtime_self_hosted.html.markdown +++ b/website/docs/r/data_factory_integration_runtime_self_hosted.html.markdown @@ -26,8 +26,8 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_integration_runtime_self_hosted" "example" { name = "example" - resource_group_name = "example" - data_factory_name = "example" + resource_group_name = azurerm_resource_group.example.name + data_factory_id = azurerm_data_factory.example.id } ``` @@ -35,7 +35,13 @@ resource "azurerm_data_factory_integration_runtime_self_hosted" "example" { The following arguments are supported: -* `data_factory_name` - (Required) Changing this forces a new Data Factory Self-hosted Integration Runtime to be created. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `name` - (Required) The name which should be used for this Data Factory. Changing this forces a new Data Factory Self-hosted Integration Runtime to be created. diff --git a/website/docs/r/data_factory_pipeline.html.markdown b/website/docs/r/data_factory_pipeline.html.markdown index a213ba72e03c..2add8153a6a7 100644 --- a/website/docs/r/data_factory_pipeline.html.markdown +++ b/website/docs/r/data_factory_pipeline.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_pipeline" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id } ``` @@ -37,7 +37,7 @@ resource "azurerm_data_factory_pipeline" "example" { resource "azurerm_data_factory_pipeline" "test" { name = "acctest%d" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id variables = { "bob" = "item1" } @@ -66,7 +66,13 @@ The following arguments are supported: * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Pipeline. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Pipeline with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `description` - (Optional) The description for the Data Factory Pipeline. diff --git a/website/docs/r/data_factory_trigger_blob_event.html.markdown b/website/docs/r/data_factory_trigger_blob_event.html.markdown index 2c78c723b7f4..d107cf571108 100644 --- a/website/docs/r/data_factory_trigger_blob_event.html.markdown +++ b/website/docs/r/data_factory_trigger_blob_event.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_pipeline" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id } resource "azurerm_storage_account" "example" { diff --git a/website/docs/r/data_factory_trigger_custom_event.html.markdown b/website/docs/r/data_factory_trigger_custom_event.html.markdown index 040ab07a4afa..c1d1a8f9c7fb 100644 --- a/website/docs/r/data_factory_trigger_custom_event.html.markdown +++ b/website/docs/r/data_factory_trigger_custom_event.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_pipeline" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id } resource "azurerm_eventgrid_topic" "example" { diff --git a/website/docs/r/data_factory_trigger_schedule.html.markdown b/website/docs/r/data_factory_trigger_schedule.html.markdown index cd2c1045f8f8..37d10dce3067 100644 --- a/website/docs/r/data_factory_trigger_schedule.html.markdown +++ b/website/docs/r/data_factory_trigger_schedule.html.markdown @@ -27,12 +27,12 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_pipeline" "test" { name = "example" resource_group_name = azurerm_resource_group.test.name - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id } resource "azurerm_data_factory_trigger_schedule" "test" { name = "example" - data_factory_name = azurerm_data_factory.test.name + data_factory_id = azurerm_data_factory.test.id resource_group_name = azurerm_resource_group.test.name pipeline_name = azurerm_data_factory_pipeline.test.name @@ -49,7 +49,13 @@ The following arguments are supported: * `resource_group_name` - (Required) The name of the resource group in which to create the Data Factory Schedule Trigger. Changing this forces a new resource -* `data_factory_name` - (Required) The Data Factory name in which to associate the Schedule Trigger with. Changing this forces a new resource. +* `data_factory_id` - (Optional) The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource. + +* `data_factory_name` - (Optional) The Data Factory name in which to associate the Linked Service with. Changing this forces a new resource. + +-> **Note:** This property has been deprecated in favour of the `data_factory_id` property and will be removed in version 3.0 of the provider. + +-> **Note:** At least one of `data_factory_id` or `data_factory_name` must be set. * `pipeline_name` - (Required) The Data Factory Pipeline name that the trigger will act on. diff --git a/website/docs/r/data_factory_tumbling_window.html.markdown b/website/docs/r/data_factory_tumbling_window.html.markdown index 61eae230752a..b057ccb06707 100644 --- a/website/docs/r/data_factory_tumbling_window.html.markdown +++ b/website/docs/r/data_factory_tumbling_window.html.markdown @@ -27,7 +27,7 @@ resource "azurerm_data_factory" "example" { resource "azurerm_data_factory_pipeline" "example" { name = "example" resource_group_name = azurerm_resource_group.example.name - data_factory_name = azurerm_data_factory.example.name + data_factory_id = azurerm_data_factory.example.id } resource "azurerm_data_factory_trigger_tumbling_window" "example" { From 5be82ac84b4ad4842082f2f62e11c500614b7a0d Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 14 Dec 2021 13:53:26 +0100 Subject: [PATCH 3/4] set data_factory_name to optional --- .../datafactory/data_factory_dataset_azure_blob_resource.go | 2 +- .../datafactory/data_factory_dataset_binary_resource.go | 2 +- .../services/datafactory/data_factory_dataset_http_resource.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go b/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go index 7127479ae97b..693ca500078f 100644 --- a/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go +++ b/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go @@ -46,7 +46,7 @@ func resourceDataFactoryDatasetAzureBlob() *pluginsdk.Resource { // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", diff --git a/internal/services/datafactory/data_factory_dataset_binary_resource.go b/internal/services/datafactory/data_factory_dataset_binary_resource.go index 0c77d252b1ff..e21e8cb1e041 100644 --- a/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -45,7 +45,7 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", diff --git a/internal/services/datafactory/data_factory_dataset_http_resource.go b/internal/services/datafactory/data_factory_dataset_http_resource.go index f88770adff4e..11cc26c47de5 100644 --- a/internal/services/datafactory/data_factory_dataset_http_resource.go +++ b/internal/services/datafactory/data_factory_dataset_http_resource.go @@ -47,7 +47,7 @@ func resourceDataFactoryDatasetHTTP() *pluginsdk.Resource { // TODO remove in 3.0 "data_factory_name": { Type: pluginsdk.TypeString, - Required: true, + Optional: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", From a2374f34abbb444b0171faa42db53bd8ae7bfa5e Mon Sep 17 00:00:00 2001 From: Steph Date: Tue, 14 Dec 2021 14:05:48 +0100 Subject: [PATCH 4/4] set data_factory_name to computed --- .../datafactory/data_factory_dataset_azure_blob_resource.go | 1 + .../services/datafactory/data_factory_dataset_binary_resource.go | 1 + .../services/datafactory/data_factory_dataset_http_resource.go | 1 + 3 files changed, 3 insertions(+) diff --git a/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go b/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go index 693ca500078f..26a558252751 100644 --- a/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go +++ b/internal/services/datafactory/data_factory_dataset_azure_blob_resource.go @@ -47,6 +47,7 @@ func resourceDataFactoryDatasetAzureBlob() *pluginsdk.Resource { "data_factory_name": { Type: pluginsdk.TypeString, Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", diff --git a/internal/services/datafactory/data_factory_dataset_binary_resource.go b/internal/services/datafactory/data_factory_dataset_binary_resource.go index e21e8cb1e041..bd69091885fd 100644 --- a/internal/services/datafactory/data_factory_dataset_binary_resource.go +++ b/internal/services/datafactory/data_factory_dataset_binary_resource.go @@ -46,6 +46,7 @@ func resourceDataFactoryDatasetBinary() *pluginsdk.Resource { "data_factory_name": { Type: pluginsdk.TypeString, Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider", diff --git a/internal/services/datafactory/data_factory_dataset_http_resource.go b/internal/services/datafactory/data_factory_dataset_http_resource.go index 11cc26c47de5..7ebd7e0b8739 100644 --- a/internal/services/datafactory/data_factory_dataset_http_resource.go +++ b/internal/services/datafactory/data_factory_dataset_http_resource.go @@ -48,6 +48,7 @@ func resourceDataFactoryDatasetHTTP() *pluginsdk.Resource { "data_factory_name": { Type: pluginsdk.TypeString, Optional: true, + Computed: true, ForceNew: true, ValidateFunc: validate.DataFactoryName(), Deprecated: "`data_factory_name` is deprecated in favour of `data_factory_id` and will be removed in version 3.0 of the AzureRM provider",