From 7fbfc1d4cf99d5f49ad2b02c9c9303a28f2b9b8b Mon Sep 17 00:00:00 2001 From: Kshitij Chawla <166698309+kshitij-microsoft@users.noreply.github.com> Date: Mon, 7 Oct 2024 21:33:06 +0530 Subject: [PATCH 1/5] Spark version upgarde to 3.3 (#3400) --- .../pipeline.yml | 4 ++-- .../add-column-and-word-count-using-spark/pipeline.yml | 4 ++-- .../spark/serverless-spark-pipeline-default-identity.yml | 2 +- .../spark/serverless-spark-pipeline-managed-identity.yml | 2 +- cli/jobs/spark/serverless-spark-pipeline-user-identity.yml | 2 +- .../spark/serverless-spark-standalone-default-identity.yml | 2 +- .../spark/serverless-spark-standalone-managed-identity.yml | 2 +- .../spark/serverless-spark-standalone-user-identity.yml | 2 +- cli/jobs/spark/setup-attached-resources.sh | 2 +- .../automation-test/test_featurestore_cli_samples.ipynb | 2 +- .../automation-test/test_featurestore_sdk_samples.ipynb | 2 +- .../automation-test/test_featurestore_vnet_samples.ipynb | 2 +- .../featurestore/arm-template/parameters.json | 2 +- .../featurestore/featurestore_with_offline_setting.yaml | 2 +- .../notebooks/sdk_and_cli/4.Provision-feature-store.ipynb | 2 +- .../network_isolation/Network-isolation-feature-store.ipynb | 2 +- .../fraud_model/pipelines/batch_inference_pipeline.yaml | 2 +- .../project/fraud_model/pipelines/training_pipeline.yaml | 2 +- .../pipeline_with_spark_nodes.ipynb | 2 +- .../aml-demand-forecast-mm-pipeline.ipynb | 6 +++--- .../spark/automation/run_interactive_session_notebook.ipynb | 2 +- sdk/python/jobs/spark/setup_spark.sh | 2 +- sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb | 6 +++--- sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb | 6 +++--- .../spark/submit_spark_standalone_jobs_managed_vnet.ipynb | 4 ++-- 25 files changed, 34 insertions(+), 34 deletions(-) diff --git a/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml b/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml index 9a75e550d3c..c5548182990 100644 --- a/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml +++ b/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml @@ -32,7 +32,7 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 count_word: type: spark @@ -44,6 +44,6 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 diff --git a/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml b/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml index 472bd6340b6..0ab705de8e5 100644 --- a/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml +++ b/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml @@ -36,7 +36,7 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 conf: spark.driver.cores: 2 @@ -71,7 +71,7 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 conf: spark.driver.cores: 2 diff --git a/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml b/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml index a334b5d824e..31b905d295f 100644 --- a/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml +++ b/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml @@ -22,4 +22,4 @@ jobs: resources: instance_type: standard_e8s_v3 - runtime_version: "3.2" + runtime_version: "3.3" diff --git a/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml b/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml index 225d51ffc9f..6822ea9f9fa 100644 --- a/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml +++ b/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml @@ -25,4 +25,4 @@ jobs: resources: instance_type: standard_e8s_v3 - runtime_version: "3.2" + runtime_version: "3.3" diff --git a/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml b/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml index 9c1dcd6ea84..b5ec9d05ae6 100644 --- a/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml +++ b/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml @@ -25,4 +25,4 @@ jobs: resources: instance_type: standard_e8s_v3 - runtime_version: "3.2" + runtime_version: "3.3" diff --git a/cli/jobs/spark/serverless-spark-standalone-default-identity.yml b/cli/jobs/spark/serverless-spark-standalone-default-identity.yml index 5f6683b1cf5..49a21c4ed4d 100644 --- a/cli/jobs/spark/serverless-spark-standalone-default-identity.yml +++ b/cli/jobs/spark/serverless-spark-standalone-default-identity.yml @@ -31,5 +31,5 @@ args: >- resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" \ No newline at end of file diff --git a/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml b/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml index f84f841a3bb..1f4af1781fd 100644 --- a/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml +++ b/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml @@ -34,5 +34,5 @@ identity: resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" \ No newline at end of file diff --git a/cli/jobs/spark/serverless-spark-standalone-user-identity.yml b/cli/jobs/spark/serverless-spark-standalone-user-identity.yml index a4d448d2d6f..86ce8695749 100644 --- a/cli/jobs/spark/serverless-spark-standalone-user-identity.yml +++ b/cli/jobs/spark/serverless-spark-standalone-user-identity.yml @@ -34,5 +34,5 @@ identity: resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" \ No newline at end of file diff --git a/cli/jobs/spark/setup-attached-resources.sh b/cli/jobs/spark/setup-attached-resources.sh index ba40f4b18c8..c01e506f512 100644 --- a/cli/jobs/spark/setup-attached-resources.sh +++ b/cli/jobs/spark/setup-attached-resources.sh @@ -27,7 +27,7 @@ az storage account create --name $GEN2_STORAGE_NAME --resource-group $RESOURCE_G az storage fs create -n $GEN2_FILE_SYSTEM --account-name $GEN2_STORAGE_NAME az synapse workspace create --name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --storage-account $GEN2_STORAGE_NAME --file-system $GEN2_FILE_SYSTEM --sql-admin-login-user $SQL_ADMIN_LOGIN_USER --sql-admin-login-password $RANDOM_STRING --location $LOCATION az role assignment create --role "Storage Blob Data Owner" --assignee $AML_USER_MANAGED_ID_OID --scope /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.Storage/storageAccounts/$GEN2_STORAGE_NAME/blobServices/default/containers/$GEN2_FILE_SYSTEM -az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.2 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true +az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.3 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true az synapse workspace firewall-rule create --name allowAll --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255 if [[ "$2" == *"managed-identity"* ]] diff --git a/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb b/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb index 64946533685..43c6c8c0f9c 100644 --- a/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb +++ b/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb @@ -69,7 +69,7 @@ " executor_instances=1,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " conf={\"spark.synapse.library.python.env\": contents},\n", ")\n", diff --git a/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb b/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb index 9ac17ef6f91..ce7d781e9bc 100644 --- a/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb +++ b/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb @@ -69,7 +69,7 @@ " executor_instances=1,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " conf={\"spark.synapse.library.python.env\": contents},\n", ")\n", diff --git a/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb b/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb index 4cc57cd7b2d..7714ae3b873 100644 --- a/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb +++ b/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb @@ -69,7 +69,7 @@ " executor_instances=1,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " conf={\"spark.synapse.library.python.env\": contents},\n", ")\n", diff --git a/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json b/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json index 182338b5581..29042fc5280 100644 --- a/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json +++ b/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json @@ -177,7 +177,7 @@ } }, "spark_runtime_version": { - "value": "3.2.0" + "value": "3.3.0" }, "offlineStoreStorageAccountOption": { "value": "new" diff --git a/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml b/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml index c069b0867e2..85b3ba116e8 100644 --- a/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml +++ b/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml @@ -13,4 +13,4 @@ materialization_identity: resource_id: /subscriptions/{sub-id}/resourceGroups/{rg}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{uai-name} compute_runtime: - spark_runtime_version: '3.2' \ No newline at end of file + spark_runtime_version: '3.3' \ No newline at end of file diff --git a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb index bb604c3919c..7708c9c37ff 100644 --- a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb +++ b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb @@ -1428,7 +1428,7 @@ "```json\n", "\"parameters\": {\n", " \"spark_runtime_version\": {\n", - " \"value\": \"3.2.0\"\n", + " \"value\": \"3.3.0\"\n", " }\n", "}\n", "```" diff --git a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb index ea6c20ce5a0..bff5f75cba2 100644 --- a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb +++ b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb @@ -936,7 +936,7 @@ " \"$schema\": \"http://azureml/sdk-2-0/FeatureStore.json\",\n", " \"name\": featurestore_name,\n", " \"location\": featurestore_location,\n", - " \"compute_runtime\": {\"spark_runtime_version\": \"3.2\"},\n", + " \"compute_runtime\": {\"spark_runtime_version\": \"3.3\"},\n", " \"offline_store\": {\n", " \"type\": \"azure_data_lake_gen2\",\n", " \"target\": offline_store_gen2_container_arm_id,\n", diff --git a/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml b/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml index 7b1393cdc0b..00b5e72f5fa 100644 --- a/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml +++ b/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml @@ -28,7 +28,7 @@ jobs: observation_data_format: parquet resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" outputs: output_data: conf: diff --git a/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml b/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml index ec3fbc580fe..aacb6b22782 100644 --- a/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml +++ b/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml @@ -28,7 +28,7 @@ jobs: observation_data_format: parquet resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" outputs: output_data: conf: diff --git a/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb b/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb index d737b0df4c5..adbd60d5b6f 100644 --- a/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb +++ b/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb @@ -171,7 +171,7 @@ " kmeans_clustering = spark_kmeans(file_input=train_data)\n", " kmeans_clustering.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", " kmeans_clustering.outputs.output.mode = InputOutputModes.DIRECT\n", "\n", diff --git a/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb b/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb index c1c178d6c1d..1a84a012993 100644 --- a/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb +++ b/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb @@ -601,7 +601,7 @@ "|Property|Description|\n", "|:-|:-|\n", "| **instance_type** | A key that defines the compute instance type to be used for the serverless Spark compute. The following instance types are currently supported:|\n", - "| **runtime_version** | A key that defines the Spark runtime version. The following Spark runtime versions are currently supported: |\n", + "| **runtime_version** | A key that defines the Spark runtime version. The following Spark runtime versions are currently supported: |\n", "| **driver_cores** | The he number of cores allocated for the Spark driver. |\n", "| **driver_memory** | The allocated memory for the Spark exedriver, with a size unit suffix `k`, `m`, `g` or `t` (for example, `512m`, `2g`). |\n", "| **executor_cores** | The number of cores allocated for the Spark executor. |\n", @@ -639,7 +639,7 @@ "if USE_PARTITIONING_COMPONENT:\n", " spark_parameters = dict(\n", " instance_type=\"Standard_E4S_V3\",\n", - " runtime_version=\"3.2.0\",\n", + " runtime_version=\"3.3.0\",\n", " driver_cores=1,\n", " driver_memory=\"2g\",\n", " executor_cores=2,\n", @@ -704,7 +704,7 @@ "\n", " partition_step.resources = {\n", " \"instance_type\": spark_parameters.get(\"instance_type\", \"Standard_E4S_V3\"),\n", - " \"runtime_version\": str(spark_parameters.get(\"runtime_version\", \"3.2.0\")),\n", + " \"runtime_version\": str(spark_parameters.get(\"runtime_version\", \"3.3.0\")),\n", " }\n", " partition_step.conf = {\n", " \"spark.driver.cores\": spark_parameters.get(\"driver_cores\", 1),\n", diff --git a/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb b/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb index 1d87178f339..9181cd793d8 100644 --- a/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb +++ b/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb @@ -45,7 +45,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", ")\n", "\n", diff --git a/sdk/python/jobs/spark/setup_spark.sh b/sdk/python/jobs/spark/setup_spark.sh index 371164d6f19..faea23617d9 100644 --- a/sdk/python/jobs/spark/setup_spark.sh +++ b/sdk/python/jobs/spark/setup_spark.sh @@ -152,7 +152,7 @@ else az storage fs create -n $GEN2_FILE_SYSTEM --account-name $GEN2_STORAGE_NAME az synapse workspace create --name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --storage-account $GEN2_STORAGE_NAME --file-system $GEN2_FILE_SYSTEM --sql-admin-login-user $SQL_ADMIN_LOGIN_USER --sql-admin-login-password $RANDOM_STRING --location $LOCATION az role assignment create --role "Storage Blob Data Owner" --assignee $AML_USER_MANAGED_ID_OID --scope /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.Storage/storageAccounts/$GEN2_STORAGE_NAME/blobServices/default/containers/$GEN2_FILE_SYSTEM - az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.2 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true + az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.3 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true az synapse workspace firewall-rule create --name allowAll --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255 # diff --git a/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb b/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb index 8f7b672f80c..1a336efc40d 100644 --- a/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb +++ b/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb @@ -342,7 +342,7 @@ " spark_step.identity = ManagedIdentityConfiguration()\n", " spark_step.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", "\n", "\n", @@ -422,7 +422,7 @@ " spark_step.identity = UserIdentityConfiguration()\n", " spark_step.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", "\n", "\n", @@ -501,7 +501,7 @@ " spark_step.outputs.wrangled_data.mode = InputOutputModes.DIRECT\n", " spark_step.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", "\n", "\n", diff --git a/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb b/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb index 522a421ad75..245c2671222 100644 --- a/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb +++ b/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb @@ -267,7 +267,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", @@ -329,7 +329,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", @@ -391,7 +391,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", diff --git a/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb b/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb index 515ea84b65b..cfa920dfae9 100644 --- a/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb +++ b/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb @@ -531,7 +531,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", @@ -765,7 +765,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", From 1f76114d60855be2ecf39f81824b933b4e1a3aed Mon Sep 17 00:00:00 2001 From: jeff-shepherd <39775772+jeff-shepherd@users.noreply.github.com> Date: Mon, 7 Oct 2024 21:19:33 -0700 Subject: [PATCH 2/5] Ignore google.protobuf message (#3408) --- .github/test/scripts/check_notebook_output.py | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/test/scripts/check_notebook_output.py b/.github/test/scripts/check_notebook_output.py index e5ba49abe78..5d46fffeaf6 100644 --- a/.github/test/scripts/check_notebook_output.py +++ b/.github/test/scripts/check_notebook_output.py @@ -81,6 +81,7 @@ "google.protobuf.service module is deprecated. RPC implementations should provide code generator plugins " "which generate code specific to the RPC implementation. service.py will be removed in Jan 2025" ), + "from google.protobuf import service as _service", ] with open(full_name, "r") as notebook_file: From 5d2fa2a8a92049f869767753767393404c11480e Mon Sep 17 00:00:00 2001 From: Rahul Kumar <74648335+iamrk04@users.noreply.github.com> Date: Tue, 8 Oct 2024 21:44:40 +0530 Subject: [PATCH 3/5] Fix classification and tf nb (#3410) * update tf-mnist nb * fix classification nb * remove unwanted change * fix code style --- .../automl-classification-task-bankmarketing.ipynb | 3 ++- .../jobs/single-step/tensorflow/mnist/tensorflow-mnist.ipynb | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/sdk/python/jobs/automl-standalone-jobs/automl-classification-task-bankmarketing/automl-classification-task-bankmarketing.ipynb b/sdk/python/jobs/automl-standalone-jobs/automl-classification-task-bankmarketing/automl-classification-task-bankmarketing.ipynb index ac475cfe9b9..f16a09d578e 100644 --- a/sdk/python/jobs/automl-standalone-jobs/automl-classification-task-bankmarketing/automl-classification-task-bankmarketing.ipynb +++ b/sdk/python/jobs/automl-standalone-jobs/automl-classification-task-bankmarketing/automl-classification-task-bankmarketing.ipynb @@ -954,7 +954,8 @@ "with open(model_file, \"r\") as model_stream:\n", " model_yaml = yaml.safe_load(model_stream)\n", " training_environment_name = (\n", - " \"AzureML-AutoML:\" + model_yaml[\"metadata\"][\"azureml.base_image\"].split(\":\")[-1]\n", + " \"AzureML-ai-ml-automl:\"\n", + " + model_yaml[\"metadata\"][\"azureml.base_image\"].split(\":\")[-1]\n", " )\n", " print(\"Training emvironment {}\".format(training_environment_name))" ] diff --git a/sdk/python/jobs/single-step/tensorflow/mnist/tensorflow-mnist.ipynb b/sdk/python/jobs/single-step/tensorflow/mnist/tensorflow-mnist.ipynb index 855d27b15c1..4c8cf4c12f2 100644 --- a/sdk/python/jobs/single-step/tensorflow/mnist/tensorflow-mnist.ipynb +++ b/sdk/python/jobs/single-step/tensorflow/mnist/tensorflow-mnist.ipynb @@ -128,7 +128,7 @@ "job = command(\n", " code=\"./src\", # local path where the code is stored\n", " command=\"python main.py\",\n", - " environment=\"AzureML-tensorflow-2.12-cuda11@latest\",\n", + " environment=\"AzureML-tensorflow-2.16-cuda11@latest\",\n", " display_name=\"tensorflow-mnist-example\"\n", " # experiment_name: tensorflow-mnist-example\n", " # description: Train a basic neural network with TensorFlow on the MNIST dataset.\n", From 67d9ae16d7ca61b290ceed083df2e86d5d055f4a Mon Sep 17 00:00:00 2001 From: Amit Chauhan <70937115+achauhan-scc@users.noreply.github.com> Date: Wed, 9 Oct 2024 10:17:23 +0530 Subject: [PATCH 4/5] upgrading aml nuget package (#3411) --- .../Assets/Code/CodeOperations.cs | 12 ++--- .../Assets/Component/ComponentOperations.cs | 24 ++++----- .../Assets/Data/DataOperations.cs | 46 ++++++++-------- .../Environment/EnvironmentOperations.cs | 14 ++--- .../Assets/Model/ModelOperations.cs | 16 +++--- .../AzureML-Samples-CSharp.csproj | 6 +-- .../Batch/BatchEndpointOperations.cs | 51 +++++++++--------- .../Online/ManagedOnlineEndpointOperations.cs | 53 ++++++++++--------- 8 files changed, 112 insertions(+), 110 deletions(-) diff --git a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Code/CodeOperations.cs b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Code/CodeOperations.cs index cebd1365fa9..b69712eb02a 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Code/CodeOperations.cs +++ b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Code/CodeOperations.cs @@ -19,7 +19,7 @@ internal class CodeOperations /// /// // - public static async Task GetOrCreateCodeVersionAsync( + public static async Task GetOrCreateCodeVersionAsync( ArmClient armClient, ResourceGroupResource resourceGroup, string workspaceName, @@ -32,13 +32,13 @@ public static async Task GetOrCreateCodeVersionAsync( string resourceId = $"{ws.Id}/codes/{codeName}"; var id = new ResourceIdentifier(resourceId); - CodeContainerResource codeContainerDataResource = armClient.GetCodeContainerResource(id); + MachineLearningCodeContainerResource codeContainerDataResource = armClient.GetMachineLearningCodeContainerResource(id); - CodeVersionProperties properties = new CodeVersionProperties { CodeUri = new Uri(codeUri) }; - CodeVersionData data = new CodeVersionData(properties); + MachineLearningCodeVersionProperties properties = new MachineLearningCodeVersionProperties { CodeUri = new Uri(codeUri) }; + MachineLearningCodeVersionData data = new MachineLearningCodeVersionData(properties); - ArmOperation CodeVersionResourceOperation = await codeContainerDataResource.GetCodeVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); - CodeVersionResource codeVersionResource = CodeVersionResourceOperation.Value; + ArmOperation CodeVersionResourceOperation = await codeContainerDataResource.GetMachineLearningCodeVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); + MachineLearningCodeVersionResource codeVersionResource = CodeVersionResourceOperation.Value; Console.WriteLine($"codeVersionResource {codeVersionResource.Data.Id} created."); return codeVersionResource; diff --git a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Component/ComponentOperations.cs b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Component/ComponentOperations.cs index 4ea1a1f3162..8deb6e8bd11 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Component/ComponentOperations.cs +++ b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Component/ComponentOperations.cs @@ -20,7 +20,7 @@ internal class ComponentOperations /// /// // - public static async Task GetOrCreateComponentVersionAsync( + public static async Task GetOrCreateComponentVersionAsync( ArmClient armClient, ResourceGroupResource resourceGroup, string workspaceName, @@ -34,7 +34,7 @@ public static async Task GetOrCreateComponentVersionAs string resourceId = $"{ws.Id}/components/{componentName}"; var id = new ResourceIdentifier(resourceId); - ComponentContainerResource componentContainerResource = armClient.GetComponentContainerResource(id); + MachineLearningComponentContainerResource componentContainerResource = armClient.GetMachineLearningComponentContainerResource(id); JObject jsonObject = JObject.Parse(@"{ '$schema': 'https://azuremlschemas.azureedge.net/latest/commandComponent.schema.json', @@ -80,11 +80,11 @@ public static async Task GetOrCreateComponentVersionAs }, }"); - ComponentVersionProperties properties = new ComponentVersionProperties { ComponentSpec = new BinaryData(jsonObject.ToString()) }; - ComponentVersionData data = new ComponentVersionData(properties); + MachineLearningComponentVersionProperties properties = new MachineLearningComponentVersionProperties { ComponentSpec = new BinaryData(jsonObject.ToString()) }; + MachineLearningComponentVersionData data = new MachineLearningComponentVersionData(properties); - ArmOperation componentVersionResourceOperation = await componentContainerResource.GetComponentVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); - ComponentVersionResource componentVersionResource = componentVersionResourceOperation.Value; + ArmOperation componentVersionResourceOperation = await componentContainerResource.GetMachineLearningComponentVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); + MachineLearningComponentVersionResource componentVersionResource = componentVersionResourceOperation.Value; Console.WriteLine($"ComponentVersionResource {componentVersionResource.Id} created."); return componentVersionResource; } @@ -102,7 +102,7 @@ public static async Task GetOrCreateComponentVersionAs /// /// // - public static async Task GetOrCreateComponentVersion_Pipeline_Async( + public static async Task GetOrCreateComponentVersion_Pipeline_Async( ArmClient armClient, ResourceGroupResource resourceGroup, string workspaceName, @@ -116,7 +116,7 @@ public static async Task GetOrCreateComponentVersion_P string resourceId = $"{ws.Id}/components/{componentName}"; var id = new ResourceIdentifier(resourceId); - ComponentContainerResource componentContainerResource = armClient.GetComponentContainerResource(id); + MachineLearningComponentContainerResource componentContainerResource = armClient.GetMachineLearningComponentContainerResource(id); JObject jsonObject = JObject.Parse(@"{ '$schema': 'https://azuremlschemas.azureedge.net/latest/commandComponent.schema.json', @@ -150,11 +150,11 @@ public static async Task GetOrCreateComponentVersion_P }, }"); - ComponentVersionProperties properties = new ComponentVersionProperties { ComponentSpec = new BinaryData(jsonObject.ToString()) }; - ComponentVersionData data = new ComponentVersionData(properties); + MachineLearningComponentVersionProperties properties = new MachineLearningComponentVersionProperties { ComponentSpec = new BinaryData(jsonObject.ToString()) }; + MachineLearningComponentVersionData data = new MachineLearningComponentVersionData(properties); - ArmOperation componentVersionResourceOperation = await componentContainerResource.GetComponentVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); - ComponentVersionResource componentVersionResource = componentVersionResourceOperation.Value; + ArmOperation componentVersionResourceOperation = await componentContainerResource.GetMachineLearningComponentVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); + MachineLearningComponentVersionResource componentVersionResource = componentVersionResourceOperation.Value; Console.WriteLine($"ComponentVersionResource {componentVersionResource.Id} created."); return componentVersionResource; } diff --git a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Data/DataOperations.cs b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Data/DataOperations.cs index a9a913b19fc..c78efde95b2 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Data/DataOperations.cs +++ b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Data/DataOperations.cs @@ -19,7 +19,7 @@ internal class DataOperations /// /// // - public static async Task GetOrCreateDataVersionAsync( + public static async Task GetOrCreateDataVersionAsync( ArmClient armClient, ResourceGroupResource resourceGroup, string workspaceName, @@ -30,16 +30,16 @@ public static async Task GetOrCreateDataVersionAsync( MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); string resourceId = $"{ws.Id}/data/{dataName}"; var id = new ResourceIdentifier(resourceId); - DataContainerResource dataContainerResource = armClient.GetDataContainerResource(id); + MachineLearningDataContainerResource dataContainerResource = armClient.GetMachineLearningDataContainerResource(id); - bool exists = await dataContainerResource.GetDataVersionBases().ExistsAsync(version); + bool exists = await dataContainerResource.GetMachineLearningDataVersions().ExistsAsync(version); - DataVersionBaseResource dataVersionBaseResource; + MachineLearningDataVersionResource dataVersionBaseResource; if (exists) { Console.WriteLine($"DataVersionBaseResource {dataName} exists."); - dataVersionBaseResource = await dataContainerResource.GetDataVersionBases().GetAsync(version); + dataVersionBaseResource = await dataContainerResource.GetMachineLearningDataVersions().GetAsync(version); Console.WriteLine($"DataVersionBaseResource details: {dataVersionBaseResource.Data.Id}"); } else @@ -47,7 +47,7 @@ public static async Task GetOrCreateDataVersionAsync( Console.WriteLine($"Creating DataVersionBaseResource {dataName}"); // UriFolderDataVersion, or UriFileDataVersion or MLTableData - DataVersionBaseProperties properties = new UriFileDataVersion(new Uri("https://pipelinedata.blob.core.windows.net/sampledata/nytaxi/")) + MachineLearningDataVersionProperties properties = new MachineLearningUriFileDataVersion(new Uri("https://pipelinedata.blob.core.windows.net/sampledata/nytaxi/")) { Description = "Test description", Tags = new Dictionary { { "tag-name-1", "tag-value-1" } }, @@ -56,9 +56,9 @@ public static async Task GetOrCreateDataVersionAsync( IsArchived = false, }; - DataVersionBaseData data = new DataVersionBaseData(properties); + MachineLearningDataVersionData data = new MachineLearningDataVersionData(properties); - ArmOperation dataVersionBaseResourceOperation = await dataContainerResource.GetDataVersionBases().CreateOrUpdateAsync(WaitUntil.Completed, version, data); + ArmOperation dataVersionBaseResourceOperation = await dataContainerResource.GetMachineLearningDataVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); dataVersionBaseResource = dataVersionBaseResourceOperation.Value; Console.WriteLine($"DataVersionBaseResource {dataVersionBaseResource.Data.Id} created."); } @@ -77,9 +77,9 @@ public static async Task ListDataAsync( { Console.WriteLine("Listing Datasets in the workspace..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - DataContainerCollection dataContainerCollection = ws.GetDataContainers(); - AsyncPageable response = dataContainerCollection.GetAllAsync(); - await foreach (DataContainerResource dataContainerResource in response) + MachineLearningDataContainerCollection dataContainerCollection = ws.GetMachineLearningDataContainers(); + AsyncPageable response = dataContainerCollection.GetAllAsync(); + await foreach (MachineLearningDataContainerResource dataContainerResource in response) { Console.WriteLine(dataContainerResource.Data.Name); } @@ -99,34 +99,34 @@ public static async Task ListDatastoreAsync( { Console.WriteLine("Listing Datastore in the workspace..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - DatastoreCollection datastoreCollection = ws.GetDatastores(); - AsyncPageable response = datastoreCollection.GetAllAsync(); - await foreach (DatastoreResource datastoreResource in response) + MachineLearningDatastoreCollection datastoreCollection = ws.GetMachineLearningDatastores(); + AsyncPageable response = datastoreCollection.GetAllAsync(); + await foreach (MachineLearningDatastoreResource datastoreResource in response) { - DatastoreProperties properties = datastoreResource.Data.Properties; + MachineLearningDatastoreProperties properties = datastoreResource.Data.Properties; switch (properties) { - case AzureFileDatastore: - AzureFileDatastore azureFileDatastore = (AzureFileDatastore)datastoreResource.Data.Properties; + case MachineLearningAzureFileDatastore: + MachineLearningAzureFileDatastore azureFileDatastore = (MachineLearningAzureFileDatastore)datastoreResource.Data.Properties; Console.WriteLine($"AccountName {azureFileDatastore.AccountName}"); Console.WriteLine($"FileShareName {azureFileDatastore.FileShareName}"); Console.WriteLine($"Endpoint {azureFileDatastore.Endpoint}"); break; - case AzureBlobDatastore: - AzureBlobDatastore azureBlobDatastore = (AzureBlobDatastore)datastoreResource.Data.Properties; + case MachineLearningAzureBlobDatastore: + MachineLearningAzureBlobDatastore azureBlobDatastore = (MachineLearningAzureBlobDatastore)datastoreResource.Data.Properties; Console.WriteLine($"AccountName {azureBlobDatastore.AccountName}"); Console.WriteLine($"ContainerName {azureBlobDatastore.ContainerName}"); Console.WriteLine($"Endpoint {azureBlobDatastore.Endpoint}"); break; - case AzureDataLakeGen1Datastore: - AzureDataLakeGen1Datastore azureDataLakeGen1Datastore = (AzureDataLakeGen1Datastore)datastoreResource.Data.Properties; + case MachineLearningAzureDataLakeGen1Datastore: + MachineLearningAzureDataLakeGen1Datastore azureDataLakeGen1Datastore = (MachineLearningAzureDataLakeGen1Datastore)datastoreResource.Data.Properties; Console.WriteLine($"StoreName {azureDataLakeGen1Datastore.StoreName}"); break; - case AzureDataLakeGen2Datastore: - AzureDataLakeGen2Datastore azureDataLakeGen2Datastore = (AzureDataLakeGen2Datastore)datastoreResource.Data.Properties; + case MachineLearningAzureDataLakeGen2Datastore: + MachineLearningAzureDataLakeGen2Datastore azureDataLakeGen2Datastore = (MachineLearningAzureDataLakeGen2Datastore)datastoreResource.Data.Properties; Console.WriteLine($"AccountName {azureDataLakeGen2Datastore.AccountName}"); Console.WriteLine($"Filesystem {azureDataLakeGen2Datastore.Filesystem}"); Console.WriteLine($"Endpoint {azureDataLakeGen2Datastore.Endpoint}"); diff --git a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Environment/EnvironmentOperations.cs b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Environment/EnvironmentOperations.cs index 9dbe3b15653..4d6979bb60f 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Environment/EnvironmentOperations.cs +++ b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Environment/EnvironmentOperations.cs @@ -20,7 +20,7 @@ internal class EnvironmentOperations /// /// // - public static async Task GetOrCreateEnvironmentVersionAsync( + public static async Task GetOrCreateEnvironmentVersionAsync( ArmClient armClient, ResourceGroupResource resourceGroup, string workspaceName, @@ -32,7 +32,7 @@ public static async Task GetOrCreateEnvironmentVersi string resourceId = $"{ws.Id}/environments/{environmentName}"; var id = new ResourceIdentifier(resourceId); - EnvironmentContainerResource environmentContainerResource = armClient.GetEnvironmentContainerResource(id); + MachineLearningEnvironmentContainerResource environmentContainerResource = armClient.GetMachineLearningEnvironmentContainerResource(id); var condaDependences = new JObject(); condaDependences["channels"] = new JArray() { "conda-forge" }; @@ -60,20 +60,20 @@ public static async Task GetOrCreateEnvironmentVersi condaDependences["dependencies"] = dependencies; Console.WriteLine($"condaDependences: {condaDependences}"); - EnvironmentVersionProperties properties = new EnvironmentVersionProperties + MachineLearningEnvironmentVersionProperties properties = new MachineLearningEnvironmentVersionProperties { Description = "Test", CondaFile = condaDependences.ToString(), Tags = { { "key1", "value1" }, { "key2", "value2" } }, - OSType = OperatingSystemType.Linux, + OSType = MachineLearningOperatingSystemType.Linux, IsAnonymous = false, Image = "mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu20.04", }; - EnvironmentVersionData data = new EnvironmentVersionData(properties); + MachineLearningEnvironmentVersionData data = new MachineLearningEnvironmentVersionData(properties); - ArmOperation environmentVersionResourceOperation = await environmentContainerResource.GetEnvironmentVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); - EnvironmentVersionResource environmentVersionResource = environmentVersionResourceOperation.Value; + ArmOperation environmentVersionResourceOperation = await environmentContainerResource.GetMachineLearningEnvironmentVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); + MachineLearningEnvironmentVersionResource environmentVersionResource = environmentVersionResourceOperation.Value; Console.WriteLine($"EnvironmentVersionResource {environmentVersionResource.Data.Id} created."); return environmentVersionResource; diff --git a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Model/ModelOperations.cs b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Model/ModelOperations.cs index 3b4e1eceba1..ac5eafc3fd3 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/Assets/Model/ModelOperations.cs +++ b/sdk/dotnet/AzureML-Samples-CSharp/Assets/Model/ModelOperations.cs @@ -19,7 +19,7 @@ class ModelOperations /// /// // - public static async Task GetOrCreateModelVersionAsync( + public static async Task GetOrCreateModelVersionAsync( ArmClient armClient, ResourceGroupResource resourceGroup, string workspaceName, @@ -32,25 +32,25 @@ public static async Task GetOrCreateModelVersionAsync( string resourceId = $"{ws.Id}/models/{modelName}"; var id = new ResourceIdentifier(resourceId); - ModelContainerResource modelContainerResource = armClient.GetModelContainerResource(id); + MachineLearningModelContainerResource modelContainerResource = armClient.GetMachineLearningModelContainerResource(id); - ModelVersionProperties properties = new ModelVersionProperties + MachineLearningModelVersionProperties properties = new MachineLearningModelVersionProperties { JobName = "TestJob", Description = "Test Description for ModelContainer", Tags = new Dictionary { { "tag-name-1", "tag-value-1" } }, IsAnonymous = false, Properties = new Dictionary { { "property-name-1", "property-value-1" } }, - Flavors = new Dictionary() { { "python_function", new FlavorData { Data = new Dictionary() { { "loader_module", "test" } } } } }, + Flavors = new Dictionary() { { "python_function", new MachineLearningFlavorData { Data = new Dictionary() { { "loader_module", "test" } } } } }, IsArchived = false, - ModelType = ModelType.CustomModel, + ModelType = "CustomModel", ModelUri = new Uri(modelUri), }; - ModelVersionData data = new ModelVersionData(properties); + MachineLearningModelVersionData data = new MachineLearningModelVersionData(properties); - ArmOperation ModelVersionResourceOperation = await modelContainerResource.GetModelVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); - ModelVersionResource modelVersionResource = ModelVersionResourceOperation.Value; + ArmOperation ModelVersionResourceOperation = await modelContainerResource.GetMachineLearningModelVersions().CreateOrUpdateAsync(WaitUntil.Completed, version, data); + MachineLearningModelVersionResource modelVersionResource = ModelVersionResourceOperation.Value; Console.WriteLine($"ModelVersionResource {modelVersionResource.Data.Id} created."); return modelVersionResource; diff --git a/sdk/dotnet/AzureML-Samples-CSharp/AzureML-Samples-CSharp.csproj b/sdk/dotnet/AzureML-Samples-CSharp/AzureML-Samples-CSharp.csproj index 362359edc47..80d8b9c79d6 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/AzureML-Samples-CSharp.csproj +++ b/sdk/dotnet/AzureML-Samples-CSharp/AzureML-Samples-CSharp.csproj @@ -13,9 +13,9 @@ - - - + + + diff --git a/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Batch/BatchEndpointOperations.cs b/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Batch/BatchEndpointOperations.cs index 04c603b7e0a..3057783c4ed 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Batch/BatchEndpointOperations.cs +++ b/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Batch/BatchEndpointOperations.cs @@ -1,4 +1,5 @@ -using Azure.ResourceManager; +using Azure.Core; +using Azure.ResourceManager; using Azure.ResourceManager.MachineLearning; using Azure.ResourceManager.MachineLearning.Models; using Azure.ResourceManager.Resources; @@ -19,7 +20,7 @@ internal class BatchEndpointOperations /// Location. /// // - public static async Task GetOrCreateBatchEndpointAsync( + public static async Task GetOrCreateBatchEndpointAsync( ResourceGroupResource resourceGroup, string workspaceName, string endpointName, @@ -27,26 +28,26 @@ public static async Task GetOrCreateBatchEndpointAsync( { Console.WriteLine("Creating a BatchEndpoint..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - bool exists = await ws.GetBatchEndpoints().ExistsAsync(endpointName); + bool exists = await ws.GetMachineLearningBatchEndpoints().ExistsAsync(endpointName); - BatchEndpointResource endpointResource; + MachineLearningBatchEndpointResource endpointResource; if (exists) { Console.WriteLine($"BatchEndpoint {endpointName} exists."); - endpointResource = await ws.GetBatchEndpoints().GetAsync(endpointName); + endpointResource = await ws.GetMachineLearningBatchEndpoints().GetAsync(endpointName); Console.WriteLine($"BatchEndpointResource details: {endpointResource.Data.Id}"); } else { Console.WriteLine($"BatchEndpoint {endpointName} does not exist."); - BatchEndpointProperties properties = new BatchEndpointProperties(EndpointAuthMode.AADToken) + MachineLearningBatchEndpointProperties properties = new MachineLearningBatchEndpointProperties(MachineLearningEndpointAuthMode.AadToken) { Description = "test batch endpoint", Properties = { { "additionalProp1", "value1" } }, }; - BatchEndpointData data = new BatchEndpointData(location, properties) + MachineLearningBatchEndpointData data = new MachineLearningBatchEndpointData(location, properties) { Kind = "BatchSample", Sku = new MachineLearningSku("Default") @@ -59,7 +60,7 @@ public static async Task GetOrCreateBatchEndpointAsync( Identity = new ManagedServiceIdentity(ResourceManager.Models.ManagedServiceIdentityType.SystemAssigned), }; - ArmOperation endpointResourceOperation = await ws.GetBatchEndpoints().CreateOrUpdateAsync(WaitUntil.Completed, endpointName, data); + ArmOperation endpointResourceOperation = await ws.GetMachineLearningBatchEndpoints().CreateOrUpdateAsync(WaitUntil.Completed, endpointName, data); endpointResource = endpointResourceOperation.Value; Console.WriteLine($"BatchEndpointResource {endpointResource.Data.Id} created."); } @@ -85,7 +86,7 @@ public static async Task GetOrCreateBatchEndpointAsync( /// /// // - public static async Task GetOrCreateBatchDeploymentAsync( + public static async Task GetOrCreateBatchDeploymentAsync( ResourceGroupResource resourceGroup, string workspaceName, string endpointName, @@ -98,53 +99,53 @@ public static async Task GetOrCreateBatchDeploymentAsyn { Console.WriteLine("Creating a BatchDeployment..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - BatchEndpointResource endpointResource = await ws.GetBatchEndpoints().GetAsync(endpointName); + MachineLearningBatchEndpointResource endpointResource = await ws.GetMachineLearningBatchEndpoints().GetAsync(endpointName); Console.WriteLine(endpointResource.Data.Id); - bool exists = await endpointResource.GetBatchDeployments().ExistsAsync(deploymentName); + bool exists = await endpointResource.GetMachineLearningBatchDeployments().ExistsAsync(deploymentName); - BatchDeploymentResource deploymentResource; + MachineLearningBatchDeploymentResource deploymentResource; if (exists) { Console.WriteLine($"BatchDeployment {deploymentName} exists."); - deploymentResource = await endpointResource.GetBatchDeployments().GetAsync(deploymentName); + deploymentResource = await endpointResource.GetMachineLearningBatchDeployments().GetAsync(deploymentName); Console.WriteLine($"BatchDeploymentResource details: {deploymentResource.Data.Id}"); } else { Console.WriteLine($"BatchDeployment {deploymentName} does not exist."); - BatchDeploymentProperties properties = new BatchDeploymentProperties + MachineLearningBatchDeploymentProperties properties = new MachineLearningBatchDeploymentProperties { Description = "This is a batch deployment", ErrorThreshold = 10, MaxConcurrencyPerInstance = 5, - LoggingLevel = BatchLoggingLevel.Info, + LoggingLevel = MachineLearningBatchLoggingLevel.Info, MiniBatchSize = 10, OutputFileName = "mypredictions.csv", - OutputAction = BatchOutputAction.AppendRow, + OutputAction = MachineLearningBatchOutputAction.AppendRow, Properties = { { "additionalProp1", "value1" } }, EnvironmentId = environmentId, Compute = computeId, - Resources = new ResourceConfiguration { InstanceCount = 1, }, + Resources = new MachineLearningDeploymentResourceConfiguration { InstanceCount = 1, }, EnvironmentVariables = new Dictionary { { "TestVariable", "TestValue" }, }, - RetrySettings = new BatchRetrySettings + RetrySettings = new MachineLearningBatchRetrySettings { MaxRetries = 4, Timeout = new TimeSpan(0, 3, 0), }, - CodeConfiguration = new CodeConfiguration("main.py") + CodeConfiguration = new MachineLearningCodeConfiguration("main.py") { - CodeId = codeArtifactId, + CodeId = new ResourceIdentifier(codeArtifactId), }, - Model = new IdAssetReference(modelId), + Model = new MachineLearningIdAssetReference(new ResourceIdentifier(modelId)), }; - BatchDeploymentData data = new BatchDeploymentData(location, properties) + MachineLearningBatchDeploymentData data = new MachineLearningBatchDeploymentData(location, properties) { Kind = "SampleBatchDeployment", Sku = new MachineLearningSku("Default") @@ -156,7 +157,7 @@ public static async Task GetOrCreateBatchDeploymentAsyn }, }; - ArmOperation endpointResourceOperation = await endpointResource.GetBatchDeployments().CreateOrUpdateAsync(WaitUntil.Completed, deploymentName, data); + ArmOperation endpointResourceOperation = await endpointResource.GetMachineLearningBatchDeployments().CreateOrUpdateAsync(WaitUntil.Completed, deploymentName, data); deploymentResource = endpointResourceOperation.Value; Console.WriteLine($"BatchDeploymentResource {deploymentResource.Data.Id} created."); } @@ -178,9 +179,9 @@ public static async Task ListBatchDeploymentsAsync( { Console.WriteLine("Listing all Batch deployments in the workspace..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - foreach (var edp in ws.GetBatchEndpoints().GetAll()) + foreach (var edp in ws.GetMachineLearningBatchEndpoints().GetAll()) { - foreach (var dep in edp.GetBatchDeployments().GetAll()) + foreach (var dep in edp.GetMachineLearningBatchDeployments().GetAll()) { Console.WriteLine(dep.Data.Name); } diff --git a/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Online/ManagedOnlineEndpointOperations.cs b/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Online/ManagedOnlineEndpointOperations.cs index cfa0b2c2ce1..9a92493bbe6 100644 --- a/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Online/ManagedOnlineEndpointOperations.cs +++ b/sdk/dotnet/AzureML-Samples-CSharp/Endpoints/Online/ManagedOnlineEndpointOperations.cs @@ -1,4 +1,5 @@ -using Azure.MachineLearning.Samples.Shared; +using Azure.Core; +using Azure.MachineLearning.Samples.Shared; using Azure.ResourceManager; using Azure.ResourceManager.MachineLearning; using Azure.ResourceManager.MachineLearning.Models; @@ -20,7 +21,7 @@ internal class ManagedOnlineEndpointOperations /// /// // - public static async Task GetOrCreateOnlineEndpointAsync( + public static async Task GetOrCreateOnlineEndpointAsync( ResourceGroupResource resourceGroup, string workspaceName, string endpointName, @@ -28,19 +29,19 @@ public static async Task GetOrCreateOnlineEndpointAsync( { Console.WriteLine("Creating an OnlineEndpoint..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - bool exists = await ws.GetOnlineEndpoints().ExistsAsync(endpointName); + bool exists = await ws.GetMachineLearningOnlineEndpoints().ExistsAsync(endpointName); - OnlineEndpointResource endpointResource; + MachineLearningOnlineEndpointResource endpointResource; if (exists) { Console.WriteLine($"OnlineEndpoint {endpointName} exists."); - endpointResource = await ws.GetOnlineEndpoints().GetAsync(endpointName); + endpointResource = await ws.GetMachineLearningOnlineEndpoints().GetAsync(endpointName); Console.WriteLine($"OnlineEndpointResource details: {endpointResource.Data.Id}"); } else { Console.WriteLine($"OnlineEndpoint {endpointName} does not exist."); - OnlineEndpointProperties properties = new OnlineEndpointProperties(EndpointAuthMode.AMLToken) + MachineLearningOnlineEndpointProperties properties = new MachineLearningOnlineEndpointProperties(MachineLearningEndpointAuthMode.AmlToken) { //ARM resource ID of the compute if it exists. //Compute = "", @@ -54,7 +55,7 @@ public static async Task GetOrCreateOnlineEndpointAsync( }; // ManagedServiceIdentity Identity = new ManagedServiceIdentity(ManagedServiceIdentityType.SystemAssigned); - OnlineEndpointData OnlineEndpointData = new OnlineEndpointData(location, properties) + MachineLearningOnlineEndpointData OnlineEndpointData = new MachineLearningOnlineEndpointData(location, properties) { Kind = "SampleKind", // Identity = ManagedServiceIdentity(Azure.ResourceManager.MachineLearningServices.Models.ManagedServiceIdentityType.SystemAssigned), @@ -70,7 +71,7 @@ public static async Task GetOrCreateOnlineEndpointAsync( }; // new OnlineEndpointTrackedResourceData(Location.WestUS2, properties) { Kind = "SampleKind", Identity = identity }; - ArmOperation endpointResourceOperation = await ws.GetOnlineEndpoints().CreateOrUpdateAsync(WaitUntil.Completed, endpointName, OnlineEndpointData); + ArmOperation endpointResourceOperation = await ws.GetMachineLearningOnlineEndpoints().CreateOrUpdateAsync(WaitUntil.Completed, endpointName, OnlineEndpointData); endpointResource = endpointResourceOperation.Value; Console.WriteLine($"OnlineEndpointResource {endpointResource.Data.Id} created."); } @@ -94,7 +95,7 @@ public static async Task GetOrCreateOnlineEndpointAsync( /// /// // - public static async Task GetOrCreateOnlineDeploymentAsync( + public static async Task GetOrCreateOnlineDeploymentAsync( ResourceGroupResource resourceGroup, string workspaceName, string endpointName, @@ -106,19 +107,19 @@ public static async Task GetOrCreateOnlineDeploymentAs { Console.WriteLine("Creating a OnlineDeployment..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - OnlineEndpointResource endpointResource = await ws.GetOnlineEndpoints().GetAsync(endpointName); + MachineLearningOnlineEndpointResource endpointResource = await ws.GetMachineLearningOnlineEndpoints().GetAsync(endpointName); Console.WriteLine(endpointResource.Data.Id); - bool exists = await endpointResource.GetOnlineDeployments().ExistsAsync(deploymentName); + bool exists = await endpointResource.GetMachineLearningOnlineDeployments().ExistsAsync(deploymentName); // https://docs.microsoft.com/azure/machine-learning/how-to-troubleshoot-online-endpoints - OnlineDeploymentResource deploymentResource; + MachineLearningOnlineDeploymentResource deploymentResource; if (exists) { Console.WriteLine($"OnlineDeployment {deploymentName} exists."); - deploymentResource = await endpointResource.GetOnlineDeployments().GetAsync(deploymentName); + deploymentResource = await endpointResource.GetMachineLearningOnlineDeployments().GetAsync(deploymentName); Console.WriteLine($"OnlineDeploymentResource details: {deploymentResource.Data.Id}"); } else @@ -136,27 +137,27 @@ public static async Task GetOrCreateOnlineDeploymentAs //scaleSettings = new DefaultScaleSettings(); - var managedOnlineDeploymentDetails = new ManagedOnlineDeployment + var managedOnlineDeploymentDetails = new MachineLearningManagedOnlineDeployment { Description = "This is a test online deployment", // EgressPublicNetworkAccess=EgressPublicNetworkAccessType.Disabled, // The path to mount the model in custom container. // Custom model mount path for curated environments is not supported // ModelMountPath = "/var/mountpath", - PrivateNetworkConnection = false, + EgressPublicNetworkAccess = MachineLearningEgressPublicNetworkAccessType.Disabled, Properties = { { "additionalProp1", "value1" } }, EnvironmentId = environmentId, EnvironmentVariables = new Dictionary { { "TestVariable", "TestValue" } }, - RequestSettings = new OnlineRequestSettings + RequestSettings = new MachineLearningOnlineRequestSettings { MaxQueueWait = TimeSpan.FromMilliseconds(30), RequestTimeout = TimeSpan.FromMilliseconds(60), MaxConcurrentRequestsPerInstance = 3, }, - LivenessProbe = new ProbeSettings + LivenessProbe = new MachineLearningProbeSettings { FailureThreshold = 10, SuccessThreshold = 1, @@ -165,7 +166,7 @@ public static async Task GetOrCreateOnlineDeploymentAs Period = TimeSpan.FromSeconds(2), }, // Only for ManagedOnlineDeployment - ReadinessProbe = new ProbeSettings + ReadinessProbe = new MachineLearningProbeSettings { FailureThreshold = 10, SuccessThreshold = 1, @@ -174,16 +175,16 @@ public static async Task GetOrCreateOnlineDeploymentAs Period = TimeSpan.FromSeconds(2), }, AppInsightsEnabled = false, - CodeConfiguration = new CodeConfiguration("main.py") + CodeConfiguration = new MachineLearningCodeConfiguration("main.py") { - CodeId = codeArtifactId, + CodeId = new ResourceIdentifier(codeArtifactId), }, InstanceType = "Standard_F2s_v2", Model = modelId, // ScaleSettings = new DefaultScaleSettings(), }; - OnlineDeploymentData data = new OnlineDeploymentData(location, managedOnlineDeploymentDetails) + MachineLearningOnlineDeploymentData data = new MachineLearningOnlineDeploymentData(location, managedOnlineDeploymentDetails) { Kind = "SampleKindDeployment", Sku = new MachineLearningSku("Default") @@ -195,7 +196,7 @@ public static async Task GetOrCreateOnlineDeploymentAs }, }; - ArmOperation deploymentResourceOperation = await endpointResource.GetOnlineDeployments().CreateOrUpdateAsync(WaitUntil.Completed, deploymentName, data); + ArmOperation deploymentResourceOperation = await endpointResource.GetMachineLearningOnlineDeployments().CreateOrUpdateAsync(WaitUntil.Completed, deploymentName, data); deploymentResource = deploymentResourceOperation.Value; Console.WriteLine($"OnlineDeploymentResource {deploymentResource.Data.Id} created."); } @@ -217,9 +218,9 @@ public static async Task ListOnlineDeploymentsAsync( { Console.WriteLine("Listing all Online deployments in the workspace..."); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - foreach (var edp in ws.GetOnlineEndpoints().GetAll()) + foreach (var edp in ws.GetMachineLearningOnlineEndpoints().GetAll()) { - foreach (var dep in edp.GetOnlineDeployments().GetAll()) + foreach (var dep in edp.GetMachineLearningOnlineDeployments().GetAll()) { Console.WriteLine(dep.Data.Name); } @@ -252,12 +253,12 @@ private static async Task InvokeOnlineEndpoint( }"); MachineLearningWorkspaceResource ws = await resourceGroup.GetMachineLearningWorkspaces().GetAsync(workspaceName); - bool exists = await ws.GetOnlineEndpoints().ExistsAsync(endpointName); + bool exists = await ws.GetMachineLearningOnlineEndpoints().ExistsAsync(endpointName); if (exists) { Console.WriteLine($"OnlineEndpoint {endpointName} exists."); - OnlineEndpointResource endpointResource = await ws.GetOnlineEndpoints().GetAsync(endpointName); + MachineLearningOnlineEndpointResource endpointResource = await ws.GetMachineLearningOnlineEndpoints().GetAsync(endpointName); Console.WriteLine($"OnlineEndpointResource details: {endpointResource.Data.Id}"); var scoringUri = endpointResource.Data.Properties.ScoringUri; From c7b8161c70299b55b742da966764e948e1bfd0ba Mon Sep 17 00:00:00 2001 From: Vinutha Karanth Date: Thu, 10 Oct 2024 12:12:49 -0700 Subject: [PATCH 5/5] add (#3416) --- ...sponsibleaidashboard-housing-classification.yml | 14 +++++++------- ...esponsibleaidashboard-programmer-regression.yml | 14 +++++++------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/cli/responsible-ai/cli-responsibleaidashboard-housing-classification/cli-responsibleaidashboard-housing-classification.yml b/cli/responsible-ai/cli-responsibleaidashboard-housing-classification/cli-responsibleaidashboard-housing-classification.yml index 4fb0870ee45..8675ed1e4ef 100644 --- a/cli/responsible-ai/cli-responsibleaidashboard-housing-classification/cli-responsibleaidashboard-housing-classification.yml +++ b/cli/responsible-ai/cli-responsibleaidashboard-housing-classification/cli-responsibleaidashboard-housing-classification.yml @@ -39,7 +39,7 @@ jobs: create_rai_job: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_constructor/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_constructor/versions/0.17.0 limits: timeout: 3600 inputs: @@ -54,7 +54,7 @@ jobs: explain_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_explanation/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_explanation/versions/0.17.0 limits: timeout: 7200 inputs: @@ -63,7 +63,7 @@ jobs: causal_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_causal/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_causal/versions/0.17.0 limits: timeout: 7200 inputs: @@ -75,7 +75,7 @@ jobs: counterfactual_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_counterfactual/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_counterfactual/versions/0.17.0 limits: timeout: 7200 inputs: @@ -88,14 +88,14 @@ jobs: limits: timeout: 7200 type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_erroranalysis/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_erroranalysis/versions/0.17.0 inputs: rai_insights_dashboard: ${{parent.jobs.create_rai_job.outputs.rai_insights_dashboard}} max_depth: 3 gather_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_gather/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_gather/versions/0.17.0 limits: timeout: 7200 inputs: @@ -107,7 +107,7 @@ jobs: scorecard_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_score_card/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_score_card/versions/0.17.0 inputs: dashboard: ${{parent.jobs.gather_01.outputs.dashboard}} pdf_generation_config: diff --git a/cli/responsible-ai/cli-responsibleaidashboard-programmer-regression/cli-responsibleaidashboard-programmer-regression.yml b/cli/responsible-ai/cli-responsibleaidashboard-programmer-regression/cli-responsibleaidashboard-programmer-regression.yml index dd9a655bd19..68765894ffd 100644 --- a/cli/responsible-ai/cli-responsibleaidashboard-programmer-regression/cli-responsibleaidashboard-programmer-regression.yml +++ b/cli/responsible-ai/cli-responsibleaidashboard-programmer-regression/cli-responsibleaidashboard-programmer-regression.yml @@ -33,7 +33,7 @@ jobs: create_rai_job: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_constructor/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_constructor/versions/0.17.0 limits: timeout: 3600 inputs: @@ -47,7 +47,7 @@ jobs: explain_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_explanation/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_explanation/versions/0.17.0 limits: timeout: 7200 inputs: @@ -56,7 +56,7 @@ jobs: causal_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_causal/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_causal/versions/0.17.0 limits: timeout: 7200 inputs: @@ -68,7 +68,7 @@ jobs: counterfactual_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_counterfactual/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_counterfactual/versions/0.17.0 limits: timeout: 7200 inputs: @@ -81,7 +81,7 @@ jobs: limits: timeout: 7200 type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_erroranalysis/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_erroranalysis/versions/0.17.0 inputs: rai_insights_dashboard: ${{parent.jobs.create_rai_job.outputs.rai_insights_dashboard}} max_depth: 3 @@ -89,7 +89,7 @@ jobs: gather_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_gather/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_insight_gather/versions/0.17.0 limits: timeout: 7200 inputs: @@ -101,7 +101,7 @@ jobs: scorecard_01: type: command - component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_score_card/versions/0.15.0 + component: azureml://registries/azureml/components/microsoft_azureml_rai_tabular_score_card/versions/0.17.0 inputs: dashboard: ${{parent.jobs.gather_01.outputs.dashboard}} pdf_generation_config: