diff --git a/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml b/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml index 9a75e550d3c..c5548182990 100644 --- a/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml +++ b/cli/jobs/pipelines-with-components/shakespear_sample_and_word_count_using_spark/pipeline.yml @@ -32,7 +32,7 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 count_word: type: spark @@ -44,6 +44,6 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 diff --git a/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml b/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml index 472bd6340b6..0ab705de8e5 100644 --- a/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml +++ b/cli/jobs/pipelines/add-column-and-word-count-using-spark/pipeline.yml @@ -36,7 +36,7 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 conf: spark.driver.cores: 2 @@ -71,7 +71,7 @@ jobs: resources: instance_type: Standard_E8S_V3 - runtime_version: 3.1.0 + runtime_version: 3.3.0 conf: spark.driver.cores: 2 diff --git a/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml b/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml index a334b5d824e..31b905d295f 100644 --- a/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml +++ b/cli/jobs/spark/serverless-spark-pipeline-default-identity.yml @@ -22,4 +22,4 @@ jobs: resources: instance_type: standard_e8s_v3 - runtime_version: "3.2" + runtime_version: "3.3" diff --git a/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml b/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml index 225d51ffc9f..6822ea9f9fa 100644 --- a/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml +++ b/cli/jobs/spark/serverless-spark-pipeline-managed-identity.yml @@ -25,4 +25,4 @@ jobs: resources: instance_type: standard_e8s_v3 - runtime_version: "3.2" + runtime_version: "3.3" diff --git a/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml b/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml index 9c1dcd6ea84..b5ec9d05ae6 100644 --- a/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml +++ b/cli/jobs/spark/serverless-spark-pipeline-user-identity.yml @@ -25,4 +25,4 @@ jobs: resources: instance_type: standard_e8s_v3 - runtime_version: "3.2" + runtime_version: "3.3" diff --git a/cli/jobs/spark/serverless-spark-standalone-default-identity.yml b/cli/jobs/spark/serverless-spark-standalone-default-identity.yml index 5f6683b1cf5..49a21c4ed4d 100644 --- a/cli/jobs/spark/serverless-spark-standalone-default-identity.yml +++ b/cli/jobs/spark/serverless-spark-standalone-default-identity.yml @@ -31,5 +31,5 @@ args: >- resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" \ No newline at end of file diff --git a/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml b/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml index f84f841a3bb..1f4af1781fd 100644 --- a/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml +++ b/cli/jobs/spark/serverless-spark-standalone-managed-identity.yml @@ -34,5 +34,5 @@ identity: resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" \ No newline at end of file diff --git a/cli/jobs/spark/serverless-spark-standalone-user-identity.yml b/cli/jobs/spark/serverless-spark-standalone-user-identity.yml index a4d448d2d6f..86ce8695749 100644 --- a/cli/jobs/spark/serverless-spark-standalone-user-identity.yml +++ b/cli/jobs/spark/serverless-spark-standalone-user-identity.yml @@ -34,5 +34,5 @@ identity: resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" \ No newline at end of file diff --git a/cli/jobs/spark/setup-attached-resources.sh b/cli/jobs/spark/setup-attached-resources.sh index ba40f4b18c8..c01e506f512 100644 --- a/cli/jobs/spark/setup-attached-resources.sh +++ b/cli/jobs/spark/setup-attached-resources.sh @@ -27,7 +27,7 @@ az storage account create --name $GEN2_STORAGE_NAME --resource-group $RESOURCE_G az storage fs create -n $GEN2_FILE_SYSTEM --account-name $GEN2_STORAGE_NAME az synapse workspace create --name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --storage-account $GEN2_STORAGE_NAME --file-system $GEN2_FILE_SYSTEM --sql-admin-login-user $SQL_ADMIN_LOGIN_USER --sql-admin-login-password $RANDOM_STRING --location $LOCATION az role assignment create --role "Storage Blob Data Owner" --assignee $AML_USER_MANAGED_ID_OID --scope /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.Storage/storageAccounts/$GEN2_STORAGE_NAME/blobServices/default/containers/$GEN2_FILE_SYSTEM -az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.2 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true +az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.3 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true az synapse workspace firewall-rule create --name allowAll --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255 if [[ "$2" == *"managed-identity"* ]] diff --git a/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb b/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb index 64946533685..43c6c8c0f9c 100644 --- a/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb +++ b/sdk/python/featurestore_sample/automation-test/test_featurestore_cli_samples.ipynb @@ -69,7 +69,7 @@ " executor_instances=1,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " conf={\"spark.synapse.library.python.env\": contents},\n", ")\n", diff --git a/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb b/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb index 9ac17ef6f91..ce7d781e9bc 100644 --- a/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb +++ b/sdk/python/featurestore_sample/automation-test/test_featurestore_sdk_samples.ipynb @@ -69,7 +69,7 @@ " executor_instances=1,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " conf={\"spark.synapse.library.python.env\": contents},\n", ")\n", diff --git a/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb b/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb index 4cc57cd7b2d..7714ae3b873 100644 --- a/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb +++ b/sdk/python/featurestore_sample/automation-test/test_featurestore_vnet_samples.ipynb @@ -69,7 +69,7 @@ " executor_instances=1,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " conf={\"spark.synapse.library.python.env\": contents},\n", ")\n", diff --git a/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json b/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json index 182338b5581..29042fc5280 100644 --- a/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json +++ b/sdk/python/featurestore_sample/featurestore/arm-template/parameters.json @@ -177,7 +177,7 @@ } }, "spark_runtime_version": { - "value": "3.2.0" + "value": "3.3.0" }, "offlineStoreStorageAccountOption": { "value": "new" diff --git a/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml b/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml index c069b0867e2..85b3ba116e8 100644 --- a/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml +++ b/sdk/python/featurestore_sample/featurestore/featurestore_with_offline_setting.yaml @@ -13,4 +13,4 @@ materialization_identity: resource_id: /subscriptions/{sub-id}/resourceGroups/{rg}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{uai-name} compute_runtime: - spark_runtime_version: '3.2' \ No newline at end of file + spark_runtime_version: '3.3' \ No newline at end of file diff --git a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb index bb604c3919c..7708c9c37ff 100644 --- a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb +++ b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/4.Provision-feature-store.ipynb @@ -1428,7 +1428,7 @@ "```json\n", "\"parameters\": {\n", " \"spark_runtime_version\": {\n", - " \"value\": \"3.2.0\"\n", + " \"value\": \"3.3.0\"\n", " }\n", "}\n", "```" diff --git a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb index ea6c20ce5a0..bff5f75cba2 100644 --- a/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb +++ b/sdk/python/featurestore_sample/notebooks/sdk_and_cli/network_isolation/Network-isolation-feature-store.ipynb @@ -936,7 +936,7 @@ " \"$schema\": \"http://azureml/sdk-2-0/FeatureStore.json\",\n", " \"name\": featurestore_name,\n", " \"location\": featurestore_location,\n", - " \"compute_runtime\": {\"spark_runtime_version\": \"3.2\"},\n", + " \"compute_runtime\": {\"spark_runtime_version\": \"3.3\"},\n", " \"offline_store\": {\n", " \"type\": \"azure_data_lake_gen2\",\n", " \"target\": offline_store_gen2_container_arm_id,\n", diff --git a/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml b/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml index 7b1393cdc0b..00b5e72f5fa 100644 --- a/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml +++ b/sdk/python/featurestore_sample/project/fraud_model/pipelines/batch_inference_pipeline.yaml @@ -28,7 +28,7 @@ jobs: observation_data_format: parquet resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" outputs: output_data: conf: diff --git a/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml b/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml index ec3fbc580fe..aacb6b22782 100644 --- a/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml +++ b/sdk/python/featurestore_sample/project/fraud_model/pipelines/training_pipeline.yaml @@ -28,7 +28,7 @@ jobs: observation_data_format: parquet resources: instance_type: standard_e4s_v3 - runtime_version: "3.2" + runtime_version: "3.3" outputs: output_data: conf: diff --git a/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb b/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb index d737b0df4c5..adbd60d5b6f 100644 --- a/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb +++ b/sdk/python/jobs/pipelines/1i_pipeline_with_spark_nodes/pipeline_with_spark_nodes.ipynb @@ -171,7 +171,7 @@ " kmeans_clustering = spark_kmeans(file_input=train_data)\n", " kmeans_clustering.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", " kmeans_clustering.outputs.output.mode = InputOutputModes.DIRECT\n", "\n", diff --git a/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb b/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb index c1c178d6c1d..1a84a012993 100644 --- a/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb +++ b/sdk/python/jobs/pipelines/1k_demand_forecast_pipeline/aml-demand-forecast-mm-pipeline/aml-demand-forecast-mm-pipeline.ipynb @@ -601,7 +601,7 @@ "|Property|Description|\n", "|:-|:-|\n", "| **instance_type** | A key that defines the compute instance type to be used for the serverless Spark compute. The following instance types are currently supported:|\n", - "| **runtime_version** | A key that defines the Spark runtime version. The following Spark runtime versions are currently supported: |\n", + "| **runtime_version** | A key that defines the Spark runtime version. The following Spark runtime versions are currently supported: |\n", "| **driver_cores** | The he number of cores allocated for the Spark driver. |\n", "| **driver_memory** | The allocated memory for the Spark exedriver, with a size unit suffix `k`, `m`, `g` or `t` (for example, `512m`, `2g`). |\n", "| **executor_cores** | The number of cores allocated for the Spark executor. |\n", @@ -639,7 +639,7 @@ "if USE_PARTITIONING_COMPONENT:\n", " spark_parameters = dict(\n", " instance_type=\"Standard_E4S_V3\",\n", - " runtime_version=\"3.2.0\",\n", + " runtime_version=\"3.3.0\",\n", " driver_cores=1,\n", " driver_memory=\"2g\",\n", " executor_cores=2,\n", @@ -704,7 +704,7 @@ "\n", " partition_step.resources = {\n", " \"instance_type\": spark_parameters.get(\"instance_type\", \"Standard_E4S_V3\"),\n", - " \"runtime_version\": str(spark_parameters.get(\"runtime_version\", \"3.2.0\")),\n", + " \"runtime_version\": str(spark_parameters.get(\"runtime_version\", \"3.3.0\")),\n", " }\n", " partition_step.conf = {\n", " \"spark.driver.cores\": spark_parameters.get(\"driver_cores\", 1),\n", diff --git a/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb b/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb index 1d87178f339..9181cd793d8 100644 --- a/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb +++ b/sdk/python/jobs/spark/automation/run_interactive_session_notebook.ipynb @@ -45,7 +45,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", ")\n", "\n", diff --git a/sdk/python/jobs/spark/setup_spark.sh b/sdk/python/jobs/spark/setup_spark.sh index 371164d6f19..faea23617d9 100644 --- a/sdk/python/jobs/spark/setup_spark.sh +++ b/sdk/python/jobs/spark/setup_spark.sh @@ -152,7 +152,7 @@ else az storage fs create -n $GEN2_FILE_SYSTEM --account-name $GEN2_STORAGE_NAME az synapse workspace create --name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --storage-account $GEN2_STORAGE_NAME --file-system $GEN2_FILE_SYSTEM --sql-admin-login-user $SQL_ADMIN_LOGIN_USER --sql-admin-login-password $RANDOM_STRING --location $LOCATION az role assignment create --role "Storage Blob Data Owner" --assignee $AML_USER_MANAGED_ID_OID --scope /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.Storage/storageAccounts/$GEN2_STORAGE_NAME/blobServices/default/containers/$GEN2_FILE_SYSTEM - az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.2 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true + az synapse spark pool create --name $SPARK_POOL_NAME --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --spark-version 3.3 --node-count 3 --node-size Medium --min-node-count 3 --max-node-count 10 --enable-auto-scale true az synapse workspace firewall-rule create --name allowAll --workspace-name $SYNAPSE_WORKSPACE_NAME --resource-group $RESOURCE_GROUP --start-ip-address 0.0.0.0 --end-ip-address 255.255.255.255 # diff --git a/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb b/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb index 8f7b672f80c..1a336efc40d 100644 --- a/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb +++ b/sdk/python/jobs/spark/submit_spark_pipeline_jobs.ipynb @@ -342,7 +342,7 @@ " spark_step.identity = ManagedIdentityConfiguration()\n", " spark_step.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", "\n", "\n", @@ -422,7 +422,7 @@ " spark_step.identity = UserIdentityConfiguration()\n", " spark_step.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", "\n", "\n", @@ -501,7 +501,7 @@ " spark_step.outputs.wrangled_data.mode = InputOutputModes.DIRECT\n", " spark_step.resources = {\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " }\n", "\n", "\n", diff --git a/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb b/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb index 522a421ad75..245c2671222 100644 --- a/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb +++ b/sdk/python/jobs/spark/submit_spark_standalone_jobs.ipynb @@ -267,7 +267,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", @@ -329,7 +329,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", @@ -391,7 +391,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", diff --git a/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb b/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb index 515ea84b65b..cfa920dfae9 100644 --- a/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb +++ b/sdk/python/jobs/spark/submit_spark_standalone_jobs_managed_vnet.ipynb @@ -531,7 +531,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n", @@ -765,7 +765,7 @@ " executor_instances=2,\n", " resources={\n", " \"instance_type\": \"Standard_E8S_V3\",\n", - " \"runtime_version\": \"3.2.0\",\n", + " \"runtime_version\": \"3.3.0\",\n", " },\n", " inputs={\n", " \"titanic_data\": Input(\n",