From a97f1d0ad0e7b92203f35c5b0b9af3a314952e05 Mon Sep 17 00:00:00 2001 From: Hongye Sun Date: Wed, 21 Aug 2019 14:04:17 -0700 Subject: [PATCH] Updated component images to version 151c5349f13bea9d626c988563c04c0a86210c21 --- components/dataflow/predict/component.yaml | 2 +- components/dataflow/tfdv/component.yaml | 2 +- components/dataflow/tfma/component.yaml | 2 +- components/dataflow/tft/component.yaml | 2 +- components/gcp/bigquery/query/component.yaml | 2 +- components/gcp/dataflow/launch_python/component.yaml | 2 +- .../gcp/dataflow/launch_template/component.yaml | 2 +- .../gcp/dataproc/create_cluster/component.yaml | 2 +- .../gcp/dataproc/delete_cluster/component.yaml | 2 +- .../gcp/dataproc/submit_hadoop_job/component.yaml | 2 +- .../gcp/dataproc/submit_hive_job/component.yaml | 2 +- .../gcp/dataproc/submit_pig_job/component.yaml | 2 +- .../gcp/dataproc/submit_pyspark_job/component.yaml | 2 +- .../gcp/dataproc/submit_spark_job/component.yaml | 2 +- .../gcp/dataproc/submit_sparksql_job/component.yaml | 2 +- .../gcp/ml_engine/batch_predict/component.yaml | 2 +- components/gcp/ml_engine/deploy/component.yaml | 2 +- components/gcp/ml_engine/train/component.yaml | 2 +- components/kubeflow/deployer/component.yaml | 2 +- components/kubeflow/dnntrainer/component.yaml | 2 +- .../kubeflow/launcher/kubeflow_tfjob_launcher_op.py | 2 +- components/kubeflow/launcher/src/train.template.yaml | 6 +++--- components/local/confusion_matrix/component.yaml | 2 +- components/local/roc/component.yaml | 2 +- .../kubeflow_tf_serving/kubeflow_tf_serving.ipynb | 2 +- .../core/xgboost_training_cm/xgboost_training_cm.py | 12 ++++++------ 26 files changed, 33 insertions(+), 33 deletions(-) diff --git a/components/dataflow/predict/component.yaml b/components/dataflow/predict/component.yaml index 30f2098b97a..6fae9083fb3 100644 --- a/components/dataflow/predict/component.yaml +++ b/components/dataflow/predict/component.yaml @@ -15,7 +15,7 @@ outputs: - {name: Predictions dir, type: GCSPath, description: 'GCS or local directory.'} #Will contain prediction_results-* and schema.json files; TODO: Split outputs and replace dir with single file # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:151c5349f13bea9d626c988563c04c0a86210c21 command: [python2, /ml/predict.py] args: [ --data, {inputValue: Data file pattern}, diff --git a/components/dataflow/tfdv/component.yaml b/components/dataflow/tfdv/component.yaml index f5fffcffb08..b72a358fa73 100644 --- a/components/dataflow/tfdv/component.yaml +++ b/components/dataflow/tfdv/component.yaml @@ -18,7 +18,7 @@ outputs: - {name: Validation result, type: String, description: Indicates whether anomalies were detected or not.} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:151c5349f13bea9d626c988563c04c0a86210c21 command: [python2, /ml/validate.py] args: [ --csv-data-for-inference, {inputValue: Inference data}, diff --git a/components/dataflow/tfma/component.yaml b/components/dataflow/tfma/component.yaml index 975fa6108b9..bc865d6d4c0 100644 --- a/components/dataflow/tfma/component.yaml +++ b/components/dataflow/tfma/component.yaml @@ -17,7 +17,7 @@ outputs: - {name: Analysis results dir, type: GCSPath, description: GCS or local directory where the analysis results should were written.} # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:151c5349f13bea9d626c988563c04c0a86210c21 command: [python2, /ml/model_analysis.py] args: [ --model, {inputValue: Model}, diff --git a/components/dataflow/tft/component.yaml b/components/dataflow/tft/component.yaml index bffa00681ca..c77ea6886ba 100644 --- a/components/dataflow/tft/component.yaml +++ b/components/dataflow/tft/component.yaml @@ -12,7 +12,7 @@ outputs: - {name: Transformed data dir, type: GCSPath} # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:151c5349f13bea9d626c988563c04c0a86210c21 command: [python2, /ml/transform.py] args: [ --train, {inputValue: Training data file pattern}, diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml index 0d8842e3efd..07f29549b40 100644 --- a/components/gcp/bigquery/query/component.yaml +++ b/components/gcp/bigquery/query/component.yaml @@ -57,7 +57,7 @@ outputs: type: GCSPath implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.bigquery, query, --query, {inputValue: query}, diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml index 55756e88a3f..920408aa48d 100644 --- a/components/gcp/dataflow/launch_python/component.yaml +++ b/components/gcp/dataflow/launch_python/component.yaml @@ -51,7 +51,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataflow, launch_python, --python_file_path, {inputValue: python_file_path}, diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml index 3817bac9401..09f29d0fab9 100644 --- a/components/gcp/dataflow/launch_template/component.yaml +++ b/components/gcp/dataflow/launch_template/component.yaml @@ -61,7 +61,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataflow, launch_template, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml index f517d1631e1..c5cad047204 100644 --- a/components/gcp/dataproc/create_cluster/component.yaml +++ b/components/gcp/dataproc/create_cluster/component.yaml @@ -68,7 +68,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, create_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/delete_cluster/component.yaml b/components/gcp/dataproc/delete_cluster/component.yaml index d16d2a58a80..15654a8f24e 100644 --- a/components/gcp/dataproc/delete_cluster/component.yaml +++ b/components/gcp/dataproc/delete_cluster/component.yaml @@ -36,7 +36,7 @@ inputs: type: Integer implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, delete_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml index d62609a1bea..8e73c7a4d7a 100644 --- a/components/gcp/dataproc/submit_hadoop_job/component.yaml +++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml @@ -78,7 +78,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, submit_hadoop_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml index 54250a22e5a..3140de61005 100644 --- a/components/gcp/dataproc/submit_hive_job/component.yaml +++ b/components/gcp/dataproc/submit_hive_job/component.yaml @@ -73,7 +73,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, submit_hive_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml index 26a4fb9f998..52e314afe1c 100644 --- a/components/gcp/dataproc/submit_pig_job/component.yaml +++ b/components/gcp/dataproc/submit_pig_job/component.yaml @@ -73,7 +73,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, submit_pig_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml index d3ab4f617ec..30036ea18c0 100644 --- a/components/gcp/dataproc/submit_pyspark_job/component.yaml +++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml @@ -67,7 +67,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, submit_pyspark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml index 825e6769952..d585b97e0af 100644 --- a/components/gcp/dataproc/submit_spark_job/component.yaml +++ b/components/gcp/dataproc/submit_spark_job/component.yaml @@ -74,7 +74,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, submit_spark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml index 1a9b30535a7..119e8592d35 100644 --- a/components/gcp/dataproc/submit_sparksql_job/component.yaml +++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml @@ -73,7 +73,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.dataproc, submit_sparksql_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml index 8b4a4c7bfdf..0a3bbf1fb36 100644 --- a/components/gcp/ml_engine/batch_predict/component.yaml +++ b/components/gcp/ml_engine/batch_predict/component.yaml @@ -67,7 +67,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.ml_engine, batch_predict, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml index b0a9d86ff65..5b4280cefdb 100644 --- a/components/gcp/ml_engine/deploy/component.yaml +++ b/components/gcp/ml_engine/deploy/component.yaml @@ -93,7 +93,7 @@ outputs: type: String implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.ml_engine, deploy, --model_uri, {inputValue: model_uri}, diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml index 5102db4e96d..0f6dd63adcc 100644 --- a/components/gcp/ml_engine/train/component.yaml +++ b/components/gcp/ml_engine/train/component.yaml @@ -101,7 +101,7 @@ outputs: type: GCSPath implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21 args: [ kfp_component.google.ml_engine, train, --project_id, {inputValue: project_id}, diff --git a/components/kubeflow/deployer/component.yaml b/components/kubeflow/deployer/component.yaml index 50916b1ced5..583615f702f 100644 --- a/components/kubeflow/deployer/component.yaml +++ b/components/kubeflow/deployer/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:151c5349f13bea9d626c988563c04c0a86210c21 command: [/bin/deploy.sh] args: [ --model-export-path, {inputValue: Model dir}, diff --git a/components/kubeflow/dnntrainer/component.yaml b/components/kubeflow/dnntrainer/component.yaml index a1ff245bd8c..70b5a25a9ee 100644 --- a/components/kubeflow/dnntrainer/component.yaml +++ b/components/kubeflow/dnntrainer/component.yaml @@ -15,7 +15,7 @@ outputs: - {name: Training output dir, type: GCSPath, description: 'GCS or local directory.'} # type: {GCSPath: {path_type: Directory}} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 command: [python2, -m, trainer.task] args: [ --transformed-data-dir, {inputValue: Transformed data dir}, diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py index 4d5b7febbbf..be10cdc8115 100644 --- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py +++ b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py @@ -17,7 +17,7 @@ def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:0517114dc2b365a4a6d95424af6157ead774eff3', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:151c5349f13bea9d626c988563c04c0a86210c21', arguments = [ '--workers', number_of_workers, '--pss', number_of_parameter_servers, diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml index fd851389080..4e8d660e72b 100644 --- a/components/kubeflow/launcher/src/train.template.yaml +++ b/components/kubeflow/launcher/src/train.template.yaml @@ -26,7 +26,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 command: - python - -m @@ -49,7 +49,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 command: - python - -m @@ -72,7 +72,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21 command: - python - -m diff --git a/components/local/confusion_matrix/component.yaml b/components/local/confusion_matrix/component.yaml index 4e9a098a7ac..12d21d15e10 100644 --- a/components/local/confusion_matrix/component.yaml +++ b/components/local/confusion_matrix/component.yaml @@ -9,7 +9,7 @@ inputs: # - {name: Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:151c5349f13bea9d626c988563c04c0a86210c21 command: [python2, /ml/confusion_matrix.py] args: [ --predictions, {inputValue: Predictions}, diff --git a/components/local/roc/component.yaml b/components/local/roc/component.yaml index 54d09b81ca3..4110ca34ee0 100644 --- a/components/local/roc/component.yaml +++ b/components/local/roc/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0517114dc2b365a4a6d95424af6157ead774eff3 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:151c5349f13bea9d626c988563c04c0a86210c21 command: [python2, /ml/roc.py] args: [ --predictions, {inputValue: Predictions dir}, diff --git a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb index 16215b0df15..25e98ce2233 100644 --- a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb +++ b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb @@ -71,7 +71,7 @@ "EXPERIMENT_NAME = 'serving_component'\n", "MODEL_VERSION = '1' # A number representing the version model \n", "OUTPUT_BUCKET = 'gs://%s-serving-component' % PROJECT_NAME # A GCS bucket for asset outputs\n", - "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:fe639f41661d8e17fcda64ff8242127620b80ba0'\n", + "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:151c5349f13bea9d626c988563c04c0a86210c21'\n", "MODEL_PATH = '%s/%s' % (OUTPUT_BUCKET,MODEL_NAME) \n", "MODEL_VERSION_PATH = '%s/%s/%s' % (OUTPUT_BUCKET,MODEL_NAME,MODEL_VERSION)" ] diff --git a/samples/core/xgboost_training_cm/xgboost_training_cm.py b/samples/core/xgboost_training_cm/xgboost_training_cm.py index f5a6d0575fb..14b885b8ded 100755 --- a/samples/core/xgboost_training_cm/xgboost_training_cm.py +++ b/samples/core/xgboost_training_cm/xgboost_training_cm.py @@ -36,7 +36,7 @@ def dataproc_create_cluster_op( ): return dsl.ContainerOp( name='Dataproc - Create cluster', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:0517114dc2b365a4a6d95424af6157ead774eff3', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:151c5349f13bea9d626c988563c04c0a86210c21', arguments=[ '--project', project, '--region', region, @@ -56,7 +56,7 @@ def dataproc_delete_cluster_op( ): return dsl.ContainerOp( name='Dataproc - Delete cluster', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:0517114dc2b365a4a6d95424af6157ead774eff3', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:151c5349f13bea9d626c988563c04c0a86210c21', arguments=[ '--project', project, '--region', region, @@ -76,7 +76,7 @@ def dataproc_analyze_op( ): return dsl.ContainerOp( name='Dataproc - Analyze', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:0517114dc2b365a4a6d95424af6157ead774eff3', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:151c5349f13bea9d626c988563c04c0a86210c21', arguments=[ '--project', project, '--region', region, @@ -103,7 +103,7 @@ def dataproc_transform_op( ): return dsl.ContainerOp( name='Dataproc - Transform', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:0517114dc2b365a4a6d95424af6157ead774eff3', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:151c5349f13bea9d626c988563c04c0a86210c21', arguments=[ '--project', project, '--region', region, @@ -141,7 +141,7 @@ def dataproc_train_op( return dsl.ContainerOp( name='Dataproc - Train XGBoost model', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:0517114dc2b365a4a6d95424af6157ead774eff3', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:151c5349f13bea9d626c988563c04c0a86210c21', arguments=[ '--project', project, '--region', region, @@ -174,7 +174,7 @@ def dataproc_predict_op( ): return dsl.ContainerOp( name='Dataproc - Predict with XGBoost model', - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:0517114dc2b365a4a6d95424af6157ead774eff3', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:151c5349f13bea9d626c988563c04c0a86210c21', arguments=[ '--project', project, '--region', region,