From a0d4be3851ef2dacfba112f883fe9607e342fa45 Mon Sep 17 00:00:00 2001 From: Yasser Elsayed Date: Wed, 16 Jan 2019 17:30:49 -0800 Subject: [PATCH] component release script (#694) --- .../launcher/kubeflow_tfjob_launcher_op.py | 2 +- .../kubeflow/launcher/src/train.template.yaml | 6 +++--- .../kubeflow-training-classification.py | 10 +++++----- ...eFlow Pipeline Using TFX OSS Components.ipynb | 14 +++++++------- samples/resnet-cmle/resnet-train-pipeline.py | 6 +++--- samples/tfx/taxi-cab-classification-pipeline.py | 16 ++++++++-------- samples/xgboost-spark/xgboost-training-cm.py | 16 ++++++++-------- 7 files changed, 35 insertions(+), 35 deletions(-) diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py index 884476e0c99..d905247ebec 100644 --- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py +++ b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py @@ -17,7 +17,7 @@ def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--workers', number_of_workers, '--pss', number_of_parameter_servers, diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml index 29db697e96f..d04df755ca6 100644 --- a/components/kubeflow/launcher/src/train.template.yaml +++ b/components/kubeflow/launcher/src/train.template.yaml @@ -26,7 +26,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d3c4add0a95e930c70a330466d0923827784eb9a + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:2ed60100d1db9efeb38c6c358f90b21c144179be command: - python - -m @@ -49,7 +49,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d3c4add0a95e930c70a330466d0923827784eb9a + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:2ed60100d1db9efeb38c6c358f90b21c144179be command: - python - -m @@ -72,7 +72,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d3c4add0a95e930c70a330466d0923827784eb9a + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:2ed60100d1db9efeb38c6c358f90b21c144179be command: - python - -m diff --git a/samples/kubeflow-tf/kubeflow-training-classification.py b/samples/kubeflow-tf/kubeflow-training-classification.py index 6a854987de8..8ed69a20dff 100755 --- a/samples/kubeflow-tf/kubeflow-training-classification.py +++ b/samples/kubeflow-tf/kubeflow-training-classification.py @@ -21,7 +21,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', preprocess_mode, preprocess_module: 'GcsUri[text/code/python]', transform_output: 'GcsUri[Directory]', step_name='preprocess'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--train', train_data, '--eval', evaluation_data, @@ -38,7 +38,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', sc def kubeflow_tf_training_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate: float, hidden_layer_size: int, steps: int, target, preprocess_module: 'GcsUri[text/code/python]', training_output: 'GcsUri[Directory]', step_name='training', use_gpu=False): kubeflow_tf_training_op = dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--transformed-data-dir', transformed_data_dir, '--schema', schema, @@ -52,7 +52,7 @@ def kubeflow_tf_training_op(transformed_data_dir, schema: 'GcsUri[text/json]', l file_outputs = {'train': '/output.txt'} ) if use_gpu: - kubeflow_tf_training_op.image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:d3c4add0a95e930c70a330466d0923827784eb9a' + kubeflow_tf_training_op.image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:2ed60100d1db9efeb38c6c358f90b21c144179be' kubeflow_tf_training_op.set_gpu_limit(1) return kubeflow_tf_training_op @@ -60,7 +60,7 @@ def kubeflow_tf_training_op(transformed_data_dir, schema: 'GcsUri[text/json]', l def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', target: str, model: 'TensorFlow model', predict_mode, project: 'GcpProject', prediction_output: 'GcsUri', step_name='prediction'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--data', evaluation_data, '--schema', schema, @@ -76,7 +76,7 @@ def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json] def confusion_matrix_op(predictions, output, step_name='confusionmatrix'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--predictions', predictions, '--output', output, diff --git a/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb b/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb index b8d60c8c8ab..633ccf65289 100644 --- a/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb +++ b/samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb @@ -43,13 +43,13 @@ "EVAL_DATA = 'gs://ml-pipeline-playground/tfx/taxi-cab-classification/eval.csv'\n", "HIDDEN_LAYER_SIZE = '1500'\n", "STEPS = 3000\n", - "DATAFLOW_TFDV_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:d3c4add0a95e930c70a330466d0923827784eb9a'\n", - "DATAFLOW_TFT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:d3c4add0a95e930c70a330466d0923827784eb9a'\n", - "DATAFLOW_TFMA_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:d3c4add0a95e930c70a330466d0923827784eb9a'\n", - "DATAFLOW_TF_PREDICT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:d3c4add0a95e930c70a330466d0923827784eb9a'\n", - "KUBEFLOW_TF_TRAINER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d3c4add0a95e930c70a330466d0923827784eb9a'\n", - "KUBEFLOW_TF_TRAINER_GPU_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:d3c4add0a95e930c70a330466d0923827784eb9a'\n", - "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:d3c4add0a95e930c70a330466d0923827784eb9a'\n", + "DATAFLOW_TFDV_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:2ed60100d1db9efeb38c6c358f90b21c144179be'\n", + "DATAFLOW_TFT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:2ed60100d1db9efeb38c6c358f90b21c144179be'\n", + "DATAFLOW_TFMA_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:2ed60100d1db9efeb38c6c358f90b21c144179be'\n", + "DATAFLOW_TF_PREDICT_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:2ed60100d1db9efeb38c6c358f90b21c144179be'\n", + "KUBEFLOW_TF_TRAINER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:2ed60100d1db9efeb38c6c358f90b21c144179be'\n", + "KUBEFLOW_TF_TRAINER_GPU_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer-gpu:2ed60100d1db9efeb38c6c358f90b21c144179be'\n", + "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:2ed60100d1db9efeb38c6c358f90b21c144179be'\n", "DEPLOYER_MODEL = 'notebook_tfx_taxi'\n", "DEPLOYER_VERSION_DEV = 'dev'\n", "DEPLOYER_VERSION_PROD = 'prod'" diff --git a/samples/resnet-cmle/resnet-train-pipeline.py b/samples/resnet-cmle/resnet-train-pipeline.py index 96ffafe54d1..fde25cefa0e 100644 --- a/samples/resnet-cmle/resnet-train-pipeline.py +++ b/samples/resnet-cmle/resnet-train-pipeline.py @@ -22,7 +22,7 @@ def resnet_preprocess_op(project_id: 'GcpProject', output: 'GcsUri', train_csv: validation_csv: 'GcsUri[text/csv]', labels, step_name='preprocess'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/resnet-preprocess:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/resnet-preprocess:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--project_id', project_id, '--output', output, @@ -38,7 +38,7 @@ def resnet_train_op(data_dir, output: 'GcsUri', region: 'GcpRegion', depth: int, num_eval_images: int, num_label_classes: int, tf_version, step_name='train'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/resnet-train:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/resnet-train:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--data_dir', data_dir, '--output', output, @@ -60,7 +60,7 @@ def resnet_deploy_op(model_dir, model, version, project_id: 'GcpProject', region tf_version, step_name='deploy'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/resnet-deploy:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/resnet-deploy:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--model', model, '--version', version, diff --git a/samples/tfx/taxi-cab-classification-pipeline.py b/samples/tfx/taxi-cab-classification-pipeline.py index 1888caa7f88..6f16707fe5a 100755 --- a/samples/tfx/taxi-cab-classification-pipeline.py +++ b/samples/tfx/taxi-cab-classification-pipeline.py @@ -21,7 +21,7 @@ def dataflow_tf_data_validation_op(inference_data: 'GcsUri', validation_data: 'GcsUri', column_names: 'GcsUri[text/json]', key_columns, project: 'GcpProject', mode, validation_output: 'GcsUri[Directory]', step_name='validation'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--csv-data-for-inference', inference_data, '--csv-data-to-validate', validation_data, @@ -40,7 +40,7 @@ def dataflow_tf_data_validation_op(inference_data: 'GcsUri', validation_data: 'G def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', preprocess_mode, preprocess_module: 'GcsUri[text/code/python]', transform_output: 'GcsUri[Directory]', step_name='preprocess'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--train', train_data, '--eval', evaluation_data, @@ -57,7 +57,7 @@ def dataflow_tf_transform_op(train_data: 'GcsUri', evaluation_data: 'GcsUri', sc def tf_train_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate: float, hidden_layer_size: int, steps: int, target: str, preprocess_module: 'GcsUri[text/code/python]', training_output: 'GcsUri[Directory]', step_name='training'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--transformed-data-dir', transformed_data_dir, '--schema', schema, @@ -74,7 +74,7 @@ def tf_train_op(transformed_data_dir, schema: 'GcsUri[text/json]', learning_rate def dataflow_tf_model_analyze_op(model: 'TensorFlow model', evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', project: 'GcpProject', analyze_mode, analyze_slice_column, analysis_output: 'GcsUri', step_name='analysis'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--model', model, '--eval', evaluation_data, @@ -91,7 +91,7 @@ def dataflow_tf_model_analyze_op(model: 'TensorFlow model', evaluation_data: 'Gc def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json]', target: str, model: 'TensorFlow model', predict_mode, project: 'GcpProject', prediction_output: 'GcsUri', step_name='prediction'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--data', evaluation_data, '--schema', schema, @@ -108,7 +108,7 @@ def dataflow_tf_predict_op(evaluation_data: 'GcsUri', schema: 'GcsUri[text/json] def confusion_matrix_op(predictions: 'GcsUri', output: 'GcsUri', step_name='confusion_matrix'): return dsl.ContainerOp( name=step_name, - image='gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--output', '%s/{{workflow.name}}/confusionmatrix' % output, '--predictions', predictions, @@ -119,7 +119,7 @@ def confusion_matrix_op(predictions: 'GcsUri', output: 'GcsUri', step_name='conf def roc_op(predictions: 'GcsUri', output: 'GcsUri', step_name='roc'): return dsl.ContainerOp( name=step_name, - image='gcr.io/ml-pipeline/ml-pipeline-local-roc:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-local-roc:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--output', '%s/{{workflow.name}}/roc' % output, '--predictions', predictions, @@ -130,7 +130,7 @@ def roc_op(predictions: 'GcsUri', output: 'GcsUri', step_name='roc'): def kubeflow_deploy_op(model: 'TensorFlow model', tf_server_name, step_name='deploy'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:d3c4add0a95e930c70a330466d0923827784eb9a', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments = [ '--model-path', model, '--server-name', tf_server_name diff --git a/samples/xgboost-spark/xgboost-training-cm.py b/samples/xgboost-spark/xgboost-training-cm.py index cbd768b5427..e624ac22cb2 100755 --- a/samples/xgboost-spark/xgboost-training-cm.py +++ b/samples/xgboost-spark/xgboost-training-cm.py @@ -26,7 +26,7 @@ class CreateClusterOp(dsl.ContainerOp): def __init__(self, name, project, region, staging): super(CreateClusterOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--project', project, '--region', region, @@ -41,7 +41,7 @@ class DeleteClusterOp(dsl.ContainerOp): def __init__(self, name, project, region): super(DeleteClusterOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--project', project, '--region', region, @@ -55,7 +55,7 @@ class AnalyzeOp(dsl.ContainerOp): def __init__(self, name, project, region, cluster_name, schema, train_data, output): super(AnalyzeOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--project', project, '--region', region, @@ -73,7 +73,7 @@ def __init__(self, name, project, region, cluster_name, train_data, eval_data, target, analysis, output): super(TransformOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--project', project, '--region', region, @@ -98,7 +98,7 @@ def __init__(self, name, project, region, cluster_name, train_data, eval_data, super(TrainerOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--project', project, '--region', region, @@ -121,7 +121,7 @@ class PredictOp(dsl.ContainerOp): def __init__(self, name, project, region, cluster_name, data, model, target, analysis, output): super(PredictOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--project', project, '--region', region, @@ -141,7 +141,7 @@ class ConfusionMatrixOp(dsl.ContainerOp): def __init__(self, name, predictions, output): super(ConfusionMatrixOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--output', output, '--predictions', predictions @@ -153,7 +153,7 @@ class RocOp(dsl.ContainerOp): def __init__(self, name, predictions, trueclass, output): super(RocOp, self).__init__( name=name, - image='gcr.io/ml-pipeline/ml-pipeline-local-roc:d3c4add0a95e930c70a330466d0923827784eb9a', + image='gcr.io/ml-pipeline/ml-pipeline-local-roc:2ed60100d1db9efeb38c6c358f90b21c144179be', arguments=[ '--output', output, '--predictions', predictions,