Skip to content

Commit

Permalink
Samples - Cleaned up unnecessary usage of PipelineParam (#1631)
Browse files Browse the repository at this point in the history
  • Loading branch information
Ark-kun authored and k8s-ci-robot committed Jul 23, 2019
1 parent 8bc4644 commit 3da6e90
Show file tree
Hide file tree
Showing 8 changed files with 90 additions and 105 deletions.
23 changes: 12 additions & 11 deletions contrib/samples/openvino/deployer/deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,18 @@
description='Deploy OpenVINO Model Server instance in Kubernetes'
)
def openvino_predict(
model_export_path = dsl.PipelineParam(name='model-export-path', value='gs://intelai_public_models/resnet_50_i8'),
server_name = dsl.PipelineParam(name='server-name', value='resnet'),
log_level = dsl.PipelineParam(name='log-level', value='DEBUG'),
batch_size = dsl.PipelineParam(name='batch-size', value='auto'),
model_version_policy = dsl.PipelineParam(name='model-version-policy', value='{"latest": { "num_versions":2 }}'),
replicas = dsl.PipelineParam(name='replicas', value=1),
images_list = dsl.PipelineParam(name='evaluation-images-list', value='https://raw.githubusercontent.com/IntelAI/OpenVINO-model-server/master/example_client/input_images.txt'),
image_path_prefix = dsl.PipelineParam(name='image-path-prefix', value='https://github.com/IntelAI/OpenVINO-model-server/raw/master/example_client/'),
model_input_name = dsl.PipelineParam(name='model-input-name', value='data'),
model_output_name = dsl.PipelineParam(name='model-output-name', value='prob'),
model_input_size = dsl.PipelineParam(name='model-input-size', value=224)):
model_export_path='gs://intelai_public_models/resnet_50_i8',
server_name='resnet',
log_level='DEBUG',
batch_size='auto',
model_version_policy='{"latest": { "num_versions":2 }}',
replicas=1,
images_list='https://raw.githubusercontent.com/IntelAI/OpenVINO-model-server/master/example_client/input_images.txt',
image_path_prefix='https://github.com/IntelAI/OpenVINO-model-server/raw/master/example_client/',
model_input_name='data',
model_output_name='prob',
model_input_size=224
):


"""A one-step pipeline."""
Expand Down
17 changes: 9 additions & 8 deletions contrib/samples/openvino/predict/numpy_predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@
description='Execute prediction operation for the dataset from numpy file and test accuracy and latency'
)
def openvino_predict(
model_bin = dsl.PipelineParam(name='model-bin-path', value='gs://intelai_public_models/resnet_50_i8/1/resnet_50_i8.bin'),
model_xml = dsl.PipelineParam(name='model-xml-path', value='gs://intelai_public_models/resnet_50_i8/1/resnet_50_i8.xml'),
generated_model_dir = dsl.PipelineParam(name='generated-model-dir', value='gs://your-bucket/folder'),
input_numpy_file = dsl.PipelineParam(name='input-numpy-file', value='gs://intelai_public_models/images/imgs.npy'),
label_numpy_file = dsl.PipelineParam(name='label-numpy-file', value='gs://intelai_public_models/images/lbs.npy'),
batch_size = dsl.PipelineParam(name='batch-size', value=1),
scale_div = dsl.PipelineParam(name='scale-input-divide', value=1),
scale_sub = dsl.PipelineParam(name='scale-input-substract', value=0)):
model_bin='gs://intelai_public_models/resnet_50_i8/1/resnet_50_i8.bin',
model_xml='gs://intelai_public_models/resnet_50_i8/1/resnet_50_i8.xml',
generated_model_dir='gs://your-bucket/folder',
input_numpy_file='gs://intelai_public_models/images/imgs.npy',
label_numpy_file='gs://intelai_public_models/images/lbs.npy',
batch_size=1,
scale_div=1,
scale_sub=0
):

"""A one-step pipeline."""
dsl.ContainerOp(
Expand Down
19 changes: 10 additions & 9 deletions contrib/samples/openvino/tf-slim/tf-slim.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@
description='Generate slim models and optimize them with OpenVINO'
)
def tf_slim_optimize(
model_name = dsl.PipelineParam(name='model-name', value='resnet_v1_50'),
num_classes = dsl.PipelineParam(name='num-classes', value=1000),
checkpoint_url = dsl.PipelineParam(name='checkpoint-url', value='http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz'),
batch_size = dsl.PipelineParam(name='batch-size', value=1),
export_dir = dsl.PipelineParam(name='tf-export-dir', value='/tmp/export'),
generated_model_dir = dsl.PipelineParam(name='generated-model-dir', value='gs://your-bucket/folder'),
mo_options = dsl.PipelineParam(name='mo-option', value='--saved_model_dir .'),
input_numpy_file = dsl.PipelineParam(name='input-numpy-file', value='gs://intelai_public_models/images/imgs.npy'),
label_numpy_file = dsl.PipelineParam(name='label-numpy-file', value='gs://intelai_public_models/images/lbs.npy')):
model_name='resnet_v1_50',
num_classes=1000,
checkpoint_url='http://download.tensorflow.org/models/resnet_v1_50_2016_08_28.tar.gz',
batch_size=1,
export_dir='/tmp/export',
generated_model_dir='gs://your-bucket/folder',
mo_options='--saved_model_dir .',
input_numpy_file='gs://intelai_public_models/images/imgs.npy',
label_numpy_file='gs://intelai_public_models/images/lbs.npy'
):

slim = dsl.ContainerOp(
name='Create_model',
Expand Down
18 changes: 9 additions & 9 deletions samples/ai-platform/Chicago Crime Pipeline.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -222,15 +222,15 @@
")\n",
"\n",
"def pipeline(\n",
" data_gcs_path=dsl.PipelineParam(name='data_gcs_path', value=DATA_GCS_PATH),\n",
" gcs_working_dir=dsl.PipelineParam(name='gcs_working_dir', value=GCS_WORKING_DIR),\n",
" project_id=dsl.PipelineParam(name='project_id', value=PROJECT_ID),\n",
" python_module=dsl.PipelineParam(name='python_module', value=PYTHON_MODULE),\n",
" region=dsl.PipelineParam(name='region', value=REGION),\n",
" runtime_version=dsl.PipelineParam(name='runtime_version', value=RUNTIME_VERSION),\n",
" package_uris=dsl.PipelineParam(name='package_uris', value=PACKAGE_URIS),\n",
" trainer_output_gcs_path=dsl.PipelineParam(name='trainer_output_gcs_path', value=TRAINER_OUTPUT_GCS_PATH),\n",
" trainer_args=dsl.PipelineParam(name='trainer_args', value=TRAINER_ARGS),\n",
" data_gcs_path=DATA_GCS_PATH,\n",
" gcs_working_dir=GCS_WORKING_DIR,\n",
" project_id=PROJECT_ID,\n",
" python_module=PYTHON_MODULE,\n",
" region=REGION,\n",
" runtime_version=RUNTIME_VERSION,\n",
" package_uris=PACKAGE_URIS,\n",
" trainer_output_gcs_path=TRAINER_OUTPUT_GCS_PATH,\n",
" trainer_args=TRAINER_ARGS,\n",
"): \n",
" download_task = download(project_id,\n",
" data_gcs_path)\n",
Expand Down
100 changes: 42 additions & 58 deletions samples/notebooks/KubeFlow Pipeline Using TFX OSS Components.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -253,20 +253,20 @@
"def taxi_cab_classification(\n",
" output,\n",
" project,\n",
" column_names=dsl.PipelineParam(name='column-names', value='gs://ml-pipeline-playground/tfx/taxi-cab-classification/column-names.json'),\n",
" key_columns=dsl.PipelineParam(name='key-columns', value='trip_start_timestamp'),\n",
" train=dsl.PipelineParam(name='train', value=TRAIN_DATA),\n",
" evaluation=dsl.PipelineParam(name='evaluation', value=EVAL_DATA),\n",
" validation_mode=dsl.PipelineParam(name='validation-mode', value='local'),\n",
" preprocess_mode=dsl.PipelineParam(name='preprocess-mode', value='local'),\n",
" preprocess_module: dsl.PipelineParam=dsl.PipelineParam(name='preprocess-module', value='gs://ml-pipeline-playground/tfx/taxi-cab-classification/preprocessing.py'),\n",
" target=dsl.PipelineParam(name='target', value='tips'),\n",
" learning_rate=dsl.PipelineParam(name='learning-rate', value=0.1),\n",
" hidden_layer_size=dsl.PipelineParam(name='hidden-layer-size', value=HIDDEN_LAYER_SIZE),\n",
" steps=dsl.PipelineParam(name='steps', value=STEPS),\n",
" predict_mode=dsl.PipelineParam(name='predict-mode', value='local'),\n",
" analyze_mode=dsl.PipelineParam(name='analyze-mode', value='local'),\n",
" analyze_slice_column=dsl.PipelineParam(name='analyze-slice-column', value='trip_start_hour')):\n",
" column_names='gs://ml-pipeline-playground/tfx/taxi-cab-classification/column-names.json',\n",
" key_columns='trip_start_timestamp',\n",
" train=TRAIN_DATA,\n",
" evaluation=EVAL_DATA,\n",
" validation_mode='local',\n",
" preprocess_mode='local',\n",
" preprocess_module: dsl.PipelineParam='gs://ml-pipeline-playground/tfx/taxi-cab-classification/preprocessing.py',\n",
" target='tips',\n",
" learning_rate=0.1,\n",
" hidden_layer_size=HIDDEN_LAYER_SIZE,\n",
" steps=STEPS,\n",
" predict_mode='local',\n",
" analyze_mode='local',\n",
" analyze_slice_column='trip_start_hour'):\n",
"\n",
" # set the flag to use GPU trainer\n",
" use_gpu = False\n",
Expand Down Expand Up @@ -508,28 +508,20 @@
" project,\n",
" model,\n",
" version,\n",
" column_names=dsl.PipelineParam(\n",
" name='column-names',\n",
" value='gs://ml-pipeline-playground/tfx/taxi-cab-classification/column-names.json'),\n",
" key_columns=dsl.PipelineParam(name='key-columns', value='trip_start_timestamp'),\n",
" train=dsl.PipelineParam(\n",
" name='train',\n",
" value=TRAIN_DATA),\n",
" evaluation=dsl.PipelineParam(\n",
" name='evaluation',\n",
" value=EVAL_DATA),\n",
" validation_mode=dsl.PipelineParam(name='validation-mode', value='local'),\n",
" preprocess_mode=dsl.PipelineParam(name='preprocess-mode', value='local'),\n",
" preprocess_module: dsl.PipelineParam=dsl.PipelineParam(\n",
" name='preprocess-module',\n",
" value='gs://ml-pipeline-playground/tfx/taxi-cab-classification/preprocessing.py'),\n",
" target=dsl.PipelineParam(name='target', value='tips'),\n",
" learning_rate=dsl.PipelineParam(name='learning-rate', value=0.1),\n",
" hidden_layer_size=dsl.PipelineParam(name='hidden-layer-size', value=HIDDEN_LAYER_SIZE),\n",
" steps=dsl.PipelineParam(name='steps', value=STEPS),\n",
" predict_mode=dsl.PipelineParam(name='predict-mode', value='local'),\n",
" analyze_mode=dsl.PipelineParam(name='analyze-mode', value='local'),\n",
" analyze_slice_column=dsl.PipelineParam(name='analyze-slice-column', value='trip_start_hour')):\n",
" column_names='gs://ml-pipeline-playground/tfx/taxi-cab-classification/column-names.json',\n",
" key_columns='trip_start_timestamp',\n",
" train=TRAIN_DATA,\n",
" evaluation=EVAL_DATA,\n",
" validation_mode='local',\n",
" preprocess_mode='local',\n",
" preprocess_module='gs://ml-pipeline-playground/tfx/taxi-cab-classification/preprocessing.py',\n",
" target='tips',\n",
" learning_rate=0.1,\n",
" hidden_layer_size=HIDDEN_LAYER_SIZE,\n",
" steps=STEPS,\n",
" predict_mode='local',\n",
" analyze_mode='local',\n",
" analyze_slice_column='trip_start_hour'):\n",
" \n",
" \n",
" validation_output = '%s/{{workflow.name}}/validation' % output\n",
Expand Down Expand Up @@ -634,28 +626,20 @@
" project,\n",
" model,\n",
" version,\n",
" column_names=dsl.PipelineParam(\n",
" name='column-names',\n",
" value='gs://ml-pipeline-playground/tfx/taxi-cab-classification/column-names.json'),\n",
" key_columns=dsl.PipelineParam(name='key-columns', value='trip_start_timestamp'),\n",
" train=dsl.PipelineParam(\n",
" name='train',\n",
" value=TRAIN_DATA),\n",
" evaluation=dsl.PipelineParam(\n",
" name='evaluation',\n",
" value=EVAL_DATA),\n",
" validation_mode=dsl.PipelineParam(name='validation-mode', value='local'),\n",
" preprocess_mode=dsl.PipelineParam(name='preprocess-mode', value='local'),\n",
" preprocess_module: dsl.PipelineParam=dsl.PipelineParam(\n",
" name='preprocess-module',\n",
" value='gs://ml-pipeline-playground/tfx/taxi-cab-classification/preprocessing.py'),\n",
" target=dsl.PipelineParam(name='target', value='tips'),\n",
" learning_rate=dsl.PipelineParam(name='learning-rate', value=0.1),\n",
" hidden_layer_size=dsl.PipelineParam(name='hidden-layer-size', value=HIDDEN_LAYER_SIZE),\n",
" steps=dsl.PipelineParam(name='steps', value=STEPS),\n",
" predict_mode=dsl.PipelineParam(name='predict-mode', value='local'),\n",
" analyze_mode=dsl.PipelineParam(name='analyze-mode', value='local'),\n",
" analyze_slice_column=dsl.PipelineParam(name='analyze-slice-column', value='trip_start_hour')):\n",
" column_names='gs://ml-pipeline-playground/tfx/taxi-cab-classification/column-names.json',\n",
" key_columns='trip_start_timestamp',\n",
" train=TRAIN_DATA,\n",
" evaluation=EVAL_DATA,\n",
" validation_mode='local',\n",
" preprocess_mode='local',\n",
" preprocess_module='gs://ml-pipeline-playground/tfx/taxi-cab-classification/preprocessing.py',\n",
" target='tips',\n",
" learning_rate=0.1,\n",
" hidden_layer_size=HIDDEN_LAYER_SIZE,\n",
" steps=STEPS,\n",
" predict_mode='local',\n",
" analyze_mode='local',\n",
" analyze_slice_column='trip_start_hour'):\n",
" \n",
" \n",
" validation_output = '%s/{{workflow.name}}/validation' % output\n",
Expand Down
10 changes: 4 additions & 6 deletions samples/notebooks/Local Development Quickstart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@
"\n",
"# Defines the pipeline.\n",
"@dsl.pipeline(name='List GCS blobs', description='Lists GCS blobs.')\n",
"def pipeline_func(bucket_name=dsl.PipelineParam('bucket')):\n",
"def pipeline_func(bucket_name):\n",
" list_blobs_task = list_blobs_op(bucket_name)\n",
"\n",
"# Compile the pipeline to a file.\n",
Expand Down Expand Up @@ -357,8 +357,7 @@
" name='List GCS Blobs',\n",
" description='Takes a GCS bucket name as input and lists the blobs.'\n",
")\n",
"def pipeline_func(\n",
" bucket=kfp.dsl.PipelineParam('bucket', value='Enter your bucket name here.')):\n",
"def pipeline_func(bucket='Enter your bucket name here.'):\n",
" list_blobs_task = list_gcs_blobs_op('List', bucket)\n",
"\n",
"# Compile the pipeline to a file.\n",
Expand Down Expand Up @@ -524,7 +523,7 @@
"metadata": {},
"source": [
"## 3 Create your workflow as a Python function\n",
"Define your pipeline as a Python function. ` @kfp.dsl.pipeline` is a required decoration including `name` and `description` properties. `pipeline_func` defines the pipeline. `bucket=kfp.dsl.PipelineParam(...)` specifies that the pipeline takes an input parameter `bucket`. Later when you load the pipeline, `kfp.dsl.PipelineParam('bucket', value='Enter your bucket name here.')` will create an input box in the UI with the initial value `Enter your bucket name here.`. You can change the initial value with your bucket name at runtime. `list_gcs_blobs_op('List', bucket)` will create a component named `List` that lists the blobs. `view_input_op('View', list_blobs_task.outputs['blobs'])` will create a component named `View` that views a CSV. `list_blobs_task.outputs['blobs']` tells the pipeline to take the output of the first component stored as string in `blobs.txt` as an input for the second component."
"Define your pipeline as a Python function. ` @kfp.dsl.pipeline` is a required decoration including `name` and `description` properties. `pipeline_func` defines the pipeline with the `bucket` parameter. When the user uploads the pipeline to the system and starts creating a new run from it, they'll see the an input box for the `bucket` parameter with the initial value `Enter your bucket name here.`. You can change the initial value with your bucket name at runtime. `list_gcs_blobs_op('List', bucket)` will create a component named `List` that lists the blobs. `view_input_op('View', list_blobs_task.outputs['blobs'])` will create a component named `View` that views a CSV. `list_blobs_task.outputs['blobs']` tells the pipeline to take the output of the first component stored as string in `blobs.txt` as an input for the second component."
]
},
{
Expand All @@ -551,8 +550,7 @@
" name='Quickstart pipeline',\n",
" description='Takes a GCS bucket name views a CSV input file in the bucket.'\n",
")\n",
"def pipeline_func(bucket=kfp.dsl.PipelineParam(\n",
" 'bucket', value='Enter your bucket name here.')):\n",
"def pipeline_func(bucket='Enter your bucket name here.'):\n",
" list_blobs_task = list_gcs_blobs_op('List', bucket)\n",
" view_input_task = view_input_op('View', list_blobs_task.outputs['blobs'])\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion sdk/python/tests/compiler/testdata/param_op_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def _add_common_labels(op: dsl.ContainerOp) -> dsl.ContainerOp:
description="Test that parameters used in Op transformation functions as pod labels "
"would be correcly identified and set as arguments in he generated yaml"
)
def param_substitutions(param = dsl.PipelineParam(name='param')):
def param_substitutions(param):
dsl.get_pipeline_conf().op_transformers.append(add_common_labels(param))

op = dsl.ContainerOp(
Expand Down
6 changes: 3 additions & 3 deletions sdk/python/tests/components/test_python_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,9 +294,9 @@ def add(a: float, b: float) -> float:
description='A pipeline that performs arithmetic calculations.'
)
def calc_pipeline(
a1=dsl.PipelineParam('a1'),
a2=dsl.PipelineParam('a2', value='7'),
a3=dsl.PipelineParam('a3', value='17'),
a1,
a2='7',
a3='17',
):
task_1 = add_op(a1, a2)
task_2 = add_op2(a1, a2)
Expand Down

0 comments on commit 3da6e90

Please sign in to comment.