diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml index d5ab09d4fd2..76d682b17c2 100644 --- a/components/gcp/bigquery/query/component.yaml +++ b/components/gcp/bigquery/query/component.yaml @@ -68,10 +68,10 @@ implementation: --table_id, {inputValue: table_id}, --dataset_location, {inputValue: dataset_location}, --output_gcs_path, {inputValue: output_gcs_path}, - --job_config, {inputValue: job_config} + --job_config, {inputValue: job_config}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: output_gcs_path: /tmp/kfp/output/bigquery/query-output-path.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py b/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py index 7d9937935c9..20445e995aa 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py +++ b/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py @@ -15,6 +15,7 @@ import argparse import fire import importlib +import os import sys import logging from .launcher import launch @@ -26,9 +27,19 @@ def main(): description='Launch a python module or file.') parser.add_argument('file_or_module', type=str, help='Either a python file path or a module name.') + parser.add_argument( + 'ui_metadata_path', + type=str, + default='/mlpipeline-ui-metadata.json', + help='Path for the file where the mlpipeline-ui-metadata.json data ' + 'should be written.') parser.add_argument('args', nargs=argparse.REMAINDER) args = parser.parse_args() - launch(args.file_or_module, args.args) + + if args.ui_metadata_path: + os.environ['KFP_UI_METADATA_PATH'] = args.ui_metadata_path + + launch(args.file_or_module, [args.args, args.ui_metadata_path]) if __name__ == '__main__': main() \ No newline at end of file diff --git a/components/gcp/container/component_sdk/python/kfp_component/launcher/launcher.py b/components/gcp/container/component_sdk/python/kfp_component/launcher/launcher.py index 362e0ebfed1..1527942ad89 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/launcher/launcher.py +++ b/components/gcp/container/component_sdk/python/kfp_component/launcher/launcher.py @@ -28,6 +28,8 @@ def launch(file_or_module, args): Returns: The return value from the launched function. """ + if not isinstance(args, list): + args = [args] try: module = importlib.import_module(file_or_module) except Exception: @@ -42,4 +44,4 @@ def launch(file_or_module, args): except Exception: logging.error('Failed to find the module or file: {}'.format(file_or_module)) sys.exit(1) - return fire.Fire(module, command=args, name=module.__name__) \ No newline at end of file + return fire.Fire(module, command=[] + args, name=module.__name__) \ No newline at end of file diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml index eb296f8e05b..0cd95d36fe8 100644 --- a/components/gcp/dataflow/launch_python/component.yaml +++ b/components/gcp/dataflow/launch_python/component.yaml @@ -61,10 +61,10 @@ implementation: --staging_dir, {inputValue: staging_dir}, --requirements_file_path, {inputValue: requirements_file_path}, --args, {inputValue: args}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataflow/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml index cc55e38eb67..267d9d2c770 100644 --- a/components/gcp/dataflow/launch_template/component.yaml +++ b/components/gcp/dataflow/launch_template/component.yaml @@ -73,9 +73,9 @@ implementation: --validate_only, {inputValue: validate_only}, --staging_dir, {inputValue: staging_dir}, --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataflow/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml index 99ea1530681..09c3a4b0001 100644 --- a/components/gcp/dataproc/create_cluster/component.yaml +++ b/components/gcp/dataproc/create_cluster/component.yaml @@ -81,10 +81,10 @@ implementation: --config_bucket, {inputValue: config_bucket}, --image_version, {inputValue: image_version}, --cluster, {inputValue: cluster}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: cluster_name: /tmp/kfp/output/dataproc/cluster_name.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml index 8c3e3b3eff7..90a41d9555d 100644 --- a/components/gcp/dataproc/submit_hadoop_job/component.yaml +++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml @@ -91,10 +91,10 @@ implementation: --args, {inputValue: args}, --hadoop_job, {inputValue: hadoop_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml index 88a141f3d8d..13d672c9110 100644 --- a/components/gcp/dataproc/submit_hive_job/component.yaml +++ b/components/gcp/dataproc/submit_hive_job/component.yaml @@ -86,10 +86,10 @@ implementation: --script_variables, {inputValue: script_variables}, --hive_job, {inputValue: hive_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml index 8aa0c9d8b12..ae4ac03ceee 100644 --- a/components/gcp/dataproc/submit_pig_job/component.yaml +++ b/components/gcp/dataproc/submit_pig_job/component.yaml @@ -86,10 +86,10 @@ implementation: --script_variables, {inputValue: script_variables}, --pig_job, {inputValue: pig_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml index 5b0e44126e0..28315602deb 100644 --- a/components/gcp/dataproc/submit_pyspark_job/component.yaml +++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml @@ -79,10 +79,10 @@ implementation: --args, {inputValue: args}, --pyspark_job, {inputValue: pyspark_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml index 0ff18b42738..228f91740df 100644 --- a/components/gcp/dataproc/submit_spark_job/component.yaml +++ b/components/gcp/dataproc/submit_spark_job/component.yaml @@ -87,10 +87,10 @@ implementation: --args, {inputValue: args}, --spark_job, {inputValue: spark_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml index d84753273d0..ac2ccc11fb8 100644 --- a/components/gcp/dataproc/submit_sparksql_job/component.yaml +++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml @@ -86,10 +86,10 @@ implementation: --script_variables, {inputValue: script_variables}, --sparksql_job, {inputValue: sparksql_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml index 9c4b7017bed..bf3a7995cca 100644 --- a/components/gcp/ml_engine/batch_predict/component.yaml +++ b/components/gcp/ml_engine/batch_predict/component.yaml @@ -81,10 +81,10 @@ implementation: --output_data_format, {inputValue: output_data_format}, --prediction_input, {inputValue: prediction_input}, --job_id_prefix, {inputValue: job_id_prefix}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/ml_engine/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml index 1f3190a1d2f..ee3046aac6d 100644 --- a/components/gcp/ml_engine/deploy/component.yaml +++ b/components/gcp/ml_engine/deploy/component.yaml @@ -109,6 +109,7 @@ implementation: --replace_existing_version, {inputValue: replace_existing_version}, --set_default, {inputValue: set_default}, --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" @@ -116,4 +117,3 @@ implementation: model_uri: /tmp/kfp/output/ml_engine/model_uri.txt model_name: /tmp/kfp/output/ml_engine/model_name.txt version_name: /tmp/kfp/output/ml_engine/version_name.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml index afbbc3b5630..03cf5dff301 100644 --- a/components/gcp/ml_engine/train/component.yaml +++ b/components/gcp/ml_engine/train/component.yaml @@ -118,11 +118,11 @@ implementation: --worker_image_uri, {inputValue: worker_image_uri}, --training_input, {inputValue: training_input}, --job_id_prefix, {inputValue: job_id_prefix}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, + --ui-metadata-path, {outputPath: MLPipeline UI metadata}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/ml_engine/job_id.txt job_dir: /tmp/kfp/output/ml_engine/job_dir.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json