diff --git a/sdk/python/kfp/compiler/_default_transformers.py b/sdk/python/kfp/compiler/_default_transformers.py index c80311c458a..1072fe49589 100644 --- a/sdk/python/kfp/compiler/_default_transformers.py +++ b/sdk/python/kfp/compiler/_default_transformers.py @@ -12,43 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -import re -from typing import Callable, Dict, Optional, Text - from ..dsl._container_op import BaseOp, ContainerOp -# Pod label indicating the SDK type from which the pipeline is -# generated. By default it's set to kfp. -_SDK_ENV_LABEL = 'pipelines.kubeflow.org/pipeline-sdk-type' -_SDK_ENV_DEFAULT = 'kfp' - -# Common prefix of KFP OOB components url paths. -_OOB_COMPONENT_PATH_PREFIX = 'https://raw.githubusercontent.com/kubeflow/'\ - 'pipelines' - -# Key for component origin path pod label. -COMPONENT_PATH_LABEL_KEY = 'pipelines.kubeflow.org/component_origin_path' - -# Key for component spec digest pod label. -COMPONENT_DIGEST_LABEL_KEY = 'pipelines.kubeflow.org/component_digest' - - -def get_default_telemetry_labels() -> Dict[Text, Text]: - """Returns the default pod labels for telemetry purpose.""" - result = { - _SDK_ENV_LABEL: _SDK_ENV_DEFAULT, - } - return result - - def add_pod_env(op: BaseOp) -> BaseOp: """Adds pod environment info to ContainerOp. """ - if isinstance(op, ContainerOp) and op.pod_labels and op.pod_labels.get('add-pod-env', None) == 'true': + if isinstance(op, ContainerOp) and op.pod_labels and 'add-pod-env' in op.pod_labels and op.pod_labels['add-pod-env'] == 'true': from kubernetes import client as k8s_client op.container.add_env_variable( k8s_client.V1EnvVar( - name='KFP_POD_NAME', + name='KFP_POD_NAME', value_from=k8s_client.V1EnvVarSource( field_ref=k8s_client.V1ObjectFieldSelector( field_path='metadata.name' @@ -57,7 +30,7 @@ def add_pod_env(op: BaseOp) -> BaseOp: ) ).add_env_variable( k8s_client.V1EnvVar( - name='KFP_NAMESPACE', + name='KFP_NAMESPACE', value_from=k8s_client.V1EnvVarSource( field_ref=k8s_client.V1ObjectFieldSelector( field_path='metadata.namespace' @@ -65,56 +38,4 @@ def add_pod_env(op: BaseOp) -> BaseOp: ) ) ) - return op - - -def add_pod_labels(labels: Optional[Dict[Text, Text]] = None) -> Callable: - """Adds provided pod labels to each pod.""" - - def _add_pod_labels(task): - for k, v in labels.items(): - # Only append but not update. - # This is needed to bypass TFX pipelines/components. - if k not in task.pod_labels: - task.add_pod_label(k, v) - return task - - return _add_pod_labels - - -def _remove_suffix(string: Text, suffix: Text) -> Text: - """Removes the suffix from a string.""" - if suffix and string.endswith(suffix): - return string[:-len(suffix)] - else: - return string - - -def add_name_for_oob_components() -> Callable: - """Adds the OOB component name if applicable.""" - - def _add_name_for_oob_components(task): - # Detect the component origin uri in component_ref if exists, and - # attach the OOB component name as a pod label. - component_ref = getattr(task, '_component_ref', None) - if component_ref: - if component_ref.url: - origin_path = _remove_suffix( - component_ref.url, 'component.yaml').rstrip('/') - # Only include KFP OOB components. - if origin_path.startswith(_OOB_COMPONENT_PATH_PREFIX): - origin_path = origin_path.split('/', 7)[-1] - else: - return task - # Clean the label to comply with the k8s label convention. - origin_path = re.sub('[^-a-z0-9A-Z_.]', '.', origin_path) - origin_path_label = origin_path[-63:].strip('-_.') - task.add_pod_label(COMPONENT_PATH_LABEL_KEY, origin_path_label) - if component_ref.digest: - # We can only preserve the first 63 digits of the digest. - task.add_pod_label( - COMPONENT_DIGEST_LABEL_KEY, component_ref.digest[:63]) - - return task - - return _add_name_for_oob_components \ No newline at end of file + return op \ No newline at end of file diff --git a/sdk/python/kfp/compiler/compiler.py b/sdk/python/kfp/compiler/compiler.py index ec6bf520258..d3786d11932 100644 --- a/sdk/python/kfp/compiler/compiler.py +++ b/sdk/python/kfp/compiler/compiler.py @@ -27,7 +27,7 @@ from .. import dsl from ._k8s_helper import convert_k8s_obj_to_json, sanitize_k8s_name from ._op_to_template import _op_to_template -from ._default_transformers import add_pod_env, add_pod_labels, add_name_for_oob_components, get_default_telemetry_labels +from ._default_transformers import add_pod_env from ..components.structures import InputSpec from ..components._yaml_utils import dump_yaml @@ -768,8 +768,7 @@ def _create_workflow(self, pipeline_description: Text=None, params_list: List[dsl.PipelineParam]=None, pipeline_conf: dsl.PipelineConf = None, - allow_telemetry: bool = True, - ) -> Dict[Text, Any]: + ) -> Dict[Text, Any]: """ Internal implementation of create_workflow.""" params_list = params_list or [] @@ -829,14 +828,6 @@ def _create_workflow(self, default=param.value) for param in params_list] op_transformers = [add_pod_env] - # By default adds telemetry instruments. Users can opt out toggling - # allow_telemetry. - # Also, TFX pipelines will be bypassed for pipeline compiled by tfx>0.21.4. - if allow_telemetry: - pod_labels = get_default_telemetry_labels() - op_transformers.append(add_pod_labels(pod_labels)) - op_transformers.append(add_name_for_oob_components()) - op_transformers.extend(pipeline_conf.op_transformers) workflow = self._create_pipeline_workflow( @@ -895,14 +886,7 @@ def _compile(self, pipeline_func, pipeline_conf: dsl.PipelineConf = None): """Compile the given pipeline function into workflow.""" return self._create_workflow(pipeline_func=pipeline_func, pipeline_conf=pipeline_conf) - def compile( - self, - pipeline_func, - package_path, - type_check=True, - pipeline_conf: dsl.PipelineConf = None, - allow_telemetry: bool = True, - ): + def compile(self, pipeline_func, package_path, type_check=True, pipeline_conf: dsl.PipelineConf = None): """Compile the given pipeline function into workflow yaml. Args: @@ -910,9 +894,6 @@ def compile( package_path: the output workflow tar.gz file path. for example, "~/a.tar.gz" type_check: whether to enable the type check or not, default: False. pipeline_conf: PipelineConf instance. Can specify op transforms, image pull secrets and other pipeline-level configuration options. Overrides any configuration that may be set by the pipeline. - allow_telemetry: If set to true, two pod labels will be attached to k8s - pods spawned by this pipeline: 1) pipeline SDK style, 2) pipeline random - ID. """ import kfp type_check_old_value = kfp.TYPE_CHECK @@ -921,8 +902,7 @@ def compile( self._create_and_write_workflow( pipeline_func=pipeline_func, pipeline_conf=pipeline_conf, - package_path=package_path, - allow_telemetry=allow_telemetry) + package_path=package_path) finally: kfp.TYPE_CHECK = type_check_old_value @@ -969,8 +949,7 @@ def _create_and_write_workflow( pipeline_description: Text=None, params_list: List[dsl.PipelineParam]=None, pipeline_conf: dsl.PipelineConf=None, - package_path: Text=None, - allow_telemetry: bool=True + package_path: Text=None ) -> None: """Compile the given pipeline function and dump it to specified file format.""" workflow = self._create_workflow( @@ -978,8 +957,7 @@ def _create_and_write_workflow( pipeline_name, pipeline_description, params_list, - pipeline_conf, - allow_telemetry) + pipeline_conf) self._write_workflow(workflow, package_path) _validate_workflow(workflow) diff --git a/sdk/python/kfp/compiler/main.py b/sdk/python/kfp/compiler/main.py index 557e64370ea..db20502ecf4 100644 --- a/sdk/python/kfp/compiler/main.py +++ b/sdk/python/kfp/compiler/main.py @@ -23,16 +23,6 @@ import tempfile from deprecated.sphinx import deprecated -def _str2bool(v): - if isinstance(v, bool): - return v - if v.lower() in ('yes', 'true', 't', 'y', '1'): - return True - elif v.lower() in ('no', 'false', 'f', 'n', '0'): - return False - else: - raise argparse.ArgumentTypeError('Boolean value expected.') - def parse_arguments(): """Parse command line arguments.""" @@ -57,16 +47,12 @@ def parse_arguments(): parser.add_argument('--disable-type-check', action='store_true', help='disable the type check, default is enabled.') - parser.add_argument('--disable-telemetry', - action='store_true', - help='disable adding telemetry labels, default is enabled.') args = parser.parse_args() return args -def _compile_pipeline_function( - pipeline_funcs, function_name, output_path, type_check, allow_telemetry): +def _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check): if len(pipeline_funcs) == 0: raise ValueError('A function with @dsl.pipeline decorator is required in the py file.') @@ -82,8 +68,7 @@ def _compile_pipeline_function( else: pipeline_func = pipeline_funcs[0] - kfp.compiler.Compiler().compile( - pipeline_func, output_path, type_check, allow_telemetry=allow_telemetry) + kfp.compiler.Compiler().compile(pipeline_func, output_path, type_check) class PipelineCollectorContext(): @@ -105,31 +90,26 @@ def __exit__(self, *args): Please switch to compiling pipeline files or functions. If you use this feature please create an issue in https://github.com/kubeflow/pipelines/issues .''' ) -def compile_package( - package_path, namespace, function_name, output_path, type_check, allow_telemetry): +def compile_package(package_path, namespace, function_name, output_path, type_check): tmpdir = tempfile.mkdtemp() sys.path.insert(0, tmpdir) try: subprocess.check_call(['python3', '-m', 'pip', 'install', package_path, '-t', tmpdir]) with PipelineCollectorContext() as pipeline_funcs: __import__(namespace) - _compile_pipeline_function( - pipeline_funcs, function_name, output_path, type_check, - allow_telemetry=allow_telemetry) + _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check) finally: del sys.path[0] shutil.rmtree(tmpdir) -def compile_pyfile(pyfile, function_name, output_path, type_check, allow_telemetry): +def compile_pyfile(pyfile, function_name, output_path, type_check): sys.path.insert(0, os.path.dirname(pyfile)) try: filename = os.path.basename(pyfile) with PipelineCollectorContext() as pipeline_funcs: __import__(os.path.splitext(filename)[0]) - _compile_pipeline_function( - pipeline_funcs, function_name, output_path, type_check, - allow_telemetry=allow_telemetry) + _compile_pipeline_function(pipeline_funcs, function_name, output_path, type_check) finally: del sys.path[0] @@ -140,22 +120,9 @@ def main(): (args.py is not None and args.package is not None)): raise ValueError('Either --py or --package is needed but not both.') if args.py: - compile_pyfile( - args.py, - args.function, - args.output, - not args.disable_type_check, - not args.disable_telemetry - ) + compile_pyfile(args.py, args.function, args.output, not args.disable_type_check) else: if args.namespace is None: raise ValueError('--namespace is required for compiling packages.') - compile_package( - args.package, - args.namespace, - args.function, - args.output, - not args.disable_type_check, - not args.disable_telemetry - ) + compile_package(args.package, args.namespace, args.function, args.output, not args.disable_type_check) diff --git a/sdk/python/tests/compiler/compiler_tests.py b/sdk/python/tests/compiler/compiler_tests.py index 3a175ed15f6..593e5053622 100644 --- a/sdk/python/tests/compiler/compiler_tests.py +++ b/sdk/python/tests/compiler/compiler_tests.py @@ -26,8 +26,6 @@ import unittest import yaml -from kfp import components -from kfp.compiler._default_transformers import COMPONENT_DIGEST_LABEL_KEY, COMPONENT_PATH_LABEL_KEY from kfp.dsl._component import component from kfp.dsl import ContainerOp, pipeline from kfp.dsl.types import Integer, InconsistentTypeException @@ -42,11 +40,6 @@ def some_op(): command=['sleep 1'], ) -_TEST_GCS_DOWNLOAD_COMPONENT_URL = 'https://raw.githubusercontent.com/kubeflow/'\ - 'pipelines/2dac60c400ad8767b452649d08f328df'\ - 'af230f96/components/google-cloud/storage/'\ - 'download/component.yaml' - class TestCompiler(unittest.TestCase): # Define the places of samples covered by unit tests. @@ -718,27 +711,6 @@ def some_pipeline(): container = template.get('container', None) if container: self.assertEqual(template['retryStrategy']['limit'], 5) - - def test_oob_component_label(self): - gcs_download_op = components.load_component_from_url( - _TEST_GCS_DOWNLOAD_COMPONENT_URL) - - @dsl.pipeline(name='some_pipeline') - def some_pipeline(): - _download_task = gcs_download_op('gs://some_bucket/some_dir/some_file') - - workflow_dict = compiler.Compiler()._compile(some_pipeline) - - found_download_task = False - for template in workflow_dict['spec']['templates']: - if template.get('container', None): - found_download_task = True - self.assertEqual( - template['metadata']['labels'][COMPONENT_PATH_LABEL_KEY], - 'google-cloud.storage.download') - self.assertIsNotNone( - template['metadata']['labels'].get(COMPONENT_DIGEST_LABEL_KEY)) - self.assertTrue(found_download_task, 'download task not found in workflow.') def test_image_pull_policy(self): def some_op(): diff --git a/sdk/python/tests/compiler/testdata/add_pod_env.yaml b/sdk/python/tests/compiler/testdata/add_pod_env.yaml index d2c9ef6dbcd..a45e831445a 100644 --- a/sdk/python/tests/compiler/testdata/add_pod_env.yaml +++ b/sdk/python/tests/compiler/testdata/add_pod_env.yaml @@ -28,7 +28,6 @@ spec: image: library/bash metadata: labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp add-pod-env: 'true' name: echo - dag: diff --git a/sdk/python/tests/compiler/testdata/artifact_location.yaml b/sdk/python/tests/compiler/testdata/artifact_location.yaml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/sdk/python/tests/compiler/testdata/basic.yaml b/sdk/python/tests/compiler/testdata/basic.yaml index fd1ae511186..4b025b23288 100644 --- a/sdk/python/tests/compiler/testdata/basic.yaml +++ b/sdk/python/tests/compiler/testdata/basic.yaml @@ -57,7 +57,6 @@ spec: - -c image: python:3.5-jessie name: exiting - metadata: {'labels': {'pipelines.kubeflow.org/pipeline-sdk-type': 'kfp'}} - container: args: - python -c "from collections import Counter; words = Counter('{{inputs.parameters.message}}'.split()); @@ -73,9 +72,6 @@ spec: parameters: - name: message name: get-frequent - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: get-frequent-word @@ -102,8 +98,6 @@ spec: - name: get-frequent-word - name: outputpath metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: tf-version.cloud-tpus.google.com: "1.12" name: save diff --git a/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml b/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml index 3db83fe038c..ed27c418f0b 100644 --- a/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml +++ b/sdk/python/tests/compiler/testdata/basic_no_decorator.yaml @@ -58,9 +58,6 @@ spec: - -c image: python:3.5-jessie name: exiting - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - python -c "from collections import Counter; words = Counter('{{inputs.parameters.message}}'.split()); @@ -76,9 +73,6 @@ spec: parameters: - name: message name: get-frequent - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: get-frequent-word @@ -105,8 +99,6 @@ spec: - name: get-frequent-word - name: outputpath metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: tf-version.cloud-tpus.google.com: "1.12" name: save diff --git a/sdk/python/tests/compiler/testdata/coin.yaml b/sdk/python/tests/compiler/testdata/coin.yaml index e0461d44997..db0fb7cab48 100644 --- a/sdk/python/tests/compiler/testdata/coin.yaml +++ b/sdk/python/tests/compiler/testdata/coin.yaml @@ -75,9 +75,6 @@ spec: - -c image: python:alpine3.6 name: flip - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-output @@ -95,9 +92,6 @@ spec: - -c image: python:alpine3.6 name: flip-again - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-again-output @@ -135,9 +129,6 @@ spec: parameters: - name: flip-again-output name: print1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: command: - echo @@ -147,6 +138,3 @@ spec: parameters: - name: flip-again-output name: print2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp diff --git a/sdk/python/tests/compiler/testdata/compose.yaml b/sdk/python/tests/compiler/testdata/compose.yaml index 52a1a7d13a2..3402b71526b 100644 --- a/sdk/python/tests/compiler/testdata/compose.yaml +++ b/sdk/python/tests/compiler/testdata/compose.yaml @@ -36,9 +36,6 @@ spec: parameters: - name: url name: download - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: download-downloaded @@ -90,9 +87,6 @@ spec: parameters: - name: download-downloaded name: get-frequent - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: get-frequent-word @@ -114,6 +108,3 @@ spec: - name: get-frequent-word - name: outputpath name: save - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp diff --git a/sdk/python/tests/compiler/testdata/default_value.yaml b/sdk/python/tests/compiler/testdata/default_value.yaml index 3485d912d86..b728c3b9def 100644 --- a/sdk/python/tests/compiler/testdata/default_value.yaml +++ b/sdk/python/tests/compiler/testdata/default_value.yaml @@ -56,9 +56,6 @@ spec: parameters: - name: url name: download - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: download-downloaded @@ -78,6 +75,3 @@ spec: parameters: - name: download-downloaded name: echo - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp diff --git a/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml b/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml index 18635a0a2a4..111c70eaddc 100644 --- a/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml +++ b/sdk/python/tests/compiler/testdata/imagepullsecrets.yaml @@ -25,9 +25,6 @@ spec: parameters: - name: message name: get-frequent - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: get-frequent-word diff --git a/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml b/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml index be6638c8181..52c3df06063 100644 --- a/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml +++ b/sdk/python/tests/compiler/testdata/input_artifact_raw_value.yaml @@ -22,9 +22,6 @@ spec: raw: data: Constant artifact value name: component-with-inline-input-artifact - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: command: - cat @@ -37,9 +34,6 @@ spec: raw: data: Constant artifact value name: component-with-input-artifact - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: command: - cat @@ -52,9 +46,6 @@ spec: raw: data: hard-coded artifact value name: component-with-input-artifact-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: command: - cat @@ -67,9 +58,6 @@ spec: raw: data: Text from a file with hard-coded artifact value name: component-with-input-artifact-3 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - name: component-with-inline-input-artifact diff --git a/sdk/python/tests/compiler/testdata/loop_over_lightweight_output.yaml b/sdk/python/tests/compiler/testdata/loop_over_lightweight_output.yaml index d231b4e6566..6139c6503df 100644 --- a/sdk/python/tests/compiler/testdata/loop_over_lightweight_output.yaml +++ b/sdk/python/tests/compiler/testdata/loop_over_lightweight_output.yaml @@ -44,8 +44,6 @@ - "name": |- produce-list-data_list-loop-item "metadata": - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp "annotations": "pipelines.kubeflow.org/component_spec": |- {"inputs": [{"name": "data"}], "name": "Consume data"} @@ -106,8 +104,6 @@ "image": |- busybox "metadata": - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp "annotations": "pipelines.kubeflow.org/component_spec": |- {"name": "Produce list", "outputs": [{"name": "data_list"}]} diff --git a/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml b/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml index f8231e3e15a..9031d0344a4 100644 --- a/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml +++ b/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml @@ -14,7 +14,7 @@ spec: container: args: - "--param1" - - "{{inputs.parameters.produce-list-of-strings-Output}}" + - "{{inputs.parameters.produce-list-of-strings-output}}" command: - python3 - "-u" @@ -27,24 +27,36 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - image: "python:3.7" + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: - - name: produce-list-of-strings-Output + name: produce-list-of-strings-output metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - annotations: + annotations: pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}" name: consume - container: args: - "--param1" - - "{{inputs.parameters.produce-list-of-strings-Output-loop-item}}" + - "{{inputs.parameters.produce-list-of-strings-output-loop-item}}" command: - python3 - "-u" @@ -57,16 +69,28 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - image: "python:3.7" + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: - - name: produce-list-of-strings-Output-loop-item + name: produce-list-of-strings-output-loop-item metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}" name: consume-2 @@ -74,7 +98,7 @@ spec: container: args: - "--param1" - - "{{inputs.parameters.produce-str-Output}}" + - "{{inputs.parameters.produce-str-output}}" command: - python3 - "-u" @@ -87,16 +111,28 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - image: "python:3.7" + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: - - name: produce-str-Output + name: produce-str-output metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}" name: consume-3 @@ -104,7 +140,7 @@ spec: container: args: - "--param1" - - "{{inputs.parameters.produce-list-of-ints-Output}}" + - "{{inputs.parameters.produce-list-of-ints-output}}" command: - python3 - "-u" @@ -117,16 +153,28 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - image: "python:3.7" + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: - - name: produce-list-of-ints-Output + name: produce-list-of-ints-output metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}" name: consume-4 @@ -134,7 +182,7 @@ spec: container: args: - "--param1" - - "{{inputs.parameters.produce-list-of-ints-Output-loop-item}}" + - "{{inputs.parameters.produce-list-of-ints-output-loop-item}}" command: - python3 - "-u" @@ -147,16 +195,28 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - image: "python:3.7" + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: - - name: produce-list-of-ints-Output-loop-item + name: produce-list-of-ints-output-loop-item metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}" name: consume-5 @@ -164,7 +224,7 @@ spec: container: args: - "--param1" - - "{{inputs.parameters.produce-list-of-dicts-Output}}" + - "{{inputs.parameters.produce-list-of-dicts-output}}" command: - python3 - "-u" @@ -177,16 +237,28 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - image: "python:3.7" + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: - - name: produce-list-of-dicts-Output + name: produce-list-of-dicts-output metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}" name: consume-6 @@ -194,7 +266,7 @@ spec: container: args: - "--param1" - - "{{inputs.parameters.produce-list-of-dicts-Output-loop-item-subvar-aaa}}" + - "{{inputs.parameters.produce-list-of-dicts-output-loop-item-subvar-aaa}}" command: - python3 - "-u" @@ -207,16 +279,28 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - image: "python:3.7" + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: - - name: produce-list-of-dicts-Output-loop-item-subvar-aaa + name: produce-list-of-dicts-output-loop-item-subvar-aaa metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"inputs\": [{\"name\": \"param1\"}], \"name\": \"Consume\"}" name: consume-7 @@ -227,34 +311,34 @@ spec: arguments: parameters: - - name: produce-list-of-strings-Output - value: "{{inputs.parameters.produce-list-of-strings-Output}}" + name: produce-list-of-strings-output + value: "{{inputs.parameters.produce-list-of-strings-output}}" name: consume template: consume - arguments: parameters: - - name: produce-list-of-strings-Output-loop-item - value: "{{inputs.parameters.produce-list-of-strings-Output-loop-item}}" + name: produce-list-of-strings-output-loop-item + value: "{{inputs.parameters.produce-list-of-strings-output-loop-item}}" name: consume-2 template: consume-2 - arguments: parameters: - - name: produce-str-Output - value: "{{inputs.parameters.produce-str-Output}}" + name: produce-str-output + value: "{{inputs.parameters.produce-str-output}}" name: consume-3 template: consume-3 inputs: parameters: - - name: produce-list-of-strings-Output + name: produce-list-of-strings-output - - name: produce-list-of-strings-Output-loop-item + name: produce-list-of-strings-output-loop-item - - name: produce-str-Output + name: produce-str-output name: for-loop-for-loop-00000001-1 - dag: @@ -263,24 +347,24 @@ spec: arguments: parameters: - - name: produce-list-of-ints-Output - value: "{{inputs.parameters.produce-list-of-ints-Output}}" + name: produce-list-of-ints-output + value: "{{inputs.parameters.produce-list-of-ints-output}}" name: consume-4 template: consume-4 - arguments: parameters: - - name: produce-list-of-ints-Output-loop-item - value: "{{inputs.parameters.produce-list-of-ints-Output-loop-item}}" + name: produce-list-of-ints-output-loop-item + value: "{{inputs.parameters.produce-list-of-ints-output-loop-item}}" name: consume-5 template: consume-5 inputs: parameters: - - name: produce-list-of-ints-Output + name: produce-list-of-ints-output - - name: produce-list-of-ints-Output-loop-item + name: produce-list-of-ints-output-loop-item name: for-loop-for-loop-00000002-2 - dag: @@ -289,24 +373,24 @@ spec: arguments: parameters: - - name: produce-list-of-dicts-Output - value: "{{inputs.parameters.produce-list-of-dicts-Output}}" + name: produce-list-of-dicts-output + value: "{{inputs.parameters.produce-list-of-dicts-output}}" name: consume-6 template: consume-6 - arguments: parameters: - - name: produce-list-of-dicts-Output-loop-item-subvar-aaa - value: "{{inputs.parameters.produce-list-of-dicts-Output-loop-item-subvar-aaa}}" + name: produce-list-of-dicts-output-loop-item-subvar-aaa + value: "{{inputs.parameters.produce-list-of-dicts-output-loop-item-subvar-aaa}}" name: consume-7 template: consume-7 inputs: parameters: - - name: produce-list-of-dicts-Output + name: produce-list-of-dicts-output - - name: produce-list-of-dicts-Output-loop-item-subvar-aaa + name: produce-list-of-dicts-output-loop-item-subvar-aaa name: for-loop-for-loop-00000003-3 - dag: @@ -315,48 +399,48 @@ spec: arguments: parameters: - - name: produce-list-of-strings-Output - value: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-Output}}" + name: produce-list-of-strings-output + value: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-output}}" - - name: produce-list-of-strings-Output-loop-item + name: produce-list-of-strings-output-loop-item value: "{{item}}" - - name: produce-str-Output - value: "{{tasks.produce-str.outputs.parameters.produce-str-Output}}" + name: produce-str-output + value: "{{tasks.produce-str.outputs.parameters.produce-str-output}}" dependencies: - produce-list-of-strings - produce-str name: for-loop-for-loop-00000001-1 template: for-loop-for-loop-00000001-1 - withParam: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-Output}}" + withParam: "{{tasks.produce-list-of-strings.outputs.parameters.produce-list-of-strings-output}}" - arguments: parameters: - - name: produce-list-of-ints-Output - value: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-Output}}" + name: produce-list-of-ints-output + value: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-output}}" - - name: produce-list-of-ints-Output-loop-item + name: produce-list-of-ints-output-loop-item value: "{{item}}" dependencies: - produce-list-of-ints name: for-loop-for-loop-00000002-2 template: for-loop-for-loop-00000002-2 - withParam: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-Output}}" + withParam: "{{tasks.produce-list-of-ints.outputs.parameters.produce-list-of-ints-output}}" - arguments: parameters: - - name: produce-list-of-dicts-Output - value: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-Output}}" + name: produce-list-of-dicts-output + value: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-output}}" - - name: produce-list-of-dicts-Output-loop-item-subvar-aaa + name: produce-list-of-dicts-output-loop-item-subvar-aaa value: "{{item.aaa}}" dependencies: - produce-list-of-dicts name: for-loop-for-loop-00000003-3 template: for-loop-for-loop-00000003-3 - withParam: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-Output}}" + withParam: "{{tasks.produce-list-of-dicts.outputs.parameters.produce-list-of-dicts-output}}" - name: produce-list-of-dicts template: produce-list-of-dicts @@ -380,7 +464,7 @@ spec: - "-u" - "-c" - | - def produce_list_of_dicts(): + def produce_list_of_dicts() : return ([{"aaa": "aaa1", "bbb": "bbb1"}, {"aaa": "aaa2", "bbb": "bbb2"}],) def _serialize_json(obj) -> str: @@ -392,7 +476,7 @@ spec: return obj.to_struct() else: raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) + return json.dumps(obj, default=default_serializer) import argparse _parser = argparse.ArgumentParser(prog='Produce list of dicts', description='') @@ -417,21 +501,19 @@ spec: pass with open(output_file, 'w') as f: f.write(_output_serializers[idx](_outputs[idx])) - image: "python:3.7" + image: "tensorflow/tensorflow:1.13.2-py3" metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of dicts\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}" name: produce-list-of-dicts outputs: artifacts: - - name: produce-list-of-dicts-Output + name: produce-list-of-dicts-output path: /tmp/outputs/Output/data parameters: - - name: produce-list-of-dicts-Output + name: produce-list-of-dicts-output valueFrom: path: /tmp/outputs/Output/data - @@ -444,7 +526,7 @@ spec: - "-u" - "-c" - | - def produce_list_of_ints(): + def produce_list_of_ints() : return ([1234567890, 987654321],) def _serialize_json(obj) -> str: @@ -456,7 +538,7 @@ spec: return obj.to_struct() else: raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) + return json.dumps(obj, default=default_serializer) import argparse _parser = argparse.ArgumentParser(prog='Produce list of ints', description='') @@ -481,21 +563,19 @@ spec: pass with open(output_file, 'w') as f: f.write(_output_serializers[idx](_outputs[idx])) - image: "python:3.7" + image: "tensorflow/tensorflow:1.13.2-py3" metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of ints\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}" name: produce-list-of-ints outputs: artifacts: - - name: produce-list-of-ints-Output + name: produce-list-of-ints-output path: /tmp/outputs/Output/data parameters: - - name: produce-list-of-ints-Output + name: produce-list-of-ints-output valueFrom: path: /tmp/outputs/Output/data - @@ -508,7 +588,7 @@ spec: - "-u" - "-c" - | - def produce_list_of_strings(): + def produce_list_of_strings() : return (["a", "z"],) def _serialize_json(obj) -> str: @@ -520,7 +600,7 @@ spec: return obj.to_struct() else: raise TypeError("Object of type '%s' is not JSON serializable and does not have .to_struct() method." % obj.__class__.__name__) - return json.dumps(obj, default=default_serializer, sort_keys=True) + return json.dumps(obj, default=default_serializer) import argparse _parser = argparse.ArgumentParser(prog='Produce list of strings', description='') @@ -545,21 +625,19 @@ spec: pass with open(output_file, 'w') as f: f.write(_output_serializers[idx](_outputs[idx])) - image: "python:3.7" + image: "tensorflow/tensorflow:1.13.2-py3" metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce list of strings\", \"outputs\": [{\"name\": \"Output\", \"type\": \"JsonArray\"}]}" name: produce-list-of-strings outputs: artifacts: - - name: produce-list-of-strings-Output + name: produce-list-of-strings-output path: /tmp/outputs/Output/data parameters: - - name: produce-list-of-strings-Output + name: produce-list-of-strings-output valueFrom: path: /tmp/outputs/Output/data - @@ -572,7 +650,7 @@ spec: - "-u" - "-c" - | - def produce_str(): + def produce_str() : return "Hello" def _serialize_str(str_value: str) -> str: @@ -603,20 +681,18 @@ spec: pass with open(output_file, 'w') as f: f.write(_output_serializers[idx](_outputs[idx])) - image: "python:3.7" + image: "tensorflow/tensorflow:1.13.2-py3" metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp annotations: pipelines.kubeflow.org/component_spec: "{\"name\": \"Produce str\", \"outputs\": [{\"name\": \"Output\", \"type\": \"String\"}]}" name: produce-str outputs: artifacts: - - name: produce-str-Output + name: produce-str-output path: /tmp/outputs/Output/data parameters: - - name: produce-str-Output + name: produce-str-output valueFrom: path: /tmp/outputs/Output/data diff --git a/sdk/python/tests/compiler/testdata/param_op_transform.yaml b/sdk/python/tests/compiler/testdata/param_op_transform.yaml index 09880f9e013..b0da3751f66 100644 --- a/sdk/python/tests/compiler/testdata/param_op_transform.yaml +++ b/sdk/python/tests/compiler/testdata/param_op_transform.yaml @@ -21,7 +21,6 @@ spec: - name: param metadata: labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp param: '{{inputs.parameters.param}}' name: cop - dag: diff --git a/sdk/python/tests/compiler/testdata/param_substitutions.yaml b/sdk/python/tests/compiler/testdata/param_substitutions.yaml index cd74997e64c..76f1f1a9f8b 100644 --- a/sdk/python/tests/compiler/testdata/param_substitutions.yaml +++ b/sdk/python/tests/compiler/testdata/param_substitutions.yaml @@ -23,17 +23,11 @@ spec: parameters: - name: create-volume-name name: cop - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume persistentVolumeClaim: claimName: '{{inputs.parameters.create-volume-name}}' - name: create-volume - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-volume-manifest diff --git a/sdk/python/tests/compiler/testdata/pipelineparams.yaml b/sdk/python/tests/compiler/testdata/pipelineparams.yaml index a07dfb8495a..174fdeb1493 100644 --- a/sdk/python/tests/compiler/testdata/pipelineparams.yaml +++ b/sdk/python/tests/compiler/testdata/pipelineparams.yaml @@ -29,9 +29,6 @@ spec: value: '10' templates: - name: download - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp inputs: parameters: - name: sleep_ms @@ -57,9 +54,6 @@ spec: args: - -text="hello world" - name: echo - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp inputs: parameters: - name: download-downloaded diff --git a/sdk/python/tests/compiler/testdata/preemptible_tpu_gpu.yaml b/sdk/python/tests/compiler/testdata/preemptible_tpu_gpu.yaml index d2a1c68bc57..85fb22644c2 100644 --- a/sdk/python/tests/compiler/testdata/preemptible_tpu_gpu.yaml +++ b/sdk/python/tests/compiler/testdata/preemptible_tpu_gpu.yaml @@ -32,9 +32,6 @@ spec: limits: nvidia.com/gpu: 1 name: flip - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-output diff --git a/sdk/python/tests/compiler/testdata/recursive_do_while.yaml b/sdk/python/tests/compiler/testdata/recursive_do_while.yaml index 80e58dd586e..3ed3bbf88f3 100644 --- a/sdk/python/tests/compiler/testdata/recursive_do_while.yaml +++ b/sdk/python/tests/compiler/testdata/recursive_do_while.yaml @@ -32,9 +32,6 @@ spec: - -c image: python:alpine3.6 name: flip - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-output @@ -52,9 +49,6 @@ spec: - -c image: python:alpine3.6 name: flip-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-2-output @@ -68,9 +62,6 @@ spec: - -c image: python:alpine3.6 name: flip-3 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-3-output @@ -138,9 +129,6 @@ spec: parameters: - name: flip-output name: print - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: command: - echo @@ -150,6 +138,3 @@ spec: parameters: - name: flip-output name: print-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp diff --git a/sdk/python/tests/compiler/testdata/recursive_while.yaml b/sdk/python/tests/compiler/testdata/recursive_while.yaml index e75b75b3939..eeed7c8ee74 100644 --- a/sdk/python/tests/compiler/testdata/recursive_while.yaml +++ b/sdk/python/tests/compiler/testdata/recursive_while.yaml @@ -46,9 +46,6 @@ spec: - -c image: python:alpine3.6 name: flip - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-output @@ -66,9 +63,6 @@ spec: - -c image: python:alpine3.6 name: flip-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-2-output @@ -82,9 +76,6 @@ spec: - -c image: python:alpine3.6 name: flip-3 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: flip-3-output @@ -140,9 +131,6 @@ spec: parameters: - name: flip-output name: print - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: command: - echo @@ -152,6 +140,3 @@ spec: parameters: - name: flip-output name: print-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp diff --git a/sdk/python/tests/compiler/testdata/resourceop_basic.yaml b/sdk/python/tests/compiler/testdata/resourceop_basic.yaml index 080f07916c9..666606a62ab 100644 --- a/sdk/python/tests/compiler/testdata/resourceop_basic.yaml +++ b/sdk/python/tests/compiler/testdata/resourceop_basic.yaml @@ -27,9 +27,6 @@ spec: parameters: - name: create-my-secret-name name: cop - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: my-secret secret: @@ -39,9 +36,6 @@ spec: - name: password - name: username name: create-my-secret - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-my-secret-manifest diff --git a/sdk/python/tests/compiler/testdata/sidecar.yaml b/sdk/python/tests/compiler/testdata/sidecar.yaml index a6803534882..cab18f2784f 100644 --- a/sdk/python/tests/compiler/testdata/sidecar.yaml +++ b/sdk/python/tests/compiler/testdata/sidecar.yaml @@ -43,13 +43,7 @@ spec: command: - sh - "-c" - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - name: echo - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp inputs: parameters: - name: download-downloaded diff --git a/sdk/python/tests/compiler/testdata/timeout.yaml b/sdk/python/tests/compiler/testdata/timeout.yaml index 26d8bea0420..95895f1671d 100644 --- a/sdk/python/tests/compiler/testdata/timeout.yaml +++ b/sdk/python/tests/compiler/testdata/timeout.yaml @@ -30,9 +30,6 @@ spec: - -c image: python:alpine3.6 name: random-failure - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - import random; import sys; exit_code = random.choice([0,1]); print(exit_code); @@ -42,6 +39,3 @@ spec: - -c image: python:alpine3.6 name: random-failure-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp diff --git a/sdk/python/tests/compiler/testdata/volume.yaml b/sdk/python/tests/compiler/testdata/volume.yaml index 870309114bf..6e7026b31f8 100644 --- a/sdk/python/tests/compiler/testdata/volume.yaml +++ b/sdk/python/tests/compiler/testdata/volume.yaml @@ -40,9 +40,6 @@ spec: - mountPath: /secret/gcp-credentials name: gcp-credentials name: download - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: download-downloaded @@ -66,9 +63,6 @@ spec: parameters: - name: download-downloaded name: echo - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - name: download diff --git a/sdk/python/tests/compiler/testdata/volume_snapshotop_rokurl.yaml b/sdk/python/tests/compiler/testdata/volume_snapshotop_rokurl.yaml index be305de17f3..2d9e6570671 100644 --- a/sdk/python/tests/compiler/testdata/volume_snapshotop_rokurl.yaml +++ b/sdk/python/tests/compiler/testdata/volume_snapshotop_rokurl.yaml @@ -16,9 +16,6 @@ spec: parameters: - name: create-volume-1-name name: create-snapshot-1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-snapshot-1-manifest @@ -40,9 +37,6 @@ spec: parameters: - name: create-volume-2-name name: create-snapshot-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-snapshot-2-manifest @@ -64,9 +58,6 @@ spec: parameters: - name: rok_url name: create-volume-1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-volume-1-manifest @@ -89,9 +80,6 @@ spec: - name: create-snapshot-1-name - name: create-snapshot-1-size name: create-volume-2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-volume-2-manifest @@ -114,9 +102,6 @@ spec: - name: create-snapshot-2-name - name: create-snapshot-2-size name: create-volume-3 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-volume-3-manifest @@ -148,9 +133,6 @@ spec: parameters: - name: create-volume-1-name name: step1-concat - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume-1 persistentVolumeClaim: @@ -168,9 +150,6 @@ spec: parameters: - name: create-volume-2-name name: step2-gunzip - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume-2 persistentVolumeClaim: @@ -187,9 +166,6 @@ spec: parameters: - name: create-volume-3-name name: step3-output - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume-3 persistentVolumeClaim: diff --git a/sdk/python/tests/compiler/testdata/volume_snapshotop_sequential.yaml b/sdk/python/tests/compiler/testdata/volume_snapshotop_sequential.yaml index c34153b2789..a0d9ac8f2ab 100644 --- a/sdk/python/tests/compiler/testdata/volume_snapshotop_sequential.yaml +++ b/sdk/python/tests/compiler/testdata/volume_snapshotop_sequential.yaml @@ -13,9 +13,6 @@ spec: serviceAccountName: pipeline-runner templates: - name: create-volume - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-volume-manifest @@ -47,9 +44,6 @@ spec: - name: create-volume-name - name: url name: step1-ingest - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume persistentVolumeClaim: @@ -58,9 +52,6 @@ spec: parameters: - name: create-volume-name name: step1-snap - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: step1-snap-manifest @@ -92,9 +83,6 @@ spec: parameters: - name: create-volume-name name: step2-gunzip - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume persistentVolumeClaim: @@ -103,9 +91,6 @@ spec: parameters: - name: create-volume-name name: step2-snap - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: step2-snap-manifest @@ -137,9 +122,6 @@ spec: parameters: - name: create-volume-name name: step3-copy - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume persistentVolumeClaim: @@ -148,9 +130,6 @@ spec: parameters: - name: create-volume-name name: step3-snap - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: step3-snap-manifest @@ -181,9 +160,6 @@ spec: parameters: - name: create-volume-name name: step4-output - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-volume persistentVolumeClaim: diff --git a/sdk/python/tests/compiler/testdata/volumeop_basic.yaml b/sdk/python/tests/compiler/testdata/volumeop_basic.yaml index efb62868898..e2ab8103572 100644 --- a/sdk/python/tests/compiler/testdata/volumeop_basic.yaml +++ b/sdk/python/tests/compiler/testdata/volumeop_basic.yaml @@ -26,9 +26,6 @@ spec: parameters: - name: create-pvc-name name: cop - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-pvc persistentVolumeClaim: @@ -37,9 +34,6 @@ spec: parameters: - name: size name: create-pvc - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-pvc-manifest diff --git a/sdk/python/tests/compiler/testdata/volumeop_dag.yaml b/sdk/python/tests/compiler/testdata/volumeop_dag.yaml index f13bd96ae78..420deff0e56 100644 --- a/sdk/python/tests/compiler/testdata/volumeop_dag.yaml +++ b/sdk/python/tests/compiler/testdata/volumeop_dag.yaml @@ -12,9 +12,6 @@ spec: serviceAccountName: pipeline-runner templates: - name: create-pvc - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-pvc-manifest @@ -45,9 +42,6 @@ spec: parameters: - name: create-pvc-name name: step1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-pvc persistentVolumeClaim: @@ -66,9 +60,6 @@ spec: parameters: - name: create-pvc-name name: step2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-pvc persistentVolumeClaim: @@ -87,9 +78,6 @@ spec: parameters: - name: create-pvc-name name: step3 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-pvc persistentVolumeClaim: diff --git a/sdk/python/tests/compiler/testdata/volumeop_parallel.yaml b/sdk/python/tests/compiler/testdata/volumeop_parallel.yaml index c7b07029fdd..cf278e50692 100644 --- a/sdk/python/tests/compiler/testdata/volumeop_parallel.yaml +++ b/sdk/python/tests/compiler/testdata/volumeop_parallel.yaml @@ -12,9 +12,6 @@ spec: serviceAccountName: pipeline-runner templates: - name: create-pvc - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: create-pvc-manifest @@ -45,9 +42,6 @@ spec: parameters: - name: create-pvc-name name: step1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-pvc persistentVolumeClaim: @@ -66,9 +60,6 @@ spec: parameters: - name: create-pvc-name name: step2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-pvc persistentVolumeClaim: @@ -87,9 +78,6 @@ spec: parameters: - name: create-pvc-name name: step3 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: create-pvc persistentVolumeClaim: diff --git a/sdk/python/tests/compiler/testdata/volumeop_sequential.yaml b/sdk/python/tests/compiler/testdata/volumeop_sequential.yaml index 30293db89c6..0d9fe372799 100644 --- a/sdk/python/tests/compiler/testdata/volumeop_sequential.yaml +++ b/sdk/python/tests/compiler/testdata/volumeop_sequential.yaml @@ -12,9 +12,6 @@ spec: serviceAccountName: pipeline-runner templates: - name: mypvc - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: parameters: - name: mypvc-manifest @@ -45,9 +42,6 @@ spec: parameters: - name: mypvc-name name: step1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: mypvc persistentVolumeClaim: @@ -66,9 +60,6 @@ spec: parameters: - name: mypvc-name name: step2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: mypvc persistentVolumeClaim: @@ -86,9 +77,6 @@ spec: parameters: - name: mypvc-name name: step3 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp volumes: - name: mypvc persistentVolumeClaim: diff --git a/sdk/python/tests/compiler/testdata/withitem_basic.yaml b/sdk/python/tests/compiler/testdata/withitem_basic.yaml index 26e1a2b5bc4..f19512bbd2e 100644 --- a/sdk/python/tests/compiler/testdata/withitem_basic.yaml +++ b/sdk/python/tests/compiler/testdata/withitem_basic.yaml @@ -47,9 +47,6 @@ spec: - name: loop-item-param-00000001-subvar-a - name: my_pipe_param name: my-in-coop1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - echo op2 {{inputs.parameters.loop-item-param-00000001-subvar-b}} @@ -61,9 +58,6 @@ spec: parameters: - name: loop-item-param-00000001-subvar-b name: my-in-coop2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - echo {{inputs.parameters.my_pipe_param}} @@ -75,9 +69,6 @@ spec: parameters: - name: my_pipe_param name: my-out-cop - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - arguments: diff --git a/sdk/python/tests/compiler/testdata/withitem_nested.yaml b/sdk/python/tests/compiler/testdata/withitem_nested.yaml index 7f12f304d66..b1f8bed1be8 100644 --- a/sdk/python/tests/compiler/testdata/withitem_nested.yaml +++ b/sdk/python/tests/compiler/testdata/withitem_nested.yaml @@ -79,9 +79,6 @@ spec: - name: loop-item-param-00000001-subvar-a - name: my_pipe_param name: my-in-coop1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - echo op2 {{inputs.parameters.loop-item-param-00000001-subvar-b}} @@ -93,9 +90,6 @@ spec: parameters: - name: loop-item-param-00000001-subvar-b name: my-in-coop2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - echo op1 {{inputs.parameters.loop-item-param-00000001-subvar-a}} {{inputs.parameters.loop-item-param-00000002}} @@ -110,9 +104,6 @@ spec: - name: loop-item-param-00000002 - name: my_pipe_param name: my-inner-inner-coop - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - echo {{inputs.parameters.my_pipe_param}} @@ -124,9 +115,6 @@ spec: parameters: - name: my_pipe_param name: my-out-cop - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - arguments: diff --git a/sdk/python/tests/compiler/testdata/withparam_global.yaml b/sdk/python/tests/compiler/testdata/withparam_global.yaml index 6ad2c5939d8..a60c4eaf0db 100644 --- a/sdk/python/tests/compiler/testdata/withparam_global.yaml +++ b/sdk/python/tests/compiler/testdata/withparam_global.yaml @@ -36,9 +36,6 @@ spec: parameters: - name: loopidy_doop-loop-item name: my-in-cop1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - python -c "import json; import sys; json.dump([i for i in range(20, 31)], @@ -48,9 +45,6 @@ spec: - -c image: python:alpine3.6 name: my-out-cop0 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: my-out-cop0-out @@ -70,9 +64,6 @@ spec: parameters: - name: my-out-cop0-out name: my-out-cop2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - arguments: diff --git a/sdk/python/tests/compiler/testdata/withparam_global_dict.yaml b/sdk/python/tests/compiler/testdata/withparam_global_dict.yaml index d2ac3f5a3d2..f78f3c69b5b 100644 --- a/sdk/python/tests/compiler/testdata/withparam_global_dict.yaml +++ b/sdk/python/tests/compiler/testdata/withparam_global_dict.yaml @@ -36,9 +36,6 @@ spec: parameters: - name: loopidy_doop-loop-item-subvar-a name: my-in-cop1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - python -c "import json; import sys; json.dump([i for i in range(20, 31)], @@ -48,9 +45,6 @@ spec: - -c image: python:alpine3.6 name: my-out-cop0 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: my-out-cop0-out @@ -70,9 +64,6 @@ spec: parameters: - name: my-out-cop0-out name: my-out-cop2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - arguments: diff --git a/sdk/python/tests/compiler/testdata/withparam_output.yaml b/sdk/python/tests/compiler/testdata/withparam_output.yaml index 35e3887c591..8978d1d055b 100644 --- a/sdk/python/tests/compiler/testdata/withparam_output.yaml +++ b/sdk/python/tests/compiler/testdata/withparam_output.yaml @@ -33,9 +33,6 @@ spec: parameters: - name: my-out-cop0-out-loop-item name: my-in-cop1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - python -c "import json; import sys; json.dump([i for i in range(20, 31)], @@ -45,9 +42,6 @@ spec: - -c image: python:alpine3.6 name: my-out-cop0 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: my-out-cop0-out @@ -67,9 +61,6 @@ spec: parameters: - name: my-out-cop0-out name: my-out-cop2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - arguments: diff --git a/sdk/python/tests/compiler/testdata/withparam_output_dict.yaml b/sdk/python/tests/compiler/testdata/withparam_output_dict.yaml index 4613eb6ae65..89aa8bb4b2a 100644 --- a/sdk/python/tests/compiler/testdata/withparam_output_dict.yaml +++ b/sdk/python/tests/compiler/testdata/withparam_output_dict.yaml @@ -33,9 +33,6 @@ spec: parameters: - name: my-out-cop0-out-loop-item-subvar-a name: my-in-cop1 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - container: args: - 'python -c "import json; import sys; json.dump([{''a'': 1, ''b'': 2}, {''a'': @@ -45,9 +42,6 @@ spec: - -c image: python:alpine3.6 name: my-out-cop0 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp outputs: artifacts: - name: my-out-cop0-out @@ -67,9 +61,6 @@ spec: parameters: - name: my-out-cop0-out name: my-out-cop2 - metadata: - labels: - pipelines.kubeflow.org/pipeline-sdk-type: kfp - dag: tasks: - arguments: