From 61fea6ba1f369dde39139c5151a91fae3329b3d5 Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Sun, 24 Oct 2021 17:10:07 -0700 Subject: [PATCH 01/31] fix(sdk.v2): Fix InputPathPlaceholder changed to InputValuePlaceholder (#6794) * fix InputPathPlaceholder changed to InputValuePlaceholder * release note --- sdk/RELEASE.md | 2 ++ .../v2/components/experimental/structures.py | 25 +++++++++++++++++-- .../experimental/structures_test.py | 4 +-- 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index d5d689e1b0b..1c533c4f37d 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -12,6 +12,8 @@ ## Bug Fixes and Other Changes +* Fix placeholder mapping error in v2. [\#6794](https://github.com/kubeflow/pipelines/pull/6794) + ## Documentation Updates # 1.8.6 diff --git a/sdk/python/kfp/v2/components/experimental/structures.py b/sdk/python/kfp/v2/components/experimental/structures.py index 8d7c6f332b9..3632bc26a70 100644 --- a/sdk/python/kfp/v2/components/experimental/structures.py +++ b/sdk/python/kfp/v2/components/experimental/structures.py @@ -443,12 +443,33 @@ def _transform_arg(arg: Union[str, Dict[str, str]]) -> ValidCommandArgs: key: _transform_arg(command) for key, command in implementation.pop('env', {}).items() } - container_spec = ContainerSpec.parse_obj(implementation) + + container_spec = ContainerSpec(image=implementation['image']) + + # Workaround for https://github.com/samuelcolvin/pydantic/issues/2079 + def _copy_model(obj): + if isinstance(obj, BaseModel): + return obj.copy(deep=True) + return obj + + # Must assign these after the constructor call, otherwise it won't work. + if implementation['commands']: + container_spec.commands = [ + _copy_model(cmd) for cmd in implementation['commands'] + ] + if implementation['arguments']: + container_spec.arguments = [ + _copy_model(arg) for arg in implementation['arguments'] + ] + if implementation['env']: + container_spec.env = { + k: _copy_model(v) for k, v in implementation['env'] + } return ComponentSpec( name=component_dict.get('name', 'name'), description=component_dict.get('description'), - implementation=Implementation(container=container_spec,), + implementation=Implementation(container=container_spec), inputs={ spec['name']: InputSpec( type=spec.get('type', 'Artifact'), diff --git a/sdk/python/kfp/v2/components/experimental/structures_test.py b/sdk/python/kfp/v2/components/experimental/structures_test.py index 56f5f6bff22..4ae034f06ee 100644 --- a/sdk/python/kfp/v2/components/experimental/structures_test.py +++ b/sdk/python/kfp/v2/components/experimental/structures_test.py @@ -364,7 +364,7 @@ def test_component_spec_load_from_v1_component_yaml(self): component_yaml_v1 = textwrap.dedent("""\ name: Component with 2 inputs and 2 outputs inputs: - - {name: Input parameter} + - {name: Input parameter, type: String} - {name: Input artifact} outputs: - {name: Output 1} @@ -413,7 +413,7 @@ def test_component_spec_load_from_v1_component_yaml(self): env={}, )), inputs={ - 'Input parameter': structures.InputSpec(type='Artifact'), + 'Input parameter': structures.InputSpec(type='String'), 'Input artifact': structures.InputSpec(type='Artifact') }, outputs={ From 5dfae1d8cc8db7cc52a646691f2fc7f6641dea19 Mon Sep 17 00:00:00 2001 From: Gerard Casas Saez Date: Sun, 24 Oct 2021 22:24:07 -0600 Subject: [PATCH 02/31] feat(sdk): Add version description optionally to Py SDK. Part of #6256 (#6472) * add description optionally * add release notes * add new line * capture if description is not accepted by server * add todo comment * add better description api missing exception --- sdk/RELEASE.md | 1 + sdk/python/kfp/_client.py | 25 ++++++++++++++++++++----- 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 1c533c4f37d..d93553e89d1 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -199,6 +199,7 @@ ## Major Features and Improvements * Support container environment variable in v2. [\#6515](https://github.com/kubeflow/pipelines/pull/6515) +* Add optional support to specify description for pipeline version [\#6472](https://github.com/kubeflow/pipelines/issues/6472). ## Breaking Changes diff --git a/sdk/python/kfp/_client.py b/sdk/python/kfp/_client.py index 4292cbdeced..1932a4db781 100644 --- a/sdk/python/kfp/_client.py +++ b/sdk/python/kfp/_client.py @@ -1263,7 +1263,8 @@ def upload_pipeline_version(self, pipeline_package_path, pipeline_version_name: str, pipeline_id: Optional[str] = None, - pipeline_name: Optional[str] = None): + pipeline_name: Optional[str] = None, + description: Optional[str] = None): """Uploads a new version of the pipeline to the Kubeflow Pipelines cluster. @@ -1272,6 +1273,7 @@ def upload_pipeline_version(self, pipeline_version_name: Name of the pipeline version to be shown in the UI. pipeline_id: Optional. Id of the pipeline. pipeline_name: Optional. Name of the pipeline. + description: Optional. Description of the pipeline version to be shown in the UI. Returns: Server response object containing pipleine id and other information. Throws: @@ -1285,11 +1287,24 @@ def upload_pipeline_version(self, if pipeline_name: pipeline_id = self.get_pipeline_id(pipeline_name) - - response = self._upload_api.upload_pipeline_version( - pipeline_package_path, + kwargs = dict( name=pipeline_version_name, - pipelineid=pipeline_id) + pipelineid=pipeline_id, + ) + + if description: + kwargs['description'] = description + try: + response = self._upload_api.upload_pipeline_version( + pipeline_package_path, **kwargs) + except kfp_server_api.exceptions.ApiTypeError as e: + # ToDo: Remove this once we drop support for kfp_server_api < 1.7.1 + if 'description' in e.message and 'unexpected keyword argument' in e.message: + raise NotImplementedError( + 'Pipeline version description is not supported in current kfp-server-api pypi package. Upgrade to 1.7.1 or above' + ) + else: + raise e if self._is_ipython(): import IPython From ec9a8ed6cdb98f7874be455ec0c0aac3dd9176e1 Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Sun, 24 Oct 2021 21:42:27 -0700 Subject: [PATCH 03/31] Update RELEASE.md (#6797) --- sdk/RELEASE.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index d93553e89d1..7e7deb19b5e 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -2,6 +2,8 @@ ## Major Features and Improvements +* Add optional support to specify description for pipeline version [\#6472](https://github.com/kubeflow/pipelines/issues/6472). + ## Breaking Changes ### For Pipeline Authors @@ -199,7 +201,6 @@ ## Major Features and Improvements * Support container environment variable in v2. [\#6515](https://github.com/kubeflow/pipelines/pull/6515) -* Add optional support to specify description for pipeline version [\#6472](https://github.com/kubeflow/pipelines/issues/6472). ## Breaking Changes From 4abc4fd1874f7937a193d31dbbe650618c88ca95 Mon Sep 17 00:00:00 2001 From: James Liu <37026441+zijianjoy@users.noreply.github.com> Date: Sun, 24 Oct 2021 23:15:07 -0700 Subject: [PATCH 04/31] fix(manifests): Upgrade kpt to 1.0.0-beta.6. Fix #5368 (#6595) * manifests: Upgrade kpt to 1.0.0-beta.6 * readme * add krmignore --- manifests/kustomize/hack/test.sh | 5 +- .../kustomize/third-party/argo/.krmignore | 1 + manifests/kustomize/third-party/argo/Kptfile | 6 + .../kustomize/third-party/argo/README.md | 6 +- .../argo/upstream/manifests/Kptfile | 13 +- .../argo-server/argo-server-deployment.yaml | 6 +- .../base/argo-server/argo-server-sa.yaml | 2 +- .../base/argo-server/argo-server-service.yaml | 2 +- .../base/argo-server/kustomization.yaml | 1 - .../argoproj.io_clusterworkflowtemplates.yaml | 2 +- .../crds/full/argoproj.io_cronworkflows.yaml | 2 +- .../argoproj.io_workfloweventbindings.yaml | 2 +- .../base/crds/full/argoproj.io_workflows.yaml | 194 +++++++++++++++++- .../full/argoproj.io_workflowtemplates.yaml | 2 +- .../base/crds/full/kustomization.yaml | 1 - .../manifests/base/crds/kustomization.yaml | 1 - .../argoproj.io_clusterworkflowtemplates.yaml | 2 +- .../minimal/argoproj.io_cronworkflows.yaml | 2 +- .../argoproj.io_workfloweventbindings.yaml | 2 +- .../crds/minimal/argoproj.io_workflows.yaml | 2 +- .../argoproj.io_workflowtemplates.yaml | 2 +- .../base/crds/minimal/kustomization.yaml | 1 - .../manifests/base/kustomization.yaml | 1 - .../workflow-controller/kustomization.yaml | 1 - .../workflow-controller-configmap.yaml | 2 +- .../workflow-controller-deployment.yaml | 2 +- .../workflow-controller-metrics-service.yaml | 2 +- .../workflow-controller-sa.yaml | 2 +- .../argo-server-clusterole.yaml | 2 +- .../argo-server-clusterolebinding.yaml | 2 +- .../argo-server-rbac/kustomization.yaml | 1 - .../cluster-install/kustomization.yaml | 1 - .../kustomization.yaml | 1 - .../workflow-aggregate-roles.yaml | 8 +- .../workflow-controller-clusterrole.yaml | 12 +- ...orkflow-controller-clusterrolebinding.yaml | 2 +- .../workflow-controller-role.yaml | 3 +- .../workflow-controller-rolebinding.yaml | 2 +- .../argo-server-rbac/argo-server-role.yaml | 2 +- .../argo-server-rolebinding.yaml | 2 +- .../argo-server-rbac/kustomization.yaml | 1 - .../namespace-install/kustomization.yaml | 6 +- .../overlays/argo-server-deployment.json | 7 - .../overlays/argo-server-deployment.yaml | 3 + .../workflow-controller-deployment.json | 7 - .../workflow-controller-deployment.yaml | 3 + .../kustomization.yaml | 1 - .../workflow-controller-role.yaml | 2 +- .../workflow-controller-rolebinding.yaml | 3 +- .../base/argo-server-sso-secret.yaml | 2 +- .../base/artifact-repositories-configmap.yaml | 4 +- .../base/cluster-workflow-template-rbac.yaml | 9 +- .../base/kubelet-executor-clusterrole.yaml | 4 +- ...t-executor-default-clusterrolebinding.yaml | 2 +- .../quick-start/base/kustomization.yaml | 2 - .../quick-start/base/minio/kustomization.yaml | 3 +- .../quick-start/base/minio/minio-deploy.yaml | 4 +- .../quick-start/base/minio/minio-pod.yaml | 4 +- .../quick-start/base/minio/minio-service.yaml | 3 +- .../base/minio/my-minio-cred-secret.yaml | 2 +- .../base/overlays/argo-server-deployment.yaml | 4 +- .../workflow-controller-configmap.yaml | 2 +- .../base/prometheus/kustomization.yaml | 1 - .../prometheus/prometheus-config-cluster.yaml | 2 +- .../prometheus/prometheus-deployment.yaml | 8 +- .../base/prometheus/prometheus-service.yaml | 2 +- ...argo-workflows-webhook-clients-secret.yaml | 6 +- .../base/webhooks/github.com-rolebinding.yaml | 4 +- .../base/webhooks/github.com-sa.yaml | 2 +- .../base/webhooks/kustomization.yaml | 1 - .../submit-workflow-template-role.yaml | 4 +- .../base/workflow-default-rolebinding.yaml | 2 +- .../quick-start/base/workflow-role.yaml | 4 +- .../quick-start/minimal/kustomization.yaml | 1 - .../mysql/argo-mysql-config-secret.yaml | 2 +- .../quick-start/mysql/kustomization.yaml | 4 +- .../quick-start/mysql/mysql-deployment.yaml | 4 +- .../quick-start/mysql/mysql-service.yaml | 4 +- .../workflow-controller-configmap.yaml | 4 +- .../postgres/argo-postgres-config-secret.yaml | 2 +- .../quick-start/postgres/kustomization.yaml | 4 +- .../workflow-controller-configmap.yaml | 4 +- .../postgres/postgres-deployment.yaml | 4 +- .../postgres/postgres-service.yaml | 4 +- .../quick-start/sso/dex/dev-svc.yaml | 2 +- .../manifests/quick-start/sso/dex/dex-cm.yaml | 4 +- .../quick-start/sso/dex/dex-deploy.yaml | 4 +- .../manifests/quick-start/sso/dex/dex-rb.yaml | 2 +- .../quick-start/sso/dex/dex-role.yaml | 2 +- .../manifests/quick-start/sso/dex/dex-sa.yaml | 2 +- .../quick-start/sso/dex/kustomization.yaml | 2 - .../quick-start/sso/kustomization.yaml | 4 +- .../sso/overlays/argo-server-sa.yaml | 2 +- .../workflow-controller-configmap.yaml | 2 +- 94 files changed, 322 insertions(+), 160 deletions(-) create mode 100644 manifests/kustomize/third-party/argo/.krmignore create mode 100644 manifests/kustomize/third-party/argo/Kptfile delete mode 100644 manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.json create mode 100644 manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.yaml delete mode 100644 manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.json create mode 100644 manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.yaml diff --git a/manifests/kustomize/hack/test.sh b/manifests/kustomize/hack/test.sh index 58d938a093d..7e251fbf614 100755 --- a/manifests/kustomize/hack/test.sh +++ b/manifests/kustomize/hack/test.sh @@ -56,7 +56,4 @@ do kustomize build "${MANIFESTS_DIR}/${path}" >/dev/null done -# TODO(Bobgy): fix this for kpt v1 -# verify these manifests work with kpt -# to prevent issues like https://github.com/kubeflow/pipelines/issues/5368 -# kpt cfg tree "${MANIFESTS_DIR}" >/dev/null +kpt pkg tree "${MANIFESTS_DIR}" >/dev/null diff --git a/manifests/kustomize/third-party/argo/.krmignore b/manifests/kustomize/third-party/argo/.krmignore new file mode 100644 index 00000000000..045951300cf --- /dev/null +++ b/manifests/kustomize/third-party/argo/.krmignore @@ -0,0 +1 @@ +upstream diff --git a/manifests/kustomize/third-party/argo/Kptfile b/manifests/kustomize/third-party/argo/Kptfile new file mode 100644 index 00000000000..bf0baf9863a --- /dev/null +++ b/manifests/kustomize/third-party/argo/Kptfile @@ -0,0 +1,6 @@ +apiVersion: kpt.dev/v1 +kind: Kptfile +metadata: + name: argo +info: + description: argo Kptfile in order to ignore upstream/ folder. diff --git a/manifests/kustomize/third-party/argo/README.md b/manifests/kustomize/third-party/argo/README.md index 77962195720..b5f82380158 100644 --- a/manifests/kustomize/third-party/argo/README.md +++ b/manifests/kustomize/third-party/argo/README.md @@ -20,6 +20,10 @@ Refer to [third_party/argo/README.md](../../../../third_party/argo/README.md). ### Upgrade argo manifests +Requirement: + +Use kpt version above 1.0.0-beta.6, refer to [kpt installation](https://kpt.dev/installation/) for downloading kpt. + As one step of above, we need to upgrade argo manifests in this folder. 1. Run: @@ -29,5 +33,3 @@ As one step of above, we need to upgrade argo manifests in this folder. ``` Note, argo version is pulled from [third_party/argo/VERSION](../../../../third_party/argo/VERSION). Edit the VERSION file first. - -2. Manually edit [YAML patches](upstream/manifests/namespace-install/overlays) to JSON patches, because YAML patches confuses kpt, refer to [#5368](https://github.com/kubeflow/pipelines/issues/5368). diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile b/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile index 5dd50819bbb..35f40b5f539 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile +++ b/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile @@ -1,11 +1,18 @@ -apiVersion: kpt.dev/v1alpha1 +apiVersion: kpt.dev/v1 kind: Kptfile metadata: name: manifests upstream: type: git git: - commit: a245fe67db56d2808fb78c6079d08404cbee91aa repo: https://github.com/argoproj/argo-workflows directory: /manifests - ref: v3.1.1 + ref: v3.1.6 + updateStrategy: resource-merge +upstreamLock: + type: git + git: + repo: https://github.com/argoproj/argo-workflows + directory: /manifests + ref: v3.1.6 + commit: 14e1278572b28d8b1854858ce7de355ce60199c9 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-deployment.yaml index d01698af536..189b104119a 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-deployment.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-deployment.yaml @@ -1,6 +1,6 @@ apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /argo-server name: argo-server spec: selector: @@ -19,7 +19,7 @@ spec: capabilities: drop: - ALL - args: [ server ] + args: [server] ports: - name: web containerPort: 2746 @@ -35,7 +35,7 @@ spec: name: tmp volumes: - name: tmp - emptyDir: { } + emptyDir: {} securityContext: runAsNonRoot: true nodeSelector: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-sa.yaml index 1d07b8da9e1..0f5cab2bc0b 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-sa.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-sa.yaml @@ -1,4 +1,4 @@ apiVersion: v1 kind: ServiceAccount -metadata: +metadata: # kpt-merge: /argo-server name: argo-server diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-service.yaml index 0c6e58d30ed..6c2207ac38f 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-service.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/argo-server-service.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Service -metadata: +metadata: # kpt-merge: /argo-server name: argo-server spec: selector: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/kustomization.yaml index 3817bd729b1..e17bc754ce5 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/argo-server/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - argo-server-deployment.yaml - argo-server-sa.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml index 31d1586b5f9..a5129ef46de 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_clusterworkflowtemplates.yaml @@ -1,7 +1,7 @@ # This is an auto-generated file. DO NOT EDIT apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /clusterworkflowtemplates.argoproj.io name: clusterworkflowtemplates.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_cronworkflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_cronworkflows.yaml index 7956d040f33..dc4dfa72e0f 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_cronworkflows.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_cronworkflows.yaml @@ -1,7 +1,7 @@ # This is an auto-generated file. DO NOT EDIT apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /cronworkflows.argoproj.io name: cronworkflows.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml index 11bef39e4d0..f22205c8a15 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workfloweventbindings.yaml @@ -1,7 +1,7 @@ # This is an auto-generated file. DO NOT EDIT apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /workfloweventbindings.argoproj.io name: workfloweventbindings.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflows.yaml index dbfe8979aff..be666e1d8be 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflows.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflows.yaml @@ -1,7 +1,7 @@ # This is an auto-generated file. DO NOT EDIT apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /workflows.argoproj.io name: workflows.argoproj.io spec: group: argoproj.io @@ -14042,6 +14042,198 @@ spec: properties: artifactRepositoryRef: properties: + artifactRepository: + properties: + archiveLogs: + type: boolean + artifactory: + properties: + passwordSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + repoURL: + type: string + usernameSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + gcs: + properties: + bucket: + type: string + keyFormat: + type: string + serviceAccountKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + type: object + hdfs: + properties: + addresses: + items: + type: string + type: array + force: + type: boolean + hdfsUser: + type: string + krbCCacheSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbConfigConfigMap: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbKeytabSecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + krbRealm: + type: string + krbServicePrincipalName: + type: string + krbUsername: + type: string + pathFormat: + type: string + type: object + oss: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + type: boolean + endpoint: + type: string + keyFormat: + type: string + lifecycleRule: + properties: + markDeletionAfterDays: + format: int32 + type: integer + markInfrequentAccessAfterDays: + format: int32 + type: integer + type: object + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + securityToken: + type: string + type: object + s3: + properties: + accessKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + bucket: + type: string + createBucketIfNotPresent: + properties: + objectLocking: + type: boolean + type: object + endpoint: + type: string + insecure: + type: boolean + keyFormat: + type: string + keyPrefix: + type: string + region: + type: string + roleARN: + type: string + secretKeySecret: + properties: + key: + type: string + name: + type: string + optional: + type: boolean + required: + - key + type: object + useSDKCreds: + type: boolean + type: object + type: object configMap: type: string default: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml index b5662e7c541..2586a5d2f0b 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/argoproj.io_workflowtemplates.yaml @@ -1,7 +1,7 @@ # This is an auto-generated file. DO NOT EDIT apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /workflowtemplates.argoproj.io name: workflowtemplates.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/kustomization.yaml index a593d88d02d..a6245453ea3 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/full/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - argoproj.io_clusterworkflowtemplates.yaml - argoproj.io_cronworkflows.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/kustomization.yaml index 3ccdade18fc..25ffbeccf0a 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/kustomization.yaml @@ -1,5 +1,4 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - minimal diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_clusterworkflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_clusterworkflowtemplates.yaml index 889dfe2f508..90c8fb8adc2 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_clusterworkflowtemplates.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_clusterworkflowtemplates.yaml @@ -1,6 +1,6 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /clusterworkflowtemplates.argoproj.io name: clusterworkflowtemplates.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_cronworkflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_cronworkflows.yaml index 812cf114348..1809239a5a9 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_cronworkflows.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_cronworkflows.yaml @@ -1,6 +1,6 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /cronworkflows.argoproj.io name: cronworkflows.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workfloweventbindings.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workfloweventbindings.yaml index 22e2ecf0f6b..ab81216d69a 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workfloweventbindings.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workfloweventbindings.yaml @@ -1,6 +1,6 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /workfloweventbindings.argoproj.io name: workfloweventbindings.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflows.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflows.yaml index 6e646adc7d4..f13fb38dbba 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflows.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflows.yaml @@ -1,6 +1,6 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /workflows.argoproj.io name: workflows.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtemplates.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtemplates.yaml index 8165ec25b8b..d2c87bf5715 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtemplates.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/argoproj.io_workflowtemplates.yaml @@ -1,6 +1,6 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition -metadata: +metadata: # kpt-merge: /workflowtemplates.argoproj.io name: workflowtemplates.argoproj.io spec: group: argoproj.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/kustomization.yaml index a593d88d02d..a6245453ea3 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/crds/minimal/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - argoproj.io_clusterworkflowtemplates.yaml - argoproj.io_cronworkflows.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/kustomization.yaml index 0b9a73341d7..02094eb9c9d 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - crds - workflow-controller diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/kustomization.yaml index 5ed907d651e..e3b2bf3b74b 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - workflow-controller-configmap.yaml - workflow-controller-deployment.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-configmap.yaml index d28f4edb3f2..3d1f2534d61 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-configmap.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-configmap.yaml @@ -1,4 +1,4 @@ apiVersion: v1 kind: ConfigMap -metadata: +metadata: # kpt-merge: /workflow-controller-configmap name: workflow-controller-configmap diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-deployment.yaml index 3deacb6e026..86676a377d8 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-deployment.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-deployment.yaml @@ -1,6 +1,6 @@ apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /workflow-controller name: workflow-controller spec: selector: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-metrics-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-metrics-service.yaml index d040adaa08a..c646a2482fa 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-metrics-service.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-metrics-service.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Service -metadata: +metadata: # kpt-merge: /workflow-controller-metrics name: workflow-controller-metrics labels: app: workflow-controller diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-sa.yaml index f3d5885df98..bc31906f374 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-sa.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/base/workflow-controller/workflow-controller-sa.yaml @@ -1,4 +1,4 @@ apiVersion: v1 kind: ServiceAccount -metadata: +metadata: # kpt-merge: /argo name: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml index f954115b005..506221526aa 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterole.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /argo-server-cluster-role name: argo-server-cluster-role rules: - apiGroups: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml index 6d7ac27ff5e..1e54be5bcfb 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/argo-server-clusterolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding -metadata: +metadata: # kpt-merge: /argo-server-binding name: argo-server-binding roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/kustomization.yaml index 6ae17fe059a..91d213a3943 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/argo-server-rbac/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - argo-server-clusterole.yaml - argo-server-clusterolebinding.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/kustomization.yaml index a1698bd2ac7..7f5d34ea7f8 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - ../base - ./workflow-controller-rbac diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/kustomization.yaml index 0ce4eff69b5..ab7574c6ea5 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - workflow-aggregate-roles.yaml - workflow-controller-clusterrole.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml index b632e22bc35..4b83b5986d2 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-aggregate-roles.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /argo-aggregate-to-view name: argo-aggregate-to-view labels: rbac.authorization.k8s.io/aggregate-to-view: "true" @@ -22,11 +22,10 @@ rules: - get - list - watch - --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /argo-aggregate-to-edit name: argo-aggregate-to-edit labels: rbac.authorization.k8s.io/aggregate-to-edit: "true" @@ -53,11 +52,10 @@ rules: - patch - update - watch - --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /argo-aggregate-to-admin name: argo-aggregate-to-admin labels: rbac.authorization.k8s.io/aggregate-to-admin: "true" diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml index 0bc7edd4f12..fbe9456779d 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrole.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /argo-cluster-role name: argo-cluster-role rules: - apiGroups: @@ -83,10 +83,10 @@ rules: - create - patch - apiGroups: - - "policy" + - "policy" resources: - - poddisruptionbudgets + - poddisruptionbudgets verbs: - - create - - get - - delete + - create + - get + - delete diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml index b4e54ea67c9..0988ebd474b 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-clusterrolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding -metadata: +metadata: # kpt-merge: /argo-binding name: argo-binding roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-role.yaml index a4693c35864..4a9ae4fc181 100755 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-role.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-role.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: +metadata: # kpt-merge: /argo-role name: argo-role rules: - apiGroups: @@ -17,4 +17,3 @@ rules: - secrets verbs: - get - diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml index 191f34d44cd..cb4f33d0431 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/cluster-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: +metadata: # kpt-merge: /argo-binding name: argo-binding roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml index 546be634146..ca14f4c34f5 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-role.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: +metadata: # kpt-merge: /argo-server-role name: argo-server-role rules: - apiGroups: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-rolebinding.yaml index d92f0a50965..6d0e838150f 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-rolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/argo-server-rolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: +metadata: # kpt-merge: /argo-server-binding name: argo-server-binding roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/kustomization.yaml index 57a0718f655..c1ca948941f 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/argo-server-rbac/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - argo-server-role.yaml - argo-server-rolebinding.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/kustomization.yaml index 808268c9b0e..a3c7fe6fbcf 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/kustomization.yaml @@ -1,21 +1,19 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - ../base - ./argo-server-rbac - ./workflow-controller-rbac - patchesJson6902: - target: version: v1 group: apps kind: Deployment name: workflow-controller - path: ./overlays/workflow-controller-deployment.json + path: ./overlays/workflow-controller-deployment.yaml - target: version: v1 group: apps kind: Deployment name: argo-server - path: ./overlays/argo-server-deployment.json + path: ./overlays/argo-server-deployment.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.json b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.json deleted file mode 100644 index b8d82b0c752..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { - "op": "add", - "path": "/spec/template/spec/containers/0/args/-", - "value": "--namespaced" - } -] diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.yaml new file mode 100644 index 00000000000..90fd8a53dcb --- /dev/null +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/argo-server-deployment.yaml @@ -0,0 +1,3 @@ +- op: add + path: /spec/template/spec/containers/0/args/- + value: --namespaced diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.json b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.json deleted file mode 100644 index b8d82b0c752..00000000000 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.json +++ /dev/null @@ -1,7 +0,0 @@ -[ - { - "op": "add", - "path": "/spec/template/spec/containers/0/args/-", - "value": "--namespaced" - } -] diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.yaml new file mode 100644 index 00000000000..90fd8a53dcb --- /dev/null +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/overlays/workflow-controller-deployment.yaml @@ -0,0 +1,3 @@ +- op: add + path: /spec/template/spec/containers/0/args/- + value: --namespaced diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/kustomization.yaml index f7b23b2b0bc..1f9553fe592 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - workflow-controller-role.yaml - workflow-controller-rolebinding.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml index 908a73626a4..65613ed92d9 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-role.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: +metadata: # kpt-merge: /argo-role name: argo-role rules: - apiGroups: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml index f8ac7ec0d60..300cb6cd04c 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/namespace-install/workflow-controller-rbac/workflow-controller-rolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: +metadata: # kpt-merge: /argo-binding name: argo-binding roleRef: apiGroup: rbac.authorization.k8s.io @@ -9,4 +9,3 @@ roleRef: subjects: - kind: ServiceAccount name: argo - diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/argo-server-sso-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/argo-server-sso-secret.yaml index 173d6b314a4..cc6fab198dd 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/argo-server-sso-secret.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/argo-server-sso-secret.yaml @@ -1,6 +1,6 @@ kind: Secret apiVersion: v1 -metadata: +metadata: # kpt-merge: /argo-server-sso name: argo-server-sso stringData: clientID: argo-server diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifact-repositories-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifact-repositories-configmap.yaml index cd2033a3d68..b548f41a416 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifact-repositories-configmap.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/artifact-repositories-configmap.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: ConfigMap -metadata: +metadata: # kpt-merge: /artifact-repositories name: artifact-repositories annotations: # you'll want to change the default over time, e.g. when you move to new storage solution, @@ -31,4 +31,4 @@ data: secretKeySecret: name: my-minio-cred key: secretkey - empty: "" \ No newline at end of file + empty: "" diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/cluster-workflow-template-rbac.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/cluster-workflow-template-rbac.yaml index 388ea11b53b..8b3c37c9cb2 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/cluster-workflow-template-rbac.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/cluster-workflow-template-rbac.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /argo-server-clusterworkflowtemplate-role name: argo-server-clusterworkflowtemplate-role rules: - apiGroups: @@ -18,7 +18,7 @@ rules: --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /argo-clusterworkflowtemplate-role name: argo-clusterworkflowtemplate-role rules: - apiGroups: @@ -33,7 +33,7 @@ rules: --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding -metadata: +metadata: # kpt-merge: /argo-clusterworkflowtemplate-role-binding name: argo-clusterworkflowtemplate-role-binding roleRef: apiGroup: rbac.authorization.k8s.io @@ -46,7 +46,7 @@ subjects: --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding -metadata: +metadata: # kpt-merge: /argo-server-clusterworkflowtemplate-role-binding name: argo-server-clusterworkflowtemplate-role-binding roleRef: apiGroup: rbac.authorization.k8s.io @@ -56,4 +56,3 @@ subjects: - kind: ServiceAccount name: argo-server namespace: argo - diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-clusterrole.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-clusterrole.yaml index d6678596491..9d5e609cd7b 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-clusterrole.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-clusterrole.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole -metadata: +metadata: # kpt-merge: /kubelet-executor name: kubelet-executor rules: # This allows the kubelet executor. @@ -9,4 +9,4 @@ rules: resources: - nodes/proxy verbs: - - get \ No newline at end of file + - get diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-default-clusterrolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-default-clusterrolebinding.yaml index f0aff8e6c48..3697ef637aa 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-default-clusterrolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kubelet-executor-default-clusterrolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding -metadata: +metadata: # kpt-merge: /kubelet-executor-default name: kubelet-executor-default roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kustomization.yaml index b809453642d..8d9f7a8d40f 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - ../../namespace-install - minio @@ -12,7 +11,6 @@ resources: - workflow-default-rolebinding.yaml - cluster-workflow-template-rbac.yaml - artifact-repositories-configmap.yaml - patchesStrategicMerge: - overlays/workflow-controller-configmap.yaml - overlays/argo-server-deployment.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/kustomization.yaml index 2d81059de41..1199d38982b 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/kustomization.yaml @@ -1,7 +1,6 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - minio-deploy.yaml - minio-service.yaml - - my-minio-cred-secret.yaml \ No newline at end of file + - my-minio-cred-secret.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-deploy.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-deploy.yaml index 967b917413d..4a75c8316ef 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-deploy.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-deploy.yaml @@ -1,6 +1,6 @@ apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /minio name: minio labels: app: minio @@ -39,4 +39,4 @@ spec: path: /minio/health/live port: 9000 initialDelaySeconds: 5 - periodSeconds: 10 \ No newline at end of file + periodSeconds: 10 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-pod.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-pod.yaml index 707fea3835a..8c617a68fa8 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-pod.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-pod.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Pod -metadata: +metadata: # kpt-merge: /minio name: minio labels: app: minio @@ -31,4 +31,4 @@ spec: path: /minio/health/live port: 9000 initialDelaySeconds: 5 - periodSeconds: 10 \ No newline at end of file + periodSeconds: 10 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-service.yaml index 6abf4af4471..e7b017d8eef 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-service.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/minio-service.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Service -metadata: +metadata: # kpt-merge: /minio name: minio labels: app: minio @@ -11,4 +11,3 @@ spec: - protocol: TCP port: 9000 targetPort: 9000 - diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/my-minio-cred-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/my-minio-cred-secret.yaml index 7a29e26b985..52d9564592f 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/my-minio-cred-secret.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/minio/my-minio-cred-secret.yaml @@ -3,7 +3,7 @@ stringData: accesskey: admin secretkey: password kind: Secret -metadata: +metadata: # kpt-merge: /my-minio-cred name: my-minio-cred labels: app: minio diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/argo-server-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/argo-server-deployment.yaml index eab93f168c2..9fab5a80f1c 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/argo-server-deployment.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/argo-server-deployment.yaml @@ -1,6 +1,6 @@ apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /argo-server name: argo-server spec: template: @@ -13,4 +13,4 @@ spec: - --auth-mode - server - --auth-mode - - client \ No newline at end of file + - client diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml index c358ece92ec..4cc36b4c6f2 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/overlays/workflow-controller-configmap.yaml @@ -57,5 +57,5 @@ data: scope: sensor-logs url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt} kind: ConfigMap -metadata: +metadata: # kpt-merge: /workflow-controller-configmap name: workflow-controller-configmap diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/kustomization.yaml index 4fa48f13b70..32db8231d01 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - prometheus-deployment.yaml - prometheus-config-cluster.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-config-cluster.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-config-cluster.yaml index e5b849d3822..c99a33569dc 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-config-cluster.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-config-cluster.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: ConfigMap -metadata: +metadata: # kpt-merge: /prometheus-config name: prometheus-config data: prometheus.yaml: | diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-deployment.yaml index 886b4e3a176..f604b2ab38d 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-deployment.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-deployment.yaml @@ -8,7 +8,7 @@ # be modified if the default is overriden. apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /prometheus name: prometheus spec: replicas: 1 @@ -27,11 +27,9 @@ spec: args: - --config.file=/config/prometheus.yaml volumeMounts: - - name: config - mountPath: /config + - name: config + mountPath: /config volumes: - name: config configMap: name: prometheus-config - - diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-service.yaml index 4f07a56d0fe..26ce49d6e18 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-service.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/prometheus/prometheus-service.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Service -metadata: +metadata: # kpt-merge: /prometheus name: prometheus spec: selector: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml index 566b49951f2..079013e8d7c 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/argo-workflows-webhook-clients-secret.yaml @@ -1,6 +1,6 @@ kind: Secret apiVersion: v1 -metadata: +metadata: # kpt-merge: /argo-workflows-webhook-clients name: argo-workflows-webhook-clients # The data keys must be the name of a service account. stringData: @@ -17,6 +17,6 @@ stringData: type: github secret: "shh!" # https://docs.gitlab.com/ee/user/project/integrations/webhooks.html - gitlab.com: | + gitlab.com: |- type: gitlab - secret: "shh!" \ No newline at end of file + secret: "shh!" diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml index 6477163c4ec..959f8bd55ce 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-rolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: +metadata: # kpt-merge: /github.com name: github.com roleRef: apiGroup: rbac.authorization.k8s.io @@ -9,4 +9,4 @@ roleRef: subjects: - kind: ServiceAccount name: github.com - namespace: argo \ No newline at end of file + namespace: argo diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-sa.yaml index 7e42d02edb2..a2591092f65 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-sa.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/github.com-sa.yaml @@ -1,4 +1,4 @@ apiVersion: v1 kind: ServiceAccount -metadata: +metadata: # kpt-merge: /github.com name: github.com diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/kustomization.yaml index ffef982a706..162751ce9cc 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/kustomization.yaml @@ -1,6 +1,5 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - submit-workflow-template-role.yaml - github.com-sa.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml index 82dd187abd3..b1dee3fedb8 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/webhooks/submit-workflow-template-role.yaml @@ -2,7 +2,7 @@ # You could tighten this further (but perhaps impractically) by using `resourceNames` apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: +metadata: # kpt-merge: /submit-workflow-template name: submit-workflow-template rules: - apiGroups: @@ -22,4 +22,4 @@ rules: resources: - workflows verbs: - - create \ No newline at end of file + - create diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-default-rolebinding.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-default-rolebinding.yaml index 9cca0400776..52ec46ba539 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-default-rolebinding.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-default-rolebinding.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: +metadata: # kpt-merge: /workflow-default-binding name: workflow-default-binding roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-role.yaml index d7f4ee34f50..ace0d759bfb 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-role.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/base/workflow-role.yaml @@ -1,7 +1,7 @@ # https://argoproj.github.io/argo-workflows/workflow-rbac/ apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: +metadata: # kpt-merge: /workflow-role name: workflow-role rules: # pod get/watch is used to identify the container IDs of the current pod @@ -47,4 +47,4 @@ rules: - workflows verbs: - create - - get \ No newline at end of file + - get diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/kustomization.yaml index aa0b761e856..27bb5cb0b48 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/minimal/kustomization.yaml @@ -1,5 +1,4 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - ../base diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/argo-mysql-config-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/argo-mysql-config-secret.yaml index ad496bc6137..cfe014fef26 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/argo-mysql-config-secret.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/argo-mysql-config-secret.yaml @@ -3,7 +3,7 @@ stringData: username: mysql password: password kind: Secret -metadata: +metadata: # kpt-merge: /argo-mysql-config name: argo-mysql-config labels: app: mysql diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/kustomization.yaml index 97b3ca0a1ed..edacf51ff4a 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/kustomization.yaml @@ -1,11 +1,9 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - ../base - argo-mysql-config-secret.yaml - mysql-deployment.yaml - mysql-service.yaml - patchesStrategicMerge: - - overlays/workflow-controller-configmap.yaml \ No newline at end of file + - overlays/workflow-controller-configmap.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-deployment.yaml index d41ad079fb1..4334cd9d5b5 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-deployment.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-deployment.yaml @@ -1,6 +1,6 @@ apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /mysql name: mysql labels: app: mysql @@ -34,4 +34,4 @@ spec: initialDelaySeconds: 15 timeoutSeconds: 2 nodeSelector: - kubernetes.io/os: linux \ No newline at end of file + kubernetes.io/os: linux diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-service.yaml index 98be938e3f1..45a03a018d6 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-service.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/mysql-service.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Service -metadata: +metadata: # kpt-merge: /mysql name: mysql labels: app: mysql @@ -10,4 +10,4 @@ spec: ports: - protocol: TCP port: 3306 - targetPort: 3306 \ No newline at end of file + targetPort: 3306 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/overlays/workflow-controller-configmap.yaml index c20578d1482..31b15d50bdd 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/overlays/workflow-controller-configmap.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/mysql/overlays/workflow-controller-configmap.yaml @@ -20,5 +20,5 @@ data: name: argo-mysql-config key: password kind: ConfigMap -metadata: - name: workflow-controller-configmap \ No newline at end of file +metadata: # kpt-merge: /workflow-controller-configmap + name: workflow-controller-configmap diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/argo-postgres-config-secret.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/argo-postgres-config-secret.yaml index 2a154b8c572..e14013fa585 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/argo-postgres-config-secret.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/argo-postgres-config-secret.yaml @@ -3,7 +3,7 @@ stringData: username: postgres password: password kind: Secret -metadata: +metadata: # kpt-merge: /argo-postgres-config name: argo-postgres-config labels: app: postgres diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/kustomization.yaml index b039183f45f..a70a0cc26b3 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/kustomization.yaml @@ -1,11 +1,9 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - ../base - argo-postgres-config-secret.yaml - postgres-deployment.yaml - postgres-service.yaml - patchesStrategicMerge: - - overlays/workflow-controller-configmap.yaml \ No newline at end of file + - overlays/workflow-controller-configmap.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/overlays/workflow-controller-configmap.yaml index 6675ce86e2d..28daa5b87e9 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/overlays/workflow-controller-configmap.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/overlays/workflow-controller-configmap.yaml @@ -20,5 +20,5 @@ data: name: argo-postgres-config key: password kind: ConfigMap -metadata: - name: workflow-controller-configmap \ No newline at end of file +metadata: # kpt-merge: /workflow-controller-configmap + name: workflow-controller-configmap diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-deployment.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-deployment.yaml index c22a5009196..4f88e4040b8 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-deployment.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-deployment.yaml @@ -1,6 +1,6 @@ apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /postgres name: postgres labels: app: postgres @@ -28,4 +28,4 @@ spec: initialDelaySeconds: 15 timeoutSeconds: 2 nodeSelector: - kubernetes.io/os: linux \ No newline at end of file + kubernetes.io/os: linux diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-service.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-service.yaml index e59ffa43721..b613893694e 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-service.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/postgres/postgres-service.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Service -metadata: +metadata: # kpt-merge: /postgres name: postgres labels: app: postgres @@ -10,4 +10,4 @@ spec: ports: - protocol: TCP port: 5432 - targetPort: 5432 \ No newline at end of file + targetPort: 5432 diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dev-svc.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dev-svc.yaml index 29e2f7c7d67..cd3697563b8 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dev-svc.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dev-svc.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: Service -metadata: +metadata: # kpt-merge: /dex name: dex spec: ports: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-cm.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-cm.yaml index c41c67cc595..45a1355fe91 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-cm.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-cm.yaml @@ -29,5 +29,5 @@ data: username: admin userID: 08a8684b-db88-4b73-90a9-3cd1661f5466 kind: ConfigMap -metadata: - name: dex \ No newline at end of file +metadata: # kpt-merge: /dex + name: dex diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-deploy.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-deploy.yaml index 5a622bb20b8..0f15abc600a 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-deploy.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-deploy.yaml @@ -1,6 +1,6 @@ apiVersion: apps/v1 kind: Deployment -metadata: +metadata: # kpt-merge: /dex labels: app: dex name: dex @@ -29,4 +29,4 @@ spec: volumes: - name: config configMap: - name: dex \ No newline at end of file + name: dex diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-rb.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-rb.yaml index 9a3f2788ce2..ce07b692e53 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-rb.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-rb.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: +metadata: # kpt-merge: /dex name: dex roleRef: apiGroup: rbac.authorization.k8s.io diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-role.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-role.yaml index ff1ab9aacd4..bb9a4fb94b1 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-role.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-role.yaml @@ -1,6 +1,6 @@ apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: +metadata: # kpt-merge: /dex name: dex rules: - apiGroups: diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-sa.yaml index 97a137459c0..561106c4ec4 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-sa.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/dex-sa.yaml @@ -1,4 +1,4 @@ apiVersion: v1 kind: ServiceAccount -metadata: +metadata: # kpt-merge: /dex name: dex diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/kustomization.yaml index 09b3bdbaca0..8ea85c17de0 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/dex/kustomization.yaml @@ -1,9 +1,7 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - commonLabels: "app.kubernetes.io/part-of": "dex" - resources: - dex-cm.yaml - dex-role.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/kustomization.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/kustomization.yaml index 3981219ff0e..70aafea6549 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/kustomization.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/kustomization.yaml @@ -1,10 +1,8 @@ apiVersion: kustomize.config.k8s.io/v1beta1 kind: Kustomization - resources: - ../base - dex - patchesStrategicMerge: - overlays/workflow-controller-configmap.yaml - - overlays/argo-server-sa.yaml \ No newline at end of file + - overlays/argo-server-sa.yaml diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/argo-server-sa.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/argo-server-sa.yaml index 0cd3393f2a3..c09b7ac93d7 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/argo-server-sa.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/argo-server-sa.yaml @@ -1,6 +1,6 @@ apiVersion: v1 kind: ServiceAccount -metadata: +metadata: # kpt-merge: /argo-server name: argo-server annotations: workflows.argoproj.io/rbac-rule: "'authors' in groups && email == 'kilgore@kilgore.trout'" diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/workflow-controller-configmap.yaml b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/workflow-controller-configmap.yaml index b502c7f7c5e..1d7ff51133d 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/workflow-controller-configmap.yaml +++ b/manifests/kustomize/third-party/argo/upstream/manifests/quick-start/sso/overlays/workflow-controller-configmap.yaml @@ -15,5 +15,5 @@ data: rbac: enabled: true kind: ConfigMap -metadata: +metadata: # kpt-merge: /workflow-controller-configmap name: workflow-controller-configmap From 31ff9ad27714a35d9937a40a17a291813764e565 Mon Sep 17 00:00:00 2001 From: James Liu <37026441+zijianjoy@users.noreply.github.com> Date: Mon, 25 Oct 2021 09:35:09 -0700 Subject: [PATCH 05/31] feat(frontend): SubDAG styling for KFPv2. Fix #6419 (#6744) * feat(frontend): SubDAG styling for KFPv2 * update button --- frontend/src/components/graph/SubDagNode.tsx | 30 ++++++++++++------- .../tabs/StaticNodeDetailsV2.test.tsx | 11 ++++--- .../components/tabs/StaticNodeDetailsV2.tsx | 2 +- frontend/src/index.tsx | 2 +- frontend/tailwind.config.js | 5 +++- 5 files changed, 30 insertions(+), 20 deletions(-) diff --git a/frontend/src/components/graph/SubDagNode.tsx b/frontend/src/components/graph/SubDagNode.tsx index d36db3eaf35..0339bf99fb0 100644 --- a/frontend/src/components/graph/SubDagNode.tsx +++ b/frontend/src/components/graph/SubDagNode.tsx @@ -18,6 +18,8 @@ import CropFreeIcon from '@material-ui/icons/CropFree'; import React from 'react'; import { Handle, Position } from 'react-flow-renderer'; import { SubDagFlowElementData } from './Constants'; +// import ExpandLessIcon from '@material-ui/icons/ExpandLess'; +// import ExpandMoreIcon from '@material-ui/icons/ExpandMore'; interface SubDagNodeProps { id: string; @@ -38,26 +40,32 @@ function SubDagNode({ id, data }: SubDagNodeProps) { <> diff --git a/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx b/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx index 39862ae3df4..f6d58335049 100644 --- a/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx +++ b/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx @@ -14,15 +14,14 @@ * limitations under the License. */ -import { render, waitFor, screen } from '@testing-library/react'; +import { render, screen } from '@testing-library/react'; import React from 'react'; +import * as lightweightPipelineTemplate from 'src/data/test/mock_lightweight_python_functions_v2_pipeline.json'; +import * as subdagPipelineTemplate from 'src/data/test/pipeline_with_loops_and_conditions.json'; import { testBestPractices } from 'src/TestUtils'; import { CommonTestWrapper } from 'src/TestWrapper'; import { StaticNodeDetailsV2 } from './StaticNodeDetailsV2'; -import * as lightweightPipelineTemplate from 'src/data/test/mock_lightweight_python_functions_v2_pipeline.json'; -import * as subdagPipelineTemplate from 'src/data/test/pipeline_with_loops_and_conditions.json'; - testBestPractices(); describe('StaticNodeDetailsV2', () => { @@ -139,7 +138,7 @@ describe('StaticNodeDetailsV2', () => { > , ); - screen.getByText('Open Sub-DAG'); + screen.getByText('Open Workflow'); screen.getByText('pipelineparam--flip-coin-op-Output'); expect(screen.getAllByText('STRING').length).toEqual(2); @@ -162,7 +161,7 @@ describe('StaticNodeDetailsV2', () => { > , ); - screen.getByText('Open Sub-DAG'); + screen.getByText('Open Workflow'); screen.getByText('pipelineparam--flip-coin-op-Output'); expect(screen.getAllByText('STRING').length).toEqual(4); diff --git a/frontend/src/components/tabs/StaticNodeDetailsV2.tsx b/frontend/src/components/tabs/StaticNodeDetailsV2.tsx index c1c63dc0645..feafe064ad2 100644 --- a/frontend/src/components/tabs/StaticNodeDetailsV2.tsx +++ b/frontend/src/components/tabs/StaticNodeDetailsV2.tsx @@ -120,7 +120,7 @@ function TaskNodeDetail({ {componentDag && (
)} diff --git a/frontend/src/index.tsx b/frontend/src/index.tsx index d7b032bfbdf..9fa7ea8fcbd 100644 --- a/frontend/src/index.tsx +++ b/frontend/src/index.tsx @@ -14,6 +14,7 @@ * limitations under the License. */ +// import './CSSReset'; import 'src/build/tailwind.output.css'; import MuiThemeProvider from '@material-ui/core/styles/MuiThemeProvider'; import * as React from 'react'; @@ -23,7 +24,6 @@ import { HashRouter } from 'react-router-dom'; import { cssRule } from 'typestyle'; import Router from './components/Router'; import { fonts, theme } from './Css'; -import './CSSReset'; import { initFeatures } from './features'; import { Deployments, KFP_FLAGS } from './lib/Flags'; import { GkeMetadataProvider } from './lib/GkeMetadata'; diff --git a/frontend/tailwind.config.js b/frontend/tailwind.config.js index a3244dc5ec3..c64c24e8c46 100644 --- a/frontend/tailwind.config.js +++ b/frontend/tailwind.config.js @@ -114,7 +114,10 @@ module.exports = { }, }, variants: { - extend: {}, + extend: { + borderColor: ['group-focus'], + textColor: ['group-focus'], + }, }, plugins: [], }; From ced672a628618d387ff723913702276ef9f17599 Mon Sep 17 00:00:00 2001 From: Sina Chavoshi Date: Mon, 25 Oct 2021 11:36:02 -0700 Subject: [PATCH 06/31] chore(components/google-cloud): Add Sample for DataflowPythonJobOp. PiperOrigin-RevId: 405451678 --- .../DataflowPythonJobOp_sample.ipynb | 216 ++++++++++++++++++ 1 file changed, 216 insertions(+) create mode 100644 components/google-cloud/google_cloud_pipeline_components/experimental/dataflow/python_job/DataflowPythonJobOp_sample.ipynb diff --git a/components/google-cloud/google_cloud_pipeline_components/experimental/dataflow/python_job/DataflowPythonJobOp_sample.ipynb b/components/google-cloud/google_cloud_pipeline_components/experimental/dataflow/python_job/DataflowPythonJobOp_sample.ipynb new file mode 100644 index 00000000000..252aa281941 --- /dev/null +++ b/components/google-cloud/google_cloud_pipeline_components/experimental/dataflow/python_job/DataflowPythonJobOp_sample.ipynb @@ -0,0 +1,216 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "483b2f3f", + "metadata": { + "id": "483b2f3f" + }, + "outputs": [], + "source": [ + "# Copyright 2021 Google LLC\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "id": "81db39f3", + "metadata": { + "id": "81db39f3" + }, + "source": [ + "# Vertex Pipelines: Dataflow Python Job OP\n", + "\n", + "## Overview\n", + "This notebook shows how to use the `DataflowPythonJobOp` to create a Python Dataflow Job component. `DataflowPythonJobOp` creates a pipeline component that prepares data by submitting an Apache Beam job (authored in Python) to Cloud Dataflow for execution. The Python Beam code is run with Cloud Dataflow Runner. learn more about [Google Cloud Dataflow Runner](https://beam.apache.org/documentation/runners/dataflow/) here.\n", + "\n", + "\n", + "For more details on `DataflowPythonJobOp` interface please see the [API doc](https://google-cloud-pipeline-components.readthedocs.io/)." + ] + }, + { + "cell_type": "markdown", + "id": "94160504", + "metadata": { + "id": "94160504" + }, + "source": [ + "### Install required packages" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5db0dbb", + "metadata": { + "id": "a5db0dbb" + }, + "outputs": [], + "source": [ + "!pip3 install -U google-cloud-pipeline-components -q" + ] + }, + { + "cell_type": "markdown", + "id": "8e2d800b", + "metadata": { + "id": "8e2d800b" + }, + "source": [ + "## Before you begin\n", + "Set your Project ID, Location, Pipeline Root, and a few parameters required for the Dataflow sample." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f8da5659", + "metadata": { + "id": "f8da5659" + }, + "outputs": [], + "source": [ + "PROJECT_ID = 'YOUR_PROJECT_ID'\n", + "LOCATION = \"us-central1\"\n", + "PIPELINE_ROOT = 'gs://YOUR_BUCKET_NAME' # No ending slash\n", + "\n", + "# Dataflow sample parameters\n", + "PIPELINE_NAME = 'dataflow-pipeline-sample'\n", + "OUTPUT_FILE = '{}/wc/wordcount.out'.format(PIPELINE_ROOT)" + ] + }, + { + "cell_type": "markdown", + "id": "6bb598bc", + "metadata": { + "id": "6bb598bc" + }, + "source": [ + "### Import libraries" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "501938b8", + "metadata": { + "id": "501938b8" + }, + "outputs": [], + "source": [ + "from google_cloud_pipeline_components.experimental.dataflow import DataflowPythonJobOp\n", + "from google_cloud_pipeline_components.experimental.wait_gcp_resources import WaitGcpResourcesOp" + ] + }, + { + "cell_type": "markdown", + "id": "847962bd", + "metadata": { + "id": "847962bd" + }, + "source": [ + "## Create a pipeline using DataflowPythonJobOp and WaitGcpResourcesOp\n", + "In this section we create a pipeline using the `DataflowPythonJobOp` and the [Apache Beam WordCount Examples](https://beam.apache.org/get-started/wordcount-example/). Then we use the 'WaitGcpResourcesOp' to poll the resource status and wait for it to finish.\n", + "To use the 'WaitGcpResourcesOp' component, first create the `DataflowPythonJobOp` component which outputs a JSON formatted [gcp_resources proto](https://github.com/kubeflow/pipelines/tree/master/components/google-cloud/google_cloud_pipeline_components/experimental/proto), then pass it to the wait component." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ed9621e0", + "metadata": { + "id": "ed9621e0" + }, + "outputs": [], + "source": [ + "import kfp.dsl as dsl\n", + "import json\n", + "\n", + "@dsl.pipeline(\n", + " name=PIPELINE_NAME,\n", + " description='Dataflow launch python pipeline'\n", + ")\n", + "def pipeline(\n", + " python_file_path:str = 'gs://ml-pipeline-playground/samples/dataflow/wc/wc.py',\n", + " project_id:str = PROJECT_ID,\n", + " location:str = LOCATION,\n", + " staging_dir:str = PIPELINE_ROOT,\n", + " requirements_file_path:str = 'gs://ml-pipeline-playground/samples/dataflow/wc/requirements.txt',\n", + "):\n", + " dataflow_python_op = DataflowPythonJobOp(\n", + " project=project_id,\n", + " location=location,\n", + " python_module_path=python_file_path,\n", + " temp_location = staging_dir,\n", + " requirements_file_path = requirements_file_path,\n", + " args = json.dumps(['--output', OUTPUT_FILE]),\n", + " )\n", + " dataflow_wait_op = WaitGcpResourcesOp(\n", + " gcp_resources = dataflow_python_op.outputs[\"gcp_resources\"])" + ] + }, + { + "cell_type": "markdown", + "id": "d90f0434", + "metadata": { + "id": "d90f0434" + }, + "source": [ + "You can proceed to complie and run the pipeline from here as usual." + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "DataflowPythonJobOp_sample.ipynb", + "private_outputs": true, + "provenance": [ + { + "file_id": "/piper/depot/google3/third_party/py/google_cloud_pipeline_components/google_cloud_pipeline_components/experimental/dataflow/python_job/DataflowPythonJobOp_sample.ipynb?workspaceId=chavoshi:dataflow_component::citc", + "timestamp": 1634795827499 + }, + { + "file_id": "1QWlRC8HvyvZuFek3kaUQ-8DuQQIUyY6_", + "timestamp": 1634795779033 + } + ] + }, + "environment": { + "name": "common-cpu.m73", + "type": "gcloud", + "uri": "gcr.io/deeplearning-platform-release/base-cpu:m73" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From b7a455d9fe9a1b2112c4316d5567b8b9b2bb06e9 Mon Sep 17 00:00:00 2001 From: James Liu <37026441+zijianjoy@users.noreply.github.com> Date: Mon, 25 Oct 2021 12:34:42 -0700 Subject: [PATCH 07/31] feat(frontend): View pipeline from run ID in KFPv2. Fix #6758 (#6759) * feat(frontend): View pipeline from run ID in KFPv2. * feat: update the linkable pipeline view UX when creating a new run from pipeline from string. * fix test snapshot --- .../v1/runtime}/hello-world-runtime.ts | 0 .../hello-world-with-steps-runtime.ts | 0 .../v1/runtime}/integration-test-runtime.ts | 0 .../{ => data/v1/runtime}/json-runtime.ts | 0 .../v1/runtime}/large-graph-runtime.ts | 0 .../v1/runtime}/mock-coinflip-runtime.ts | 0 .../v1/runtime}/mock-error-runtime.ts | 0 .../v1/runtime}/mock-retry-runtime.ts | 0 .../v1/runtime}/mock-xgboost-runtime.ts | 0 .../v1/runtime}/mock-xgboost-small-runtime.ts | 0 .../template}/mock-conditional-template.yaml | 0 .../v1/template}/mock-recursive-template.yaml | 0 .../data/v1/template/mock-template-str.json | 963 ++++++++++++++++++ .../{ => data/v1/template}/mock-template.yaml | 0 frontend/mock-backend/fixed-data.ts | 23 +- frontend/mock-backend/mock-api-middleware.ts | 8 +- frontend/src/Css.tsx | 1 + frontend/src/lib/v2/WorkflowUtils.ts | 27 + frontend/src/pages/NewRun.test.tsx | 2 +- frontend/src/pages/NewRun.tsx | 16 +- frontend/src/pages/PipelineDetails.tsx | 12 +- frontend/src/pages/RunDetailsRouter.tsx | 28 +- .../pages/__snapshots__/NewRun.test.tsx.snap | 23 +- 23 files changed, 1045 insertions(+), 58 deletions(-) rename frontend/mock-backend/{ => data/v1/runtime}/hello-world-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/hello-world-with-steps-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/integration-test-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/json-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/large-graph-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/mock-coinflip-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/mock-error-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/mock-retry-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/mock-xgboost-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/runtime}/mock-xgboost-small-runtime.ts (100%) rename frontend/mock-backend/{ => data/v1/template}/mock-conditional-template.yaml (100%) rename frontend/mock-backend/{ => data/v1/template}/mock-recursive-template.yaml (100%) create mode 100644 frontend/mock-backend/data/v1/template/mock-template-str.json rename frontend/mock-backend/{ => data/v1/template}/mock-template.yaml (100%) diff --git a/frontend/mock-backend/hello-world-runtime.ts b/frontend/mock-backend/data/v1/runtime/hello-world-runtime.ts similarity index 100% rename from frontend/mock-backend/hello-world-runtime.ts rename to frontend/mock-backend/data/v1/runtime/hello-world-runtime.ts diff --git a/frontend/mock-backend/hello-world-with-steps-runtime.ts b/frontend/mock-backend/data/v1/runtime/hello-world-with-steps-runtime.ts similarity index 100% rename from frontend/mock-backend/hello-world-with-steps-runtime.ts rename to frontend/mock-backend/data/v1/runtime/hello-world-with-steps-runtime.ts diff --git a/frontend/mock-backend/integration-test-runtime.ts b/frontend/mock-backend/data/v1/runtime/integration-test-runtime.ts similarity index 100% rename from frontend/mock-backend/integration-test-runtime.ts rename to frontend/mock-backend/data/v1/runtime/integration-test-runtime.ts diff --git a/frontend/mock-backend/json-runtime.ts b/frontend/mock-backend/data/v1/runtime/json-runtime.ts similarity index 100% rename from frontend/mock-backend/json-runtime.ts rename to frontend/mock-backend/data/v1/runtime/json-runtime.ts diff --git a/frontend/mock-backend/large-graph-runtime.ts b/frontend/mock-backend/data/v1/runtime/large-graph-runtime.ts similarity index 100% rename from frontend/mock-backend/large-graph-runtime.ts rename to frontend/mock-backend/data/v1/runtime/large-graph-runtime.ts diff --git a/frontend/mock-backend/mock-coinflip-runtime.ts b/frontend/mock-backend/data/v1/runtime/mock-coinflip-runtime.ts similarity index 100% rename from frontend/mock-backend/mock-coinflip-runtime.ts rename to frontend/mock-backend/data/v1/runtime/mock-coinflip-runtime.ts diff --git a/frontend/mock-backend/mock-error-runtime.ts b/frontend/mock-backend/data/v1/runtime/mock-error-runtime.ts similarity index 100% rename from frontend/mock-backend/mock-error-runtime.ts rename to frontend/mock-backend/data/v1/runtime/mock-error-runtime.ts diff --git a/frontend/mock-backend/mock-retry-runtime.ts b/frontend/mock-backend/data/v1/runtime/mock-retry-runtime.ts similarity index 100% rename from frontend/mock-backend/mock-retry-runtime.ts rename to frontend/mock-backend/data/v1/runtime/mock-retry-runtime.ts diff --git a/frontend/mock-backend/mock-xgboost-runtime.ts b/frontend/mock-backend/data/v1/runtime/mock-xgboost-runtime.ts similarity index 100% rename from frontend/mock-backend/mock-xgboost-runtime.ts rename to frontend/mock-backend/data/v1/runtime/mock-xgboost-runtime.ts diff --git a/frontend/mock-backend/mock-xgboost-small-runtime.ts b/frontend/mock-backend/data/v1/runtime/mock-xgboost-small-runtime.ts similarity index 100% rename from frontend/mock-backend/mock-xgboost-small-runtime.ts rename to frontend/mock-backend/data/v1/runtime/mock-xgboost-small-runtime.ts diff --git a/frontend/mock-backend/mock-conditional-template.yaml b/frontend/mock-backend/data/v1/template/mock-conditional-template.yaml similarity index 100% rename from frontend/mock-backend/mock-conditional-template.yaml rename to frontend/mock-backend/data/v1/template/mock-conditional-template.yaml diff --git a/frontend/mock-backend/mock-recursive-template.yaml b/frontend/mock-backend/data/v1/template/mock-recursive-template.yaml similarity index 100% rename from frontend/mock-backend/mock-recursive-template.yaml rename to frontend/mock-backend/data/v1/template/mock-recursive-template.yaml diff --git a/frontend/mock-backend/data/v1/template/mock-template-str.json b/frontend/mock-backend/data/v1/template/mock-template-str.json new file mode 100644 index 00000000000..1e4a5f6c0c6 --- /dev/null +++ b/frontend/mock-backend/data/v1/template/mock-template-str.json @@ -0,0 +1,963 @@ +{ + "apiVersion": "argoproj.io/v1alpha1", + "kind": "Workflow", + "metadata": { + "generateName": "xgboosttrainer-" + }, + "spec": { + "arguments": { + "parameters": [ + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region", + "value": "us-central1" + }, + { + "name": "train-data", + "value": "gs://ml-pipeline-playground/sfpd/train.csv" + }, + { + "name": "eval-data", + "value": "gs://ml-pipeline-playground/sfpd/eval.csv" + }, + { + "name": "schema", + "value": "gs://ml-pipeline-playground/sfpd/schema.json" + }, + { + "name": "target", + "value": "resolution" + }, + { + "name": "rounds", + "value": "200" + }, + { + "name": "workers", + "value": "2" + }, + { + "name": "true-label", + "value": "ACTION" + } + ] + }, + "entrypoint": "xgboosttrainer", + "onExit": "delete-cluster", + "templates": [ + { + "container": { + "args": [ + "--project", + "{{inputs.parameters.project}}", + "--region", + "{{inputs.parameters.region}}", + "--cluster", + "{{inputs.parameters.create-cluster-output}}", + "--schema", + "{{inputs.parameters.schema}}", + "--train", + "{{inputs.parameters.train-data}}", + "--output", + "{{inputs.parameters.output}}/{{workflow.name}}/analysis" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "create-cluster-output" + }, + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region" + }, + { + "name": "schema" + }, + { + "name": "train-data" + } + ] + }, + "name": "analyze", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ], + "parameters": [ + { + "name": "analyze-output", + "valueFrom": { + "path": "/output.txt" + } + } + ] + } + }, + { + "container": { + "args": [ + "--output", + "{{inputs.parameters.output}}/{{workflow.name}}/confusionmatrix", + "--predictions", + "{{inputs.parameters.predict-output}}" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "output" + }, + { + "name": "predict-output" + } + ] + }, + "name": "confusion-matrix", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ] + } + }, + { + "container": { + "args": [ + "--project", + "{{inputs.parameters.project}}", + "--region", + "{{inputs.parameters.region}}", + "--name", + "xgb-{{workflow.name}}", + "--staging", + "{{inputs.parameters.output}}" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region" + } + ] + }, + "name": "create-cluster", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ], + "parameters": [ + { + "name": "create-cluster-output", + "valueFrom": { + "path": "/output.txt" + } + } + ] + } + }, + { + "container": { + "args": [ + "--project", + "{{inputs.parameters.project}}", + "--region", + "{{inputs.parameters.region}}", + "--name", + "xgb-{{workflow.name}}" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "project" + }, + { + "name": "region" + } + ] + }, + "name": "delete-cluster", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ] + } + }, + { + "dag": { + "tasks": [ + { + "arguments": { + "parameters": [ + { + "name": "create-cluster-output", + "value": "{{tasks.create-cluster.outputs.parameters.create-cluster-output}}" + }, + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "project", + "value": "{{inputs.parameters.project}}" + }, + { + "name": "region", + "value": "{{inputs.parameters.region}}" + }, + { + "name": "schema", + "value": "{{inputs.parameters.schema}}" + }, + { + "name": "train-data", + "value": "{{inputs.parameters.train-data}}" + } + ] + }, + "dependencies": [ + "create-cluster" + ], + "name": "analyze", + "template": "analyze" + }, + { + "arguments": { + "parameters": [ + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "predict-output", + "value": "{{tasks.predict.outputs.parameters.predict-output}}" + } + ] + }, + "dependencies": [ + "predict" + ], + "name": "confusion-matrix", + "template": "confusion-matrix" + }, + { + "arguments": { + "parameters": [ + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "project", + "value": "{{inputs.parameters.project}}" + }, + { + "name": "region", + "value": "{{inputs.parameters.region}}" + } + ] + }, + "name": "create-cluster", + "template": "create-cluster" + }, + { + "arguments": { + "parameters": [ + { + "name": "analyze-output", + "value": "{{tasks.analyze.outputs.parameters.analyze-output}}" + }, + { + "name": "create-cluster-output", + "value": "{{tasks.create-cluster.outputs.parameters.create-cluster-output}}" + }, + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "project", + "value": "{{inputs.parameters.project}}" + }, + { + "name": "region", + "value": "{{inputs.parameters.region}}" + }, + { + "name": "target", + "value": "{{inputs.parameters.target}}" + }, + { + "name": "train-output", + "value": "{{tasks.train.outputs.parameters.train-output}}" + }, + { + "name": "transform-eval", + "value": "{{tasks.transform.outputs.parameters.transform-eval}}" + } + ] + }, + "dependencies": [ + "analyze", + "create-cluster", + "train", + "transform" + ], + "name": "predict", + "template": "predict" + }, + { + "arguments": { + "parameters": [ + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "predict-output", + "value": "{{tasks.predict.outputs.parameters.predict-output}}" + }, + { + "name": "true-label", + "value": "{{inputs.parameters.true-label}}" + } + ] + }, + "dependencies": [ + "predict" + ], + "name": "roc", + "template": "roc" + }, + { + "arguments": { + "parameters": [ + { + "name": "analyze-output", + "value": "{{tasks.analyze.outputs.parameters.analyze-output}}" + }, + { + "name": "create-cluster-output", + "value": "{{tasks.create-cluster.outputs.parameters.create-cluster-output}}" + }, + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "project", + "value": "{{inputs.parameters.project}}" + }, + { + "name": "region", + "value": "{{inputs.parameters.region}}" + }, + { + "name": "rounds", + "value": "{{inputs.parameters.rounds}}" + }, + { + "name": "target", + "value": "{{inputs.parameters.target}}" + }, + { + "name": "transform-eval", + "value": "{{tasks.transform.outputs.parameters.transform-eval}}" + }, + { + "name": "transform-train", + "value": "{{tasks.transform.outputs.parameters.transform-train}}" + }, + { + "name": "workers", + "value": "{{inputs.parameters.workers}}" + } + ] + }, + "dependencies": [ + "analyze", + "create-cluster", + "transform" + ], + "name": "train", + "template": "train" + }, + { + "arguments": { + "parameters": [ + { + "name": "analyze-output", + "value": "{{tasks.analyze.outputs.parameters.analyze-output}}" + }, + { + "name": "create-cluster-output", + "value": "{{tasks.create-cluster.outputs.parameters.create-cluster-output}}" + }, + { + "name": "eval-data", + "value": "{{inputs.parameters.eval-data}}" + }, + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "project", + "value": "{{inputs.parameters.project}}" + }, + { + "name": "region", + "value": "{{inputs.parameters.region}}" + }, + { + "name": "target", + "value": "{{inputs.parameters.target}}" + }, + { + "name": "train-data", + "value": "{{inputs.parameters.train-data}}" + } + ] + }, + "dependencies": [ + "analyze", + "create-cluster" + ], + "name": "transform", + "template": "transform" + } + ] + }, + "inputs": { + "parameters": [ + { + "name": "eval-data" + }, + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region" + }, + { + "name": "rounds" + }, + { + "name": "schema" + }, + { + "name": "target" + }, + { + "name": "train-data" + }, + { + "name": "true-label" + }, + { + "name": "workers" + } + ] + }, + "name": "exit-handler-1" + }, + { + "container": { + "args": [ + "--project", + "{{inputs.parameters.project}}", + "--region", + "{{inputs.parameters.region}}", + "--cluster", + "{{inputs.parameters.create-cluster-output}}", + "--predict", + "{{inputs.parameters.transform-eval}}", + "--analysis", + "{{inputs.parameters.analyze-output}}", + "--target", + "{{inputs.parameters.target}}", + "--package", + "gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar", + "--model", + "{{inputs.parameters.train-output}}", + "--output", + "{{inputs.parameters.output}}/{{workflow.name}}/predict" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "analyze-output" + }, + { + "name": "create-cluster-output" + }, + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region" + }, + { + "name": "target" + }, + { + "name": "train-output" + }, + { + "name": "transform-eval" + } + ] + }, + "name": "predict", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ], + "parameters": [ + { + "name": "predict-output", + "valueFrom": { + "path": "/output.txt" + } + } + ] + } + }, + { + "container": { + "args": [ + "--output", + "{{inputs.parameters.output}}/{{workflow.name}}/roc", + "--predictions", + "{{inputs.parameters.predict-output}}", + "--trueclass", + "{{inputs.parameters.true-label}}" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-local-roc:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "output" + }, + { + "name": "predict-output" + }, + { + "name": "true-label" + } + ] + }, + "name": "roc", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ] + } + }, + { + "container": { + "args": [ + "--project", + "{{inputs.parameters.project}}", + "--region", + "{{inputs.parameters.region}}", + "--cluster", + "{{inputs.parameters.create-cluster-output}}", + "--train", + "{{inputs.parameters.transform-train}}", + "--eval", + "{{inputs.parameters.transform-eval}}", + "--analysis", + "{{inputs.parameters.analyze-output}}", + "--target", + "{{inputs.parameters.target}}", + "--package", + "gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar", + "--workers", + "{{inputs.parameters.workers}}", + "--rounds", + "{{inputs.parameters.rounds}}", + "--conf", + "gs://ml-pipeline-playground/trainconfcla.json", + "--output", + "{{inputs.parameters.output}}/{{workflow.name}}/model" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-dataproc-train:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "analyze-output" + }, + { + "name": "create-cluster-output" + }, + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region" + }, + { + "name": "rounds" + }, + { + "name": "target" + }, + { + "name": "transform-eval" + }, + { + "name": "transform-train" + }, + { + "name": "workers" + } + ] + }, + "name": "train", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ], + "parameters": [ + { + "name": "train-output", + "valueFrom": { + "path": "/output.txt" + } + } + ] + } + }, + { + "container": { + "args": [ + "--project", + "{{inputs.parameters.project}}", + "--region", + "{{inputs.parameters.region}}", + "--cluster", + "{{inputs.parameters.create-cluster-output}}", + "--train", + "{{inputs.parameters.train-data}}", + "--eval", + "{{inputs.parameters.eval-data}}", + "--analysis", + "{{inputs.parameters.analyze-output}}", + "--target", + "{{inputs.parameters.target}}", + "--output", + "{{inputs.parameters.output}}/{{workflow.name}}/transform" + ], + "image": "gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:0.0.42" + }, + "inputs": { + "parameters": [ + { + "name": "analyze-output" + }, + { + "name": "create-cluster-output" + }, + { + "name": "eval-data" + }, + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region" + }, + { + "name": "target" + }, + { + "name": "train-data" + } + ] + }, + "name": "transform", + "outputs": { + "artifacts": [ + { + "name": "mlpipeline-ui-metadata", + "path": "/mlpipeline-ui-metadata.json", + "s3": { + "accessKeySecret": { + "key": "accesskey", + "name": "mlpipeline-minio-artifact" + }, + "bucket": "mlpipeline", + "endpoint": "minio-service.kubeflow:9000", + "insecure": true, + "key": "runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz", + "secretKeySecret": { + "key": "secretkey", + "name": "mlpipeline-minio-artifact" + } + } + } + ], + "parameters": [ + { + "name": "transform-eval", + "valueFrom": { + "path": "/output_eval.txt" + } + }, + { + "name": "transform-train", + "valueFrom": { + "path": "/output_train.txt" + } + } + ] + } + }, + { + "dag": { + "tasks": [ + { + "arguments": { + "parameters": [ + { + "name": "eval-data", + "value": "{{inputs.parameters.eval-data}}" + }, + { + "name": "output", + "value": "{{inputs.parameters.output}}" + }, + { + "name": "project", + "value": "{{inputs.parameters.project}}" + }, + { + "name": "region", + "value": "{{inputs.parameters.region}}" + }, + { + "name": "rounds", + "value": "{{inputs.parameters.rounds}}" + }, + { + "name": "schema", + "value": "{{inputs.parameters.schema}}" + }, + { + "name": "target", + "value": "{{inputs.parameters.target}}" + }, + { + "name": "train-data", + "value": "{{inputs.parameters.train-data}}" + }, + { + "name": "true-label", + "value": "{{inputs.parameters.true-label}}" + }, + { + "name": "workers", + "value": "{{inputs.parameters.workers}}" + } + ] + }, + "name": "exit-handler-1", + "template": "exit-handler-1" + } + ] + }, + "inputs": { + "parameters": [ + { + "name": "eval-data" + }, + { + "name": "output" + }, + { + "name": "project" + }, + { + "name": "region" + }, + { + "name": "rounds" + }, + { + "name": "schema" + }, + { + "name": "target" + }, + { + "name": "train-data" + }, + { + "name": "true-label" + }, + { + "name": "workers" + } + ] + }, + "name": "xgboosttrainer" + } + ] + } +} diff --git a/frontend/mock-backend/mock-template.yaml b/frontend/mock-backend/data/v1/template/mock-template.yaml similarity index 100% rename from frontend/mock-backend/mock-template.yaml rename to frontend/mock-backend/data/v1/template/mock-template.yaml diff --git a/frontend/mock-backend/fixed-data.ts b/frontend/mock-backend/fixed-data.ts index 98e72ef3b90..2b80b3ee010 100644 --- a/frontend/mock-backend/fixed-data.ts +++ b/frontend/mock-backend/fixed-data.ts @@ -18,14 +18,15 @@ import { ApiPipeline, ApiPipelineVersion } from '../src/apis/pipeline'; import { ApiRelationship, ApiResourceType, ApiRunDetail, RunMetricFormat } from '../src/apis/run'; import v2_lightweight_python_pipeline from './data/v2/pipeline/mock_lightweight_python_functions_v2_pipeline.json'; import xgboost_sample_pipeline from './data/v2/pipeline/xgboost_sample_pipeline.json'; -import helloWorldRun from './hello-world-runtime'; -import helloWorldWithStepsRun from './hello-world-with-steps-runtime'; -import jsonRun from './json-runtime'; -import largeGraph from './large-graph-runtime'; -import coinflipRun from './mock-coinflip-runtime'; -import errorRun from './mock-error-runtime'; -import retryRun from './mock-retry-runtime'; -import xgboostRun from './mock-xgboost-runtime'; +import helloWorldRun from './data/v1/runtime/hello-world-runtime'; +import helloWorldWithStepsRun from './data/v1/runtime/hello-world-with-steps-runtime'; +import jsonRun from './data/v1/runtime/json-runtime'; +import largeGraph from './data/v1/runtime/large-graph-runtime'; +import coinflipRun from './data/v1/runtime/mock-coinflip-runtime'; +import errorRun from './data/v1/runtime/mock-error-runtime'; +import retryRun from './data/v1/runtime/mock-retry-runtime'; +import xgboostRun from './data/v1/runtime/mock-xgboost-runtime'; +import mock_template from './data/v1/template/mock-template-str.json'; function padStartTwoZeroes(str: string): string { let padded = str || ''; @@ -199,7 +200,7 @@ const pipelines: ApiPipeline[] = [ created_at: new Date('2019-10-25T20:59:23.000Z'), description: 'A pipeline using [markdown](https://en.wikipedia.org/wiki/Markdown) for description.', - id: '8fbe3bd6-a01f-11e8-98d0-529269fb1461', + id: '8fbe3bd6-a01f-11e8-98d0-529269fb1499', name: 'Markdown description', parameters: [], }, @@ -829,7 +830,7 @@ const runs: ApiRunDetail[] = [ { name: 'paramName1', value: 'paramVal1' }, { name: 'paramName2', value: 'paramVal2' }, ], - workflow_manifest: JSON.stringify(helloWorldRun), + workflow_manifest: JSON.stringify(mock_template), }, resource_references: [ { @@ -855,7 +856,7 @@ const runs: ApiRunDetail[] = [ id: '808ecf03-ee3b-48c6-9fa1-5f14ad11a3f8', name: 'Very large graph', pipeline_spec: { - workflow_manifest: JSON.stringify(largeGraph), + workflow_manifest: JSON.stringify(mock_template), }, resource_references: [ { diff --git a/frontend/mock-backend/mock-api-middleware.ts b/frontend/mock-backend/mock-api-middleware.ts index 4142e127c5e..48c6ab5f098 100644 --- a/frontend/mock-backend/mock-api-middleware.ts +++ b/frontend/mock-backend/mock-api-middleware.ts @@ -35,7 +35,7 @@ import { PIPELINE_VERSIONS_LIST_MAP, v2PipelineSpecMap, } from './fixed-data'; -import helloWorldRuntime from './integration-test-runtime'; +import helloWorldRuntime from './data/v1/runtime/integration-test-runtime'; import proxyMiddleware from './proxy-middleware'; const rocMetadataJsonPath = './eval-output/metadata.json'; @@ -511,11 +511,11 @@ export default (app: express.Application) => { } let filePath = ''; if (req.params.pid === namedPipelines.noParams.id) { - filePath = './mock-backend/mock-conditional-template.yaml'; + filePath = './mock-backend/data/v1/template/mock-conditional-template.yaml'; } else if (req.params.pid === namedPipelines.unstructuredText.id) { - filePath = './mock-backend/mock-recursive-template.yaml'; + filePath = './mock-backend/data/v1/template/mock-recursive-template.yaml'; } else { - filePath = './mock-backend/mock-template.yaml'; + filePath = './mock-backend/data/v1/template/mock-template.yaml'; } if (v2PipelineSpecMap.has(req.params.pid)) { const specPath = v2PipelineSpecMap.get(req.params.pid); diff --git a/frontend/src/Css.tsx b/frontend/src/Css.tsx index 12d04032aed..4047a925d96 100644 --- a/frontend/src/Css.tsx +++ b/frontend/src/Css.tsx @@ -266,6 +266,7 @@ export const commonCss = stylesheet({ '&:hover': { color: color.linkLight, textDecoration: 'underline', + cursor: 'pointer', }, }, color: color.strong, diff --git a/frontend/src/lib/v2/WorkflowUtils.ts b/frontend/src/lib/v2/WorkflowUtils.ts index 2551875b8fc..0491c88c747 100644 --- a/frontend/src/lib/v2/WorkflowUtils.ts +++ b/frontend/src/lib/v2/WorkflowUtils.ts @@ -12,8 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. +import jsyaml from 'js-yaml'; +import { FeatureKey, isFeatureEnabled } from 'src/features'; import { PipelineSpec } from 'src/generated/pipeline_spec'; import { ml_pipelines } from 'src/generated/pipeline_spec/pbjs_ml_pipelines'; +import * as StaticGraphParser from 'src/lib/StaticGraphParser'; +import { convertFlowElements } from 'src/lib/v2/StaticFlow'; +import * as WorkflowUtils from 'src/lib/v2/WorkflowUtils'; import { Workflow } from 'third_party/argo-ui/argo_template'; export function isV2Pipeline(workflow: Workflow): boolean { @@ -37,3 +42,25 @@ export function convertJsonToV2PipelineSpec(template: string): PipelineSpec { const pipelineSpec = PipelineSpec.deserializeBinary(buffer); return pipelineSpec; } + +// This needs to be changed to use pipeline_manifest vs workflow_manifest to distinguish V1 and V2. +export function isPipelineSpec(templateString: string) { + if (!templateString) { + return false; + } + try { + const template = jsyaml.safeLoad(templateString); + if (WorkflowUtils.isArgoWorkflowTemplate(template)) { + StaticGraphParser.createGraph(template!); + return false; + } else if (isFeatureEnabled(FeatureKey.V2)) { + const pipelineSpec = WorkflowUtils.convertJsonToV2PipelineSpec(templateString); + convertFlowElements(pipelineSpec); + return true; + } else { + return false; + } + } catch (err) { + return false; + } +} diff --git a/frontend/src/pages/NewRun.test.tsx b/frontend/src/pages/NewRun.test.tsx index e2c7ea752db..9e17cf6afd7 100644 --- a/frontend/src/pages/NewRun.test.tsx +++ b/frontend/src/pages/NewRun.test.tsx @@ -1128,7 +1128,7 @@ describe('NewRun', () => { await TestUtils.flushPromises(); expect(tree.state('useWorkflowFromRun')).toBe(true); - expect(tree.state('usePipelineFromRunLabel')).toBe('Using pipeline from previous page'); + expect(tree.state('usePipelineFromRunLabel')).toBe('Using pipeline from previous page.'); expect(tree).toMatchSnapshot(); }); diff --git a/frontend/src/pages/NewRun.tsx b/frontend/src/pages/NewRun.tsx index 5beaf02dc8a..f7d3d107dda 100644 --- a/frontend/src/pages/NewRun.tsx +++ b/frontend/src/pages/NewRun.tsx @@ -221,10 +221,18 @@ export class NewRun extends Page<{ namespace?: string }, NewRunState> {
Run details
{/* Pipeline selection */} - {!!workflowFromRun && ( + {workflowFromRun && (
- {usePipelineFromRunLabel} - {!!originalRunId && [View pipeline]} +
+ {usePipelineFromRunLabel} +
+
+ {originalRunId && ( + + [View pipeline] + + )} +
)} {!useWorkflowFromRun && ( @@ -919,7 +927,7 @@ export class NewRun extends Page<{ namespace?: string }, NewRunState> { const parameters = RunUtils.getParametersFromRun(runWithEmbeddedPipeline); this.setStateSafe({ parameters, - usePipelineFromRunLabel: 'Using pipeline from previous page', + usePipelineFromRunLabel: 'Using pipeline from previous page.', useWorkflowFromRun: true, workflowFromRun: workflow, }); diff --git a/frontend/src/pages/PipelineDetails.tsx b/frontend/src/pages/PipelineDetails.tsx index 143d33e516c..5ff09710ec7 100644 --- a/frontend/src/pages/PipelineDetails.tsx +++ b/frontend/src/pages/PipelineDetails.tsx @@ -204,11 +204,17 @@ class PipelineDetails extends Page<{}, PipelineDetailsState> { try { const runDetails = await Apis.runServiceApi.getRun(fromRunId); - // Convert the run's pipeline spec to YAML to be displayed as the pipeline's source. + // V1: Convert the run's pipeline spec to YAML to be displayed as the pipeline's source. + // V2: Use the pipeline spec string directly because it can be translated in JSON format. try { - const pipelineSpec = JSON.parse(RunUtils.getWorkflowManifest(runDetails.run) || '{}'); + const workflowManifestString = RunUtils.getWorkflowManifest(runDetails.run) || ''; + const workflowManifest = JSON.parse(workflowManifestString || '{}'); try { - templateString = JsYaml.safeDump(pipelineSpec); + if (WorkflowUtils.isPipelineSpec(workflowManifestString)) { + templateString = workflowManifestString; + } else { + templateString = JsYaml.safeDump(workflowManifest); + } } catch (err) { await this.showPageError( `Failed to parse pipeline spec from run with ID: ${runDetails.run!.id}.`, diff --git a/frontend/src/pages/RunDetailsRouter.tsx b/frontend/src/pages/RunDetailsRouter.tsx index 55ea2c4f713..ce7cf48fcd5 100644 --- a/frontend/src/pages/RunDetailsRouter.tsx +++ b/frontend/src/pages/RunDetailsRouter.tsx @@ -14,15 +14,11 @@ * limitations under the License. */ -import jsyaml from 'js-yaml'; import React from 'react'; import { useQuery } from 'react-query'; import { ApiRunDetail } from 'src/apis/run'; import { RouteParams } from 'src/components/Router'; -import { FeatureKey, isFeatureEnabled } from 'src/features'; import { Apis } from 'src/lib/Apis'; -import * as StaticGraphParser from 'src/lib/StaticGraphParser'; -import { convertFlowElements } from 'src/lib/v2/StaticFlow'; import * as WorkflowUtils from 'src/lib/v2/WorkflowUtils'; import EnhancedRunDetails, { RunDetailsProps } from 'src/pages/RunDetails'; import { RunDetailsV2 } from 'src/pages/RunDetailsV2'; @@ -50,7 +46,7 @@ export default function RunDetailsRouter(props: RunDetailsProps) { data.run.pipeline_spec.workflow_manifest ) { // TODO(zijianjoy): We need to switch to use pipeline_manifest for new API implementation. - const isV2Pipeline = isPipelineSpec(data.run.pipeline_spec.workflow_manifest); + const isV2Pipeline = WorkflowUtils.isPipelineSpec(data.run.pipeline_spec.workflow_manifest); if (isV2Pipeline) { return ( ; } - -// This needs to be changed to use pipeline_manifest vs workflow_manifest to distinguish V1 and V2. -function isPipelineSpec(templateString: string) { - if (!templateString) { - return false; - } - try { - const template = jsyaml.safeLoad(templateString); - if (WorkflowUtils.isArgoWorkflowTemplate(template)) { - StaticGraphParser.createGraph(template!); - return false; - } else if (isFeatureEnabled(FeatureKey.V2)) { - const pipelineSpec = WorkflowUtils.convertJsonToV2PipelineSpec(templateString); - convertFlowElements(pipelineSpec); - return true; - } else { - return false; - } - } catch (err) { - return false; - } -} diff --git a/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap b/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap index d8c7d19451c..13f5158314f 100644 --- a/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap +++ b/frontend/src/pages/__snapshots__/NewRun.test.tsx.snap @@ -13,15 +13,22 @@ exports[`NewRun arriving from pipeline details page indicates that a pipeline is Run details
- - Using pipeline from previous page - - + + Using pipeline from previous page. + +
+
- [View pipeline] - + + [View pipeline] + +
Date: Mon, 25 Oct 2021 12:58:42 -0700 Subject: [PATCH 08/31] feat(frontend): Update unknown and invalid execution node style (#6795) --- frontend/src/components/graph/ExecutionNode.tsx | 9 ++++++--- frontend/src/stories/v2/NodeGallery.stories.tsx | 9 +++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/graph/ExecutionNode.tsx b/frontend/src/components/graph/ExecutionNode.tsx index 2309b8217ad..bbe8cc48030 100644 --- a/frontend/src/components/graph/ExecutionNode.tsx +++ b/frontend/src/components/graph/ExecutionNode.tsx @@ -26,6 +26,7 @@ import StopCircle from 'src/icons/StopCircle'; import { Execution } from 'src/third_party/mlmd'; import { classes } from 'typestyle'; import { ExecutionFlowElementData } from './Constants'; +import MoreHorizIcon from '@material-ui/icons/MoreHoriz'; export interface ExecutionNodeProps { id: string; @@ -93,9 +94,10 @@ function getIcon(state: Execution.State | undefined) { switch (state) { case Execution.State.UNKNOWN: return getStateIconWrapper( - , + , 'bg-mui-grey-200', ); + case Execution.State.NEW: return getStateIconWrapper( , @@ -117,11 +119,12 @@ function getIcon(state: Execution.State | undefined) { ); case Execution.State.COMPLETE: return getStateIconWrapper( - , + , 'bg-mui-green-50', ); default: - throw new Error('Unknown exeuction state: ' + state); + console.error('Unknown exeuction state: ' + state); + return getStateIconWrapper(, 'bg-black'); } } diff --git a/frontend/src/stories/v2/NodeGallery.stories.tsx b/frontend/src/stories/v2/NodeGallery.stories.tsx index 72ab3bcd9dd..c66b3025e62 100644 --- a/frontend/src/stories/v2/NodeGallery.stories.tsx +++ b/frontend/src/stories/v2/NodeGallery.stories.tsx @@ -102,6 +102,15 @@ const elements = [ state: Execution.State.FAILED, } as ExecutionFlowElementData, }, + { + id: '9', + type: NodeTypeNames.EXECUTION, + position: { x: 100, y: 900 }, + data: { + label: 'invalid execution node', + state: 8 as Execution.State, + } as ExecutionFlowElementData, + }, { id: '101', type: NodeTypeNames.ARTIFACT, From 642ede7f6ff88fd693936b970a0cf317080464e3 Mon Sep 17 00:00:00 2001 From: Sina Chavoshi Date: Mon, 25 Oct 2021 15:23:58 -0700 Subject: [PATCH 09/31] chore(components/google-cloud): Add E2E tests for Dataflow component PiperOrigin-RevId: 405504497 --- .../container/aiplatform/Dockerfile | 3 + .../experimental/dataflow/__init__.py | 14 ++++ .../dataflow_python_job_remote_runner.py | 15 +++-- .../test_dataflow_python_job_remote_runner.py | 65 ++++++++++++++++++- 4 files changed, 90 insertions(+), 7 deletions(-) create mode 100644 components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/__init__.py diff --git a/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/Dockerfile b/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/Dockerfile index 7c238887e62..bfff3064075 100644 --- a/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/Dockerfile +++ b/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/Dockerfile @@ -24,6 +24,9 @@ RUN pip3 install --upgrade pip RUN pip3 install -U google-cloud-aiplatform RUN pip3 install -U google-cloud-storage +# Required by dataflow_launcher +RUN pip3 install -U apache_beam[gcp] + # Install main package (switch to using pypi package for official release) RUN pip3 install "git+https://github.com/kubeflow/pipelines.git#egg=google-cloud-pipeline-components&subdirectory=components/google-cloud" diff --git a/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/__init__.py b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/__init__.py new file mode 100644 index 00000000000..19156cbabee --- /dev/null +++ b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 The Kubeflow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Google Cloud Pipeline Dataflow Remote Components.""" diff --git a/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py index b5e2a363bc7..0e0e92a6e31 100644 --- a/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py +++ b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py @@ -13,12 +13,13 @@ # limitations under the License. """Module for launching Dataflow python jobs.""" +import json import logging import os import re import subprocess import tempfile -from typing import Tuple, Optional, List +from typing import Tuple, Optional from google.cloud import storage from google_cloud_pipeline_components.proto import gcp_resources_pb2 @@ -31,7 +32,7 @@ def create_python_job(python_module_path: str, location: str, temp_location: str, requirements_file_path: str = '', - args: Optional[List[str]] = None): + args: Optional[str] = '[]'): """Creates a Dataflow python job. Args: @@ -43,7 +44,7 @@ def create_python_job(python_module_path: str, during the execution of the pipeline. requirements_file_path: Optional, the gcs or local path to the pip requirements file. - args: The list of args to pass to the python file. + args: The JsonArray list of args to pass to the python file. Returns: And instance of GCPResouces proto with the dataflow Job ID which is stored @@ -54,9 +55,13 @@ def create_python_job(python_module_path: str, job_id = None if requirements_file_path: install_requirements(requirements_file_path) + args_list = [] + if args: + args_list = json.loads(args) python_file_path = stage_file(python_module_path) - cmd = prepare_cmd(project, location, python_file_path, args, temp_location) + cmd = prepare_cmd(project, location, python_file_path, args_list, + temp_location) sub_process = Process(cmd) for line in sub_process.read_lines(): job_id, location = extract_job_id_and_location(line) @@ -83,7 +88,7 @@ def prepare_cmd(project_id, region, python_file_path, args, temp_location): '--temp_location', temp_location ] - return (['python', '-u', python_file_path] + dataflow_args + args) + return (['python3', '-u', python_file_path] + dataflow_args + args) def extract_job_id_and_location(line): diff --git a/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py b/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py index 6b99625645a..2ea97ff5384 100644 --- a/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py +++ b/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py @@ -38,6 +38,7 @@ def setUp(self): os.getenv('TEST_UNDECLARED_OUTPUTS_DIR'), 'local_file') self._requirement_file_path = f'gs://{self._test_bucket_name}/requirements.txt' self._job_id = 'test_job_id' + self._args = ['test_arg'] def tearDown(self): super(DataflowPythonJobRemoteRunnerUtilsTests, self).tearDown() @@ -113,10 +114,10 @@ def test_prepare_cmd_returns_correct_command_values(self): project_id=self._project, region=self._location, python_file_path=self._local_file_path, - args=['test_arg'], + args=self._args, temp_location=self._gcs_temp_path) expected_results = [ - 'python', '-u', self._local_file_path, '--runner', 'DataflowRunner', + 'python3', '-u', self._local_file_path, '--runner', 'DataflowRunner', '--project', self._project, '--region', self._location, '--temp_location', self._gcs_temp_path, 'test_arg' ] @@ -149,6 +150,66 @@ def test_create_python_job_raises_error_on_no_job_id( temp_location=self._gcs_temp_path) mock_process_client.wait_and_check.assert_called_once_with() + @mock.patch.object( + dataflow_python_job_remote_runner, 'stage_file', autospec=True) + @mock.patch.object( + dataflow_python_job_remote_runner, 'prepare_cmd', autospec=True) + @mock.patch.object( + dataflow_python_job_remote_runner, 'Process', autospec=True) + @mock.patch.object( + dataflow_python_job_remote_runner, + 'extract_job_id_and_location', + autospec=True) + def test_create_python_job_parses_with_emtpy_args_list_parses_correctly( + self, mock_extract_job_id_and_location, mock_process, mock_prepare_cmd, + unused_mock_stage_file): + mock_process_client = mock.Mock() + mock_process.return_value = mock_process_client + mock_process_client.read_lines.return_value = ['test_line'] + mock_extract_job_id_and_location.return_value = (None, None) + + with self.assertRaises(RuntimeError): + dataflow_python_job_remote_runner.create_python_job( + python_module_path=self._local_file_path, + project=self._project, + gcp_resources=self._gcp_resources, + location=self._location, + temp_location=self._gcs_temp_path) + mock_prepare_cmd.assert_called_once_with(self._project, self._location, + mock.ANY, [], self._gcs_temp_path) + mock_process_client.wait_and_check.assert_called_once_with() + + @mock.patch.object( + dataflow_python_job_remote_runner, 'stage_file', autospec=True) + @mock.patch.object( + dataflow_python_job_remote_runner, 'prepare_cmd', autospec=True) + @mock.patch.object( + dataflow_python_job_remote_runner, 'Process', autospec=True) + @mock.patch.object( + dataflow_python_job_remote_runner, + 'extract_job_id_and_location', + autospec=True) + def test_create_python_job_parses_with_json_array_args_list_parses_correctly( + self, mock_extract_job_id_and_location, mock_process, mock_prepare_cmd, + unused_mock_stage_file): + mock_process_client = mock.Mock() + mock_process.return_value = mock_process_client + mock_process_client.read_lines.return_value = ['test_line'] + mock_extract_job_id_and_location.return_value = (None, None) + + with self.assertRaises(RuntimeError): + dataflow_python_job_remote_runner.create_python_job( + python_module_path=self._local_file_path, + project=self._project, + gcp_resources=self._gcp_resources, + location=self._location, + temp_location=self._gcs_temp_path, + args=json.dumps(self._args)) + mock_prepare_cmd.assert_called_once_with(self._project, self._location, + mock.ANY, self._args, + self._gcs_temp_path) + mock_process_client.wait_and_check.assert_called_once_with() + @mock.patch.object( dataflow_python_job_remote_runner, 'stage_file', autospec=True) @mock.patch.object( From 2e3fb5efff5623d3cd4fff0cfb3585cb46d8eac9 Mon Sep 17 00:00:00 2001 From: Sina Chavoshi Date: Tue, 26 Oct 2021 00:18:09 -0700 Subject: [PATCH 10/31] chore(components/google-cloud): Replace executor input with json escaped placeholder. PiperOrigin-RevId: 405579501 --- .../experimental/custom_job/custom_job.py | 199 ++- .../custom_job/unit/test_custom_job.py | 1092 ++++++++++------- 2 files changed, 738 insertions(+), 553 deletions(-) diff --git a/components/google-cloud/google_cloud_pipeline_components/experimental/custom_job/custom_job.py b/components/google-cloud/google_cloud_pipeline_components/experimental/custom_job/custom_job.py index 9917378d53e..11060319606 100644 --- a/components/google-cloud/google_cloud_pipeline_components/experimental/custom_job/custom_job.py +++ b/components/google-cloud/google_cloud_pipeline_components/experimental/custom_job/custom_job.py @@ -17,115 +17,117 @@ # TODO(chavoshi): switch to using V2 only once it is ready. import copy import json -import logging import tempfile -from typing import Callable, List, Optional, Mapping, Any, Dict +from typing import Callable, Any, Dict, List, Mapping, Optional + +from google_cloud_pipeline_components.aiplatform import utils from kfp import components +from kfp.components import structures from kfp.dsl import dsl_utils from kfp.v2.components.types import type_utils -from google_cloud_pipeline_components.aiplatform import utils -from kfp.components import structures _DEFAULT_CUSTOM_JOB_CONTAINER_IMAGE = utils.DEFAULT_CONTAINER_IMAGE -# Using an empty placeholder instead of "{{$.json_escape[1]}}" while -# backend support has not been enabled. -_EXECUTOR_PLACE_HOLDER_REPLACEMENT = json.dumps({"outputs": {"outputFile": "tmp/temp_output_file"}}) +# Executor replacement is used as executor content needs to be jsonified before +# injection into the payload, since payload is already a Json serialized string. +_EXECUTOR_PLACE_HOLDER_REPLACEMENT = '{{$.json_escape[1]}}' def custom_training_job_op( - component_spec: Callable, - display_name: Optional[str] = "", + component_spec: Callable, # pylint: disable=g-bare-generic + display_name: Optional[str] = '', replica_count: Optional[int] = 1, - machine_type: Optional[str] = "n1-standard-4", - accelerator_type: Optional[str] = "", + machine_type: Optional[str] = 'n1-standard-4', + accelerator_type: Optional[str] = '', accelerator_count: Optional[int] = 1, - boot_disk_type: Optional[str] = "pd-ssd", + boot_disk_type: Optional[str] = 'pd-ssd', boot_disk_size_gb: Optional[int] = 100, - timeout: Optional[str] = "", + timeout: Optional[str] = '', restart_job_on_worker_restart: Optional[bool] = False, - service_account: Optional[str] = "", - network: Optional[str] = "", + service_account: Optional[str] = '', + network: Optional[str] = '', worker_pool_specs: Optional[List[Mapping[str, Any]]] = None, - encryption_spec_key_name: Optional[str] = "", - tensorboard: Optional[str] = "", - base_output_directory: Optional[str] = "", + encryption_spec_key_name: Optional[str] = '', + tensorboard: Optional[str] = '', + base_output_directory: Optional[str] = '', labels: Optional[Dict[str, str]] = None, -) -> Callable: +) -> Callable: # pylint: disable=g-bare-generic """Run a pipeline task using Vertex AI custom training job. - For detailed doc of the service, please refer to - https://cloud.google.com/vertex-ai/docs/training/create-custom-job + For detailed doc of the service, please refer to + https://cloud.google.com/vertex-ai/docs/training/create-custom-job - Args: - component_spec: The task (ContainerOp) object to run as Vertex AI custom - job. - display_name (Optional[str]): The name of the custom job. If not provided - the component_spec.name will be used instead. - replica_count (Optional[int]): The number of replicas to be split between - master workerPoolSpec and worker workerPoolSpec. (master always has 1 - replica). - machine_type (Optional[str]): The type of the machine to run the custom - job. The default value is "n1-standard-4". For more details about this - input config, see - https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types. - accelerator_type (Optional[str]): The type of accelerator(s) that may be - attached to the machine as per accelerator_count. For more details - about this input config, see - https://cloud.google.com/vertex-ai/docs/reference/rest/v1/MachineSpec#acceleratortype. - accelerator_count (Optional[int]): The number of accelerators to attach to - the machine. Defaults to 1 if accelerator_type is set. - boot_disk_type (Optional[str]): - Type of the boot disk (default is "pd-ssd"). Valid values: "pd-ssd" - (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk - Hard Disk Drive). - boot_disk_size_gb (Optional[int]): Size in GB of the boot disk (default is - 100GB). - timeout (Optional[str]): The maximum job running time. The default is 7 - days. A duration in seconds with up to nine fractional digits, - terminated by 's'. - Example: "3.5s". - restart_job_on_worker_restart (Optional[bool]): Restarts the entire - CustomJob if a worker gets restarted. This feature can be used by - distributed training jobs that are not resilient to workers leaving and - joining a job. - service_account (Optional[str]): Sets the default service account for - workload run-as account. The service account running the pipeline - (https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) - submitting jobs must have act-as permission on this run-as account. - If unspecified, the Vertex AI Custom Code Service - Agent(https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) - for the CustomJob's project is used. - network (Optional[str]): The full name of the Compute Engine network to - which the job should be peered. For example, - projects/12345/global/networks/myVPC. Format is of the form - projects/{project}/global/networks/{network}. Where {project} is a - project number, as in 12345, and {network} is a network name. Private - services access must already be configured for the network. If left - unspecified, the job is not peered with any network. - worker_pool_specs (Optional[List[Mapping[str, Any]]]): Worker_pool_specs - for distributed training. This - will overwite all other cluster configurations. For details, please see: - https://cloud.google.com/ai-platform-unified/docs/training/distributed-training - encryption_spec_key_name (Optional[str]): Customer-managed encryption key - options for the CustomJob. If this is set, then all resources created by - the CustomJob will be encrypted with the provided encryption key. - tensorboard (Optional[str]): The name of a Vertex AI Tensorboard resource - to which this CustomJob will upload Tensorboard logs. - base_output_directory (Optional[str]): The Cloud Storage location to store - the output of this CustomJob or - HyperparameterTuningJob. see below for more details: - https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination - labels (Optional[Dict[str, str]]): The labels with user-defined metadata - to organize CustomJobs. - See https://goo.gl/xmQnxf for more information. + Args: + component_spec: The task (ContainerOp) object to run as Vertex AI custom + job. + display_name (Optional[str]): The name of the custom job. If not provided + the component_spec.name will be used instead. + replica_count (Optional[int]): The number of replicas to be split between + master workerPoolSpec and worker workerPoolSpec. (master always has 1 + replica). + machine_type (Optional[str]): The type of the machine to run the custom + job. The default value is "n1-standard-4". For more details about this + input config, see + https://cloud.google.com/vertex-ai/docs/training/configure-compute#machine-types. + accelerator_type (Optional[str]): The type of accelerator(s) that may be + attached to the machine as per accelerator_count. For more details about + this input config, see + https://cloud.google.com/vertex-ai/docs/reference/rest/v1/MachineSpec#acceleratortype. + accelerator_count (Optional[int]): The number of accelerators to attach to + the machine. Defaults to 1 if accelerator_type is set. + boot_disk_type (Optional[str]): + Type of the boot disk (default is "pd-ssd"). Valid values: "pd-ssd" + (Persistent Disk Solid State Drive) or "pd-standard" (Persistent Disk + Hard Disk Drive). + boot_disk_size_gb (Optional[int]): Size in GB of the boot disk (default is + 100GB). + timeout (Optional[str]): The maximum job running time. The default is 7 + days. A duration in seconds with up to nine fractional digits, terminated + by 's', for example: "3.5s". + restart_job_on_worker_restart (Optional[bool]): Restarts the entire + CustomJob if a worker gets restarted. This feature can be used by + distributed training jobs that are not resilient to workers leaving and + joining a job. + service_account (Optional[str]): Sets the default service account for + workload run-as account. The service account running the pipeline + (https://cloud.google.com/vertex-ai/docs/pipelines/configure-project#service-account) + submitting jobs must have act-as permission on this run-as account. If + unspecified, the Vertex AI Custom Code Service + Agent(https://cloud.google.com/vertex-ai/docs/general/access-control#service-agents) + for the CustomJob's project is used. + network (Optional[str]): The full name of the Compute Engine network to + which the job should be peered. For example, + projects/12345/global/networks/myVPC. Format is of the form + projects/{project}/global/networks/{network}. Where {project} is a project + number, as in 12345, and {network} is a network name. Private services + access must already be configured for the network. If left unspecified, + the job is not peered with any network. + worker_pool_specs (Optional[List[Mapping[str, Any]]]): Worker_pool_specs for + distributed training. This + will overwite all other cluster configurations. For details, please see: + https://cloud.google.com/ai-platform-unified/docs/training/distributed-training + encryption_spec_key_name (Optional[str]): Customer-managed encryption key + options for the CustomJob. If this is set, then all resources created by + the CustomJob will be encrypted with the provided encryption key. + tensorboard (Optional[str]): The name of a Vertex AI Tensorboard resource + to which this CustomJob will upload Tensorboard logs. + base_output_directory (Optional[str]): The Cloud Storage location to store + the output of this CustomJob or + HyperparameterTuningJob. see below for more details: + https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GcsDestination + labels (Optional[Dict[str, str]]): The labels with user-defined metadata + to organize CustomJobs. + See https://goo.gl/xmQnxf for more information. - Returns: - A Custom Job component operator correspoinding to the input component - operator. - """ + Returns: + A Custom Job component operator correspoinding to the input component + operator. + """ job_spec = {} input_specs = [] output_specs = [] + + # pytype: disable=attribute-error + if component_spec.component_spec.inputs: input_specs = component_spec.component_spec.inputs if component_spec.component_spec.outputs: @@ -147,10 +149,7 @@ def _is_output_parameter(output_key: str) -> bool: if 'args' in container_spec: dsl_utils.resolve_cmd_lines(container_spec['args'], _is_output_parameter) - # Temporarily remove {{{{$}}}} executor_input arg as it is not supported by the backend. - logging.info( - 'Setting executor_input to empty, as it is currently not supported by the backend.' - ) + # Replace executor place holder with the json escaped placeholder. for idx, val in enumerate(container_spec['args']): if val == '{{{{$}}}}': container_spec['args'][idx] = _EXECUTOR_PLACE_HOLDER_REPLACEMENT @@ -160,10 +159,7 @@ def _is_output_parameter(output_key: str) -> bool: python_spec = worker_pool_spec['python_package_spec'] if 'args' in python_spec: dsl_utils.resolve_cmd_lines(python_spec['args'], _is_output_parameter) - # Temporarily remove {{{{$}}}} executor_input arg as it is not supported by the backend. - logging.info( - 'Setting executor_input to empty, as it is currently not supported by the backend.' - ) + # Replace executor place holder with the json escaped placeholder. for idx, val in enumerate(python_spec['args']): if val == '{{{{$}}}}': python_spec['args'][idx] = _EXECUTOR_PLACE_HOLDER_REPLACEMENT @@ -203,11 +199,7 @@ def _is_output_parameter(output_key: str) -> bool: container_args_copy = component_spec.component_spec.implementation.container.args.copy( ) dsl_utils.resolve_cmd_lines(container_args_copy, _is_output_parameter) - # Temporarily remove {{{{$}}}} executor_input arg as it is not supported by the backend. - logging.info( - 'Setting executor_input to empty, as it is currently not supported by the backend.' - 'This may result in python componnet artifacts not working correctly.' - ) + # Replace executor place holder with the json escaped placeholder. for idx, val in enumerate(container_args_copy): if val == '{{{{$}}}}': container_args_copy[idx] = _EXECUTOR_PLACE_HOLDER_REPLACEMENT @@ -224,12 +216,12 @@ def _is_output_parameter(output_key: str) -> bool: worker_pool_spec['disk_spec']['boot_disk_size_gb'] = boot_disk_size_gb job_spec['worker_pool_specs'] = [worker_pool_spec] - if replica_count > 1: + if int(replica_count) > 1: additional_worker_pool_spec = copy.deepcopy(worker_pool_spec) additional_worker_pool_spec['replica_count'] = str(replica_count - 1) job_spec['worker_pool_specs'].append(additional_worker_pool_spec) - #TODO(chavoshi): Use input parameter instead of hard coded string label. + # TODO(chavoshi): Use input parameter instead of hard coded string label. # This requires Dictionary input type to be supported in V2. if labels is not None: job_spec['labels'] = labels @@ -318,6 +310,9 @@ def _is_output_parameter(output_key: str) -> bool: structures.OutputPathPlaceholder(output_name='gcp_resources'), ], ))) + + # pytype: enable=attribute-error + component_path = tempfile.mktemp() custom_job_component_spec.save(component_path) diff --git a/components/google-cloud/tests/experimental/custom_job/unit/test_custom_job.py b/components/google-cloud/tests/experimental/custom_job/unit/test_custom_job.py index 429e1f5053d..9ff3fa1a2bb 100644 --- a/components/google-cloud/tests/experimental/custom_job/unit/test_custom_job.py +++ b/components/google-cloud/tests/experimental/custom_job/unit/test_custom_job.py @@ -13,22 +13,22 @@ # limitations under the License. """Test Vertex AI Custom Job Client module.""" -import unittest from google_cloud_pipeline_components.experimental.custom_job import custom_job from kfp import components -from kfp.v2.dsl import component + +import unittest class VertexAICustomJobUtilsTests(unittest.TestCase): - def setUp(self): - super(VertexAICustomJobUtilsTests, self).setUp() - custom_job._DEFAULT_CUSTOM_JOB_CONTAINER_IMAGE = 'test_launcher_image' + def setUp(self): + super(VertexAICustomJobUtilsTests, self).setUp() + custom_job._DEFAULT_CUSTOM_JOB_CONTAINER_IMAGE = 'test_launcher_image' - def _create_a_container_based_component(self) -> callable: - """Creates a test container based component factory.""" + def _create_a_container_based_component(self) -> callable: + """Creates a test container based component factory.""" - return components.load_component_from_text(""" + return components.load_component_from_text(""" name: ContainerComponent inputs: - {name: input_text, type: String, description: "Represents an input parameter."} @@ -47,471 +47,661 @@ def _create_a_container_based_component(self) -> callable: - {outputPath: output_value} """) - def test_run_as_vertex_ai_custom_job_on_container_spec_with_defualts_values_converts_correctly( - self): - expected_results = { - 'name': 'ContainerComponent', - 'inputs': [{ - 'name': 'input_text', - 'type': 'String', - 'description': 'Represents an input parameter.' - }, { - 'name': 'base_output_directory', - 'type': 'String', - 'default': '', - 'optional': True - }, { - 'name': 'tensorboard', - 'type': 'String', - 'default': '', - 'optional': True - }, { - 'name': 'network', - 'type': 'String', - 'default': '', - 'optional': True - }, { - 'name': 'service_account', - 'type': 'String', - 'default': '', - 'optional': True - }, { - 'name': 'project', - 'type': 'String' - }, { - 'name': 'location', - 'type': 'String' - }], - 'outputs': [{ - 'name': 'output_value', - 'type': 'String', - 'description': 'Represents an output paramter.' - }, { - 'name': 'gcp_resources', - 'type': 'String' - }], - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + def test_run_as_vertex_ai_custom_job_on_container_spec_with_defualts_values_converts_correctly( + self): + expected_results = { + 'name': 'ContainerComponent', + 'inputs': [{ + 'name': 'input_text', + 'type': 'String', + 'description': 'Represents an input parameter.' + }, { + 'name': 'base_output_directory', + 'type': 'String', + 'default': '', + 'optional': True + }, { + 'name': 'tensorboard', + 'type': 'String', + 'default': '', + 'optional': True + }, { + 'name': 'network', + 'type': 'String', + 'default': '', + 'optional': True + }, { + 'name': 'service_account', + 'type': 'String', + 'default': '', + 'optional': True + }, { + 'name': 'project', + 'type': 'String' + }, { + 'name': 'location', + 'type': 'String' + }], + 'outputs': [{ + 'name': 'output_value', + 'type': 'String', + 'description': 'Represents an output paramter.' + }, { + 'name': 'gcp_resources', + 'type': 'String' + }], + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - component_factory_function = self._create_a_container_based_component() - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function) - self.assertDictEqual(custom_job_spec.component_spec.to_dict(), - expected_results) - - def test_run_as_vertex_ai_custom_with_worker_poolspec_container_spec_converts_correctly( - self): - component_factory_function = self._create_a_container_based_component() - worker_pool_spec = [{ - 'machine_spec': { - 'machine_type': 'test_machine_type' - }, - 'replica_count': 2, - 'container_spec': { - 'image_uri': 'test_image_uri', - 'command': ['test_command'], - 'args': ['test_args'] - } - }] - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "test_machine_type"}, "replica_count": 2, "container_spec": {"image_uri": "test_image_uri", "command": ["test_command"], "args": ["test_args"]}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + component_factory_function = self._create_a_container_based_component() + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function) + self.assertDictEqual(custom_job_spec.component_spec.to_dict(), + expected_results) + + def test_run_as_vertex_ai_custom_with_worker_poolspec_container_spec_converts_correctly( + self): + component_factory_function = self._create_a_container_based_component() + worker_pool_spec = [{ + 'machine_spec': { + 'machine_type': 'test_machine_type' + }, + 'replica_count': 2, + 'container_spec': { + 'image_uri': 'test_image_uri', + 'command': ['test_command'], + 'args': ['test_args'] + } + }] + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"test_machine_type"}, "replica_count": 2, ' + '"container_spec": {"image_uri": "test_image_uri", ' + '"command": ["test_command"], "args": ["test_args"]}}], ' + '"service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, worker_pool_specs=worker_pool_spec) - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_python_package_spec_converts_correctly( - self): - component_factory_function = self._create_a_container_based_component() - python_package_spec = [{'python_package_spec': {'args': ['test_args']}}] - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"python_package_spec": {"args": ["test_args"]}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, worker_pool_specs=worker_pool_spec) + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_python_package_spec_converts_correctly( + self): + component_factory_function = self._create_a_container_based_component() + python_package_spec = [{'python_package_spec': {'args': ['test_args']}}] + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"python_package_spec": {"args": ' + '["test_args"]}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, worker_pool_specs=python_package_spec) - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_accelerator_type_and_count_converts_correctly( - self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4", "accelerator_type": "test_accelerator_type", "accelerator_count": 2}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, worker_pool_specs=python_package_spec) + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_accelerator_type_and_count_converts_correctly( + self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4", "accelerator_type": ' + '"test_accelerator_type", "accelerator_count": 2}, ' + '"replica_count": 1, "container_spec": {"image_uri": ' + '"google/cloud-sdk:latest", "command": ["sh", "-c", "set ' + '-e -x\\necho \\"$0, this is an output parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, - accelerator_type="test_accelerator_type", - accelerator_count=2) - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_boot_disk_type_and_size_converts_correctly( - self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}, {"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": "1", "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, + accelerator_type='test_accelerator_type', + accelerator_count=2) + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_boot_disk_type_and_size_converts_correctly( + self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}, {"machine_spec": ' + '{"machine_type": "n1-standard-4"}, "replica_count": "1", ' + '"container_spec": {"image_uri": ' + '"google/cloud-sdk:latest", "command": ["sh", "-c", "set ' + '-e -x\\necho \\"$0, this is an output parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, replica_count=2) - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_replica_count_greater_than_1_converts_correctly( - self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}, {"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": "1", "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, replica_count=2) + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_replica_count_greater_than_1_converts_correctly( + self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}, {"machine_spec": ' + '{"machine_type": "n1-standard-4"}, "replica_count": "1", ' + '"container_spec": {"image_uri": ' + '"google/cloud-sdk:latest", "command": ["sh", "-c", "set ' + '-e -x\\necho \\"$0, this is an output parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, replica_count=2) - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_time_out_converts_correctly(self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "scheduling": {"timeout": 2}, "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, replica_count=2) + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_time_out_converts_correctly(self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "scheduling": {"timeout": ' + '2}, "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, timeout=2) - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_restart_job_on_worker_restart_converts_correctly( - self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "scheduling": {"restart_job_on_worker_restart": true}, "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, timeout=2) + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_restart_job_on_worker_restart_converts_correctly( + self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "scheduling": ' + '{"restart_job_on_worker_restart": true}, ' + '"service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, restart_job_on_worker_restart=True) - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_custom_service_account_converts_correctly( - self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, restart_job_on_worker_restart=True) + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_custom_service_account_converts_correctly( + self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, service_account='test_service_account') - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_display_name_converts_correctly(self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "test_display_name", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, service_account='test_service_account') + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_display_name_converts_correctly(self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "test_display_name", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, display_name='test_display_name') - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_without_container_spec_or_python_package_spec_correctly( - self): - component_factory_function = self._create_a_container_based_component() - - worker_pool_spec = [{ - 'machine_spec': { - 'machine_type': 'test_machine_type' - }, - 'replica_count': 2 - }] - with self.assertRaises(ValueError): - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, worker_pool_specs=worker_pool_spec) - - def test_run_as_vertex_ai_custom_with_network_converts_correctly(self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, display_name='test_display_name') + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_without_container_spec_or_python_package_spec_correctly( + self): + component_factory_function = self._create_a_container_based_component() + + worker_pool_spec = [{ + 'machine_spec': { + 'machine_type': 'test_machine_type' + }, + 'replica_count': 2 + }] + with self.assertRaises(ValueError): + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, worker_pool_specs=worker_pool_spec) + + def test_run_as_vertex_ai_custom_with_network_converts_correctly(self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, network='test_network') - - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) - - def test_run_as_vertex_ai_custom_with_labels_converts_correctly(self): - component_factory_function = self._create_a_container_based_component() - - expected_sub_results = { - 'implementation': { - 'container': { - 'image': - 'test_launcher_image', - 'command': [ - 'python3', '-u', '-m', - 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' - ], - 'args': [ - '--type', 'CustomJob', '--payload', - '{"display_name": "ContainerComponent", "job_spec": {"worker_pool_specs": [{"machine_spec": {"machine_type": "n1-standard-4"}, "replica_count": 1, "container_spec": {"image_uri": "google/cloud-sdk:latest", "command": ["sh", "-c", "set -e -x\\necho \\"$0, this is an output parameter\\"\\n", "{{$.inputs.parameters[\'input_text\']}}", "{{$.outputs.parameters[\'output_value\'].output_file}}"]}, "disk_spec": {"boot_disk_type": "pd-ssd", "boot_disk_size_gb": 100}}], "labels": {"test_key": "test_value"}, "service_account": "{{$.inputs.parameters[\'service_account\']}}", "network": "{{$.inputs.parameters[\'network\']}}", "tensorboard": "{{$.inputs.parameters[\'tensorboard\']}}", "base_output_directory": {"output_uri_prefix": "{{$.inputs.parameters[\'base_output_directory\']}}"}}}', - '--project', { - 'inputValue': 'project' - }, '--location', { - 'inputValue': 'location' - }, '--gcp_resources', { - 'outputPath': 'gcp_resources' - } - ] - } + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, network='test_network') + + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) + + def test_run_as_vertex_ai_custom_with_labels_converts_correctly(self): + component_factory_function = self._create_a_container_based_component() + + expected_sub_results = { + 'implementation': { + 'container': { + 'image': + 'test_launcher_image', + 'command': [ + 'python3', '-u', '-m', + 'google_cloud_pipeline_components.container.experimental.gcp_launcher.launcher' + ], + 'args': [ + '--type', 'CustomJob', '--payload', + '{"display_name": "ContainerComponent", "job_spec": ' + '{"worker_pool_specs": [{"machine_spec": {"machine_type": ' + '"n1-standard-4"}, "replica_count": 1, "container_spec": ' + '{"image_uri": "google/cloud-sdk:latest", "command": ' + '["sh", "-c", "set -e -x\\necho \\"$0, this is an output ' + 'parameter\\"\\n", ' + '"{{$.inputs.parameters[\'input_text\']}}", ' + '"{{$.outputs.parameters[\'output_value\'].output_file}}"]},' + ' "disk_spec": {"boot_disk_type": "pd-ssd", ' + '"boot_disk_size_gb": 100}}], "labels": {"test_key": ' + '"test_value"}, "service_account": ' + '"{{$.inputs.parameters[\'service_account\']}}", ' + '"network": "{{$.inputs.parameters[\'network\']}}", ' + '"tensorboard": ' + '"{{$.inputs.parameters[\'tensorboard\']}}", ' + '"base_output_directory": {"output_uri_prefix": ' + '"{{$.inputs.parameters[\'base_output_directory\']}}"}}}', + '--project', { + 'inputValue': 'project' + }, '--location', { + 'inputValue': 'location' + }, '--gcp_resources', { + 'outputPath': 'gcp_resources' + } + ] } } - custom_job_spec = custom_job.custom_training_job_op( - component_factory_function, labels={"test_key": "test_value"}) + } + custom_job_spec = custom_job.custom_training_job_op( + component_factory_function, labels={'test_key': 'test_value'}) - self.assertDictContainsSubset( - subset=expected_sub_results, - dictionary=custom_job_spec.component_spec.to_dict()) + self.assertDictContainsSubset( + subset=expected_sub_results, + dictionary=custom_job_spec.component_spec.to_dict()) From 02b96f0a63e419294dd70fded0e91a660cef74f6 Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Tue, 26 Oct 2021 14:12:37 -0700 Subject: [PATCH 11/31] feat(sdk.v2): Implement v2 experimental compiler. (#6803) * Implement v2 experimental compiler. * add release note * Support protobuf.Value in Condition operand --- sdk/RELEASE.md | 2 + sdk/python/kfp/v2/compiler/compiler_utils.py | 5 + .../kfp/v2/compiler/experimental/__init__.py | 15 + .../kfp/v2/compiler/experimental/compiler.py | 1022 +++++++++++++++++ .../v2/compiler/experimental/compiler_test.py | 82 ++ .../experimental/pipeline_spec_builder.py | 858 ++++++++++++++ .../pipeline_spec_builder_test.py | 168 +++ sdk/python/kfp/v2/compiler/main.py | 54 +- .../compiler_cli_tests/compiler_cli_tests.py | 61 +- ...perimental_pipeline_with_exit_handler.json | 182 +++ ...experimental_pipeline_with_exit_handler.py | 67 ++ .../experimental_pipeline_with_loops.json | 442 +++++++ .../experimental_pipeline_with_loops.py | 103 ++ ..._pipeline_with_nested_conditions_yaml.json | 539 +++++++++ ...al_pipeline_with_nested_conditions_yaml.py | 91 ++ .../experimental_two_step_pipeline.json | 126 ++ .../experimental_two_step_pipeline.py | 69 ++ .../test_data/experimental_v2_component.json | 214 ++-- .../test_data/experimental_v2_component.py | 4 +- sdk/python/kfp/v2/components/__init__.py | 2 + .../components/experimental/base_component.py | 6 +- .../experimental/component_factory.py | 443 +++++++ .../experimental/for_loop.py | 18 +- .../experimental/for_loop_test.py | 4 +- .../v2/components/experimental/pipeline.py | 148 +++ .../experimental/pipeline_channel.py | 90 +- .../components/experimental/pipeline_task.py | 144 ++- .../experimental/pipeline_task_test.py | 21 +- .../v2/components/experimental/structures.py | 5 + .../v2/components/experimental/tasks_group.py | 219 ++++ .../components/experimental/yaml_component.py | 29 + .../components/types/experimental/__init__.py | 13 + .../types/experimental/type_utils.py | 279 +++++ sdk/python/kfp/v2/components/utils.py | 45 + sdk/python/kfp/v2/components/utils_test.py | 48 + .../kfp/v2/dsl/experimental/__init__.py | 35 + sdk/python/requirements.in | 2 +- sdk/python/requirements.txt | 2 +- sdk/python/setup.py | 2 +- 39 files changed, 5423 insertions(+), 236 deletions(-) create mode 100644 sdk/python/kfp/v2/compiler/experimental/__init__.py create mode 100644 sdk/python/kfp/v2/compiler/experimental/compiler.py create mode 100644 sdk/python/kfp/v2/compiler/experimental/compiler_test.py create mode 100644 sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py create mode 100644 sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder_test.py create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.json create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py create mode 100644 sdk/python/kfp/v2/components/experimental/component_factory.py rename sdk/python/kfp/v2/{dsl => components}/experimental/for_loop.py (94%) rename sdk/python/kfp/v2/{dsl => components}/experimental/for_loop_test.py (98%) create mode 100644 sdk/python/kfp/v2/components/experimental/pipeline.py create mode 100644 sdk/python/kfp/v2/components/experimental/tasks_group.py create mode 100644 sdk/python/kfp/v2/components/experimental/yaml_component.py create mode 100644 sdk/python/kfp/v2/components/types/experimental/__init__.py create mode 100644 sdk/python/kfp/v2/components/types/experimental/type_utils.py diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 7e7deb19b5e..619925490b1 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -3,6 +3,7 @@ ## Major Features and Improvements * Add optional support to specify description for pipeline version [\#6472](https://github.com/kubeflow/pipelines/issues/6472). +* New v2 experimental compiler. [\#6803](https://github.com/kubeflow/pipelines/pull/6803) ## Breaking Changes @@ -15,6 +16,7 @@ ## Bug Fixes and Other Changes * Fix placeholder mapping error in v2. [\#6794](https://github.com/kubeflow/pipelines/pull/6794) +* Depends on `kfp-pipeline-spec>=0.1.13,<0.2.0` [\#6803](https://github.com/kubeflow/pipelines/pull/6803) ## Documentation Updates diff --git a/sdk/python/kfp/v2/compiler/compiler_utils.py b/sdk/python/kfp/v2/compiler/compiler_utils.py index 99356e29112..ac17ec4235e 100644 --- a/sdk/python/kfp/v2/compiler/compiler_utils.py +++ b/sdk/python/kfp/v2/compiler/compiler_utils.py @@ -96,6 +96,11 @@ def validate_pipeline_name(name: str) -> None: def is_v2_component(op: _container_op.ContainerOp) -> bool: """Determines whether a component is a KFP v2 component.""" + + # TODO: migrate v2 component to PipelineTask + if not isinstance(op, _container_op.ContainerOp): + return False + component_spec = op._metadata return (component_spec and component_spec.metadata and component_spec.metadata.annotations and diff --git a/sdk/python/kfp/v2/compiler/experimental/__init__.py b/sdk/python/kfp/v2/compiler/experimental/__init__.py new file mode 100644 index 00000000000..a5cd6411321 --- /dev/null +++ b/sdk/python/kfp/v2/compiler/experimental/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp.v2.compiler.experimental.compiler import Compiler diff --git a/sdk/python/kfp/v2/compiler/experimental/compiler.py b/sdk/python/kfp/v2/compiler/experimental/compiler.py new file mode 100644 index 00000000000..7ee7fe3901d --- /dev/null +++ b/sdk/python/kfp/v2/compiler/experimental/compiler.py @@ -0,0 +1,1022 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""KFP DSL v2 compiler. + +This is an experimental implementation of KFP compiler that compiles KFP +pipeline into Pipeline IR: +https://docs.google.com/document/d/1PUDuSQ8vmeKSBloli53mp7GIvzekaY7sggg6ywy35Dk/ +""" + +import collections +import inspect +import json +import uuid +import warnings +from typing import (Any, Callable, Dict, List, Mapping, Optional, Set, Tuple, + Union) + +import kfp +import kfp.v2.dsl.experimental as dsl +from google.protobuf import json_format +from kfp.pipeline_spec import pipeline_spec_pb2 +from kfp.v2.compiler import compiler_utils +from kfp.v2.compiler.experimental import pipeline_spec_builder as builder +from kfp.v2.components import utils as component_utils +from kfp.v2.components.experimental import component_factory +from kfp.v2.components.experimental import for_loop +from kfp.v2.components.experimental import pipeline_channel +from kfp.v2.components.experimental import pipeline_task +from kfp.v2.components.experimental import tasks_group +from kfp.v2.components.types import artifact_types +from kfp.v2.components.types.experimental import type_utils + +_GroupOrTask = Union[tasks_group.TasksGroup, pipeline_task.PipelineTask] + + +class Compiler: + """Experimental DSL compiler that targets the PipelineSpec IR. + + It compiles pipeline function into PipelineSpec json string. + PipelineSpec is the IR protobuf message that defines a pipeline: + https://github.com/kubeflow/pipelines/blob/237795539f7b85bac77435e2464367226ee19391/api/v2alpha1/pipeline_spec.proto#L8 + In this initial implementation, we only support components authored through + Component yaml spec. And we don't support advanced features like conditions, + static and dynamic loops, etc. + + Example:: + + @dsl.pipeline( + name='name', + description='description', + ) + def my_pipeline(a: int = 1, b: str = "default value"): + ... + + kfp.v2.compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path='path/to/pipeline.json', + ) + """ + + def compile( + self, + pipeline_func: Callable[..., Any], + package_path: str, + pipeline_name: Optional[str] = None, + pipeline_parameters: Optional[Mapping[str, Any]] = None, + type_check: bool = True, + ) -> None: + """Compile the given pipeline function into pipeline job json. + + Args: + pipeline_func: Pipeline function with @dsl.pipeline decorator. + package_path: The output pipeline spec .json file path. For example, + "~/pipeline_spec.json". + pipeline_name: Optional; the name of the pipeline. + pipeline_parameters: Optional; the mapping from parameter names to + values. + type_check: Optional; whether to enable the type check or not. + Default is True. + """ + type_check_old_value = kfp.TYPE_CHECK + try: + kfp.TYPE_CHECK = type_check + pipeline_spec = self._create_pipeline_v2( + pipeline_func=pipeline_func, + pipeline_name=pipeline_name, + pipeline_parameters_override=pipeline_parameters, + ) + self._write_pipeline_spec_json( + pipeline_spec=pipeline_spec, + output_path=package_path, + ) + finally: + kfp.TYPE_CHECK = type_check_old_value + + def _create_pipeline_v2( + self, + pipeline_func: Callable[..., Any], + pipeline_name: Optional[str] = None, + pipeline_parameters_override: Optional[Mapping[str, Any]] = None, + ) -> pipeline_spec_pb2.PipelineSpec: + """Creates a pipeline instance and constructs the pipeline spec from + it. + + Args: + pipeline_func: The pipeline function with @dsl.pipeline decorator. + pipeline_name: Optional; the name of the pipeline. + pipeline_parameters_override: Optional; the mapping from parameter + names to values. + + Returns: + A PipelineSpec proto representing the compiled pipeline. + """ + + # Create the arg list with no default values and call pipeline function. + # Assign type information to the PipelineChannel + pipeline_meta = component_factory.extract_component_interface( + pipeline_func) + pipeline_name = pipeline_name or pipeline_meta.name + + pipeline_root = getattr(pipeline_func, 'pipeline_root', None) + + args_list = [] + signature = inspect.signature(pipeline_func) + + for arg_name in signature.parameters: + arg_type = pipeline_meta.inputs[arg_name].type + if not type_utils.is_parameter_type(arg_type): + raise TypeError( + 'The pipeline argument "{arg_name}" is viewed as an artifact' + ' due to its type "{arg_type}". And we currently do not ' + 'support passing artifacts as pipeline inputs. Consider type' + ' annotating the argument with a primitive type, such as ' + '"str", "int", "float", "bool", "dict", and "list".'.format( + arg_name=arg_name, arg_type=arg_type)) + args_list.append( + dsl.PipelineParameterChannel( + name=arg_name, channel_type=arg_type)) + + with dsl.Pipeline(pipeline_name) as dsl_pipeline: + pipeline_func(*args_list) + + if not dsl_pipeline.tasks: + raise ValueError('Task is missing from pipeline.') + + self._validate_exit_handler(dsl_pipeline) + + pipeline_inputs = pipeline_meta.inputs or {} + + # Verify that pipeline_parameters_override contains only input names + # that match the pipeline inputs definition. + pipeline_parameters_override = pipeline_parameters_override or {} + for input_name in pipeline_parameters_override: + if input_name not in pipeline_inputs: + raise ValueError( + 'Pipeline parameter {} does not match any known ' + 'pipeline argument.'.format(input_name)) + + # Fill in the default values. + args_list_with_defaults = [ + dsl.PipelineParameterChannel( + name=input_name, + channel_type=input_spec.type, + value=pipeline_parameters_override.get(input_name) or + input_spec.default, + ) for input_name, input_spec in pipeline_inputs.items() + ] + + # Making the pipeline group name unique to prevent name clashes with + # templates + pipeline_group = dsl_pipeline.groups[0] + pipeline_group.name = uuid.uuid4().hex + + pipeline_spec = self._create_pipeline_spec( + pipeline_args=args_list_with_defaults, + pipeline=dsl_pipeline, + ) + + if pipeline_root: + pipeline_spec.default_pipeline_root = pipeline_root + + return pipeline_spec + + def _write_pipeline_spec_json( + self, + pipeline_spec: pipeline_spec_pb2.PipelineSpec, + output_path: str, + ) -> None: + """Writes pipeline spec into a json file. + + Args: + pipeline_spec: IR pipeline spec. + ouput_path: The file path to be written. + + Raises: + ValueError: if the specified output path doesn't end with the + acceptable extention. + """ + json_text = json_format.MessageToJson(pipeline_spec, sort_keys=True) + + if output_path.endswith('.json'): + with open(output_path, 'w') as json_file: + json_file.write(json_text) + else: + raise ValueError( + 'The output path {} should ends with ".json".'.format( + output_path)) + + def _validate_exit_handler(self, pipeline: dsl.Pipeline) -> None: + """Makes sure there is only one global exit handler. + + This is temporary to be compatible with KFP v1. + + Raises: + ValueError if there are more than one exit handler. + """ + + def _validate_exit_handler_helper( + group: tasks_group.TasksGroup, + exiting_task_names: List[str], + handler_exists: bool, + ) -> None: + + if isinstance(group, dsl.ExitHandler): + if handler_exists or len(exiting_task_names) > 1: + raise ValueError( + 'Only one global exit_handler is allowed and all ops need to be included.' + ) + handler_exists = True + + if group.tasks: + exiting_task_names.extend([x.name for x in group.tasks]) + + for group in group.groups: + _validate_exit_handler_helper( + group=group, + exiting_task_names=exiting_task_names, + handler_exists=handler_exists, + ) + + _validate_exit_handler_helper( + group=pipeline.groups[0], + exiting_task_names=[], + handler_exists=False, + ) + + def _create_pipeline_spec( + self, + pipeline_args: List[dsl.PipelineChannel], + pipeline: dsl.Pipeline, + ) -> pipeline_spec_pb2.PipelineSpec: + """Creates a pipeline spec object. + + Args: + pipeline_args: The list of pipeline input parameters. + pipeline: The instantiated pipeline object. + + Returns: + A PipelineSpec proto representing the compiled pipeline. + + Raises: + ValueError if the argument is of unsupported types. + """ + compiler_utils.validate_pipeline_name(pipeline.name) + + deployment_config = pipeline_spec_pb2.PipelineDeploymentConfig() + pipeline_spec = pipeline_spec_pb2.PipelineSpec() + + pipeline_spec.pipeline_info.name = pipeline.name + pipeline_spec.sdk_version = 'kfp-{}'.format(kfp.__version__) + # Schema version 2.1.0 is required for kfp-pipeline-spec>0.1.13 + pipeline_spec.schema_version = '2.1.0' + + pipeline_spec.root.CopyFrom( + builder.build_component_spec_for_group( + pipeline_channels=pipeline_args, + is_root_group=True, + )) + + root_group = pipeline.groups[0] + + all_groups = self._get_all_groups(root_group) + group_name_to_group = {group.name: group for group in all_groups} + task_name_to_parent_groups, group_name_to_parent_groups = ( + self._get_parent_groups(root_group)) + condition_channels = self._get_condition_channels_for_tasks(root_group) + name_to_for_loop_group = { + group_name: group + for group_name, group in group_name_to_group.items() + if isinstance(group, dsl.ParallelFor) + } + inputs = self._get_inputs_for_all_groups( + pipeline=pipeline, + pipeline_args=pipeline_args, + root_group=root_group, + task_name_to_parent_groups=task_name_to_parent_groups, + group_name_to_parent_groups=group_name_to_parent_groups, + condition_channels=condition_channels, + name_to_for_loop_group=name_to_for_loop_group, + ) + dependencies = self._get_dependencies( + pipeline=pipeline, + root_group=root_group, + task_name_to_parent_groups=task_name_to_parent_groups, + group_name_to_parent_groups=group_name_to_parent_groups, + group_name_to_group=group_name_to_group, + condition_channels=condition_channels, + ) + + for group in all_groups: + self._build_spec_by_group( + pipeline_spec=pipeline_spec, + deployment_config=deployment_config, + group=group, + inputs=inputs, + dependencies=dependencies, + rootgroup_name=root_group.name, + task_name_to_parent_groups=task_name_to_parent_groups, + group_name_to_parent_groups=group_name_to_parent_groups, + name_to_for_loop_group=name_to_for_loop_group, + ) + + # TODO: refactor to support multiple exit handler per pipeline. + if pipeline.groups[0].groups: + first_group = pipeline.groups[0].groups[0] + if isinstance(first_group, dsl.ExitHandler): + exit_task = first_group.exit_task + exit_task_name = component_utils.sanitize_task_name( + exit_task.name) + exit_handler_group_task_name = component_utils.sanitize_task_name( + first_group.name) + input_parameters_in_current_dag = [ + input_name for input_name in + pipeline_spec.root.input_definitions.parameters + ] + exit_task_task_spec = builder.build_task_spec_for_exit_task( + task=exit_task, + dependent_task=exit_handler_group_task_name, + pipeline_inputs=pipeline_spec.root.input_definitions, + ) + + exit_task_component_spec = builder.build_component_spec_for_task( + task=exit_task) + + exit_task_container_spec = builder.build_container_spec_for_task( + task=exit_task) + + # Add exit task task spec + pipeline_spec.root.dag.tasks[exit_task_name].CopyFrom( + exit_task_task_spec) + + # Add exit task component spec if it does not exist. + component_name = exit_task_task_spec.component_ref.name + if component_name not in pipeline_spec.components: + pipeline_spec.components[component_name].CopyFrom( + exit_task_component_spec) + + # Add exit task container spec if it does not exist. + executor_label = exit_task_component_spec.executor_label + if executor_label not in deployment_config.executors: + deployment_config.executors[ + executor_label].container.CopyFrom( + exit_task_container_spec) + pipeline_spec.deployment_spec.update( + json_format.MessageToDict(deployment_config)) + + return pipeline_spec + + def _get_all_groups( + self, + root_group: tasks_group.TasksGroup, + ) -> List[tasks_group.TasksGroup]: + """Gets all groups (not including tasks) in a pipeline. + + Args: + root_group: The root group of a pipeline. + + Returns: + A list of all groups in topological order (parent first). + """ + all_groups = [] + + def _get_all_groups_helper( + group: tasks_group.TasksGroup, + all_groups: List[tasks_group.TasksGroup], + ): + all_groups.append(group) + for group in group.groups: + _get_all_groups_helper(group, all_groups) + + _get_all_groups_helper(root_group, all_groups) + return all_groups + + def _get_parent_groups( + self, + root_group: tasks_group.TasksGroup, + ) -> Tuple[Mapping[str, List[_GroupOrTask]], Mapping[str, + List[_GroupOrTask]]]: + """Get parent groups that contain the specified tasks. + + Each pipeline has a root group. Each group has a list of tasks (leaf) + and groups. + This function traverse the tree and get ancestor groups for all tasks. + + Args: + root_group: The root group of a pipeline. + + Returns: + A tuple. The first item is a mapping of task names to parent groups, + and second item is a mapping of group names to parent groups. + A list of parent groups is a list of ancestor groups including the + task/group itself. The list is sorted in a way that the farthest + parent group is the first and task/group itself is the last. + """ + + def _get_parent_groups_helper( + current_groups: List[tasks_group.TasksGroup], + tasks_to_groups: Dict[str, List[_GroupOrTask]], + groups_to_groups: Dict[str, List[_GroupOrTask]], + ) -> None: + root_group = current_groups[-1] + for group in root_group.groups: + + groups_to_groups[group.name] = [x.name for x in current_groups + ] + [group.name] + current_groups.append(group) + + _get_parent_groups_helper( + current_groups=current_groups, + tasks_to_groups=tasks_to_groups, + groups_to_groups=groups_to_groups, + ) + del current_groups[-1] + + for task in root_group.tasks: + tasks_to_groups[task.name] = [x.name for x in current_groups + ] + [task.name] + + tasks_to_groups = {} + groups_to_groups = {} + current_groups = [root_group] + + _get_parent_groups_helper( + current_groups=current_groups, + tasks_to_groups=tasks_to_groups, + groups_to_groups=groups_to_groups, + ) + return (tasks_to_groups, groups_to_groups) + + # TODO: do we really need this? + def _get_condition_channels_for_tasks( + self, + root_group: tasks_group.TasksGroup, + ) -> Mapping[str, Set[dsl.PipelineChannel]]: + """Gets channels referenced in conditions of tasks' parents. + + Args: + root_group: The root group of a pipeline. + + Returns: + A mapping of task name to a set of pipeline channels appeared in its + parent dsl.Condition groups. + """ + conditions = collections.defaultdict(set) + + def _get_condition_channels_for_tasks_helper( + group, + current_conditions_channels, + ): + new_current_conditions_channels = current_conditions_channels + if isinstance(group, dsl.Condition): + new_current_conditions_channels = list( + current_conditions_channels) + if isinstance(group.condition.left_operand, + dsl.PipelineChannel): + new_current_conditions_channels.append( + group.condition.left_operand) + if isinstance(group.condition.right_operand, + dsl.PipelineChannel): + new_current_conditions_channels.append( + group.condition.right_operand) + for task in group.tasks: + for channel in new_current_conditions_channels: + conditions[task.name].add(channel) + for group in group.groups: + _get_condition_channels_for_tasks_helper( + group, new_current_conditions_channels) + + _get_condition_channels_for_tasks_helper(root_group, []) + return conditions + + def _get_inputs_for_all_groups( + self, + pipeline: dsl.Pipeline, + pipeline_args: List[dsl.PipelineChannel], + root_group: tasks_group.TasksGroup, + task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]], + group_name_to_parent_groups: Mapping[str, List[tasks_group.TasksGroup]], + condition_channels: Mapping[str, Set[dsl.PipelineParameterChannel]], + name_to_for_loop_group: Mapping[str, dsl.ParallelFor], + ) -> Mapping[str, List[Tuple[dsl.PipelineChannel, str]]]: + """Get inputs and outputs of each group and op. + + Args: + pipeline: The instantiated pipeline object. + pipeline_args: The list of pipeline function arguments as + PipelineChannel. + root_group: The root group of the pipeline. + task_name_to_parent_groups: The dict of task name to list of parent + groups. + group_name_to_parent_groups: The dict of group name to list of + parent groups. + condition_channels: The dict of task name to a set of pipeline + channels referenced by its parent condition groups. + name_to_for_loop_group: The dict of for loop group name to loop + group. + + Returns: + A mapping with key being the group/task names and values being list + of tuples (channel, producing_task_name). + producing_task_name is the name of the task that produces the + channel. If the channel is a pipeline argument (no producer task), + then producing_task_name is None. + """ + inputs = collections.defaultdict(set) + + for task in pipeline.tasks.values(): + # task's inputs and all channels used in conditions for that task are + # considered. + task_inputs = task.channel_inputs + task_condition_inputs = list(condition_channels[task.name]) + + for channel in task.channel_inputs + task_condition_inputs: + + # If the value is already provided (immediate value), then no + # need to expose it as input for its parent groups. + if getattr(channel, 'value', None): + continue + + # channels_to_add could be a list of PipelineChannels when loop + # args are involved. Given a nested loops example as follows: + # + # def my_pipeline(loop_parameter: list): + # with dsl.ParallelFor(loop_parameter) as item: + # with dsl.ParallelFor(item.p_a) as item_p_a: + # print_op(item_p_a.q_a) + # + # The print_op takes an input of + # {{channel:task=;name=loop_parameter-loop-item-subvar-p_a-loop-item-subvar-q_a;}}. + # Given this, we calculate the list of PipelineChannels potentially + # needed by across DAG levels as follows: + # + # [{{channel:task=;name=loop_parameter-loop-item-subvar-p_a-loop-item-subvar-q_a}}, + # {{channel:task=;name=loop_parameter-loop-item-subvar-p_a-loop-item}}, + # {{channel:task=;name=loop_parameter-loop-item-subvar-p_a}}, + # {{channel:task=;name=loop_parameter-loop-item}}, + # {{chaenel:task=;name=loop_parameter}}] + # + # For the above example, the first loop needs the input of + # {{channel:task=;name=loop_parameter}}, + # the second loop needs the input of + # {{channel:task=;name=loop_parameter-loop-item}} + # and the print_op needs the input of + # {{channel:task=;name=loop_parameter-loop-item-subvar-p_a-loop-item}} + # + # When we traverse a DAG in a top-down direction, we add channels + # from the end, and pop it out when it's no longer needed by the + # sub-DAG. + # When we traverse a DAG in a bottom-up direction, we add + # channels from the front, and pop it out when it's no longer + # needed by the parent DAG. + channels_to_add = collections.deque() + channel_to_add = channel + + while isinstance(channel_to_add, ( + for_loop.LoopArgument, + for_loop.LoopArgumentVariable, + )): + channels_to_add.append(channel_to_add) + if isinstance(channel_to_add, + for_loop.LoopArgumentVariable): + channel_to_add = channel_to_add.loop_argument + elif isinstance(channel_to_add.items_or_pipeline_channel, + dsl.PipelineChannel): + channel_to_add = channel_to_add.items_or_pipeline_channel + else: + break + + if isinstance(channel_to_add, dsl.PipelineChannel): + channels_to_add.append(channel_to_add) + + if channel.task_name: + # The PipelineChannel is produced by a task. + + upstream_task = pipeline.tasks[channel.task_name] + upstream_groups, downstream_groups = ( + self._get_uncommon_ancestors( + task_name_to_parent_groups=task_name_to_parent_groups, + group_name_to_parent_groups=group_name_to_parent_groups, + task1=upstream_task, + task2=task, + )) + + for i, group_name in enumerate(downstream_groups): + if i == 0: + # If it is the first uncommon downstream group, then + # the input comes from the first uncommon upstream + # group. + producer_task = upstream_groups[0] + else: + # If not the first downstream group, then the input + # is passed down from its ancestor groups so the + # upstream group is None. + producer_task = None + + inputs[group_name].add( + (channels_to_add[-1], producer_task)) + + if group_name in name_to_for_loop_group: + loop_group = name_to_for_loop_group[group_name] + + # Pop out the last elements from channels_to_add if it + # is found in the current (loop) DAG. Downstreams + # would only need the more specific versions for it. + if channels_to_add[ + -1].full_name in loop_group.loop_argument.full_name: + channels_to_add.pop() + if not channels_to_add: + break + + else: + # The PipelineChannel is not produced by a task. It's either + # a top-level pipeline input, or a constant value to loop + # items. + + # TODO: revisit if this is correct. + if getattr(task, 'is_exit_handler', False): + continue + + # For PipelineChannel as a result of constant value used as + # loop items, we have to go from bottom-up because the + # PipelineChannel can be originated from the middle a DAG, + # which is not needed and visible to its parent DAG. + if isinstance( + channel, + (for_loop.LoopArgument, for_loop.LoopArgumentVariable + )) and channel.is_with_items_loop_argument: + for group_name in task_name_to_parent_groups[ + task.name][::-1]: + + inputs[group_name].add((channels_to_add[0], None)) + if group_name in name_to_for_loop_group: + # for example: + # loop_group.loop_argument.name = 'loop-item-param-1' + # channel.name = 'loop-item-param-1-subvar-a' + loop_group = name_to_for_loop_group[group_name] + + if channels_to_add[ + 0].full_name in loop_group.loop_argument.full_name: + channels_to_add.popleft() + if not channels_to_add: + break + else: + # For PipelineChannel from pipeline input, go top-down + # just like we do for PipelineChannel produced by a task. + for group_name in task_name_to_parent_groups[task.name]: + + inputs[group_name].add((channels_to_add[-1], None)) + if group_name in name_to_for_loop_group: + loop_group = name_to_for_loop_group[group_name] + + if channels_to_add[ + -1].full_name in loop_group.loop_argument.full_name: + channels_to_add.pop() + if not channels_to_add: + break + + return inputs + + def _get_uncommon_ancestors( + self, + task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]], + group_name_to_parent_groups: Mapping[str, List[tasks_group.TasksGroup]], + task1: _GroupOrTask, + task2: _GroupOrTask, + ) -> Tuple[List[_GroupOrTask], List[_GroupOrTask]]: + """Gets the unique ancestors between two tasks. + + For example, task1's ancestor groups are [root, G1, G2, G3, task1], + task2's ancestor groups are [root, G1, G4, task2], then it returns a + tuple ([G2, G3, task1], [G4, task2]). + + Args: + task_name_to_parent_groups: The dict of task name to list of parent + groups. + group_name_tor_parent_groups: The dict of group name to list of + parent groups. + task1: One of the two tasks. + task2: The other task. + + Returns: + A tuple which are lists of uncommon ancestors for each task. + """ + if task1.name in task_name_to_parent_groups: + task1_groups = task_name_to_parent_groups[task1.name] + elif task1.name in group_name_to_parent_groups: + task1_groups = group_name_to_parent_groups[task1.name] + else: + raise ValueError(task1.name + ' does not exist.') + + if task2.name in task_name_to_parent_groups: + task2_groups = task_name_to_parent_groups[task2.name] + elif task2.name in group_name_to_parent_groups: + task2_groups = group_name_to_parent_groups[task2.name] + else: + raise ValueError(task2.name + ' does not exist.') + + both_groups = [task1_groups, task2_groups] + common_groups_len = sum( + 1 for x in zip(*both_groups) if x == (x[0],) * len(x)) + group1 = task1_groups[common_groups_len:] + group2 = task2_groups[common_groups_len:] + return (group1, group2) + + # TODO: revisit for dependency that breaks through DAGs. + def _get_dependencies( + self, + pipeline: dsl.Pipeline, + root_group: tasks_group.TasksGroup, + task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]], + group_name_to_parent_groups: Mapping[str, List[tasks_group.TasksGroup]], + group_name_to_group: Mapping[str, tasks_group.TasksGroup], + condition_channels: Dict[str, dsl.PipelineChannel], + ) -> Mapping[str, List[_GroupOrTask]]: + """Gets dependent groups and tasks for all tasks and groups. + + Args: + pipeline: The instantiated pipeline object. + root_group: The root group of the pipeline. + task_name_to_parent_groups: The dict of task name to list of parent + groups. + group_name_to_parent_groups: The dict of group name to list of + parent groups. + group_name_to_group: The dict of group name to group. + condition_channels: The dict of task name to a set of pipeline + channels referenced by its parent condition groups. + + Returns: + A Mapping where key is group/task name, value is a list of dependent + groups/tasks. The dependencies are calculated in the following way: + if task2 depends on task1, and their ancestors are + [root, G1, G2, task1] and [root, G1, G3, G4, task2], then G3 is + dependent on G2. Basically dependency only exists in the first + uncommon ancesters in their ancesters chain. Only sibling + groups/tasks can have dependencies. + """ + dependencies = collections.defaultdict(set) + for task in pipeline.tasks.values(): + upstream_task_names = set() + task_condition_inputs = list(condition_channels[task.name]) + for channel in task.channel_inputs + task_condition_inputs: + if channel.task_name: + upstream_task_names.add(channel.task_name) + upstream_task_names |= set(task.dependent_tasks) + + for upstream_task_name in upstream_task_names: + # the dependent op could be either a BaseOp or an opsgroup + if upstream_task_name in pipeline.tasks: + upstream_task = pipeline.tasks[upstream_task_name] + elif upstream_task_name in group_name_to_group: + upstream_task = group_name_to_group[upstream_task_name] + else: + raise ValueError( + f'Compiler cannot find task: {upstream_task_name}.') + + upstream_groups, downstream_groups = self._get_uncommon_ancestors( + task_name_to_parent_groups=task_name_to_parent_groups, + group_name_to_parent_groups=group_name_to_parent_groups, + task1=upstream_task, + task2=task, + ) + dependencies[downstream_groups[0]].add(upstream_groups[0]) + + return dependencies + + def _build_spec_by_group( + self, + pipeline_spec: pipeline_spec_pb2.PipelineSpec, + deployment_config: pipeline_spec_pb2.PipelineDeploymentConfig, + group: tasks_group.TasksGroup, + inputs: Mapping[str, List[Tuple[dsl.PipelineChannel, str]]], + dependencies: Dict[str, List[_GroupOrTask]], + rootgroup_name: str, + task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]], + group_name_to_parent_groups: Mapping[str, List[tasks_group.TasksGroup]], + name_to_for_loop_group: Mapping[str, dsl.ParallelFor], + ) -> None: + """Generates IR spec given a TasksGroup. + + Args: + pipeline_spec: The pipeline_spec to update in place. + deployment_config: The deployment_config to hold all executors. The + spec is updated in place. + group: The TasksGroup to generate spec for. + inputs: The inputs dictionary. The keys are group/task names and the + values are lists of tuples (channel, producing_task_name). + dependencies: The group dependencies dictionary. The keys are group + or task names, and the values are lists of dependent groups or + tasks. + rootgroup_name: The name of the group root. Used to determine whether + the component spec for the current group should be the root dag. + task_name_to_parent_groups: The dict of task name to parent groups. + Key is task name. Value is a list of ancestor groups including + the task itself. The list of a given task is sorted in a way that + the farthest group is the first and the task itself is the last. + group_name_to_parent_groups: The dict of group name to parent groups. + Key is the group name. Value is a list of ancestor groups + including the group itself. The list of a given group is sorted + in a way that the farthest group is the first and the group + itself is the last. + name_to_for_loop_group: The dict of for loop group name to loop + group. + """ + group_component_name = component_utils.sanitize_component_name( + group.name) + + if group.name == rootgroup_name: + group_component_spec = pipeline_spec.root + else: + group_component_spec = pipeline_spec.components[ + group_component_name] + + task_name_to_task_spec = {} + task_name_to_component_spec = {} + + # Generate task specs and component specs for the dag. + subgroups = group.groups + group.tasks + for subgroup in subgroups: + + subgroup_inputs = inputs.get(subgroup.name, []) + subgroup_channels = [channel for channel, _ in subgroup_inputs] + + subgroup_component_name = ( + component_utils.sanitize_component_name(subgroup.name)) + + tasks_in_current_dag = [ + component_utils.sanitize_task_name(subgroup.name) + for subgroup in subgroups + ] + input_parameters_in_current_dag = [ + input_name for input_name in + group_component_spec.input_definitions.parameters + ] + input_artifacts_in_current_dag = [ + input_name for input_name in + group_component_spec.input_definitions.artifacts + ] + is_parent_component_root = ( + group_component_spec == pipeline_spec.root) + + if isinstance(subgroup, pipeline_task.PipelineTask): + + subgroup_task_spec = builder.build_task_spec_for_task( + task=subgroup, + parent_component_inputs=group_component_spec + .input_definitions, + tasks_in_current_dag=tasks_in_current_dag, + input_parameters_in_current_dag=input_parameters_in_current_dag, + input_artifacts_in_current_dag=input_artifacts_in_current_dag, + ) + task_name_to_task_spec[subgroup.name] = subgroup_task_spec + + subgroup_component_spec = builder.build_component_spec_for_task( + task=subgroup) + task_name_to_component_spec[ + subgroup.name] = subgroup_component_spec + + # TODO: handler importer spec. + + subgroup_container_spec = builder.build_container_spec_for_task( + task=subgroup) + + if compiler_utils.is_v2_component(subgroup): + compiler_utils.refactor_v2_container_spec( + subgroup_container_spec) + + executor_label = subgroup_component_spec.executor_label + + if executor_label not in deployment_config.executors: + deployment_config.executors[ + executor_label].container.CopyFrom( + subgroup_container_spec) + + elif isinstance(subgroup, dsl.ParallelFor): + + # "Punch the hole", adding additional inputs (other than loop + # arguments which will be handled separately) needed by its + # subgroups or tasks. + loop_subgroup_channels = [] + + for channel in subgroup_channels: + # Skip 'withItems' loop arguments if it's from an inner loop. + if isinstance( + channel, + (for_loop.LoopArgument, for_loop.LoopArgumentVariable + )) and channel.is_with_items_loop_argument: + withitems_loop_arg_found_in_self_or_upstream = False + for group_name in group_name_to_parent_groups[ + subgroup.name][::-1]: + if group_name in name_to_for_loop_group: + loop_group = name_to_for_loop_group[group_name] + if channel.name in loop_group.loop_argument.name: + withitems_loop_arg_found_in_self_or_upstream = True + break + if not withitems_loop_arg_found_in_self_or_upstream: + continue + loop_subgroup_channels.append(channel) + + if subgroup.items_is_pipeline_channel: + # This loop_argument is based on a pipeline channel, i.e., + # rather than a static list, it is either the output of + # another task or an input as global pipeline parameters. + loop_subgroup_channels.append( + subgroup.loop_argument.items_or_pipeline_channel) + + loop_subgroup_channels.append(subgroup.loop_argument) + + subgroup_component_spec = builder.build_component_spec_for_group( + pipeline_channels=loop_subgroup_channels, + is_root_group=False, + ) + + subgroup_task_spec = builder.build_task_spec_for_group( + group=subgroup, + pipeline_channels=loop_subgroup_channels, + tasks_in_current_dag=tasks_in_current_dag, + is_parent_component_root=is_parent_component_root, + ) + + elif isinstance(subgroup, dsl.Condition): + + # "Punch the hole", adding inputs needed by its subgroups or + # tasks. + condition_subgroup_channels = list(subgroup_channels) + for operand in [ + subgroup.condition.left_operand, + subgroup.condition.right_operand, + ]: + if isinstance(operand, dsl.PipelineChannel): + condition_subgroup_channels.append(operand) + + subgroup_component_spec = builder.build_component_spec_for_group( + pipeline_channels=condition_subgroup_channels, + is_root_group=False, + ) + + subgroup_task_spec = builder.build_task_spec_for_group( + group=subgroup, + pipeline_channels=condition_subgroup_channels, + tasks_in_current_dag=tasks_in_current_dag, + is_parent_component_root=is_parent_component_root, + ) + + elif isinstance(subgroup, dsl.ExitHandler): + + subgroup_component_spec = builder.build_component_spec_for_group( + pipeline_channels=subgroup_channels, + is_root_group=False, + ) + + subgroup_task_spec = builder.build_task_spec_for_group( + group=subgroup, + pipeline_channels=subgroup_channels, + tasks_in_current_dag=tasks_in_current_dag, + is_parent_component_root=is_parent_component_root, + ) + + else: + raise RuntimeError( + f'Unexpected task/group type: Got {subgroup} of type ' + f'{type(subgroup)}.') + + # Generate dependencies section for this task. + if dependencies.get(subgroup.name, None): + group_dependencies = list(dependencies[subgroup.name]) + group_dependencies.sort() + subgroup_task_spec.dependent_tasks.extend([ + component_utils.sanitize_task_name(dep) + for dep in group_dependencies + ]) + + # Add component spec if not exists + if subgroup_component_name not in pipeline_spec.components: + pipeline_spec.components[subgroup_component_name].CopyFrom( + subgroup_component_spec) + + # Add task spec + group_component_spec.dag.tasks[subgroup.name].CopyFrom( + subgroup_task_spec) + + pipeline_spec.deployment_spec.update( + json_format.MessageToDict(deployment_config)) + + # Surface metrics outputs to the top. + builder.populate_metrics_in_dag_outputs( + tasks=group.tasks, + task_name_to_parent_groups=task_name_to_parent_groups, + task_name_to_task_spec=task_name_to_task_spec, + task_name_to_component_spec=task_name_to_component_spec, + pipeline_spec=pipeline_spec, + ) diff --git a/sdk/python/kfp/v2/compiler/experimental/compiler_test.py b/sdk/python/kfp/v2/compiler/experimental/compiler_test.py new file mode 100644 index 00000000000..d3da50b2209 --- /dev/null +++ b/sdk/python/kfp/v2/compiler/experimental/compiler_test.py @@ -0,0 +1,82 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for kfp.v2.compiler.experimental.compiler.""" + +import json +import os +import shutil +import tempfile +import unittest + +from kfp.v2 import components +from kfp.v2.compiler.experimental import compiler +import kfp.v2.dsl.experimental as dsl +from kfp.v2.components.types import type_utils + + +class CompilerTest(unittest.TestCase): + + def test_compile_simple_pipeline(self): + + tmpdir = tempfile.mkdtemp() + try: + producer_op = components.load_component_from_text(""" + name: producer + inputs: + - {name: input_param, type: String} + outputs: + - {name: output_model, type: Model} + - {name: output_value, type: Integer} + implementation: + container: + image: gcr.io/my-project/my-image:tag + args: + - {inputValue: input_param} + - {outputPath: output_model} + - {outputPath: output_value} + """) + + consumer_op = components.load_component_from_text(""" + name: consumer + inputs: + - {name: input_model, type: Model} + - {name: input_value, type: Integer} + implementation: + container: + image: gcr.io/my-project/my-image:tag + args: + - {inputPath: input_model} + - {inputValue: input_value} + """) + + @dsl.pipeline(name='test-pipeline') + def simple_pipeline(pipeline_input: str = 'Hello KFP!'): + producer = producer_op(input_param=pipeline_input) + consumer = consumer_op( + input_model=producer.outputs['output_model'], + input_value=producer.outputs['output_value']) + + target_json_file = os.path.join(tmpdir, 'result.json') + compiler.Compiler().compile( + pipeline_func=simple_pipeline, package_path=target_json_file) + + self.assertTrue(os.path.exists(target_json_file)) + with open(target_json_file, 'r') as f: + print(f.read()) + finally: + shutil.rmtree(tmpdir) + + +if __name__ == '__main__': + unittest.main() diff --git a/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py new file mode 100644 index 00000000000..6c5e55ff751 --- /dev/null +++ b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py @@ -0,0 +1,858 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Functions for creating PipelineSpec proto objects.""" + +import json +from typing import List, Mapping, Optional, Tuple, Union + +from google.protobuf import struct_pb2 +from kfp.pipeline_spec import pipeline_spec_pb2 +from kfp.v2.components import utils as component_utils +from kfp.v2.components.experimental import for_loop +from kfp.v2.components.experimental import pipeline_channel +from kfp.v2.components.experimental import pipeline_task +from kfp.v2.components.experimental import placeholders +from kfp.v2.components.experimental import structures +from kfp.v2.components.experimental import tasks_group +from kfp.v2.components.types import artifact_types +from kfp.v2.components.types.experimental import type_utils + +_GroupOrTask = Union[tasks_group.TasksGroup, pipeline_task.PipelineTask] + + +def _additional_input_name_for_pipeline_channel( + channel_or_name: Union[pipeline_channel.PipelineChannel, str]) -> str: + """Gets the name for an additional (compiler-injected) input.""" + + # Adding a prefix to avoid (reduce chance of) name collision between the + # original component inputs and the injected input. + return 'pipelinechannel--' + ( + channel_or_name.full_name if isinstance( + channel_or_name, pipeline_channel.PipelineChannel) else + channel_or_name) + + +def _to_protobuf_value(value: type_utils.PARAMETER_TYPES) -> struct_pb2.Value: + """Creates a google.protobuf.struct_pb2.Value message out of a provide + value. + + Args: + value: The value to be converted to Value message. + + Returns: + A google.protobuf.struct_pb2.Value message. + + Raises: + ValueError if the given value is not one of the parameter types. + """ + if isinstance(value, str): + return struct_pb2.Value(string_value=value) + elif isinstance(value, (int, float)): + return struct_pb2.Value(number_value=value) + elif isinstance(value, bool): + return struct_pb2.Value(bool_value=value) + elif isinstance(value, dict): + return struct_pb2.Value( + struct_value=struct_pb2.Struct( + fields={k: _to_protobuf_value(v) for k, v in value.items()})) + elif isinstance(value, list): + return struct_pb2.Value( + list_value=struct_pb2.ListValue( + values=[_to_protobuf_value(v) for v in value])) + else: + raise ValueError('Value must be one of the following types: ' + 'str, int, float, bool, dict, and list. Got: ' + f'"{value}" of type "{type(value)}".') + + +def build_task_spec_for_task( + task: pipeline_task.PipelineTask, + parent_component_inputs: pipeline_spec_pb2.ComponentInputsSpec, + tasks_in_current_dag: List[str], + input_parameters_in_current_dag: List[str], + input_artifacts_in_current_dag: List[str], +) -> pipeline_spec_pb2.PipelineTaskSpec: + """Builds PipelineTaskSpec for a pipeline task. + + A task input may reference an output outside its immediate DAG. + For instance:: + + random_num = random_num_op(...) + with dsl.Condition(random_num.output > 5): + print_op('%s > 5' % random_num.output) + + In this example, `dsl.Condition` forms a subDAG with one task from `print_op` + inside the subDAG. The task of `print_op` references output from `random_num` + task, which is outside the sub-DAG. When compiling to IR, such cross DAG + reference is disallowed. So we need to "punch a hole" in the sub-DAG to make + the input available in the subDAG component inputs if it's not already there, + Next, we can call this method to fix the tasks inside the subDAG to make them + reference the component inputs instead of directly referencing the original + producer task. + + Args: + task: The task to build a PipelineTaskSpec for. + parent_component_inputs: The task's parent component's input specs. + tasks_in_current_dag: The list of tasks names for tasks in the same dag. + input_parameters_in_current_dag: The list of input parameters in the DAG + component. + input_artifacts_in_current_dag: The list of input artifacts in the DAG + component. + + Returns: + A PipelineTaskSpec object representing the task. + """ + pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() + pipeline_task_spec.task_info.name = ( + task.task_spec.display_name or task.name) + # Use task.name for component_ref.name because we may customize component + # spec for individual tasks to work around the lack of optional inputs + # support in IR. + pipeline_task_spec.component_ref.name = ( + component_utils.sanitize_component_name(task.name)) + pipeline_task_spec.caching_options.enable_cache = ( + task.task_spec.enable_caching) + + for input_name, input_value in task.inputs.items(): + input_type = task.component_spec.inputs[input_name].type + + if isinstance(input_value, pipeline_channel.PipelineArtifactChannel): + + if input_value.task_name: + # Value is produced by an upstream task. + if input_value.task_name in tasks_in_current_dag: + # Dependent task within the same DAG. + pipeline_task_spec.inputs.artifacts[ + input_name].task_output_artifact.producer_task = ( + component_utils.sanitize_task_name( + input_value.task_name)) + pipeline_task_spec.inputs.artifacts[ + input_name].task_output_artifact.output_artifact_key = ( + input_value.name) + else: + # Dependent task not from the same DAG. + component_input_artifact = ( + _additional_input_name_for_pipeline_channel(input_value) + ) + assert component_input_artifact in parent_component_inputs.artifacts, \ + 'component_input_artifact: {} not found. All inputs: {}'.format( + component_input_artifact, parent_component_inputs) + pipeline_task_spec.inputs.artifacts[ + input_name].component_input_artifact = ( + component_input_artifact) + else: + raise RuntimeError( + f'Artifacts must be produced by a task. Got {input_value}.') + + elif isinstance(input_value, pipeline_channel.PipelineParameterChannel): + + if input_value.task_name: + # Value is produced by an upstream task. + if input_value.task_name in tasks_in_current_dag: + # Dependent task within the same DAG. + pipeline_task_spec.inputs.parameters[ + input_name].task_output_parameter.producer_task = ( + component_utils.sanitize_task_name( + input_value.task_name)) + pipeline_task_spec.inputs.parameters[ + input_name].task_output_parameter.output_parameter_key = ( + input_value.name) + else: + # Dependent task not from the same DAG. + component_input_parameter = ( + _additional_input_name_for_pipeline_channel(input_value) + ) + assert component_input_parameter in parent_component_inputs.parameters, \ + 'component_input_parameter: {} not found. All inputs: {}'.format( + component_input_parameter, parent_component_inputs) + pipeline_task_spec.inputs.parameters[ + input_name].component_input_parameter = ( + component_input_parameter) + else: + # Value is from pipeline input. + component_input_parameter = input_value.full_name + if component_input_parameter not in parent_component_inputs.parameters: + component_input_parameter = ( + _additional_input_name_for_pipeline_channel(input_value) + ) + pipeline_task_spec.inputs.parameters[ + input_name].component_input_parameter = ( + component_input_parameter) + + elif isinstance(input_value, for_loop.LoopArgument): + + component_input_parameter = ( + _additional_input_name_for_pipeline_channel(input_value)) + assert component_input_parameter in parent_component_inputs.parameters, \ + 'component_input_parameter: {} not found. All inputs: {}'.format( + component_input_parameter, parent_component_inputs) + pipeline_task_spec.inputs.parameters[ + input_name].component_input_parameter = ( + component_input_parameter) + + elif isinstance(input_value, for_loop.LoopArgumentVariable): + + component_input_parameter = ( + _additional_input_name_for_pipeline_channel( + input_value.loop_argument)) + assert component_input_parameter in parent_component_inputs.parameters, \ + 'component_input_parameter: {} not found. All inputs: {}'.format( + component_input_parameter, parent_component_inputs) + pipeline_task_spec.inputs.parameters[ + input_name].component_input_parameter = ( + component_input_parameter) + pipeline_task_spec.inputs.parameters[ + input_name].parameter_expression_selector = ( + 'parseJson(string_value)["{}"]'.format( + input_value.subvar_name)) + + elif isinstance(input_value, str): + + # Handle extra input due to string concat + pipeline_channels = ( + pipeline_channel.extract_pipeline_channels_from_any(input_value) + ) + for channel in pipeline_channels: + # value contains PipelineChannel placeholders which needs to be + # replaced. And the input needs to be added to the task spec. + + # Form the name for the compiler injected input, and make sure it + # doesn't collide with any existing input names. + additional_input_name = ( + _additional_input_name_for_pipeline_channel(channel)) + + # We don't expect collision to happen because we prefix the name + # of additional input with 'pipelinechannel--'. But just in case + # collision did happend, throw a RuntimeError so that we don't + # get surprise at runtime. + for existing_input_name, _ in task.inputs.items(): + if existing_input_name == additional_input_name: + raise RuntimeError( + 'Name collision between existing input name ' + '{} and compiler injected input name {}'.format( + existing_input_name, additional_input_name)) + + additional_input_placeholder = ( + placeholders.input_parameter_placeholder( + additional_input_name)) + input_value = input_value.replace(channel.pattern, + additional_input_placeholder) + + if channel.task_name: + # Value is produced by an upstream task. + if channel.task_name in tasks_in_current_dag: + # Dependent task within the same DAG. + pipeline_task_spec.inputs.parameters[ + additional_input_name].task_output_parameter.producer_task = ( + component_utils.sanitize_task_name( + channel.task_name)) + pipeline_task_spec.inputs.parameters[ + input_name].task_output_parameter.output_parameter_key = ( + channel.name) + else: + # Dependent task not from the same DAG. + component_input_parameter = ( + _additional_input_name_for_pipeline_channel(channel) + ) + assert component_input_parameter in parent_component_inputs.parameters, \ + 'component_input_parameter: {} not found. All inputs: {}'.format( + component_input_parameter, parent_component_inputs) + pipeline_task_spec.inputs.parameters[ + additional_input_name].component_input_parameter = ( + component_input_parameter) + else: + # Value is from pipeline input. (or loop?) + component_input_parameter = channel.full_name + if component_input_parameter not in parent_component_inputs.parameters: + component_input_parameter = ( + _additional_input_name_for_pipeline_channel(channel) + ) + pipeline_task_spec.inputs.parameters[ + additional_input_name].component_input_parameter = ( + component_input_parameter) + + pipeline_task_spec.inputs.parameters[ + input_name].runtime_value.constant.string_value = input_value + + elif isinstance(input_value, (str, int, float, bool, dict, list)): + + pipeline_task_spec.inputs.parameters[ + input_name].runtime_value.constant.CopyFrom( + _to_protobuf_value(input_value)) + + else: + raise ValueError( + 'Input argument supports only the following types: ' + 'str, int, float, bool, dict, and list.' + f'Got {input_value} of type {type(input_value)}.') + + return pipeline_task_spec + + +def build_component_spec_for_task( + task: pipeline_task.PipelineTask) -> pipeline_spec_pb2.ComponentSpec: + """Builds ComponentSpec for a pipeline task. + + Args: + task: The task to build a ComponentSpec for. + + Returns: + A ComponentSpec object for the task. + """ + component_spec = pipeline_spec_pb2.ComponentSpec() + component_spec.executor_label = component_utils.sanitize_executor_label( + task.name) + + for input_name, input_spec in (task.component_spec.inputs or {}).items(): + + # skip inputs not present, as a workaround to support optional inputs. + if input_name not in task.inputs: + continue + + if type_utils.is_parameter_type(input_spec.type): + component_spec.input_definitions.parameters[ + input_name].parameter_type = type_utils.get_parameter_type( + input_spec.type) + else: + component_spec.input_definitions.artifacts[ + input_name].artifact_type.CopyFrom( + type_utils.get_artifact_type_schema(input_spec.type)) + + for output_name, output_spec in (task.component_spec.outputs or {}).items(): + if type_utils.is_parameter_type(output_spec.type): + component_spec.output_definitions.parameters[ + output_name].parameter_type = type_utils.get_parameter_type( + output_spec.type) + else: + component_spec.output_definitions.artifacts[ + output_name].artifact_type.CopyFrom( + type_utils.get_artifact_type_schema(output_spec.type)) + + return component_spec + + +def build_container_spec_for_task( + task: pipeline_task.PipelineTask +) -> pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec: + """Builds PipelineContainerSpec for a pipeline task. + + Args: + task: The task to build a ComponentSpec for. + + Returns: + A PipelineContaienrSpec object for the task. + """ + container_spec = ( + pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec( + image=task.container_spec.image, + command=task.container_spec.commands, + args=task.container_spec.arguments, + )) + + if task.container_spec.env is not None: + container_spec.env = [ + pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec + .EnvVar(name=name, value=value) + for name, value in task.container_spec.env.items() + ] + + if task.container_spec.resources is not None: + container_spec.reources.cpu_limit = ( + task.container_spec.resources.cpu_limit) + container_spec.reources.memory_limit = ( + task.container_spec.resources.memory_limit) + if task.container_spec.resources.accelerator_count is not None: + container_spec.resources.accelerator.CopyFrom( + pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec + .ResourceSpec.AcceleratorConfig( + type=task.container_spec.resources.accelerator_type, + count=task.container_spec.resources.accelerator_count, + )) + + return container_spec + + +def _fill_in_component_input_default_value( + component_spec: pipeline_spec_pb2.ComponentSpec, + input_name: str, + default_value: Optional[type_utils.PARAMETER_TYPES], +) -> None: + """Fills in the default of component input parameter. + + Args: + component_spec: The ComponentSpec to update in place. + input_name: The name of the input parameter. + default_value: The default value of the input parameter. + """ + if default_value is None: + return + + parameter_type = component_spec.input_definitions.parameters[ + input_name].parameter_type + if pipeline_spec_pb2.ParameterType.NUMBER_INTEGER == parameter_type: + component_spec.input_definitions.parameters[ + input_name].default_value.number_value = default_value + elif pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE == parameter_type: + component_spec.input_definitions.parameters[ + input_name].default_value.number_value = default_value + elif pipeline_spec_pb2.ParameterType.STRING == parameter_type: + component_spec.input_definitions.parameters[ + input_name].default_value.string_value = default_value + elif pipeline_spec_pb2.ParameterType.BOOLEAN == parameter_type: + component_spec.input_definitions.parameters[ + input_name].default_value.bool_value = default_value + elif pipeline_spec_pb2.ParameterType.STRUCT == parameter_type: + component_spec.input_definitions.parameters[ + input_name].default_value.CopyFrom( + _to_protobuf_value(default_value)) + elif pipeline_spec_pb2.ParameterType.LIST == parameter_type: + component_spec.input_definitions.parameters[ + input_name].default_value.CopyFrom( + _to_protobuf_value(default_value)) + + +def build_component_spec_for_group( + pipeline_channels: List[pipeline_channel.PipelineChannel], + is_root_group: bool, +) -> pipeline_spec_pb2.ComponentSpec: + """Builds ComponentSpec for a TasksGroup. + + Args: + group: The group to build a ComponentSpec for. + pipeline_channels: The list of pipeline channels referenced by the group. + + Returns: + A PipelineTaskSpec object representing the loop group. + """ + component_spec = pipeline_spec_pb2.ComponentSpec() + + for channel in pipeline_channels: + + input_name = ( + channel.name if is_root_group else + _additional_input_name_for_pipeline_channel(channel)) + + if isinstance(channel, pipeline_channel.PipelineArtifactChannel): + component_spec.input_definitions.artifacts[ + input_name].artifact_type.CopyFrom( + type_utils.get_artifact_type_schema(channel.channel_type)) + else: + # channel is one of PipelineParameterChannel, LoopArgument, or + # LoopArgumentVariable. + component_spec.input_definitions.parameters[ + input_name].parameter_type = type_utils.get_parameter_type( + channel.channel_type) + + # TODO: should we fill in default value for all groups and tasks? + if is_root_group: + _fill_in_component_input_default_value( + component_spec=component_spec, + input_name=input_name, + default_value=channel.value, + ) + + return component_spec + + +def _pop_input_from_task_spec( + task_spec: pipeline_spec_pb2.PipelineTaskSpec, + input_name: str, +) -> None: + """Removes an input from task spec inputs. + + Args: + task_spec: The pipeline task spec to update in place. + input_name: The name of the input, which could be an artifact or paremeter. + """ + task_spec.inputs.artifacts.pop(input_name) + task_spec.inputs.parameters.pop(input_name) + + if task_spec.inputs == pipeline_spec_pb2.TaskInputsSpec(): + task_spec.ClearField('inputs') + + +def _update_task_spec_for_loop_group( + group: tasks_group.ParallelFor, + pipeline_task_spec: pipeline_spec_pb2.PipelineTaskSpec, +) -> None: + """Updates PipelineTaskSpec for loop group. + + Args: + group: The loop group to update task spec for. + pipeline_task_spec: The pipeline task spec to update in place. + """ + if group.items_is_pipeline_channel: + loop_items_channel = group.loop_argument.items_or_pipeline_channel + input_parameter_name = _additional_input_name_for_pipeline_channel( + loop_items_channel) + loop_argument_item_name = _additional_input_name_for_pipeline_channel( + group.loop_argument.full_name) + + loop_arguments_item = '{}-{}'.format( + input_parameter_name, for_loop.LoopArgument.LOOP_ITEM_NAME_BASE) + assert loop_arguments_item == loop_argument_item_name + + pipeline_task_spec.parameter_iterator.items.input_parameter = ( + input_parameter_name) + pipeline_task_spec.parameter_iterator.item_input = ( + loop_argument_item_name) + + # If the loop items itself is a loop arguments variable, handle the + # subvar name. + if isinstance(loop_items_channel, for_loop.LoopArgumentVariable): + pipeline_task_spec.inputs.parameters[ + input_parameter_name].parameter_expression_selector = ( + 'parseJson(string_value)["{}"]'.format( + loop_items_channel.subvar_name)) + pipeline_task_spec.inputs.parameters[ + input_parameter_name].component_input_parameter = ( + _additional_input_name_for_pipeline_channel( + loop_items_channel.loop_argument)) + + remove_input_name = loop_argument_item_name + else: + input_parameter_name = _additional_input_name_for_pipeline_channel( + group.loop_argument) + raw_values = group.loop_argument.items_or_pipeline_channel + + pipeline_task_spec.parameter_iterator.items.raw = json.dumps( + raw_values, sort_keys=True) + pipeline_task_spec.parameter_iterator.item_input = ( + input_parameter_name) + + _pop_input_from_task_spec( + task_spec=pipeline_task_spec, + input_name=pipeline_task_spec.parameter_iterator.item_input) + + +def _resolve_condition_operands( + left_operand: Union[str, pipeline_channel.PipelineChannel], + right_operand: Union[str, pipeline_channel.PipelineChannel], +) -> Tuple[str, str]: + """Resolves values and PipelineChannels for condition operands. + + Args: + left_operand: The left operand of a condition expression. + right_operand: The right operand of a condition expression. + + Returns: + A tuple of the resolved operands values: + (left_operand_value, right_operand_value). + """ + + # Pre-scan the operand to get the type of constant value if there's any. + # The value_type can be used to backfill missing PipelineChannel.channel_type. + value_type = None + for value_or_reference in [left_operand, right_operand]: + if isinstance(value_or_reference, pipeline_channel.PipelineChannel): + parameter_type = type_utils.get_parameter_type( + value_or_reference.channel_type) + if parameter_type in [ + pipeline_spec_pb2.ParameterType.STRUCT, + pipeline_spec_pb2.ParameterType.LIST, + pipeline_spec_pb2.ParameterType + .PARAMETER_TYPE_ENUM_UNSPECIFIED, + ]: + input_name = _additional_input_name_for_pipeline_channel( + value_or_reference) + raise ValueError('Conditional requires scalar parameter values' + ' for comparison. Found input "{}" of type {}' + ' in pipeline definition instead.'.format( + input_name, + value_or_reference.channel_type)) + parameter_types = set() + for value_or_reference in [left_operand, right_operand]: + if isinstance(value_or_reference, pipeline_channel.PipelineChannel): + parameter_type = type_utils.get_parameter_type( + value_or_reference.channel_type) + else: + parameter_type = type_utils.get_parameter_type( + type(value_or_reference).__name__) + + parameter_types.add(parameter_type) + + if len(parameter_types) == 2: + # Two different types being compared. The only possible types are + # String, Boolean, Double and Integer. We'll promote the other type + # using the following precedence: + # String > Boolean > Double > Integer + if pipeline_spec_pb2.ParameterType.STRING in parameter_types: + canonical_parameter_type = pipeline_spec_pb2.ParameterType.STRING + elif pipeline_spec_pb2.ParameterType.BOOLEAN in parameter_types: + canonical_parameter_type = pipeline_spec_pb2.ParameterType.BOOLEAN + else: + # Must be a double and int, promote to double. + assert pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE in parameter_types, \ + 'Types: {} [{} {}]'.format( + parameter_types, left_operand, right_operand) + assert pipeline_spec_pb2.ParameterType.NUMBER_INTEGER in parameter_types, \ + 'Types: {} [{} {}]'.format( + parameter_types, left_operand, right_operand) + canonical_parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE + elif len(parameter_types) == 1: # Both operands are the same type. + canonical_parameter_type = parameter_types.pop() + else: + # Probably shouldn't happen. + raise ValueError('Unable to determine operand types for' + ' "{}" and "{}"'.format(left_operand, right_operand)) + + operand_values = [] + for value_or_reference in [left_operand, right_operand]: + if isinstance(value_or_reference, pipeline_channel.PipelineChannel): + input_name = _additional_input_name_for_pipeline_channel( + value_or_reference) + operand_value = "inputs.parameter_values['{input_name}']".format( + input_name=input_name) + parameter_type = type_utils.get_parameter_type( + value_or_reference.channel_type) + if parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER: + operand_value = 'int({})'.format(operand_value) + elif isinstance(value_or_reference, str): + operand_value = "'{}'".format(value_or_reference) + parameter_type = pipeline_spec_pb2.ParameterType.STRING + elif isinstance(value_or_reference, bool): + # Booleans need to be compared as 'true' or 'false' in CEL. + operand_value = str(value_or_reference).lower() + parameter_type = pipeline_spec_pb2.ParameterType.BOOLEAN + elif isinstance(value_or_reference, int): + operand_value = str(value_or_reference) + parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_INTEGER + else: + assert isinstance(value_or_reference, float), value_or_reference + operand_value = str(value_or_reference) + parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE + + if parameter_type != canonical_parameter_type: + # Type-cast to so CEL does not complain. + if canonical_parameter_type == pipeline_spec_pb2.ParameterType.STRING: + assert parameter_type in [ + pipeline_spec_pb2.ParameterType.BOOLEAN, + pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + ] + operand_value = "'{}'".format(operand_value) + elif canonical_parameter_type == pipeline_spec_pb2.ParameterType.BOOLEAN: + assert parameter_type in [ + pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + ] + operand_value = 'true' if int(operand_value) == 0 else 'false' + else: + assert canonical_parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE + assert parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER + operand_value = 'double({})'.format(operand_value) + + operand_values.append(operand_value) + + return tuple(operand_values) + + +def _update_task_spec_for_condition_group( + group: tasks_group.Condition, + pipeline_task_spec: pipeline_spec_pb2.PipelineTaskSpec, +) -> None: + """Updates PipelineTaskSpec for condition group. + + Args: + group: The condition group to update task spec for. + pipeline_task_spec: The pipeline task spec to update in place. + """ + left_operand_value, right_operand_value = _resolve_condition_operands( + group.condition.left_operand, group.condition.right_operand) + + condition_string = ( + f'{left_operand_value} {group.condition.operator} {right_operand_value}' + ) + pipeline_task_spec.trigger_policy.CopyFrom( + pipeline_spec_pb2.PipelineTaskSpec.TriggerPolicy( + condition=condition_string)) + + +def build_task_spec_for_exit_task( + task: pipeline_task.PipelineTask, + dependent_task: str, + pipeline_inputs: pipeline_spec_pb2.ComponentInputsSpec, +) -> pipeline_spec_pb2.PipelineTaskSpec: + """Builds PipelineTaskSpec for an exit handler's exit task. + + Args: + tasks: The exit handler's exit task to build task spec for. + dependent_task: The dependent task name for the exit task, i.e. the name + of the exit handler group. + pipeline_inputs: The pipeline level input definitions. + + Returns: + A PipelineTaskSpec object representing the exit task. + """ + pipeline_task_spec = build_task_spec_for_task( + task=task, + parent_component_inputs=pipeline_inputs, + tasks_in_current_dag=[], # Does not matter for exit task + input_parameters_in_current_dag=pipeline_inputs.parameters.keys(), + input_artifacts_in_current_dag=[], + ) + pipeline_task_spec.dependent_tasks.extend([dependent_task]) + pipeline_task_spec.trigger_policy.strategy = ( + pipeline_spec_pb2.PipelineTaskSpec.TriggerPolicy.TriggerStrategy + .ALL_UPSTREAM_TASKS_COMPLETED) + + return pipeline_task_spec + + +def build_task_spec_for_group( + group: tasks_group.TasksGroup, + pipeline_channels: List[pipeline_channel.PipelineChannel], + tasks_in_current_dag: List[str], + is_parent_component_root: bool, +) -> pipeline_spec_pb2.PipelineTaskSpec: + """Builds PipelineTaskSpec for a group. + + Args: + group: The group to build PipelineTaskSpec for. + pipeline_channels: The list of pipeline channels referenced by the group. + tasks_in_current_dag: The list of tasks names for tasks in the same dag. + is_parent_component_root: Whether the parent component is the pipeline's + root dag. + + Returns: + A PipelineTaskSpec object representing the group. + """ + pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() + pipeline_task_spec.task_info.name = group.name + pipeline_task_spec.component_ref.name = ( + component_utils.sanitize_component_name(group.name)) + + for channel in pipeline_channels: + + channel_full_name = channel.full_name + subvar_name = None + if isinstance(channel, for_loop.LoopArgumentVariable): + channel_full_name = channel.loop_argument.full_name + subvar_name = channel.subvar_name + + input_name = _additional_input_name_for_pipeline_channel(channel) + + channel_name = channel.name + if subvar_name: + pipeline_task_spec.inputs.parameters[ + input_name].parameter_expression_selector = ( + 'parseJson(string_value)["{}"]'.format(subvar_name)) + if not channel.is_with_items_loop_argument: + channel_name = channel.items_or_pipeline_channel.name + + if isinstance(channel, pipeline_channel.PipelineArtifactChannel): + if channel.task_name and channel.task_name in tasks_in_current_dag: + pipeline_task_spec.inputs.artifacts[ + input_name].task_output_artifact.producer_task = ( + component_utils.sanitize_task_name(channel.task_name)) + pipeline_task_spec.inputs.artifacts[ + input_name].task_output_artifact.output_artifact_key = ( + channel_name) + else: + pipeline_task_spec.inputs.artifacts[ + input_name].component_input_artifact = ( + channel_full_name + if is_parent_component_root else input_name) + else: + # channel is one of PipelineParameterChannel, LoopArgument, or + # LoopArgumentVariable + if channel.task_name and channel.task_name in tasks_in_current_dag: + pipeline_task_spec.inputs.parameters[ + input_name].task_output_parameter.producer_task = ( + component_utils.sanitize_task_name(channel.task_name)) + pipeline_task_spec.inputs.parameters[ + input_name].task_output_parameter.output_parameter_key = ( + channel_name) + else: + pipeline_task_spec.inputs.parameters[ + input_name].component_input_parameter = ( + channel_full_name if is_parent_component_root else + _additional_input_name_for_pipeline_channel( + channel_full_name)) + + if isinstance(group, tasks_group.ParallelFor): + _update_task_spec_for_loop_group( + group=group, + pipeline_task_spec=pipeline_task_spec, + ) + elif isinstance(group, tasks_group.Condition): + _update_task_spec_for_condition_group( + group=group, + pipeline_task_spec=pipeline_task_spec, + ) + + return pipeline_task_spec + + +def populate_metrics_in_dag_outputs( + tasks: List[pipeline_task.PipelineTask], + task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]], + task_name_to_task_spec: Mapping[str, pipeline_spec_pb2.PipelineTaskSpec], + task_name_to_component_spec: Mapping[str, pipeline_spec_pb2.ComponentSpec], + pipeline_spec: pipeline_spec_pb2.PipelineSpec, +) -> None: + """Populates metrics artifacts in DAG outputs. + + Args: + tasks: The list of tasks that may produce metrics outputs. + task_name_to_parent_groups: The dict of task name to parent groups. + Key is the task's name. Value is a list of ancestor groups including + the task itself. The list of a given op is sorted in a way that the + farthest group is the first and the task itself is the last. + task_name_to_task_spec: The dict of task name to PipelineTaskSpec. + task_name_to_component_spec: The dict of task name to ComponentSpec. + pipeline_spec: The pipeline_spec to update in-place. + """ + for task in tasks: + task_spec = task_name_to_task_spec[task.name] + component_spec = task_name_to_component_spec[task.name] + + # Get the tuple of (component_name, task_name) of all its parent groups. + parent_components_and_tasks = [('_root', '')] + # skip the op itself and the root group which cannot be retrived via name. + for group_name in task_name_to_parent_groups[task.name][1:-1]: + parent_components_and_tasks.append( + (component_utils.sanitize_component_name(group_name), + component_utils.sanitize_task_name(group_name))) + # Reverse the order to make the farthest group in the end. + parent_components_and_tasks.reverse() + + for output_name, artifact_spec in \ + component_spec.output_definitions.artifacts.items(): + + if artifact_spec.artifact_type.WhichOneof( + 'kind' + ) == 'schema_title' and artifact_spec.artifact_type.schema_title in [ + artifact_types.Metrics.TYPE_NAME, + artifact_types.ClassificationMetrics.TYPE_NAME, + ]: + unique_output_name = '{}-{}'.format(op.name, output_name) + + sub_task_name = task.name + sub_task_output = output_name + for component_name, task_name in parent_components_and_tasks: + group_component_spec = ( + pipeline_spec.root if component_name == '_root' else + pipeline_spec.components[component_name]) + group_component_spec.output_definitions.artifacts[ + unique_output_name].CopyFrom(artifact_spec) + group_component_spec.dag.outputs.artifacts[ + unique_output_name].artifact_selectors.append( + pipeline_spec_pb2.DagOutputsSpec + .ArtifactSelectorSpec( + producer_subtask=sub_task_name, + output_artifact_key=sub_task_output, + )) + sub_task_name = task_name + sub_task_output = unique_output_name diff --git a/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder_test.py b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder_test.py new file mode 100644 index 00000000000..7dc459641d3 --- /dev/null +++ b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder_test.py @@ -0,0 +1,168 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for kfp.v2.compiler.experimental.pipeline_spec_builder.""" + +import unittest + +from absl.testing import parameterized +from google.protobuf import json_format +from google.protobuf import struct_pb2 +from kfp.pipeline_spec import pipeline_spec_pb2 +from kfp.v2.compiler.experimental import pipeline_spec_builder +from kfp.v2.components.experimental import pipeline_channel +from kfp.v2.components.experimental import structures + + +class PipelineSpecBuilderTest(parameterized.TestCase): + + def setUp(self): + self.maxDiff = None + + @parameterized.parameters( + { + 'channel': + pipeline_channel.PipelineParameterChannel( + name='output1', task_name='task1', channel_type='String'), + 'expected': + 'pipelinechannel--task1-output1', + }, + { + 'channel': + pipeline_channel.PipelineArtifactChannel( + name='output1', task_name='task1', channel_type='Artifact'), + 'expected': + 'pipelinechannel--task1-output1', + }, + { + 'channel': + pipeline_channel.PipelineParameterChannel( + name='param1', channel_type='String'), + 'expected': + 'pipelinechannel--param1', + }, + ) + def test_additional_input_name_for_pipeline_channel(self, channel, + expected): + self.assertEqual( + expected, + pipeline_spec_builder._additional_input_name_for_pipeline_channel( + channel)) + + @parameterized.parameters( + { + 'parameter_type': pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + 'default_value': None, + 'expected': struct_pb2.Value(), + }, + { + 'parameter_type': pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + 'default_value': 1, + 'expected': struct_pb2.Value(number_value=1), + }, + { + 'parameter_type': pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + 'default_value': 1.2, + 'expected': struct_pb2.Value(number_value=1.2), + }, + { + 'parameter_type': pipeline_spec_pb2.ParameterType.STRING, + 'default_value': 'text', + 'expected': struct_pb2.Value(string_value='text'), + }, + { + 'parameter_type': pipeline_spec_pb2.ParameterType.BOOLEAN, + 'default_value': True, + 'expected': struct_pb2.Value(bool_value=True), + }, + { + 'parameter_type': pipeline_spec_pb2.ParameterType.BOOLEAN, + 'default_value': False, + 'expected': struct_pb2.Value(bool_value=False), + }, + { + 'parameter_type': + pipeline_spec_pb2.ParameterType.STRUCT, + 'default_value': { + 'a': 1, + 'b': 2, + }, + 'expected': + struct_pb2.Value( + struct_value=struct_pb2.Struct( + fields={ + 'a': struct_pb2.Value(number_value=1), + 'b': struct_pb2.Value(number_value=2), + })), + }, + { + 'parameter_type': + pipeline_spec_pb2.ParameterType.LIST, + 'default_value': ['a', 'b'], + 'expected': + struct_pb2.Value( + list_value=struct_pb2.ListValue(values=[ + struct_pb2.Value(string_value='a'), + struct_pb2.Value(string_value='b'), + ])), + }, + { + 'parameter_type': + pipeline_spec_pb2.ParameterType.LIST, + 'default_value': [{ + 'a': 1, + 'b': 2 + }, { + 'a': 10, + 'b': 20 + }], + 'expected': + struct_pb2.Value( + list_value=struct_pb2.ListValue(values=[ + struct_pb2.Value( + struct_value=struct_pb2.Struct( + fields={ + 'a': struct_pb2.Value(number_value=1), + 'b': struct_pb2.Value(number_value=2), + })), + struct_pb2.Value( + struct_value=struct_pb2.Struct( + fields={ + 'a': struct_pb2.Value(number_value=10), + 'b': struct_pb2.Value(number_value=20), + })), + ])), + }, + ) + def test_fill_in_component_input_default_value(self, parameter_type, + default_value, expected): + component_spec = pipeline_spec_pb2.ComponentSpec( + input_definitions=pipeline_spec_pb2.ComponentInputsSpec( + parameters={ + 'input1': + pipeline_spec_pb2.ComponentInputsSpec.ParameterSpec( + parameter_type=parameter_type) + })) + pipeline_spec_builder._fill_in_component_input_default_value( + component_spec=component_spec, + input_name='input1', + default_value=default_value) + + self.assertEqual( + expected, + component_spec.input_definitions.parameters['input1'].default_value, + ) + + +if __name__ == '__main__': + unittest.main() diff --git a/sdk/python/kfp/v2/compiler/main.py b/sdk/python/kfp/v2/compiler/main.py index d02223b475c..b80f1bb00c3 100644 --- a/sdk/python/kfp/v2/compiler/main.py +++ b/sdk/python/kfp/v2/compiler/main.py @@ -20,7 +20,8 @@ from typing import Any, Callable, List, Mapping, Optional import kfp.dsl as dsl -import kfp.v2.compiler as compiler +from kfp.v2 import compiler +from kfp.v2.compiler.experimental import compiler as experimental_compiler def parse_arguments() -> argparse.Namespace: @@ -51,15 +52,24 @@ def parse_arguments() -> argparse.Namespace: '--disable-type-check', action='store_true', help='disable the type check, default is enabled.') + parser.add_argument( + '--use-experimental', + action='store_true', + help='Whether to use the experimental compiler. This is a temporary flag.' + ) args = parser.parse_args() return args -def _compile_pipeline_function(pipeline_funcs: List[Callable], - function_name: Optional[str], - pipeline_parameters: Optional[Mapping[str, Any]], - package_path: str, type_check: bool) -> None: +def _compile_pipeline_function( + pipeline_funcs: List[Callable], + function_name: Optional[str], + pipeline_parameters: Optional[Mapping[str, Any]], + package_path: str, + type_check: bool, + use_experimental: bool, +) -> None: """Compiles a pipeline function. Args: @@ -91,11 +101,19 @@ def _compile_pipeline_function(pipeline_funcs: List[Callable], else: pipeline_func = pipeline_funcs[0] - compiler.Compiler().compile( - pipeline_func=pipeline_func, - pipeline_parameters=pipeline_parameters, - package_path=package_path, - type_check=type_check) + if use_experimental: + experimental_compiler.Compiler().compile( + pipeline_func=pipeline_func, + pipeline_parameters=pipeline_parameters, + package_path=package_path, + type_check=type_check) + + else: + compiler.Compiler().compile( + pipeline_func=pipeline_func, + pipeline_parameters=pipeline_parameters, + package_path=package_path, + type_check=type_check) class PipelineCollectorContext(): @@ -115,9 +133,14 @@ def __exit__(self, *args): dsl._pipeline._pipeline_decorator_handler = self.old_handler -def compile_pyfile(pyfile: str, function_name: Optional[str], - pipeline_parameters: Optional[Mapping[str, Any]], - package_path: str, type_check: bool) -> None: +def compile_pyfile( + pyfile: str, + function_name: Optional[str], + pipeline_parameters: Optional[Mapping[str, Any]], + package_path: str, + type_check: bool, + use_experimental: bool, +) -> None: """Compiles a pipeline written in a .py file. Args: @@ -137,7 +160,9 @@ def compile_pyfile(pyfile: str, function_name: Optional[str], function_name=function_name, pipeline_parameters=pipeline_parameters, package_path=package_path, - type_check=type_check) + type_check=type_check, + use_experimental=use_experimental, + ) finally: del sys.path[0] @@ -152,4 +177,5 @@ def main(): pipeline_parameters=args.pipeline_parameters, package_path=args.output, type_check=not args.disable_type_check, + use_experimental=args.use_experimental, ) diff --git a/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py b/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py index 0af13e89ee7..a5523f86803 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py @@ -21,21 +21,22 @@ import unittest -def _ignore_kfp_version_helper(file): +def _ignore_kfp_version_helper(spec): """Ignores kfp sdk versioning in command. Takes in a JSON input and ignores the kfp sdk versioning in command for comparison between compiled file and goldens. """ - if 'executors' in file['pipelineSpec']['deploymentSpec']: - for executor in file['pipelineSpec']['deploymentSpec']['executors']: - file['pipelineSpec']['deploymentSpec']['executors'][ - executor] = json.loads( - re.sub( - "'kfp==(\d+).(\d+).(\d+)'", 'kfp', - json.dumps(file['pipelineSpec']['deploymentSpec'] - ['executors'][executor]))) - return file + pipeline_spec = spec['pipelineSpec'] if 'pipelineSpec' in spec else spec + + if 'executors' in pipeline_spec['deploymentSpec']: + for executor in pipeline_spec['deploymentSpec']['executors']: + pipeline_spec['deploymentSpec']['executors'][executor] = json.loads( + re.sub( + "'kfp==(\d+).(\d+).(\d+)'", 'kfp', + json.dumps(pipeline_spec['deploymentSpec']['executors'] + [executor]))) + return spec class CompilerCliTests(unittest.TestCase): @@ -44,13 +45,23 @@ def setUp(self) -> None: self.maxDiff = None return super().setUp() - def _test_compile_py_to_json(self, file_base_name, additional_arguments=[]): + def _test_compile_py_to_json( + self, + file_base_name, + additional_arguments=None, + use_experimental=False, + ): test_data_dir = os.path.join(os.path.dirname(__file__), 'test_data') py_file = os.path.join(test_data_dir, '{}.py'.format(file_base_name)) tmpdir = tempfile.mkdtemp() golden_compiled_file = os.path.join(test_data_dir, file_base_name + '.json') + if additional_arguments is None: + additional_arguments = [] + if use_experimental: + additional_arguments.append('--use-experimental') + def _compile(target_output_file: str): subprocess.check_call([ 'dsl-compile-v2', '--py', py_file, '--output', @@ -61,7 +72,9 @@ def _load_compiled_file(filename: str): with open(filename, 'r') as f: contents = json.load(f) # Correct the sdkVersion - del contents['pipelineSpec']['sdkVersion'] + pipeline_spec = contents[ + 'pipelineSpec'] if 'pipelineSpec' in contents else contents + del pipeline_spec['sdkVersion'] return _ignore_kfp_version_helper(contents) try: @@ -90,6 +103,14 @@ def test_two_step_pipeline(self): 'two_step_pipeline', ['--pipeline-parameters', '{"text":"Hello KFP!"}']) + def test_two_step_pipeline_experimental(self): + self._test_compile_py_to_json( + 'experimental_two_step_pipeline', [ + '--pipeline-parameters', + '{"text":"Hello KFP!"}', + ], + use_experimental=True) + def test_pipeline_with_importer(self): self._test_compile_py_to_json('pipeline_with_importer') @@ -123,9 +144,18 @@ def test_pipeline_with_nested_conditions(self): def test_pipeline_with_nested_conditions_yaml(self): self._test_compile_py_to_json('pipeline_with_nested_conditions_yaml') + def test_pipeline_with_nested_conditions_yaml_experimental(self): + self._test_compile_py_to_json( + 'experimental_pipeline_with_nested_conditions_yaml', + use_experimental=True) + def test_pipeline_with_loops(self): self._test_compile_py_to_json('pipeline_with_loops') + def test_pipeline_with_loops_experimental(self): + self._test_compile_py_to_json( + 'experimental_pipeline_with_loops', use_experimental=True) + def test_pipeline_with_nested_loops(self): self._test_compile_py_to_json('pipeline_with_nested_loops') @@ -155,6 +185,10 @@ def test_pipeline_with_metrics_outputs(self): def test_pipeline_with_exit_handler(self): self._test_compile_py_to_json('pipeline_with_exit_handler') + def test_pipeline_with_exit_handler_experimental(self): + self._test_compile_py_to_json( + 'experimental_pipeline_with_exit_handler', use_experimental=True) + def test_pipeline_with_env(self): self._test_compile_py_to_json('pipeline_with_env') @@ -162,7 +196,8 @@ def test_v2_component_with_optional_inputs(self): self._test_compile_py_to_json('v2_component_with_optional_inputs') def test_experimental_v2_component(self): - self._test_compile_py_to_json('experimental_v2_component') + self._test_compile_py_to_json( + 'experimental_v2_component', use_experimental=True) def test_pipeline_with_gcpc_types(self): self._test_compile_py_to_json('pipeline_with_gcpc_types') diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json new file mode 100644 index 00000000000..28249e636d0 --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json @@ -0,0 +1,182 @@ +{ + "components": { + "comp-exit-handler-1": { + "dag": { + "tasks": { + "fail-op": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-fail-op" + }, + "inputs": { + "parameters": { + "msg": { + "runtimeValue": { + "constant": "Task failed." + } + } + } + }, + "taskInfo": { + "name": "fail-op" + } + }, + "print-op-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-op-2" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--message" + } + } + }, + "taskInfo": { + "name": "print-op-2" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--message": { + "parameterType": "STRING" + } + } + } + }, + "comp-fail-op": { + "executorLabel": "exec-fail-op", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-op": { + "executorLabel": "exec-print-op", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-op-2": { + "executorLabel": "exec-print-op-2", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + } + }, + "deploymentSpec": { + "executors": { + "exec-fail-op": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\nexit 1\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-op": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-op-2": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + } + } + }, + "pipelineInfo": { + "name": "pipeline-with-exit-handler" + }, + "root": { + "dag": { + "tasks": { + "exit-handler-1": { + "componentRef": { + "name": "comp-exit-handler-1" + }, + "inputs": { + "parameters": { + "pipelinechannel--message": { + "componentInputParameter": "message" + } + } + }, + "taskInfo": { + "name": "exit-handler-1" + } + }, + "print-op": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-op" + }, + "dependentTasks": [ + "exit-handler-1" + ], + "inputs": { + "parameters": { + "msg": { + "runtimeValue": { + "constant": "Exit handler has worked!" + } + } + } + }, + "taskInfo": { + "name": "print-op" + }, + "triggerPolicy": { + "strategy": "ALL_UPSTREAM_TASKS_COMPLETED" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "message": { + "defaultValue": "Hello World!", + "parameterType": "STRING" + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" +} \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py new file mode 100644 index 00000000000..fe00e9240f5 --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py @@ -0,0 +1,67 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Pipeline using ExitHandler.""" + +from kfp.v2 import components +import kfp.v2.dsl.experimental as dsl +from kfp.v2.compiler.experimental import compiler + +print_op = components.load_component_from_text(""" +name: print op +inputs: +- {name: msg, type: String} +implementation: + container: + image: alpine + command: + - sh + - -c + - | + set -e -x + echo "$0" + - {inputValue: msg} +""") + +fail_op = components.load_component_from_text(""" +name: fail op +inputs: +- {name: msg, type: String} +implementation: + container: + image: alpine + command: + - sh + - -c + - | + set -e -x + echo "$0" + exit 1 + - {inputValue: msg} +""") + + +@dsl.pipeline(name='pipeline-with-exit-handler') +def my_pipeline(message: str = 'Hello World!'): + + exit_task = print_op(msg='Exit handler has worked!') + + with dsl.ExitHandler(exit_task): + print_op(msg=message) + fail_op(msg='Task failed.') + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path=__file__.replace('.py', '.json')) diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json new file mode 100644 index 00000000000..9a20d3dde4d --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json @@ -0,0 +1,442 @@ +{ + "components": { + "comp-args-generator-op": { + "executorLabel": "exec-args-generator-op", + "outputDefinitions": { + "parameters": { + "output": { + "parameterType": "LIST" + } + } + } + }, + "comp-for-loop-1": { + "dag": { + "tasks": { + "print-op": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-op" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--loop_parameter-loop-item" + } + } + }, + "taskInfo": { + "name": "print-op" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--loop_parameter": { + "parameterType": "LIST" + }, + "pipelinechannel--loop_parameter-loop-item": { + "parameterType": "STRING" + } + } + } + }, + "comp-for-loop-2": { + "dag": { + "tasks": { + "print-op-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-op-2" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--args-generator-op-output-loop-item", + "parameterExpressionSelector": "parseJson(string_value)[\"A_a\"]" + } + } + }, + "taskInfo": { + "name": "print-op-2" + } + }, + "print-op-3": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-op-3" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--args-generator-op-output-loop-item", + "parameterExpressionSelector": "parseJson(string_value)[\"B_b\"]" + } + } + }, + "taskInfo": { + "name": "print-op-3" + } + }, + "print-struct": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-struct" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--args-generator-op-output-loop-item" + } + } + }, + "taskInfo": { + "name": "print-struct" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--args-generator-op-output": { + "parameterType": "LIST" + }, + "pipelinechannel--args-generator-op-output-loop-item": { + "parameterType": "STRUCT" + } + } + } + }, + "comp-for-loop-4": { + "dag": { + "tasks": { + "print-op-4": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-op-4" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--loop-item-param-3", + "parameterExpressionSelector": "parseJson(string_value)[\"A_a\"]" + } + } + }, + "taskInfo": { + "name": "print-op-4" + } + }, + "print-op-5": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-op-5" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--loop-item-param-3", + "parameterExpressionSelector": "parseJson(string_value)[\"B_b\"]" + } + } + }, + "taskInfo": { + "name": "print-op-5" + } + }, + "print-struct-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-struct-2" + }, + "inputs": { + "parameters": { + "msg": { + "componentInputParameter": "pipelinechannel--loop-item-param-3" + } + } + }, + "taskInfo": { + "name": "print-struct-2" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--loop-item-param-3": { + "parameterType": "STRUCT" + } + } + } + }, + "comp-print-op": { + "executorLabel": "exec-print-op", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-op-2": { + "executorLabel": "exec-print-op-2", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-op-3": { + "executorLabel": "exec-print-op-3", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-op-4": { + "executorLabel": "exec-print-op-4", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-op-5": { + "executorLabel": "exec-print-op-5", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-struct": { + "executorLabel": "exec-print-struct", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRUCT" + } + } + } + }, + "comp-print-struct-2": { + "executorLabel": "exec-print-struct-2", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRUCT" + } + } + } + } + }, + "deploymentSpec": { + "executors": { + "exec-args-generator-op": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"[{'A_a': '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]\" > \"$0\"\n", + "{{$.outputs.parameters['output'].output_file}}" + ], + "image": "alpine" + } + }, + "exec-print-op": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-op-2": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-op-3": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-op-4": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-op-5": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-struct": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + }, + "exec-print-struct-2": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\"\n", + "{{$.inputs.parameters['msg']}}" + ], + "image": "alpine" + } + } + } + }, + "pipelineInfo": { + "name": "pipeline-with-loops" + }, + "root": { + "dag": { + "tasks": { + "args-generator-op": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-args-generator-op" + }, + "taskInfo": { + "name": "args-generator-op" + } + }, + "for-loop-1": { + "componentRef": { + "name": "comp-for-loop-1" + }, + "inputs": { + "parameters": { + "pipelinechannel--loop_parameter": { + "componentInputParameter": "loop_parameter" + } + } + }, + "parameterIterator": { + "itemInput": "pipelinechannel--loop_parameter-loop-item", + "items": { + "inputParameter": "pipelinechannel--loop_parameter" + } + }, + "taskInfo": { + "name": "for-loop-1" + } + }, + "for-loop-2": { + "componentRef": { + "name": "comp-for-loop-2" + }, + "dependentTasks": [ + "args-generator-op" + ], + "inputs": { + "parameters": { + "pipelinechannel--args-generator-op-output": { + "taskOutputParameter": { + "outputParameterKey": "output", + "producerTask": "args-generator-op" + } + } + } + }, + "parameterIterator": { + "itemInput": "pipelinechannel--args-generator-op-output-loop-item", + "items": { + "inputParameter": "pipelinechannel--args-generator-op-output" + } + }, + "taskInfo": { + "name": "for-loop-2" + } + }, + "for-loop-4": { + "componentRef": { + "name": "comp-for-loop-4" + }, + "parameterIterator": { + "itemInput": "pipelinechannel--loop-item-param-3", + "items": { + "raw": "[{\"A_a\": \"1\", \"B_b\": \"2\"}, {\"A_a\": \"10\", \"B_b\": \"20\"}]" + } + }, + "taskInfo": { + "name": "for-loop-4" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "loop_parameter": { + "parameterType": "LIST" + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" +} \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py new file mode 100644 index 00000000000..c8fe6acdc2a --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py @@ -0,0 +1,103 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List + +from kfp.v2 import components +import kfp.v2.dsl.experimental as dsl +from kfp.v2.compiler.experimental import compiler + +# @component +# def args_generator_op() -> List[str]: +# return [{'A_a': '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}] + +args_generator_op = components.load_component_from_text(""" +name: Args generator op +outputs: +- {name: output, type: "List[Dict[str, str]]"} +implementation: + container: + image: alpine + command: + - sh + - -c + - | + set -e -x + echo "[{'A_a': '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]" > "$0" + - {outputPath: output} +""") + +# @component +# def print_op(msg: str): +# print(msg) + +print_op = components.load_component_from_text(""" +name: print op +inputs: +- {name: msg, type: String} +implementation: + container: + image: alpine + command: + - sh + - -c + - | + set -e -x + echo "$0" + - {inputValue: msg} +""") + +print_struct = components.load_component_from_text(""" +name: print struct +inputs: +- {name: msg, type: Dict} +implementation: + container: + image: alpine + command: + - sh + - -c + - | + set -e -x + echo "$0" + - {inputValue: msg} +""") + + +@dsl.pipeline(name='pipeline-with-loops') +def my_pipeline(loop_parameter: List[str]): + + # Loop argument is from a pipeline input + with dsl.ParallelFor(loop_parameter) as item: + print_op(msg=item) + + # Loop argument is from a component output + args_generator = args_generator_op() + with dsl.ParallelFor(args_generator.output) as item: + print_struct(msg=item) + print_op(msg=item.A_a) + print_op(msg=item.B_b) + + # Loop argument is a static value known at compile time + loop_args = [{'A_a': '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}] + with dsl.ParallelFor(loop_args) as item: + print_struct(msg=item) + print_op(msg=item.A_a) + print_op(msg=item.B_b) + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path=__file__.replace('.py', '.json')) diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json new file mode 100644 index 00000000000..59c34e2b35a --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json @@ -0,0 +1,539 @@ +{ + "components": { + "comp-condition-1": { + "dag": { + "tasks": { + "condition-2": { + "componentRef": { + "name": "comp-condition-2" + }, + "dependentTasks": [ + "generate-random-number" + ], + "inputs": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "componentInputParameter": "pipelinechannel--flip-coin-output" + }, + "pipelinechannel--generate-random-number-output": { + "taskOutputParameter": { + "outputParameterKey": "output", + "producerTask": "generate-random-number" + } + } + } + }, + "taskInfo": { + "name": "condition-2" + }, + "triggerPolicy": { + "condition": "int(inputs.parameter_values['pipelinechannel--generate-random-number-output']) > 5" + } + }, + "condition-3": { + "componentRef": { + "name": "comp-condition-3" + }, + "dependentTasks": [ + "generate-random-number" + ], + "inputs": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "componentInputParameter": "pipelinechannel--flip-coin-output" + }, + "pipelinechannel--generate-random-number-output": { + "taskOutputParameter": { + "outputParameterKey": "output", + "producerTask": "generate-random-number" + } + } + } + }, + "taskInfo": { + "name": "condition-3" + }, + "triggerPolicy": { + "condition": "int(inputs.parameter_values['pipelinechannel--generate-random-number-output']) <= 5" + } + }, + "generate-random-number": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-generate-random-number" + }, + "taskInfo": { + "name": "generate-random-number" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "parameterType": "STRING" + } + } + } + }, + "comp-condition-2": { + "dag": { + "tasks": { + "print": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print" + }, + "inputs": { + "parameters": { + "msg": { + "runtimeValue": { + "constant": "heads and {{$.inputs.parameters['pipelinechannel--generate-random-number-output']}} > 5!" + } + }, + "pipelinechannel--generate-random-number-output": { + "componentInputParameter": "pipelinechannel--generate-random-number-output" + } + } + }, + "taskInfo": { + "name": "print" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "parameterType": "STRING" + }, + "pipelinechannel--generate-random-number-output": { + "parameterType": "NUMBER_INTEGER" + } + } + } + }, + "comp-condition-3": { + "dag": { + "tasks": { + "print-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-2" + }, + "inputs": { + "parameters": { + "msg": { + "runtimeValue": { + "constant": "heads and {{$.inputs.parameters['pipelinechannel--generate-random-number-output']}} <= 5!" + } + }, + "pipelinechannel--generate-random-number-output": { + "componentInputParameter": "pipelinechannel--generate-random-number-output" + } + } + }, + "taskInfo": { + "name": "print-2" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "parameterType": "STRING" + }, + "pipelinechannel--generate-random-number-output": { + "parameterType": "NUMBER_INTEGER" + } + } + } + }, + "comp-condition-4": { + "dag": { + "tasks": { + "condition-5": { + "componentRef": { + "name": "comp-condition-5" + }, + "dependentTasks": [ + "generate-random-number-2" + ], + "inputs": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "componentInputParameter": "pipelinechannel--flip-coin-output" + }, + "pipelinechannel--generate-random-number-2-output": { + "taskOutputParameter": { + "outputParameterKey": "output", + "producerTask": "generate-random-number-2" + } + } + } + }, + "taskInfo": { + "name": "condition-5" + }, + "triggerPolicy": { + "condition": "int(inputs.parameter_values['pipelinechannel--generate-random-number-2-output']) > 15" + } + }, + "condition-6": { + "componentRef": { + "name": "comp-condition-6" + }, + "dependentTasks": [ + "generate-random-number-2" + ], + "inputs": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "componentInputParameter": "pipelinechannel--flip-coin-output" + }, + "pipelinechannel--generate-random-number-2-output": { + "taskOutputParameter": { + "outputParameterKey": "output", + "producerTask": "generate-random-number-2" + } + } + } + }, + "taskInfo": { + "name": "condition-6" + }, + "triggerPolicy": { + "condition": "int(inputs.parameter_values['pipelinechannel--generate-random-number-2-output']) <= 15" + } + }, + "generate-random-number-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-generate-random-number-2" + }, + "taskInfo": { + "name": "generate-random-number-2" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "parameterType": "STRING" + } + } + } + }, + "comp-condition-5": { + "dag": { + "tasks": { + "print-3": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-3" + }, + "inputs": { + "parameters": { + "msg": { + "runtimeValue": { + "constant": "tails and {{$.inputs.parameters['pipelinechannel--generate-random-number-2-output']}} > 15!" + } + }, + "pipelinechannel--generate-random-number-2-output": { + "componentInputParameter": "pipelinechannel--generate-random-number-2-output" + } + } + }, + "taskInfo": { + "name": "print-3" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "parameterType": "STRING" + }, + "pipelinechannel--generate-random-number-2-output": { + "parameterType": "NUMBER_INTEGER" + } + } + } + }, + "comp-condition-6": { + "dag": { + "tasks": { + "print-4": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-print-4" + }, + "inputs": { + "parameters": { + "msg": { + "runtimeValue": { + "constant": "tails and {{$.inputs.parameters['pipelinechannel--generate-random-number-2-output']}} <= 15!" + } + }, + "pipelinechannel--generate-random-number-2-output": { + "componentInputParameter": "pipelinechannel--generate-random-number-2-output" + } + } + }, + "taskInfo": { + "name": "print-4" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "parameterType": "STRING" + }, + "pipelinechannel--generate-random-number-2-output": { + "parameterType": "NUMBER_INTEGER" + } + } + } + }, + "comp-flip-coin": { + "executorLabel": "exec-flip-coin", + "outputDefinitions": { + "parameters": { + "output": { + "parameterType": "STRING" + } + } + } + }, + "comp-generate-random-number": { + "executorLabel": "exec-generate-random-number", + "outputDefinitions": { + "parameters": { + "output": { + "parameterType": "NUMBER_INTEGER" + } + } + } + }, + "comp-generate-random-number-2": { + "executorLabel": "exec-generate-random-number-2", + "outputDefinitions": { + "parameters": { + "output": { + "parameterType": "NUMBER_INTEGER" + } + } + } + }, + "comp-print": { + "executorLabel": "exec-print", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-2": { + "executorLabel": "exec-print-2", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-3": { + "executorLabel": "exec-print-3", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + }, + "comp-print-4": { + "executorLabel": "exec-print-4", + "inputDefinitions": { + "parameters": { + "msg": { + "parameterType": "STRING" + } + } + } + } + }, + "deploymentSpec": { + "executors": { + "exec-flip-coin": { + "container": { + "args": [ + "mkdir -p \"$(dirname $0)\" && python -c \"import random; result = 'heads' if random.randint(0,1) == 0 else 'tails'; print(result, end='')\" | tee $0", + "{{$.outputs.parameters['output'].output_file}}" + ], + "command": [ + "sh", + "-c" + ], + "image": "python:alpine3.6" + } + }, + "exec-generate-random-number": { + "container": { + "args": [ + "mkdir -p \"$(dirname $2)\" && python -c \"import random; print(random.randint($0, $1), end='')\" | tee $2", + "0", + "9", + "{{$.outputs.parameters['output'].output_file}}" + ], + "command": [ + "sh", + "-c" + ], + "image": "python:alpine3.6" + } + }, + "exec-generate-random-number-2": { + "container": { + "args": [ + "mkdir -p \"$(dirname $2)\" && python -c \"import random; print(random.randint($0, $1), end='')\" | tee $2", + "10", + "19", + "{{$.outputs.parameters['output'].output_file}}" + ], + "command": [ + "sh", + "-c" + ], + "image": "python:alpine3.6" + } + }, + "exec-print": { + "container": { + "command": [ + "echo", + "{{$.inputs.parameters['msg']}}" + ], + "image": "python:alpine3.6" + } + }, + "exec-print-2": { + "container": { + "command": [ + "echo", + "{{$.inputs.parameters['msg']}}" + ], + "image": "python:alpine3.6" + } + }, + "exec-print-3": { + "container": { + "command": [ + "echo", + "{{$.inputs.parameters['msg']}}" + ], + "image": "python:alpine3.6" + } + }, + "exec-print-4": { + "container": { + "command": [ + "echo", + "{{$.inputs.parameters['msg']}}" + ], + "image": "python:alpine3.6" + } + } + } + }, + "pipelineInfo": { + "name": "conditional-execution-pipeline" + }, + "root": { + "dag": { + "tasks": { + "condition-1": { + "componentRef": { + "name": "comp-condition-1" + }, + "dependentTasks": [ + "flip-coin" + ], + "inputs": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "taskOutputParameter": { + "outputParameterKey": "output", + "producerTask": "flip-coin" + } + } + } + }, + "taskInfo": { + "name": "condition-1" + }, + "triggerPolicy": { + "condition": "inputs.parameter_values['pipelinechannel--flip-coin-output'] == 'heads'" + } + }, + "condition-4": { + "componentRef": { + "name": "comp-condition-4" + }, + "dependentTasks": [ + "flip-coin" + ], + "inputs": { + "parameters": { + "pipelinechannel--flip-coin-output": { + "taskOutputParameter": { + "outputParameterKey": "output", + "producerTask": "flip-coin" + } + } + } + }, + "taskInfo": { + "name": "condition-4" + }, + "triggerPolicy": { + "condition": "inputs.parameter_values['pipelinechannel--flip-coin-output'] == 'tails'" + } + }, + "flip-coin": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-flip-coin" + }, + "taskInfo": { + "name": "flip-coin" + } + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" +} \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py new file mode 100644 index 00000000000..2bbfd1ff504 --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py @@ -0,0 +1,91 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp.v2 import components +import kfp.v2.dsl.experimental as dsl +from kfp.v2.compiler.experimental import compiler + + +def random_num_op(low, high): + """Generate a random number between low and high.""" + return components.load_component_from_text(""" + name: Generate random number + outputs: + - {name: output, type: Integer} + implementation: + container: + image: python:alpine3.6 + command: + - sh + - -c + args: + - mkdir -p "$(dirname $2)" && python -c "import random; print(random.randint($0, $1), end='')" | tee $2 + - "%s" + - "%s" + - {outputPath: output} + """ % (low, high)) + + +flip_coin_op = components.load_component_from_text(""" + name: Flip coin + outputs: + - {name: output, type: String} + implementation: + container: + image: python:alpine3.6 + command: + - sh + - -c + args: + - mkdir -p "$(dirname $0)" && python -c "import random; result = \'heads\' if random.randint(0,1) == 0 else \'tails\'; print(result, end='')" | tee $0 + - {outputPath: output} + """) + +print_op = components.load_component_from_text(""" + name: Print + inputs: + - {name: msg, type: String} + implementation: + container: + image: python:alpine3.6 + command: + - echo + - {inputValue: msg} + """) + + +@dsl.pipeline( + name='conditional-execution-pipeline', + description='Shows how to use dsl.Condition().') +def my_pipeline(): + flip = flip_coin_op() + with dsl.Condition(flip.output == 'heads'): + random_num_head = random_num_op(0, 9)() + with dsl.Condition(random_num_head.output > 5): + print_op(msg='heads and %s > 5!' % random_num_head.output) + with dsl.Condition(random_num_head.output <= 5): + print_op(msg='heads and %s <= 5!' % random_num_head.output) + + with dsl.Condition(flip.output == 'tails'): + random_num_tail = random_num_op(10, 19)() + with dsl.Condition(random_num_tail.output > 15): + print_op(msg='tails and %s > 15!' % random_num_tail.output) + with dsl.Condition(random_num_tail.output <= 15): + print_op(msg='tails and %s <= 15!' % random_num_tail.output) + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=my_pipeline, + package_path=__file__.replace('.py', '.json')) diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.json new file mode 100644 index 00000000000..aeb050a7a11 --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.json @@ -0,0 +1,126 @@ +{ + "components": { + "comp-read-from-gcs": { + "executorLabel": "exec-read-from-gcs", + "inputDefinitions": { + "artifacts": { + "input_gcs_path": { + "artifactType": { + "schemaTitle": "system.Artifact", + "schemaVersion": "0.0.1" + } + } + } + } + }, + "comp-write-to-gcs": { + "executorLabel": "exec-write-to-gcs", + "inputDefinitions": { + "parameters": { + "text": { + "parameterType": "STRING" + } + } + }, + "outputDefinitions": { + "artifacts": { + "output_gcs_path": { + "artifactType": { + "schemaTitle": "system.Artifact", + "schemaVersion": "0.0.1" + } + } + } + } + } + }, + "defaultPipelineRoot": "dummy_root", + "deploymentSpec": { + "executors": { + "exec-read-from-gcs": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\ngsutil cat \"$0\"\n", + "{{$.inputs.artifacts['input_gcs_path'].uri}}" + ], + "image": "google/cloud-sdk:slim" + } + }, + "exec-write-to-gcs": { + "container": { + "command": [ + "sh", + "-c", + "set -e -x\necho \"$0\" | gsutil cp - \"$1\"\n", + "{{$.inputs.parameters['text']}}", + "{{$.outputs.artifacts['output_gcs_path'].uri}}" + ], + "image": "google/cloud-sdk:slim" + } + } + } + }, + "pipelineInfo": { + "name": "simple-two-step-pipeline" + }, + "root": { + "dag": { + "tasks": { + "read-from-gcs": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-read-from-gcs" + }, + "dependentTasks": [ + "write-to-gcs" + ], + "inputs": { + "artifacts": { + "input_gcs_path": { + "taskOutputArtifact": { + "outputArtifactKey": "output_gcs_path", + "producerTask": "write-to-gcs" + } + } + } + }, + "taskInfo": { + "name": "Consumer" + } + }, + "write-to-gcs": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-write-to-gcs" + }, + "inputs": { + "parameters": { + "text": { + "componentInputParameter": "text" + } + } + }, + "taskInfo": { + "name": "Producer" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "text": { + "defaultValue": "Hello KFP!", + "parameterType": "STRING" + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" +} \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py new file mode 100644 index 00000000000..6573685f36b --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py @@ -0,0 +1,69 @@ +# Copyright 2020 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pathlib + +from kfp.v2 import components +import kfp.v2.dsl.experimental as dsl +from kfp.v2.compiler.experimental import compiler + +component_op_1 = components.load_component_from_text(""" +name: Write to GCS +inputs: +- {name: text, type: String, description: 'Content to be written to GCS'} +outputs: +- {name: output_gcs_path, type: GCSPath, description: 'GCS file path'} +implementation: + container: + image: google/cloud-sdk:slim + command: + - sh + - -c + - | + set -e -x + echo "$0" | gsutil cp - "$1" + - {inputValue: text} + - {outputUri: output_gcs_path} +""") + +component_op_2 = components.load_component_from_text(""" +name: Read from GCS +inputs: +- {name: input_gcs_path, type: GCSPath, description: 'GCS file path'} +implementation: + container: + image: google/cloud-sdk:slim + command: + - sh + - -c + - | + set -e -x + gsutil cat "$0" + - {inputUri: input_gcs_path} +""") + + +@dsl.pipeline(name='simple-two-step-pipeline', pipeline_root='dummy_root') +def my_pipeline(text: str = 'Hello world!'): + component_1 = component_op_1(text=text).set_display_name('Producer') + component_2 = component_op_2( + input_gcs_path=component_1.outputs['output_gcs_path']) + component_2.set_display_name('Consumer') + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=my_pipeline, + pipeline_parameters={'text': 'Hello KFP!'}, + package_path=__file__.replace('.py', '.json')) diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.json index f4e83197b6e..ffe4e66548a 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.json @@ -1,136 +1,128 @@ { - "pipelineSpec": { - "components": { - "comp-component-1": { - "executorLabel": "exec-component-1", - "inputDefinitions": { - "parameters": { - "input1": { - "type": "STRING" - } - } - }, - "outputDefinitions": { - "parameters": { - "output1": { - "type": "STRING" - } + "components": { + "comp-component-1": { + "executorLabel": "exec-component-1", + "inputDefinitions": { + "parameters": { + "input1": { + "parameterType": "STRING" } } }, - "comp-component-1-2": { - "executorLabel": "exec-component-1-2", - "inputDefinitions": { - "parameters": { - "input1": { - "type": "STRING" - } - } - }, - "outputDefinitions": { - "parameters": { - "output1": { - "type": "STRING" - } + "outputDefinitions": { + "parameters": { + "output1": { + "parameterType": "STRING" } } } }, - "deploymentSpec": { - "executors": { - "exec-component-1": { - "container": { - "command": [ - "sh", - "-c", - "set -ex\necho \"$0\" > \"$1\"", - "{{$.inputs.parameters['input1']}}", - "{{$.outputs.parameters['output1'].output_file}}" - ], - "image": "alpine" + "comp-component-1-2": { + "executorLabel": "exec-component-1-2", + "inputDefinitions": { + "parameters": { + "input1": { + "parameterType": "STRING" } - }, - "exec-component-1-2": { - "container": { - "command": [ - "sh", - "-c", - "set -ex\necho \"$0\" > \"$1\"", - "{{$.inputs.parameters['input1']}}", - "{{$.outputs.parameters['output1'].output_file}}" - ], - "image": "alpine" + } + }, + "outputDefinitions": { + "parameters": { + "output1": { + "parameterType": "STRING" } } } - }, - "pipelineInfo": { - "name": "experimental-v2-component" - }, - "root": { - "dag": { - "tasks": { - "component-1": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-1" - }, - "inputs": { - "parameters": { - "input1": { - "componentInputParameter": "text" - } + } + }, + "defaultPipelineRoot": "dummy_root", + "deploymentSpec": { + "executors": { + "exec-component-1": { + "container": { + "command": [ + "sh", + "-c", + "set -ex\necho \"$0\" > \"$1\"", + "{{$.inputs.parameters['input1']}}", + "{{$.outputs.parameters['output1'].output_file}}" + ], + "image": "alpine" + } + }, + "exec-component-1-2": { + "container": { + "command": [ + "sh", + "-c", + "set -ex\necho \"$0\" > \"$1\"", + "{{$.inputs.parameters['input1']}}", + "{{$.outputs.parameters['output1'].output_file}}" + ], + "image": "alpine" + } + } + } + }, + "pipelineInfo": { + "name": "experimental-v2-component" + }, + "root": { + "dag": { + "tasks": { + "component-1": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-component-1" + }, + "inputs": { + "parameters": { + "input1": { + "componentInputParameter": "text" } - }, - "taskInfo": { - "name": "component-1" } }, - "component-1-2": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-component-1-2" - }, - "dependentTasks": [ - "component-1" - ], - "inputs": { - "parameters": { - "input1": { - "taskOutputParameter": { - "outputParameterKey": "output1", - "producerTask": "component-1" - } + "taskInfo": { + "name": "component-1" + } + }, + "component-1-2": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-component-1-2" + }, + "dependentTasks": [ + "component-1" + ], + "inputs": { + "parameters": { + "input1": { + "taskOutputParameter": { + "outputParameterKey": "output1", + "producerTask": "component-1" } } - }, - "taskInfo": { - "name": "component-1-2" } - } - } - }, - "inputDefinitions": { - "parameters": { - "text": { - "type": "STRING" + }, + "taskInfo": { + "name": "component-1-2" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" - }, - "runtimeConfig": { - "gcsOutputDirectory": "dummy_root", - "parameters": { - "text": { - "stringValue": "Hello world!" + "inputDefinitions": { + "parameters": { + "text": { + "defaultValue": "Hello world!", + "parameterType": "STRING" + } } } - } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.py index d0993c725f0..89913b62526 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_v2_component.py @@ -14,8 +14,8 @@ from kfp.v2.components.experimental import base_component from kfp.v2.components.experimental import structures -from kfp.v2 import dsl -from kfp.v2 import compiler +import kfp.v2.dsl.experimental as dsl +from kfp.v2.compiler.experimental import compiler class TestComponent(base_component.BaseComponent): diff --git a/sdk/python/kfp/v2/components/__init__.py b/sdk/python/kfp/v2/components/__init__.py index b4447dd5838..1059eb6b6f3 100644 --- a/sdk/python/kfp/v2/components/__init__.py +++ b/sdk/python/kfp/v2/components/__init__.py @@ -11,3 +11,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from kfp.v2.components.experimental.yaml_component import load_component_from_text diff --git a/sdk/python/kfp/v2/components/experimental/base_component.py b/sdk/python/kfp/v2/components/experimental/base_component.py index 41bd79ec824..35430b3cbb1 100644 --- a/sdk/python/kfp/v2/components/experimental/base_component.py +++ b/sdk/python/kfp/v2/components/experimental/base_component.py @@ -36,7 +36,7 @@ def __init__(self, component_spec: structures.ComponentSpec): self.component_spec = component_spec self.name = component_spec.name - self._component_inputs = set(self.component_spec.inputs.keys()) + self._component_inputs = set((self.component_spec.inputs or {}).keys()) def __call__(self, *args, **kwargs) -> pipeline_task.PipelineTask: """Creates a PipelineTask object. @@ -63,12 +63,12 @@ def __call__(self, *args, **kwargs) -> pipeline_task.PipelineTask: task_inputs[k] = v # Fill in default value if there was no user provided value - for name, input_spec in self.component_spec.inputs.items(): + for name, input_spec in (self.component_spec.inputs or {}).items(): if input_spec.default is not None and name not in task_inputs: task_inputs[name] = input_spec.default missing_arguments = [ - name for name in self.component_spec.inputs + name for name in (self.component_spec.inputs or {}) if name not in task_inputs ] if missing_arguments: diff --git a/sdk/python/kfp/v2/components/experimental/component_factory.py b/sdk/python/kfp/v2/components/experimental/component_factory.py new file mode 100644 index 00000000000..abf5ed28cbf --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/component_factory.py @@ -0,0 +1,443 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import dataclasses +import inspect +import itertools +import pathlib +import re +import textwrap +from typing import Callable, List, Optional, Tuple +import warnings + +import docstring_parser + +from kfp import components as v1_components +from kfp.components import _components, _data_passing +from kfp.v2.components.experimental import structures +from kfp.v2.components.types import artifact_types, type_annotations + +_DEFAULT_BASE_IMAGE = 'python:3.7' + + +@dataclasses.dataclass +class ComponentInfo(): + """A dataclass capturing registered v2 components. + + This will likely be subsumed/augmented with v2 BaseComponent. + """ + name: str + function_name: str + func: Callable + target_image: str + module_path: pathlib.Path + component_spec: structures.ComponentSpec + output_component_file: Optional[str] = None + base_image: str = _DEFAULT_BASE_IMAGE + + +# A map from function_name to components. This is always populated when a +# module containing KFP v2 components is loaded. Primarily used by KFP CLI +# component builder to package components in a file into containers. +REGISTERED_MODULES = None + + +def _python_function_name_to_component_name(name): + name_with_spaces = re.sub(' +', ' ', name.replace('_', ' ')).strip(' ') + return name_with_spaces[0].upper() + name_with_spaces[1:] + + +_INSTALL_PYTHON_PACKAGES_SCRIPT = ''' +if ! [ -x "$(command -v pip)" ]; then + python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip +fi + +PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet \ + --no-warn-script-location {package_list} && "$0" "$@" +''' + + +def _get_packages_to_install_command( + package_list: Optional[List[str]] = None) -> List[str]: + result = [] + if package_list: + result = [ + 'sh', '-c', + _INSTALL_PYTHON_PACKAGES_SCRIPT.format(package_list=' '.join( + [repr(str(package)) for package in package_list])) + ] + return result + + +def _get_default_kfp_package_path() -> str: + import kfp + return 'kfp=={}'.format(kfp.__version__) + + +def _get_function_source_definition(func: Callable) -> str: + func_code = inspect.getsource(func) + + # Function might be defined in some indented scope (e.g. in another + # function). We need to handle this and properly dedent the function source + # code + func_code = textwrap.dedent(func_code) + func_code_lines = func_code.split('\n') + + # Removing possible decorators (can be multiline) until the function + # definition is found + func_code_lines = itertools.dropwhile(lambda x: not x.startswith('def'), + func_code_lines) + + if not func_code_lines: + raise ValueError( + 'Failed to dedent and clean up the source of function "{}". ' + 'It is probably not properly indented.'.format(func.__name__)) + + return '\n'.join(func_code_lines) + + +def _annotation_to_type_struct(annotation): + if not annotation or annotation == inspect.Parameter.empty: + return None + if hasattr(annotation, 'to_dict'): + annotation = annotation.to_dict() + if isinstance(annotation, dict): + return annotation + if isinstance(annotation, type): + type_struct = _data_passing.get_canonical_type_name_for_type(annotation) + if type_struct: + return type_struct + if issubclass(annotation, artifact_types.Artifact + ) and not annotation.TYPE_NAME.startswith('system.'): + # For artifact classes not under the `system` namespace, + # use its TYPE_NAME as-is. + type_name = annotation.TYPE_NAME + else: + type_name = str(annotation.__name__) + elif hasattr( + annotation, '__forward_arg__' + ): # Handling typing.ForwardRef('Type_name') (the name was _ForwardRef in python 3.5-3.6) + type_name = str(annotation.__forward_arg__) + else: + type_name = str(annotation) + + # It's also possible to get the converter by type name + type_struct = _data_passing.get_canonical_type_name_for_type(type_name) + if type_struct: + return type_struct + return type_name + + +def _maybe_make_unique(name: str, names: List[str]): + if name not in names: + return name + + for i in range(2, 100): + unique_name = '{}_{}'.format(name, i) + if unique_name not in names: + return unique_name + + raise RuntimeError('Too many arguments with the name {}'.format(name)) + + +# TODO: switch to v2 structures +def extract_component_interface(func: Callable) -> structures.ComponentSpec: + single_output_name_const = 'Output' + + signature = inspect.signature(func) + parameters = list(signature.parameters.values()) + + parsed_docstring = docstring_parser.parse(inspect.getdoc(func)) + doc_dict = {p.arg_name: p.description for p in parsed_docstring.params} + + inputs = {} + outputs = {} + + input_names = set() + output_names = set() + for parameter in parameters: + parameter_type = type_annotations.maybe_strip_optional_from_annotation( + parameter.annotation) + passing_style = None + io_name = parameter.name + + if type_annotations.is_artifact_annotation(parameter_type): + # passing_style is either type_annotations.InputAnnotation or + # type_annotations.OutputAnnotation. + passing_style = type_annotations.get_io_artifact_annotation( + parameter_type) + + # parameter_type is type_annotations.Artifact or one of its subclasses. + parameter_type = type_annotations.get_io_artifact_class( + parameter_type) + if not issubclass(parameter_type, artifact_types.Artifact): + raise ValueError( + 'Input[T] and Output[T] are only supported when T is a ' + 'subclass of Artifact. Found `{} with type {}`'.format( + io_name, parameter_type)) + + if parameter.default is not inspect.Parameter.empty: + raise ValueError( + 'Default values for Input/Output artifacts are not supported.' + ) + # elif isinstance(parameter_type, + # (v1_components.InputPath, v1_components.OutputPath)): + # raise TypeError( + # 'In v2 components, please import the Python function' + # ' annotations `InputPath` and `OutputPath` from' + # ' package `kfp.v2.dsl` instead of `kfp.dsl`.') + elif isinstance( + parameter_type, + (type_annotations.InputPath, type_annotations.OutputPath)): + passing_style = type(parameter_type) + parameter_type = parameter_type.type + if parameter.default is not inspect.Parameter.empty and not ( + passing_style == type_annotations.InputPath and + parameter.default is None): + raise ValueError( + 'Path inputs only support default values of None. Default' + ' values for outputs are not supported.') + + type_struct = _annotation_to_type_struct(parameter_type) + + if passing_style in [ + type_annotations.OutputAnnotation, type_annotations.OutputPath + ]: + io_name = _maybe_make_unique(io_name, output_names) + output_names.add(io_name) + output_spec = structures.OutputSpec( + type=type_struct, description=doc_dict.get(parameter.name)) + # output_spec._passing_style = passing_style + # output_spec._parameter_name = parameter.name + outputs.append(output_spec) + else: + io_name = _maybe_make_unique(io_name, input_names) + input_names.add(io_name) + input_spec = structures.InputSpec( + type=type_struct, description=doc_dict.get(parameter.name)) + if parameter.default is not inspect.Parameter.empty: + # input_spec.optional = True + if parameter.default is not None: + outer_type_name = list(type_struct.keys())[0] if isinstance( + type_struct, dict) else type_struct + try: + input_spec.default = _data_passing.serialize_value( + parameter.default, outer_type_name) + except Exception as ex: + warnings.warn( + 'Could not serialize the default value of the' + ' parameter "{}". {}'.format(parameter.name, ex)) + # input_spec._passing_style = passing_style + # input_spec._parameter_name = parameter.name + inputs[io_name] = input_spec + + #Analyzing the return type annotations. + return_ann = signature.return_annotation + if hasattr(return_ann, '_fields'): #NamedTuple + # Getting field type annotations. + # __annotations__ does not exist in python 3.5 and earlier + # _field_types does not exist in python 3.9 and later + field_annotations = getattr(return_ann, + '__annotations__', None) or getattr( + return_ann, '_field_types', None) + for field_name in return_ann._fields: + type_struct = None + if field_annotations: + type_struct = _annotation_to_type_struct( + field_annotations.get(field_name, None)) + + output_name = _maybe_make_unique(field_name, output_names) + output_names.add(output_name) + output_spec = structures.OutputSpec(type=type_struct) + # output_spec._passing_style = None + # output_spec._return_tuple_field_name = field_name + outputs[output_name] = output_spec + # Deprecated dict-based way of declaring multiple outputs. Was only used by + # the @component decorator + elif isinstance(return_ann, dict): + warnings.warn( + "The ability to specify multiple outputs using the dict syntax" + " has been deprecated. It will be removed soon after release" + " 0.1.32. Please use typing.NamedTuple to declare multiple" + " outputs.") + for output_name, output_type_annotation in return_ann.items(): + output_type_struct = _annotation_to_type_struct( + output_type_annotation) + output_spec = structures.OutputSpec(type=output_type_struct) + outputs[name] = output_spec + elif signature.return_annotation is not None and signature.return_annotation != inspect.Parameter.empty: + output_name = _maybe_make_unique(single_output_name_const, output_names) + # Fixes exotic, but possible collision: + # `def func(output_path: OutputPath()) -> str: ...` + output_names.add(output_name) + type_struct = _annotation_to_type_struct(signature.return_annotation) + output_spec = structures.OutputSpec(type=type_struct,) + # output_spec._passing_style = None + outputs[output_name] = output_spec + + # Component name and description are derived from the function's name and + # docstring. The name can be overridden by setting setting func.__name__ + # attribute (of the legacy func._component_human_name attribute). The + # description can be overridden by setting the func.__doc__ attribute (or + # the legacy func._component_description attribute). + component_name = getattr(func, '_component_human_name', + None) or _python_function_name_to_component_name( + func.__name__) + description = getattr(func, '_component_description', + None) or parsed_docstring.short_description + if description: + description = description.strip() + + component_spec = structures.ComponentSpec( + name=component_name, + description=description, + inputs=inputs if inputs else None, + outputs=outputs if outputs else None, + # Dummy implementation to bypass model validation. + implementation=structures.Implementation(), + ) + return component_spec + + +def _get_command_and_args_for_lightweight_component( + func: Callable) -> Tuple[List[str], List[str]]: + imports_source = [ + "import kfp", + "from kfp.v2 import dsl", + "from kfp.v2.dsl import *", + "from typing import *", + ] + + func_source = _get_function_source_definition(func) + source = textwrap.dedent(""" + {imports_source} + + {func_source}\n""").format( + imports_source='\n'.join(imports_source), func_source=func_source) + command = [ + 'sh', + '-ec', + textwrap.dedent('''\ + program_path=$(mktemp -d) + printf "%s" "$0" > "$program_path/ephemeral_component.py" + python3 -m kfp.v2.components.executor_main \ + --component_module_path \ + "$program_path/ephemeral_component.py" \ + "$@" + '''), + source, + ] + + args = [ + "--executor_input", + structures.ExecutorInputPlaceholder(), + "--function_to_execute", + func.__name__, + ] + + return command, args + + +def _get_command_and_args_for_containerized_component( + function_name: str) -> Tuple[List[str], List[str]]: + command = [ + 'python3', + '-m', + 'kfp.v2.components.executor_main', + ] + + args = [ + "--executor_input", + structures.ExecutorInputPlaceholder(), + "--function_to_execute", + function_name, + ] + return command, args + + +def create_component_from_func(func: Callable, + base_image: Optional[str] = None, + target_image: Optional[str] = None, + packages_to_install: List[str] = None, + output_component_file: Optional[str] = None, + install_kfp_package: bool = True, + kfp_package_path: Optional[str] = None): + """Implementation for the @component decorator. + + The decorator is defined under component_decorator.py. See the + decorator for the canonical documentation for this function. + """ + packages_to_install = packages_to_install or [] + + if install_kfp_package and target_image is None: + if kfp_package_path is None: + kfp_package_path = _get_default_kfp_package_path() + packages_to_install.append(kfp_package_path) + + packages_to_install_command = _get_packages_to_install_command( + package_list=packages_to_install) + + command = [] + args = [] + if base_image is None: + base_image = _DEFAULT_BASE_IMAGE + + component_image = base_image + + if target_image: + component_image = target_image + command, args = _get_command_and_args_for_containerized_component( + function_name=func.__name__,) + else: + command, args = _get_command_and_args_for_lightweight_component( + func=func) + + component_spec = extract_component_interface(func) + component_spec.implementation = structures.ContainerImplementation( + container=structures.ContainerSpec( + image=component_image, + command=packages_to_install_command + command, + args=args, + )) + + module_path = pathlib.Path(inspect.getsourcefile(func)) + module_path.resolve() + + component_name = _python_function_name_to_component_name(func.__name__) + component_info = ComponentInfo( + name=component_name, + function_name=func.__name__, + func=func, + target_image=target_image, + module_path=module_path, + component_spec=component_spec, + output_component_file=output_component_file, + base_image=base_image) + + if REGISTERED_MODULES is not None: + REGISTERED_MODULES[component_name] = component_info + + if output_component_file: + component_spec.save(output_component_file) + + # TODO(KFPv2): Replace with v2 BaseComponent. + task_factory = _components._create_task_factory_from_component_spec( + component_spec) + + # TODO(KFPv2): Once this returns a BaseComponent, we should check for this + # in the Executor, and get the appropriate callable. For now, we'll look for + # this special attribute to hold the Python function in the task factory + # during runtime. + setattr(task_factory, 'python_func', func) + + return task_factory diff --git a/sdk/python/kfp/v2/dsl/experimental/for_loop.py b/sdk/python/kfp/v2/components/experimental/for_loop.py similarity index 94% rename from sdk/python/kfp/v2/dsl/experimental/for_loop.py rename to sdk/python/kfp/v2/components/experimental/for_loop.py index 72dfafe3d10..a8bf62cd615 100644 --- a/sdk/python/kfp/v2/dsl/experimental/for_loop.py +++ b/sdk/python/kfp/v2/components/experimental/for_loop.py @@ -127,6 +127,8 @@ def __init__( items = list(items) self.items_or_pipeline_channel = items + self.is_with_items_loop_argument = not isinstance( + items, pipeline_channel.PipelineChannel) self._referenced_subvars: Dict[str, LoopArgumentVariable] = {} if isinstance(items, list) and isinstance(items[0], dict): @@ -165,7 +167,7 @@ def from_pipeline_channel( items=channel, name_override=channel.name + '-' + cls.LOOP_ITEM_NAME_BASE, task_name=channel.task_name, - channel_type=_get_loop_item_type(channel.channel_type), + channel_type=_get_loop_item_type(channel.channel_type) or 'String', ) @classmethod @@ -244,9 +246,21 @@ def __init__( subvar_name=subvar_name, ), task_name=loop_argument.task_name, - channel_type=_get_subvar_type(loop_argument.channel_type), + channel_type=_get_subvar_type(loop_argument.channel_type) or + 'String', ) + @property + def items_or_pipeline_channel( + self) -> Union[ItemList, pipeline_channel.PipelineChannel]: + """Returns the loop argument items.""" + return self.loop_argument.items_or_pipeline_chanenl + + @property + def is_with_items_loop_argument(self) -> bool: + """Whether the loop argument is originated from raw items.""" + return self.loop_argument.is_with_items_loop_argument + def _subvar_name_is_legal(self, proposed_variable_name: str) -> bool: """Returns True if the subvar name is legal.""" return re.match(self.LEGAL_SUBVAR_NAME_REGEX, diff --git a/sdk/python/kfp/v2/dsl/experimental/for_loop_test.py b/sdk/python/kfp/v2/components/experimental/for_loop_test.py similarity index 98% rename from sdk/python/kfp/v2/dsl/experimental/for_loop_test.py rename to sdk/python/kfp/v2/components/experimental/for_loop_test.py index 2db03089e01..6ff780cc3c6 100644 --- a/sdk/python/kfp/v2/dsl/experimental/for_loop_test.py +++ b/sdk/python/kfp/v2/components/experimental/for_loop_test.py @@ -11,12 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Tests for kfp.v2.dsl.experimental.for_loop.""" +"""Tests for kfp.v2.components.experimental.for_loop.""" import unittest from absl.testing import parameterized from kfp.v2.components.experimental import pipeline_channel -from kfp.v2.dsl.experimental import for_loop +from kfp.v2.components.experimental import for_loop class ForLoopTest(parameterized.TestCase): diff --git a/sdk/python/kfp/v2/components/experimental/pipeline.py b/sdk/python/kfp/v2/components/experimental/pipeline.py new file mode 100644 index 00000000000..69a6d29470f --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/pipeline.py @@ -0,0 +1,148 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Definition for Pipeline.""" + +from kfp.v2.components.experimental import pipeline_task +from kfp.v2.components.experimental import tasks_group +from kfp.v2.components import utils + + +class Pipeline: + """A pipeline contains a list of tasks. + + This class is not supposed to be used by pipeline authors since pipeline + authors can use pipeline functions (decorated with @pipeline) to reference + their pipelines. + This class is useful for implementing a compiler. For example, the compiler + can use the following to get the pipeline object and its tasks: + + Example: + :: + + with Pipeline() as p: + pipeline_func(*args_list) + + traverse(p.tasks) + + Attributes: + name: + tasks: + groups: + """ + + # _default_pipeline is set when the compiler runs "with Pipeline()" + _default_pipeline = None + + @staticmethod + def get_default_pipeline(): + """Gets the default pipeline.""" + return Pipeline._default_pipeline + + def __init__(self, name: str): + """Creates a new instance of Pipeline. + + Args: + name: The name of the pipeline. + """ + self.name = name + self.tasks = {} + # Add the root group. + self.groups = [ + tasks_group.TasksGroup( + group_type=tasks_group.TasksGroupType.PIPELINE, name=name) + ] + self._group_id = 0 + + def __enter__(self): + + if Pipeline._default_pipeline: + raise Exception('Nested pipelines are not allowed.') + + Pipeline._default_pipeline = self + + def register_task_and_generate_id(task: pipeline_task.PipelineTask): + return self.add_task( + task=task, + add_to_group=not getattr(task, 'is_exit_handler', False)) + + self._old_register_task_handler = ( + pipeline_task.PipelineTask.register_task_handler) + pipeline_task.PipelineTask.register_task_handler = ( + register_task_and_generate_id) + return self + + def __exit__(self, *unused_args): + + Pipeline._default_pipeline = None + pipeline_task.PipelineTask.register_task_handler = ( + self._old_register_task_handler) + + def add_task( + self, + task: pipeline_task.PipelineTask, + add_to_group: bool, + ) -> str: + """Adds a new task. + + Args: + task: A PipelineTask instance. + add_to_group: Whether add the task into the current group. Expect + True for all tasks expect for exit handler. + + Returns: + A unique task name. + """ + # Sanitizing the task name. + # Technically this could be delayed to the compilation stage, but string + # serialization of PipelineChannels make unsanitized names problematic. + task_name = utils.maybe_rename_for_k8s(task.component_spec.name) + #If there is an existing task with this name then generate a new name. + task_name = utils.make_name_unique_by_adding_index( + task_name, list(self.tasks.keys()), '-') + if task_name == '': + task_name = utils._make_name_unique_by_adding_index( + 'task', list(self.tasks.keys()), '-') + + self.tasks[task_name] = task + if add_to_group: + self.groups[-1].tasks.append(task) + + return task_name + + def push_tasks_group(self, group: 'tasks_group.TasksGroup'): + """Pushes a TasksGroup into the stack. + + Args: + group: A TasksGroup. Typically it is one of ExitHandler, Condition, + and ParallelFor. + """ + self.groups[-1].groups.append(group) + self.groups.append(group) + + def pop_tasks_group(self): + """Removes the current TasksGroup from the stack.""" + del self.groups[-1] + + def remove_task_from_groups(self, task: pipeline_task.PipelineTask): + """Removes a task from the pipeline. + + This is useful for excluding exit handler from the pipeline. + """ + for group in self.groups: + group.remove_task_recursive(task) + + def get_next_group_id(self) -> str: + """Gets the next id for a new group.""" + self._group_id += 1 + return str(self._group_id) diff --git a/sdk/python/kfp/v2/components/experimental/pipeline_channel.py b/sdk/python/kfp/v2/components/experimental/pipeline_channel.py index 19ad4b84dee..64377a6ccc1 100644 --- a/sdk/python/kfp/v2/components/experimental/pipeline_channel.py +++ b/sdk/python/kfp/v2/components/experimental/pipeline_channel.py @@ -19,7 +19,7 @@ from typing import Dict, List, Optional, Union from kfp.v2.components import utils -from kfp.v2.components.types import type_utils +from kfp.v2.components.types.experimental import type_utils @dataclasses.dataclass @@ -38,12 +38,10 @@ class ConditionOperator: # The string template used to generate the placeholder of a PipelineChannel. _PIPELINE_CHANNEL_PLACEHOLDER_TEMPLATE = ( - '{{channel:task=%s;name=%s;type=%s;}}' -) + '{{channel:task=%s;name=%s;type=%s;}}') # The regex for parsing PipelineChannel placeholders from a string. _PIPELINE_CHANNEL_PLACEHOLDER_REGEX = ( - r'{{channel:task=([\w\s_-]*);name=([\w\s_-]+);type=([\w\s{}":_-]*);}}' -) + r'{{channel:task=([\w\s_-]*);name=([\w\s_-]+);type=([\w\s{}":_-]*);}}') class PipelineChannel(abc.ABC): @@ -55,14 +53,14 @@ class PipelineChannel(abc.ABC): components. Attributes: - name: The name of the pipeline channel. - channel_type: The type of the pipeline channel. - task_name: The name of the task that produces the pipeline channel. - None means it is not produced by any task, so if None, either user - constructs it directly (for providing an immediate value), or it is a - pipeline function argument. - pattern: The serialized string regex pattern this pipeline channel created - from. + name: The name of the pipeline channel. + channel_type: The type of the pipeline channel. + task_name: The name of the task that produces the pipeline channel. + None means it is not produced by any task, so if None, either user + constructs it directly (for providing an immediate value), or it is + a pipeline function argument. + pattern: The serialized string regex pattern this pipeline channel + created from. """ @abc.abstractmethod @@ -75,21 +73,22 @@ def __init__( """Initializes a PipelineChannel instance. Args: - name: The name of the pipeline channel. - channel_type: The type of the pipeline channel. - task_name: Optional; The name of the task that produces the pipeline - channel. + name: The name of the pipeline channel. The name will be sanitized + to be k8s compatible. + channel_type: The type of the pipeline channel. + task_name: Optional; The name of the task that produces the pipeline + channel. If provided, the task name will be sanitized to be k8s + compatible. Raises: - ValueError: If name or task_name contains invalid characters. - ValueError: If both task_name and value are set. + ValueError: If name or task_name contains invalid characters. + ValueError: If both task_name and value are set. """ valid_name_regex = r'^[A-Za-z][A-Za-z0-9\s_-]*$' if not re.match(valid_name_regex, name): raise ValueError( 'Only letters, numbers, spaces, "_", and "-" are allowed in the ' - 'name. Must begin with a letter. Got name: {}'.format(name) - ) + 'name. Must begin with a letter. Got name: {}'.format(name)) self.name = name self.channel_type = channel_type @@ -123,9 +122,8 @@ def __str__(self) -> str: channel_type = self.channel_type or '' if isinstance(channel_type, dict): channel_type = json.dumps(channel_type) - return _PIPELINE_CHANNEL_PLACEHOLDER_TEMPLATE % ( - task_name, name, channel_type - ) + return _PIPELINE_CHANNEL_PLACEHOLDER_TEMPLATE % (task_name, name, + channel_type) def __repr__(self) -> str: """Representation of the PipelineChannel. @@ -251,9 +249,41 @@ def __init__( ) +def create_pipeline_channel( + name: str, + channel_type: Union[str, Dict], + task_name: Optional[str], + value: Optional[type_utils.PARAMETER_TYPES] = None, +) -> PipelineChannel: + """Creates a PipelineChannel object. + + Args: + name: The name of the channel. + channel_type: The type of the channel, which decides whether it is an + PipelineParameterChannel or PipelineArtifactChannel + task_name: Optional; the task that produced the channel. + value: Optional; the realized value for a channel. + + Returns: + A PipelineParameterChannel or PipelineArtifactChannel object. + """ + if type_utils.is_parameter_type(channel_type): + return PipelineParameterChannel( + name=name, + channel_type=channel_type, + task_name=task_name, + value=value, + ) + else: + return PipelineArtifactChannel( + name=name, + channel_type=channel_type, + task_name=task_name, + ) + + def extract_pipeline_channels_from_string( - payload: str -) -> List[PipelineChannel]: + payload: str) -> List[PipelineChannel]: """Extracts a list of PipelineChannel instances from the payload string. Note: this function removes all duplicate matches. @@ -280,15 +310,15 @@ def extract_pipeline_channels_from_string( if type_utils.is_parameter_type(channel_type): pipeline_channel = PipelineParameterChannel( - name=utils.maybe_rename_for_k8s(name), + name=name, channel_type=channel_type, - task_name=utils.maybe_rename_for_k8s(task_name), + task_name=task_name, ) else: pipeline_channel = PipelineArtifactChannel( - name=utils.maybe_rename_for_k8s(name), + name=name, channel_type=channel_type, - task_name=utils.maybe_rename_for_k8s(task_name), + task_name=task_name, ) unique_channels.add(pipeline_channel) diff --git a/sdk/python/kfp/v2/components/experimental/pipeline_task.py b/sdk/python/kfp/v2/components/experimental/pipeline_task.py index 55d9bf10762..44b777a0790 100644 --- a/sdk/python/kfp/v2/components/experimental/pipeline_task.py +++ b/sdk/python/kfp/v2/components/experimental/pipeline_task.py @@ -15,25 +15,20 @@ import re import copy -from typing import Any, List, Mapping, Optional, Union +from typing import Any, Callable, List, Mapping, Optional, Union -from kfp.dsl import _component_bridge from kfp.v2.components.experimental import constants from kfp.v2.components.experimental import pipeline_channel from kfp.v2.components.experimental import placeholders from kfp.v2.components.experimental import structures -from kfp.v2.components.types import type_utils +from kfp.v2.components.types.experimental import type_utils -# TODO(chensun): return PipelineTask object instead of ContainerOp object. def create_pipeline_task( component_spec: structures.ComponentSpec, arguments: Mapping[str, Any], -) -> "ContainerOp": # pytype: disable=name-error - return _component_bridge._create_container_op_from_component_and_arguments( - component_spec=component_spec.to_v1_component_spec(), - arguments=arguments, - ) +) -> 'PipelineTask': # pytype: disable=name-error + return PipelineTask(component_spec=component_spec, arguments=arguments) class PipelineTask: @@ -43,11 +38,16 @@ class PipelineTask: `.after()`, `.set_memory_limit()`, `enable_caching()`, etc. Attributes: + name: The name of the task. Unique within its parent group. + outputs: task_spec: The task spec of the task. component_spec: The component spec of the task. container_spec: The resolved container spec of the task. """ + # To be override by pipeline `register_task_and_generate_id` + register_task_handler = lambda task: task.component_spec.name + def __init__( self, component_spec: structures.ComponentSpec, @@ -59,6 +59,8 @@ def __init__( component_spec: The component definition. arguments: The dictionary of component arguments. """ + arguments = arguments or {} + for input_name, argument_value in arguments.items(): if input_name not in component_spec.inputs: @@ -100,8 +102,7 @@ def __init__( self.component_spec = component_spec self.task_spec = structures.TaskSpec( - # The name of the task is subject to change due to component reuse. - name=component_spec.name, + name=self.register_task_handler(), inputs={ input_name: value for input_name, value in arguments.items() }, @@ -115,6 +116,60 @@ def __init__( arguments=arguments, ) + self._outputs = { + output_name: pipeline_channel.create_pipeline_channel( + name=output_name, + channel_type=output_spec.type, + task_name=self.task_spec.name, + ) for output_name, output_spec in ( + component_spec.outputs or {}).items() + } + + self._inputs = arguments + + self._channel_inputs = [ + value for _, value in arguments.items() + if isinstance(value, pipeline_channel.PipelineChannel) + ] + pipeline_channel.extract_pipeline_channels_from_any([ + value for _, value in arguments.items() + if not isinstance(value, pipeline_channel.PipelineChannel) + ]) + + @property + def name(self) -> str: + """Returns the name of the task.""" + return self.task_spec.name + + @property + def inputs( + self + ) -> List[Union[type_utils.PARAMETER_TYPES, + pipeline_channel.PipelineChannel]]: + """Returns the list of actual inputs passed to the task.""" + return self._inputs + + @property + def channel_inputs(self) -> List[pipeline_channel.PipelineChannel]: + """Returns the list of all PipelineChannels passed to the task.""" + return self._channel_inputs + + @property + def output(self) -> pipeline_channel.PipelineChannel: + """Returns the single output object (a PipelineChannel) of the task.""" + if len(self._outputs) != 1: + raise AttributeError + return list(self._outputs.values())[0] + + @property + def outputs(self) -> Mapping[str, pipeline_channel.PipelineChannel]: + """Returns the dictionary of outputs (PipelineChannels) of the task.""" + return self._outputs + + @property + def dependent_tasks(self) -> List[str]: + """Returns the list of dependent task names.""" + return self.task_spec.dependent_tasks + def _resolve_command_line_and_arguments( self, component_spec: structures.ComponentSpec, @@ -130,7 +185,7 @@ def _resolve_command_line_and_arguments( if not component_spec.implementation.container: raise TypeError( - 'Only container components have command line to resolve') + 'Only container components have command line to resolve.') component_inputs = component_spec.inputs or {} inputs_dict = { @@ -272,8 +327,8 @@ def expand_argument_list(argument_list) -> Optional[List[str]]: return expanded_list container_spec = component_spec.implementation.container - resolved_container_spec = copy.deepcopy(container_spec) + resolved_container_spec = copy.deepcopy(container_spec) resolved_container_spec.commands = expand_argument_list( container_spec.commands) resolved_container_spec.arguments = expand_argument_list( @@ -313,16 +368,16 @@ def set_cpu_limit(self, cpu: str) -> 'PipelineTask': else: cpu = float(cpu) - if self.component_spec.implementation.container is not None: - if self.component_spec.implementation.container.resources is not None: - self.component_spec.implementation.container.resources.cpu_limit = cpu - else: - self.component_spec.implementation.container.resources = structures.ResourceSpec( - cpu_limit=cpu) - else: + if self.container_spec is None: raise ValueError( 'There is no container specified in implementation') + if self.container_spec.resources is not None: + self.container_spec.resources.cpu_limit = cpu + else: + self.container_spec.resources = structures.ResourceSpec( + cpu_limit=cpu) + return self def set_gpu_limit(self, gpu: str) -> 'PipelineTask': @@ -339,15 +394,16 @@ def set_gpu_limit(self, gpu: str) -> 'PipelineTask': gpu = int(gpu) - if self.component_spec.implementation.container is not None: - if self.component_spec.implementation.container.resources is not None: - self.component_spec.implementation.container.resources.accelerator_count = gpu - else: - self.component_spec.implementation.container.resources = structures.ResourceSpec( - accelerator_count=gpu) - else: + if self.container_spec is None: raise ValueError( 'There is no container specified in implementation') + + if self.container_spec.resources is not None: + self.container_spec.resources.accelerator_count = gpu + else: + self.container_spec.resources = structures.ResourceSpec( + accelerator_count=gpu) + return self def set_memory_limit(self, memory: str) -> 'PipelineTask': @@ -396,16 +452,16 @@ def set_memory_limit(self, memory: str) -> 'PipelineTask': # By default interpret as a plain integer, in the unit of Bytes. memory = float(memory) / constants._G - if self.component_spec.implementation.container is not None: - if self.component_spec.implementation.container.resources is not None: - self.component_spec.implementation.container.resources.memory_limit = memory - else: - self.component_spec.implementation.container.resources = structures.ResourceSpec( - memory_limit=memory) - else: + if self.container_spec is None: raise ValueError( 'There is no container specified in implementation') + if self.container_spec.resources is not None: + self.container_spec.resources.memory_limit = memory + else: + self.container_spec.resources = structures.ResourceSpec( + memory_limit=memory) + return self def add_node_selector_constraint(self, accelerator: str) -> 'PipelineTask': @@ -418,18 +474,18 @@ def add_node_selector_constraint(self, accelerator: str) -> 'PipelineTask': Returns: Self return to allow chained setting calls. """ - if self.component_spec.implementation.container is not None: - if self.component_spec.implementation.container.resources is not None: - self.component_spec.implementation.container.resources.accelerator_type = accelerator - if self.component_spec.implementation.container.resources.accelerator_count is None: - self.component_spec.implementation.container.resources.accelerator_count = 1 - else: - self.component_spec.implementation.container.resources = structures.ResourceSpec( - accelerator_count=1, accelerator_type=accelerator) - else: + if self.container_spec is None: raise ValueError( 'There is no container specified in implementation') + if self.container_spec.resources is not None: + self.container_spec.resources.accelerator_type = accelerator + if self.container_spec.resources.accelerator_count is None: + self.container_spec.resources.accelerator_count = 1 + else: + self.container_spec.resources = structures.ResourceSpec( + accelerator_count=1, accelerator_type=accelerator) + return self def set_display_name(self, name: str) -> 'PipelineTask': @@ -441,7 +497,7 @@ def set_display_name(self, name: str) -> 'PipelineTask': Returns: Self return to allow chained setting calls. """ - self.task_spec.name = name + self.task_spec.display_name = name return self def after(self, *tasks) -> 'PipelineTask': diff --git a/sdk/python/kfp/v2/components/experimental/pipeline_task_test.py b/sdk/python/kfp/v2/components/experimental/pipeline_task_test.py index aee256d246b..a059f948306 100644 --- a/sdk/python/kfp/v2/components/experimental/pipeline_task_test.py +++ b/sdk/python/kfp/v2/components/experimental/pipeline_task_test.py @@ -246,9 +246,8 @@ def test_set_valid_cpu_limit(self, cpu_limit: str, arguments={'input1': 'value'}, ) task.set_cpu_limit(cpu_limit) - self.assertEqual( - expected_cpu_number, - task.component_spec.implementation.container.resources.cpu_limit) + self.assertEqual(expected_cpu_number, + task.container_spec.resources.cpu_limit) @parameterized.parameters( { @@ -263,9 +262,8 @@ def test_set_valid_gpu_limit(self, gpu_limit: str, arguments={'input1': 'value'}, ) task.set_gpu_limit(gpu_limit) - self.assertEqual( - expected_gpu_number, task.component_spec.implementation.container - .resources.accelerator_count) + self.assertEqual(expected_gpu_number, + task.container_spec.resources.accelerator_count) @parameterized.parameters( { @@ -328,9 +326,8 @@ def test_set_memory_limit(self, memory: str, expected_memory_number: int): arguments={'input1': 'value'}, ) task.set_memory_limit(memory) - self.assertEqual( - expected_memory_number, - task.component_spec.implementation.container.resources.memory_limit) + self.assertEqual(expected_memory_number, + task.container_spec.resources.memory_limit) def test_add_node_selector_constraint_type_only(self): task = pipeline_task.PipelineTask( @@ -342,7 +339,7 @@ def test_add_node_selector_constraint_type_only(self): self.assertEqual( structures.ResourceSpec( accelerator_type='NVIDIA_TESLA_K80', accelerator_count=1), - task.component_spec.implementation.container.resources) + task.container_spec.resources) def test_add_node_selector_constraint_accelerator_count(self): task = pipeline_task.PipelineTask( @@ -354,7 +351,7 @@ def test_add_node_selector_constraint_accelerator_count(self): self.assertEqual( structures.ResourceSpec( accelerator_type='TPU_V3', accelerator_count=5), - task.component_spec.implementation.container.resources) + task.container_spec.resources) def test_set_display_name(self): task = pipeline_task.PipelineTask( @@ -363,7 +360,7 @@ def test_set_display_name(self): arguments={'input1': 'value'}, ) task.set_display_name('test_name') - self.assertEqual('test_name', task.task_spec.name) + self.assertEqual('test_name', task.task_spec.display_name) if __name__ == '__main__': diff --git a/sdk/python/kfp/v2/components/experimental/structures.py b/sdk/python/kfp/v2/components/experimental/structures.py index 3632bc26a70..fe2fc07be1a 100644 --- a/sdk/python/kfp/v2/components/experimental/structures.py +++ b/sdk/python/kfp/v2/components/experimental/structures.py @@ -37,10 +37,12 @@ class InputSpec(BaseModel): Attributes: type: The type of the input. default: Optional; the default value for the input. + description: Optional: the user description of the input. """ # TODO(ji-yaqi): Add logic to cast default value into the specified type. type: str default: Optional[Union[str, int, float, bool, dict, list]] = None + description: Optional[str] = None class OutputSpec(BaseModel): @@ -221,6 +223,8 @@ class TaskSpec(BaseModel): from the [items][] collection. enable_caching: Optional; whether or not to enable caching for the task. Default is True. + display_name: Optional; the display name of the task. If not specified, + the task name will be used as the display name. """ name: str inputs: Mapping[str, Any] @@ -231,6 +235,7 @@ class TaskSpec(BaseModel): iterator_items: Optional[Any] = None iterator_item_input: Optional[str] = None enable_caching: bool = True + display_name: Optional[str] = None class DagSpec(BaseModel): diff --git a/sdk/python/kfp/v2/components/experimental/tasks_group.py b/sdk/python/kfp/v2/components/experimental/tasks_group.py new file mode 100644 index 00000000000..7848fe9c113 --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/tasks_group.py @@ -0,0 +1,219 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Definition for TasksGroup.""" + +import enum +from typing import Optional, Union + +from kfp.v2.components.experimental import for_loop +from kfp.v2.components.experimental import pipeline +from kfp.v2.components.experimental import pipeline_channel +from kfp.v2.components.experimental import pipeline_task + + +class TasksGroupType(str, enum.Enum): + """Types of TasksGroup.""" + PIPELINE = 'pipeline' + CONDITION = 'condition' + FOR_LOOP = 'for-loop' + EXIT_HANDLER = 'exit-handler' + + +class TasksGroup: + """Represents a logical group of tasks and groups of TasksGroups. + + This class is the base class for groups of tasks, such as tasks + sharing an exit handler, a condition branch, or a loop. This class + is not supposed to be used by pipeline authors. It is useful for + implementing a compiler. + + Attributes: + group_type: The type of the TasksGroup. + tasks: A list of all PipelineTasks in this group. + groups: A list of TasksGroups in this group. + name: The optional user given name of the group. + dependencies: A list of tasks or groups this group depends on. + """ + + def __init__( + self, + group_type: TasksGroupType, + name: Optional[str] = None, + ): + """Create a new instance of TasksGroup. + + Args: + group_type: The type of the group. + name: Optional; the name of the group. + """ + self.group_type = group_type + self.tasks = list() + self.groups = list() + self.name = name + self.dependencies = [] + + def __enter__(self): + if not pipeline.Pipeline.get_default_pipeline(): + raise ValueError('Default pipeline not defined.') + + self._make_name_unique() + + pipeline.Pipeline.get_default_pipeline().push_tasks_group(self) + return self + + def __exit__(self, *unused_args): + pipeline.Pipeline.get_default_pipeline().pop_tasks_group() + + def _make_name_unique(self): + """Generates a unique TasksGroup name in the pipeline.""" + if not pipeline.Pipeline.get_default_pipeline(): + raise ValueError('Default pipeline not defined.') + + self.name = ( + self.group_type + '-' + + ('' if self.name is None else self.name + '-') + + pipeline.Pipeline.get_default_pipeline().get_next_group_id()) + self.name = self.name.replace('_', '-') + + def remove_task_recursive(self, task: pipeline_task.PipelineTask): + """Removes a task from the group recursively.""" + if self.tasks and task in self.tasks: + self.tasks.remove(task) + for group in self.groups or []: + group.remove_task_recursive(task) + + +class ExitHandler(TasksGroup): + """Represents an exit handler that is invoked upon exiting a group of + tasks. + + Example: + :: + + exit_task = ExitComponent(...) + with ExitHandler(exit_task): + task1 = MyComponent1(...) + task2 = MyComponent2(...) + + Attributes: + exit_task: The exit handler task. + """ + + def __init__( + self, + exit_task: pipeline_task.PipelineTask, + name: Optional[str] = None, + ): + """Initializes a Condition task group. + + Args: + exit_task: An operator invoked at exiting a group of ops. + name: Optional; the name of the exit handler group. + + Raises: + ValueError: Raised if the exit_task is invalid. + """ + super().__init__(group_type=TasksGroupType.EXIT_HANDLER, name=name) + + if exit_task.dependent_tasks: + raise ValueError('exit_task cannot depend on any other tasks.') + + # Removing exit_task form any group + pipeline.Pipeline.get_default_pipeline().remove_task_from_groups( + exit_task) + + # Set is_exit_handler since the compiler might be using this attribute. + exit_task.is_exit_handler = True + + self.exit_task = exit_task + + +class Condition(TasksGroup): + """Represents an condition group with a condition. + + Example: + :: + + with Condition(param1=='pizza', '[param1 is pizza]'): + task1 = MyComponent1(...) + task2 = MyComponent2(...) + + Attributes: + condition: The condition expression. + """ + + def __init__( + self, + condition: pipeline_channel.ConditionOperator, + name: Optional[str] = None, + ): + """Initializes a conditional task group. + + Args: + condition: The condition expression. + name: Optional; the name of the condition group. + """ + super().__init__(group_type=TasksGroupType.CONDITION, name=name) + self.condition = condition + + +class ParallelFor(TasksGroup): + """Represents a parallel for loop over a static set of items. + + Example: + :: + + with dsl.ParallelFor([{'a': 1, 'b': 10}, {'a': 2, 'b': 20}]) as item: + task1 = MyComponent(..., item.a) + task2 = MyComponent(..., item.b) + + In this case :code:`task1` would be executed twice, once with case + :code:`args=['echo 1']` and once with case :code:`args=['echo 2']`:: + + + Attributes: + loop_argument: The argument for each loop iteration. + items_is_pipeline_channel: Whether the loop items is PipelineChannel + instead of raw items. + """ + + def __init__( + self, + items: Union[for_loop.ItemList, pipeline_channel.PipelineChannel], + name: Optional[str] = None, + ): + """Initializes a for loop task group. + + Args: + items: The argument to loop over. It can be either a raw list or a + pipeline channel. + name: Optional; the name of the for loop group. + """ + super().__init__(group_type=TasksGroupType.FOR_LOOP, name=name) + + if isinstance(items, pipeline_channel.PipelineChannel): + self.loop_argument = for_loop.LoopArgument.from_pipeline_channel( + items) + self.items_is_pipeline_channel = True + else: + self.loop_argument = for_loop.LoopArgument.from_raw_items( + raw_items=items, + name_code=pipeline.Pipeline.get_default_pipeline() + .get_next_group_id(), + ) + self.items_is_pipeline_channel = False + + def __enter__(self) -> for_loop.LoopArgument: + super().__enter__() + return self.loop_argument diff --git a/sdk/python/kfp/v2/components/experimental/yaml_component.py b/sdk/python/kfp/v2/components/experimental/yaml_component.py new file mode 100644 index 00000000000..02993f51a73 --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/yaml_component.py @@ -0,0 +1,29 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Functions for loading component from yaml.""" + +from kfp.v2.components.experimental import base_component +from kfp.v2.components.experimental import structures + + +class YamlComponent(base_component.BaseComponent): + + def execute(self, *args, **kwargs): + pass + + +def load_component_from_text(text: str) -> base_component.BaseComponent: + """Loads component from text.""" + return YamlComponent( + structures.ComponentSpec.load_from_component_yaml(text)) diff --git a/sdk/python/kfp/v2/components/types/experimental/__init__.py b/sdk/python/kfp/v2/components/types/experimental/__init__.py new file mode 100644 index 00000000000..b4447dd5838 --- /dev/null +++ b/sdk/python/kfp/v2/components/types/experimental/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/sdk/python/kfp/v2/components/types/experimental/type_utils.py b/sdk/python/kfp/v2/components/types/experimental/type_utils.py new file mode 100644 index 00000000000..1ebd2e68f1a --- /dev/null +++ b/sdk/python/kfp/v2/components/types/experimental/type_utils.py @@ -0,0 +1,279 @@ +# Copyright 2020 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for component I/O type mapping.""" +import inspect +import re +import warnings +from typing import Dict, List, Optional, Type, Union + +from kfp.components import structures, type_annotation_utils +from kfp.pipeline_spec import pipeline_spec_pb2 +from kfp.v2.components.types import artifact_types + +PARAMETER_TYPES = Union[str, int, float, bool, dict, list] + +# ComponentSpec I/O types to DSL ontology artifact classes mapping. +_ARTIFACT_CLASSES_MAPPING = { + 'model': artifact_types.Model, + 'dataset': artifact_types.Dataset, + 'metrics': artifact_types.Metrics, + 'classificationmetrics': artifact_types.ClassificationMetrics, + 'slicedclassificationmetrics': artifact_types.SlicedClassificationMetrics, + 'html': artifact_types.HTML, + 'markdown': artifact_types.Markdown, +} + +_GOOGLE_TYPES_PATTERN = r'^google.[A-Za-z]+$' +_GOOGLE_TYPES_VERSION = '0.0.1' + +# ComponentSpec I/O types to (IR) PipelineTaskSpec I/O types mapping. +# The keys are normalized (lowercased). These are types viewed as Parameters. +# The values are the corresponding IR parameter primitive types. +_PARAMETER_TYPES_MAPPING = { + 'integer': pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + 'int': pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + 'double': pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + 'float': pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + 'string': pipeline_spec_pb2.ParameterType.STRING, + 'str': pipeline_spec_pb2.ParameterType.STRING, + 'text': pipeline_spec_pb2.ParameterType.STRING, + 'bool': pipeline_spec_pb2.ParameterType.BOOLEAN, + 'boolean': pipeline_spec_pb2.ParameterType.BOOLEAN, + 'dict': pipeline_spec_pb2.ParameterType.STRUCT, + 'list': pipeline_spec_pb2.ParameterType.LIST, + 'jsonobject': pipeline_spec_pb2.ParameterType.STRUCT, + 'jsonarray': pipeline_spec_pb2.ParameterType.LIST, +} + +# Mapping primitive types to their IR message field names. +# This is used in constructing condition strings. +_PARAMETER_TYPES_VALUE_REFERENCE_MAPPING = { + pipeline_spec_pb2.ParameterType.NUMBER_INTEGER: 'number_value', + pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE: 'number_value', + pipeline_spec_pb2.ParameterType.STRING: 'string_value', + pipeline_spec_pb2.ParameterType.BOOLEAN: 'bool_value', + pipeline_spec_pb2.ParameterType.STRUCT: 'struct_value', + pipeline_spec_pb2.ParameterType.LIST: 'list_value', +} + + +def is_parameter_type(type_name: Optional[Union[str, dict]]) -> bool: + """Check if a ComponentSpec I/O type is considered as a parameter type. + + Args: + type_name: type name of the ComponentSpec I/O type. + + Returns: + True if the type name maps to a parameter type else False. + """ + if isinstance(type_name, str): + type_name = type_annotation_utils.get_short_type_name(type_name) + elif isinstance(type_name, dict): + type_name = list(type_name.keys())[0] + else: + return False + + return type_name.lower() in _PARAMETER_TYPES_MAPPING + + +def get_artifact_type_schema( + artifact_class_or_type_name: Optional[Union[str, + Type[artifact_types.Artifact]]] +) -> pipeline_spec_pb2.ArtifactTypeSchema: + """Gets the IR I/O artifact type msg for the given ComponentSpec I/O + type.""" + artifact_class = artifact_types.Artifact + if isinstance(artifact_class_or_type_name, str): + if re.match(_GOOGLE_TYPES_PATTERN, artifact_class_or_type_name): + return pipeline_spec_pb2.ArtifactTypeSchema( + schema_title=artifact_class_or_type_name, + schema_version=_GOOGLE_TYPES_VERSION, + ) + artifact_class = _ARTIFACT_CLASSES_MAPPING.get( + artifact_class_or_type_name.lower(), artifact_types.Artifact) + elif inspect.isclass(artifact_class_or_type_name) and issubclass( + artifact_class_or_type_name, artifact_types.Artifact): + artifact_class = artifact_class_or_type_name + + return pipeline_spec_pb2.ArtifactTypeSchema( + schema_title=artifact_class.TYPE_NAME, + schema_version=artifact_class.VERSION) + + +def get_parameter_type( + param_type: Optional[Union[Type, str, dict]] +) -> pipeline_spec_pb2.ParameterType: + """Get the IR I/O parameter type for the given ComponentSpec I/O type. + + Args: + param_type: type of the ComponentSpec I/O type. Can be a primitive Python + builtin type or a type name. + + Returns: + The enum value of the mapped IR I/O primitive type. + + Raises: + AttributeError: if type_name is not a string type. + """ + if type(param_type) == type: + type_name = param_type.__name__ + elif isinstance(param_type, dict): + type_name = list(param_type.keys())[0] + else: + type_name = type_annotation_utils.get_short_type_name(str(param_type)) + return _PARAMETER_TYPES_MAPPING.get(type_name.lower()) + + +def get_parameter_type_name( + param_type: Optional[Union[Type, str, dict]]) -> str: + """Gets the parameter type name.""" + return pipeline_spec_pb2.ParameterType.ParameterTypeEnum.Name( + get_parameter_type(param_type)) + + +def get_parameter_type_field_name(type_name: Optional[str]) -> str: + """Get the IR field name for the given primitive type. + + For example: 'str' -> 'string_value', 'double' -> 'double_value', etc. + + Args: + type_name: type name of the ComponentSpec I/O primitive type. + + Returns: + The IR value reference field name. + + Raises: + AttributeError: if type_name is not a string type. + """ + return _PARAMETER_TYPES_VALUE_REFERENCE_MAPPING.get( + get_parameter_type(type_name)) + + +def get_input_artifact_type_schema( + input_name: str, + inputs: List[structures.InputSpec], +) -> Optional[str]: + """Find the input artifact type by input name. + + Args: + input_name: The name of the component input. + inputs: The list of InputSpec + + Returns: + The artifact type schema of the input. + + Raises: + AssertionError if input not found, or input found but not an artifact type. + """ + for component_input in inputs: + if component_input.name == input_name: + assert not is_parameter_type( + component_input.type), 'Input is not an artifact type.' + return get_artifact_type_schema(component_input.type) + assert False, 'Input not found.' + + +class InconsistentTypeException(Exception): + """InconsistencyTypeException is raised when two types are not + consistent.""" + pass + + +class InconsistentTypeWarning(Warning): + """InconsistentTypeWarning is issued when two types are not consistent.""" + pass + + +def verify_type_compatibility( + given_type: Union[str, dict], + expected_type: Union[str, dict], + error_message_prefix: str, +) -> bool: + """Verifies the given argument type is compatible with the expected type. + + Args: + given_type: The type of the argument passed to the input. + expected_type: The declared type of the input. + error_message_prefix: The prefix for the error message. + + Returns: + True if types are compatible, and False if otherwise. + + Raises: + InconsistentTypeException if types are incompatible and TYPE_CHECK==True. + """ + + # Generic "Artifact" type is compatible with any specific artifact types. + if not is_parameter_type( + str(given_type)) and (str(given_type).lower() == "artifact" or + str(expected_type).lower() == "artifact"): + return True + + # Normalize parameter type names. + if is_parameter_type(given_type): + given_type = get_parameter_type_name(given_type) + if is_parameter_type(expected_type): + expected_type = get_parameter_type_name(expected_type) + + types_are_compatible = _check_types(given_type, expected_type) + + if not types_are_compatible: + error_text = error_message_prefix + ( + 'Argument type "{}" is incompatible with the input type "{}"' + ).format(str(given_type), str(expected_type)) + import kfp + if kfp.TYPE_CHECK: + raise InconsistentTypeException(error_text) + else: + warnings.warn(InconsistentTypeWarning(error_text)) + return types_are_compatible + + +def _check_types( + given_type: Union[str, dict], + expected_type: Union[str, dict], +): + if isinstance(given_type, str): + given_type = {given_type: {}} + if isinstance(expected_type, str): + expected_type = {expected_type: {}} + return _check_dict_types(given_type, expected_type) + + +def _check_dict_types( + given_type: dict, + expected_type: dict, +): + given_type_name, _ = list(given_type.items())[0] + expected_type_name, _ = list(expected_type.items())[0] + if given_type_name == "" or expected_type_name == "": + # If the type name is empty, it matches any types + return True + if given_type_name != expected_type_name: + print("type name " + str(given_type_name) + + " is different from expected: " + str(expected_type_name)) + return False + type_name = given_type_name + for type_property in given_type[type_name]: + if type_property not in expected_type[type_name]: + print(type_name + " has a property " + str(type_property) + + " that the latter does not.") + return False + if given_type[type_name][type_property] != expected_type[type_name][ + type_property]: + print(type_name + " has a property " + str(type_property) + + " with value: " + str(given_type[type_name][type_property]) + + " and " + str(expected_type[type_name][type_property])) + return False + return True diff --git a/sdk/python/kfp/v2/components/utils.py b/sdk/python/kfp/v2/components/utils.py index 3eaf7ae8895..3e1f6d93dcc 100644 --- a/sdk/python/kfp/v2/components/utils.py +++ b/sdk/python/kfp/v2/components/utils.py @@ -18,6 +18,10 @@ import re import sys import types +from typing import List + +_COMPONENT_NAME_PREFIX = 'comp-' +_EXECUTOR_LABEL_PREFIX = 'exec-' def load_module(module_name: str, module_directory: str) -> types.ModuleType: @@ -55,3 +59,44 @@ def maybe_rename_for_k8s(name: str) -> str: """ return re.sub('-+', '-', re.sub('[^-0-9a-z]+', '-', name.lower())).lstrip('-').rstrip('-') + + +def sanitize_component_name(name: str) -> str: + """Sanitizes component name.""" + return _COMPONENT_NAME_PREFIX + maybe_rename_for_k8s(name) + + +def sanitize_task_name(name: str) -> str: + """Sanitizes task name.""" + return maybe_rename_for_k8s(name) + + +def sanitize_executor_label(label: str) -> str: + """Sanitizes executor label.""" + return _EXECUTOR_LABEL_PREFIX + maybe_rename_for_k8s(label) + + +def make_name_unique_by_adding_index( + name: str, + collection: List[str], + delimiter: str, +) -> str: + """Makes a unique name by adding index. + + The index starts from 2 and increase by 1 until we find a unique name. + + Args: + name: The original name. + collection: The collection of existing names. + delimiter: The delimiter to connect the original name and an index. + + Returns: + A unique name composed of name+delimiter+next index + """ + unique_name = name + if unique_name in collection: + for i in range(2, sys.maxsize**10): + unique_name = name + delimiter + str(i) + if unique_name not in collection: + break + return unique_name diff --git a/sdk/python/kfp/v2/components/utils_test.py b/sdk/python/kfp/v2/components/utils_test.py index 1f52ac5b186..6d0ce09aed5 100644 --- a/sdk/python/kfp/v2/components/utils_test.py +++ b/sdk/python/kfp/v2/components/utils_test.py @@ -38,6 +38,54 @@ class UtilsTest(parameterized.TestCase): def test_maybe_rename_for_k8s(self, original, expected): self.assertEqual(utils.maybe_rename_for_k8s(original), expected) + def test_sanitize_component_name(self): + self.assertEqual('comp-my-component', + utils.sanitize_component_name('My component')) + + def test_sanitize_executor_label(self): + self.assertEqual('exec-my-component', + utils.sanitize_executor_label('My component')) + + def test_sanitize_task_name(self): + self.assertEqual('my-component-1', + utils.sanitize_task_name('My component 1')) + + @parameterized.parameters( + { + 'name': 'some-name', + 'collection': [], + 'delimiter': '-', + 'expected': 'some-name' + }, + { + 'name': 'some-name', + 'collection': ['some-name'], + 'delimiter': '+', + 'expected': 'some-name+2' + }, + { + 'name': 'some-name', + 'collection': ['some-name', 'some-name-2'], + 'delimiter': '-', + 'expected': 'some-name-3' + }, + { + 'name': 'some-name-2', + 'collection': ['some-name', 'some-name-2'], + 'delimiter': '-', + 'expected': 'some-name-2-2' + }, + ) + def test_make_name_unique_by_adding_index(self, name, collection, delimiter, + expected): + self.assertEqual( + expected, + utils.make_name_unique_by_adding_index( + name=name, + collection=collection, + delimiter=delimiter, + )) + if __name__ == '__main__': unittest.main() diff --git a/sdk/python/kfp/v2/dsl/experimental/__init__.py b/sdk/python/kfp/v2/dsl/experimental/__init__.py index b4447dd5838..1f5b3dd13c5 100644 --- a/sdk/python/kfp/v2/dsl/experimental/__init__.py +++ b/sdk/python/kfp/v2/dsl/experimental/__init__.py @@ -11,3 +11,38 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from kfp.v2.components.experimental.pipeline import Pipeline + +from kfp.v2.components.importer_node import importer +from kfp.v2.dsl import ( + pipeline, + component, +) +from kfp.v2.components.types.artifact_types import ( + Artifact, + ClassificationMetrics, + Dataset, + HTML, + Markdown, + Metrics, + Model, + SlicedClassificationMetrics, +) +from kfp.v2.components.types.type_annotations import ( + Input, + Output, + InputPath, + OutputPath, +) +from kfp.v2.components.experimental.pipeline_channel import ( + PipelineArtifactChannel, + PipelineChannel, + PipelineParameterChannel, +) +from kfp.v2.components.experimental.pipeline_task import PipelineTask +from kfp.v2.components.experimental.tasks_group import ( + Condition, + ExitHandler, + ParallelFor, +) diff --git a/sdk/python/requirements.in b/sdk/python/requirements.in index 7ae3de7820e..fc31e2ab85c 100644 --- a/sdk/python/requirements.in +++ b/sdk/python/requirements.in @@ -31,7 +31,7 @@ typer>=0.3.2,<1.0 # kfp.v2 absl-py>=0.9,<=0.11 -kfp-pipeline-spec>=0.1.11,<0.2.0 +kfp-pipeline-spec>=0.1.13,<0.2.0 fire>=0.3.1,<1 google-api-python-client>=1.7.8,<2 pydantic>=1.8.2,<2 diff --git a/sdk/python/requirements.txt b/sdk/python/requirements.txt index 2d3646acc93..73ca463603b 100644 --- a/sdk/python/requirements.txt +++ b/sdk/python/requirements.txt @@ -65,7 +65,7 @@ idna==3.2 # via requests jsonschema==3.2.0 # via -r requirements.in -kfp-pipeline-spec==0.1.12 +kfp-pipeline-spec==0.1.13 # via -r requirements.in kfp-server-api==1.7.0 # via -r requirements.in diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 555494b55ec..bea4e3eeae7 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -47,7 +47,7 @@ 'Deprecated>=1.2.7,<2', 'strip-hints>=0.1.8,<1', 'docstring-parser>=0.7.3,<1', - 'kfp-pipeline-spec>=0.1.10,<0.2.0', + 'kfp-pipeline-spec>=0.1.13,<0.2.0', 'fire>=0.3.1,<1', 'protobuf>=3.13.0,<4', 'uritemplate>=3.0.1,<4', From 003dfa4d9b8f1e5b5d75b7564da11c83d1f40800 Mon Sep 17 00:00:00 2001 From: Sina Chavoshi Date: Tue, 26 Oct 2021 16:39:51 -0700 Subject: [PATCH 12/31] chore(components/google-cloud):Internal clean up. PiperOrigin-RevId: 405764253 --- .../experimental/dataflow/dataflow_python_job_remote_runner.py | 1 + 1 file changed, 1 insertion(+) diff --git a/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py index 0e0e92a6e31..958c09d32ee 100644 --- a/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py +++ b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py @@ -64,6 +64,7 @@ def create_python_job(python_module_path: str, temp_location) sub_process = Process(cmd) for line in sub_process.read_lines(): + logging.info('DataflowRunner output: %s', line) job_id, location = extract_job_id_and_location(line) if job_id: logging.info('Found job id %s and location %s.', job_id, location) From 4360652e66a1d572076d4551e496aa693eab5770 Mon Sep 17 00:00:00 2001 From: "Yuan (Bob) Gong" <4957653+Bobgy@users.noreply.github.com> Date: Wed, 27 Oct 2021 08:48:27 +0800 Subject: [PATCH 13/31] test(backend/integration): pipeline API integration tests for v2 spec (#6798) * test(backend/integration): pipeline API integration tests for v2 spec * test: update PipelineClientInterface --- .../apiserver/server/test/v2-hello-world.json | 66 ++++++++++++ backend/src/apiserver/server/util.go | 12 ++- backend/src/apiserver/server/util_test.go | 9 ++ .../client/api_server/pipeline_client.go | 31 +----- .../client/api_server/pipeline_client_fake.go | 23 ++-- backend/src/common/util/template_util.go | 5 + backend/test/integration/pipeline_api_test.go | 101 ++++++++++-------- .../integration/pipeline_version_api_test.go | 88 ++++++++++----- backend/test/integration/upgrade_test.go | 11 +- backend/test/resources/v2-hello-world.json | 66 ++++++++++++ 10 files changed, 294 insertions(+), 118 deletions(-) create mode 100644 backend/src/apiserver/server/test/v2-hello-world.json create mode 100644 backend/test/resources/v2-hello-world.json diff --git a/backend/src/apiserver/server/test/v2-hello-world.json b/backend/src/apiserver/server/test/v2-hello-world.json new file mode 100644 index 00000000000..455016c25fe --- /dev/null +++ b/backend/src/apiserver/server/test/v2-hello-world.json @@ -0,0 +1,66 @@ +{ + "components": { + "comp-hello-world": { + "executorLabel": "exec-hello-world", + "inputDefinitions": { + "parameters": { + "text": { + "type": "STRING" + } + } + } + } + }, + "deploymentSpec": { + "executors": { + "exec-hello-world": { + "container": { + "args": ["--text", "{{$.inputs.parameters['text']}}"], + "command": [ + "sh", + "-ec", + "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", + "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" + ], + "image": "python:3.7" + } + } + } + }, + "pipelineInfo": { + "name": "hello-world" + }, + "root": { + "dag": { + "tasks": { + "hello-world": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-hello-world" + }, + "inputs": { + "parameters": { + "text": { + "componentInputParameter": "text" + } + } + }, + "taskInfo": { + "name": "hello-world" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "text": { + "type": "STRING" + } + } + } + }, + "schemaVersion": "2.0.0", + "sdkVersion": "kfp-1.6.5" +} diff --git a/backend/src/apiserver/server/util.go b/backend/src/apiserver/server/util.go index cfd71b07449..ba4232a6ace 100644 --- a/backend/src/apiserver/server/util.go +++ b/backend/src/apiserver/server/util.go @@ -59,14 +59,14 @@ func loadFile(fileReader io.Reader, maxFileLength int) ([]byte, error) { return pipelineFile[:size], nil } -func isSupportedPipelineFormat(fileName string, compressedFile []byte) bool { - return isYamlFile(fileName) || isCompressedTarballFile(compressedFile) || isZipFile(compressedFile) -} - func isYamlFile(fileName string) bool { return strings.HasSuffix(fileName, ".yaml") || strings.HasSuffix(fileName, ".yml") } +func isJSONFile(fileName string) bool { + return strings.HasSuffix(fileName, ".json") +} + func isPipelineYamlFile(fileName string) bool { return fileName == "pipeline.yaml" } @@ -168,12 +168,14 @@ func ReadPipelineFile(fileName string, fileReader io.Reader, maxFileLength int) switch { case isYamlFile(fileName): processedFile = pipelineFileBytes + case isJSONFile(fileName): + processedFile = pipelineFileBytes case isZipFile(pipelineFileBytes): processedFile, err = DecompressPipelineZip(pipelineFileBytes) case isCompressedTarballFile(pipelineFileBytes): processedFile, err = DecompressPipelineTarball(pipelineFileBytes) default: - return nil, util.NewInvalidInputError("Unexpected pipeline file format. Support .zip, .tar.gz or YAML.") + return nil, util.NewInvalidInputError("Unexpected pipeline file format. Support .zip, .tar.gz, .json or YAML.") } if err != nil { return nil, util.Wrap(err, "Error decompress the pipeline file") diff --git a/backend/src/apiserver/server/util_test.go b/backend/src/apiserver/server/util_test.go index 2d6d3e686b4..0249eaccf78 100644 --- a/backend/src/apiserver/server/util_test.go +++ b/backend/src/apiserver/server/util_test.go @@ -127,6 +127,15 @@ func TestReadPipelineFile_YAML(t *testing.T) { assert.Equal(t, expectedFileBytes, fileBytes) } +func TestReadPipelineFile_JSON(t *testing.T) { + file, _ := os.Open("test/v2-hello-world.json") + fileBytes, err := ReadPipelineFile("v2-hello-world.json", file, MaxFileLength) + assert.Nil(t, err) + + expectedFileBytes, _ := ioutil.ReadFile("test/v2-hello-world.json") + assert.Equal(t, expectedFileBytes, fileBytes) +} + func TestReadPipelineFile_Zip(t *testing.T) { file, _ := os.Open("test/arguments_zip/arguments-parameters.zip") pipelineFile, err := ReadPipelineFile("arguments-parameters.zip", file, MaxFileLength) diff --git a/backend/src/common/client/api_server/pipeline_client.go b/backend/src/common/client/api_server/pipeline_client.go index 2e51fa4da0d..ca12f8e86fa 100644 --- a/backend/src/common/client/api_server/pipeline_client.go +++ b/backend/src/common/client/api_server/pipeline_client.go @@ -3,8 +3,6 @@ package api_server import ( "fmt" - workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/ghodss/yaml" "github.com/go-openapi/strfmt" apiclient "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client" params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" @@ -19,7 +17,7 @@ type PipelineInterface interface { Create(params *params.CreatePipelineParams) (*model.APIPipeline, error) Get(params *params.GetPipelineParams) (*model.APIPipeline, error) Delete(params *params.DeletePipelineParams) error - GetTemplate(params *params.GetTemplateParams) (*workflowapi.Workflow, error) + GetTemplate(params *params.GetTemplateParams) (util.Template, error) List(params *params.ListPipelinesParams) ([]*model.APIPipeline, int, string, error) ListAll(params *params.ListPipelinesParams, maxResultSize int) ( []*model.APIPipeline, error) @@ -138,8 +136,7 @@ func (c *PipelineClient) Delete(parameters *params.DeletePipelineParams) error { return nil } -func (c *PipelineClient) GetTemplate(parameters *params.GetTemplateParams) ( - *workflowapi.Workflow, error) { +func (c *PipelineClient) GetTemplate(parameters *params.GetTemplateParams) (util.Template, error) { // Create context with timeout ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) defer cancel() @@ -160,16 +157,7 @@ func (c *PipelineClient) GetTemplate(parameters *params.GetTemplateParams) ( } // Unmarshal response - var workflow workflowapi.Workflow - err = yaml.Unmarshal([]byte(response.Payload.Template), &workflow) - if err != nil { - return nil, util.NewUserError(err, - fmt.Sprintf("Failed to unmarshal reponse. Params: '%+v'. Response: '%s'", parameters, - response.Payload.Template), - fmt.Sprintf("Failed to unmarshal reponse")) - } - - return &workflow, nil + return util.NewTemplate([]byte(response.Payload.Template)) } func (c *PipelineClient) List(parameters *params.ListPipelinesParams) ( @@ -298,7 +286,7 @@ func (c *PipelineClient) GetPipelineVersion(parameters *params.GetPipelineVersio } func (c *PipelineClient) GetPipelineVersionTemplate(parameters *params.GetPipelineVersionTemplateParams) ( - *workflowapi.Workflow, error) { + util.Template, error) { // Create context with timeout ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) defer cancel() @@ -319,14 +307,5 @@ func (c *PipelineClient) GetPipelineVersionTemplate(parameters *params.GetPipeli } // Unmarshal response - var workflow workflowapi.Workflow - err = yaml.Unmarshal([]byte(response.Payload.Template), &workflow) - if err != nil { - return nil, util.NewUserError(err, - fmt.Sprintf("Failed to unmarshal reponse. Params: '%+v'. Response: '%s'", parameters, - response.Payload.Template), - fmt.Sprintf("Failed to unmarshal reponse")) - } - - return &workflow, nil + return util.NewTemplate([]byte(response.Payload.Template)) } diff --git a/backend/src/common/client/api_server/pipeline_client_fake.go b/backend/src/common/client/api_server/pipeline_client_fake.go index 85386381678..e55e2d427b0 100644 --- a/backend/src/common/client/api_server/pipeline_client_fake.go +++ b/backend/src/common/client/api_server/pipeline_client_fake.go @@ -6,11 +6,11 @@ import ( "path" workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/ghodss/yaml" "github.com/go-openapi/strfmt" params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" pipelineparams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" pipelinemodel "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/src/common/util" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -42,13 +42,18 @@ func getDefaultWorkflow() *workflowapi.Workflow { }} } +func getDefaultTemplate() util.Template { + tmpl, _ := util.NewArgoTemplateFromWorkflow(&workflowapi.Workflow{ + ObjectMeta: metav1.ObjectMeta{ + Namespace: "MY_NAMESPACE", + Name: "MY_NAME", + }}) + return tmpl +} + func getDefaultWorkflowAsString() string { - workflow := getDefaultWorkflow() - result, err := yaml.Marshal(workflow) - if err != nil { - return "no workflow" - } - return string(result) + tmpl := getDefaultTemplate() + return string(tmpl.Bytes()) } type PipelineClientFake struct{} @@ -87,12 +92,12 @@ func (c *PipelineClientFake) Delete(params *pipelineparams.DeletePipelineParams) } func (c *PipelineClientFake) GetTemplate(params *pipelineparams.GetTemplateParams) ( - *workflowapi.Workflow, error) { + util.Template, error) { switch params.ID { case PipelineForClientErrorTest: return nil, fmt.Errorf(ClientErrorString) default: - return getDefaultWorkflow(), nil + return getDefaultTemplate(), nil } } diff --git a/backend/src/common/util/template_util.go b/backend/src/common/util/template_util.go index 16c8f516542..19f7af96f3d 100644 --- a/backend/src/common/util/template_util.go +++ b/backend/src/common/util/template_util.go @@ -20,6 +20,7 @@ import ( "strings" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/workflow/validate" "github.com/ghodss/yaml" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" @@ -164,6 +165,10 @@ func NewArgoTemplate(bytes []byte) (*ArgoTemplate, error) { return &ArgoTemplate{wf}, nil } +func NewArgoTemplateFromWorkflow(wf *workflowapi.Workflow) (*ArgoTemplate, error) { + return &ArgoTemplate{wf: &Workflow{wf}}, nil +} + func (t *ArgoTemplate) Bytes() []byte { if t == nil { return nil diff --git a/backend/test/integration/pipeline_api_test.go b/backend/test/integration/pipeline_api_test.go index f6d6a4c437c..3e16b021103 100644 --- a/backend/test/integration/pipeline_api_test.go +++ b/backend/test/integration/pipeline_api_test.go @@ -7,8 +7,6 @@ import ( "github.com/kubeflow/pipelines/backend/test" - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/ghodss/yaml" "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" @@ -17,14 +15,16 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/client/api_server" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" ) // This test suit tests various methods to import pipeline to pipeline system, including +// - upload v2 pipeline spec JSON file // - upload yaml file // - upload tarball file // - providing YAML file url -// - Providing tarball file url +// - providing tarball file url type PipelineApiTest struct { suite.Suite namespace string @@ -65,14 +65,20 @@ func (s *PipelineApiTest) TestPipelineAPI() { test.DeleteAllPipelines(s.pipelineClient, t) + /* ------ Upload v2 pipeline spec JSON --------*/ + v2HelloPipeline, err := s.pipelineUploadClient.UploadFile("../resources/v2-hello-world.json", uploadParams.NewUploadPipelineParams()) + require.Nil(t, err) + assert.Equal(t, "v2-hello-world.json", v2HelloPipeline.Name) + /* ---------- Upload pipelines YAML ---------- */ + time.Sleep(1 * time.Second) argumentYAMLPipeline, err := s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", uploadParams.NewUploadPipelineParams()) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "arguments-parameters.yaml", argumentYAMLPipeline.Name) /* ---------- Upload the same pipeline again. Should fail due to name uniqueness ---------- */ _, err = s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", uploadParams.NewUploadPipelineParams()) - assert.NotNil(t, err) + require.NotNil(t, err) assert.Contains(t, err.Error(), "Failed to upload pipeline.") /* ---------- Import pipeline YAML by URL ---------- */ @@ -80,14 +86,14 @@ func (s *PipelineApiTest) TestPipelineAPI() { sequentialPipeline, err := s.pipelineClient.Create(¶ms.CreatePipelineParams{ Body: &model.APIPipeline{Name: "sequential", URL: &model.APIURL{ PipelineURL: "https://storage.googleapis.com/ml-pipeline-dataset/sequential.yaml"}}}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "sequential", sequentialPipeline.Name) /* ---------- Upload pipelines zip ---------- */ time.Sleep(1 * time.Second) argumentUploadPipeline, err := s.pipelineUploadClient.UploadFile( "../resources/arguments.pipeline.zip", &uploadParams.UploadPipelineParams{Name: util.StringPointer("zip-arguments-parameters")}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "zip-arguments-parameters", argumentUploadPipeline.Name) /* ---------- Import pipeline tarball by URL ---------- */ @@ -95,14 +101,14 @@ func (s *PipelineApiTest) TestPipelineAPI() { argumentUrlPipeline, err := s.pipelineClient.Create(¶ms.CreatePipelineParams{ Body: &model.APIPipeline{URL: &model.APIURL{ PipelineURL: "https://storage.googleapis.com/ml-pipeline-dataset/arguments.pipeline.zip"}}}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "arguments.pipeline.zip", argumentUrlPipeline.Name) /* ---------- Verify list pipeline works ---------- */ pipelines, totalSize, _, err := s.pipelineClient.List(¶ms.ListPipelinesParams{}) - assert.Nil(t, err) - assert.Equal(t, 4, len(pipelines)) - assert.Equal(t, 4, totalSize) + require.Nil(t, err) + assert.Equal(t, 5, len(pipelines)) + assert.Equal(t, 5, totalSize) for _, p := range pipelines { // Sampling one of the pipelines and verify the result is expected. if p.Name == "arguments-parameters.yaml" { @@ -113,37 +119,39 @@ func (s *PipelineApiTest) TestPipelineAPI() { /* ---------- Verify list pipeline sorted by names ---------- */ listFirstPagePipelines, totalSize, nextPageToken, err := s.pipelineClient.List( ¶ms.ListPipelinesParams{PageSize: util.Int32Pointer(2), SortBy: util.StringPointer("name")}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 2, len(listFirstPagePipelines)) - assert.Equal(t, 4, totalSize) + assert.Equal(t, 5, totalSize) assert.Equal(t, "arguments-parameters.yaml", listFirstPagePipelines[0].Name) assert.Equal(t, "arguments.pipeline.zip", listFirstPagePipelines[1].Name) assert.NotEmpty(t, nextPageToken) listSecondPagePipelines, totalSize, nextPageToken, err := s.pipelineClient.List( - ¶ms.ListPipelinesParams{PageToken: util.StringPointer(nextPageToken), PageSize: util.Int32Pointer(2), SortBy: util.StringPointer("name")}) - assert.Nil(t, err) - assert.Equal(t, 2, len(listSecondPagePipelines)) - assert.Equal(t, 4, totalSize) + ¶ms.ListPipelinesParams{PageToken: util.StringPointer(nextPageToken), PageSize: util.Int32Pointer(3), SortBy: util.StringPointer("name")}) + require.Nil(t, err) + assert.Equal(t, 3, len(listSecondPagePipelines)) + assert.Equal(t, 5, totalSize) assert.Equal(t, "sequential", listSecondPagePipelines[0].Name) - assert.Equal(t, "zip-arguments-parameters", listSecondPagePipelines[1].Name) + assert.Equal(t, "v2-hello-world.json", listSecondPagePipelines[1].Name) + assert.Equal(t, "zip-arguments-parameters", listSecondPagePipelines[2].Name) assert.Empty(t, nextPageToken) /* ---------- Verify list pipeline sorted by creation time ---------- */ listFirstPagePipelines, totalSize, nextPageToken, err = s.pipelineClient.List( - ¶ms.ListPipelinesParams{PageSize: util.Int32Pointer(2), SortBy: util.StringPointer("created_at")}) - assert.Nil(t, err) - assert.Equal(t, 2, len(listFirstPagePipelines)) - assert.Equal(t, 4, totalSize) - assert.Equal(t, "arguments-parameters.yaml", listFirstPagePipelines[0].Name) - assert.Equal(t, "sequential", listFirstPagePipelines[1].Name) + ¶ms.ListPipelinesParams{PageSize: util.Int32Pointer(3), SortBy: util.StringPointer("created_at")}) + require.Nil(t, err) + assert.Equal(t, 3, len(listFirstPagePipelines)) + assert.Equal(t, 5, totalSize) + assert.Equal(t, "v2-hello-world.json", listFirstPagePipelines[0].Name) + assert.Equal(t, "arguments-parameters.yaml", listFirstPagePipelines[1].Name) + assert.Equal(t, "sequential", listFirstPagePipelines[2].Name) assert.NotEmpty(t, nextPageToken) listSecondPagePipelines, totalSize, nextPageToken, err = s.pipelineClient.List( - ¶ms.ListPipelinesParams{PageToken: util.StringPointer(nextPageToken), PageSize: util.Int32Pointer(2), SortBy: util.StringPointer("created_at")}) - assert.Nil(t, err) + ¶ms.ListPipelinesParams{PageToken: util.StringPointer(nextPageToken), PageSize: util.Int32Pointer(3), SortBy: util.StringPointer("created_at")}) + require.Nil(t, err) assert.Equal(t, 2, len(listSecondPagePipelines)) - assert.Equal(t, 4, totalSize) + assert.Equal(t, 5, totalSize) assert.Equal(t, "zip-arguments-parameters", listSecondPagePipelines[0].Name) assert.Equal(t, "arguments.pipeline.zip", listSecondPagePipelines[1].Name) assert.Empty(t, nextPageToken) @@ -155,37 +163,44 @@ func (s *PipelineApiTest) TestPipelineAPI() { /* ---------- List pipelines sorted by names descend order ---------- */ listFirstPagePipelines, totalSize, nextPageToken, err = s.pipelineClient.List( - ¶ms.ListPipelinesParams{PageSize: util.Int32Pointer(2), SortBy: util.StringPointer("name desc")}) - assert.Nil(t, err) - assert.Equal(t, 2, len(listFirstPagePipelines)) - assert.Equal(t, 4, totalSize) + ¶ms.ListPipelinesParams{PageSize: util.Int32Pointer(3), SortBy: util.StringPointer("name desc")}) + require.Nil(t, err) + assert.Equal(t, 3, len(listFirstPagePipelines)) + assert.Equal(t, 5, totalSize) assert.Equal(t, "zip-arguments-parameters", listFirstPagePipelines[0].Name) - assert.Equal(t, "sequential", listFirstPagePipelines[1].Name) + assert.Equal(t, "v2-hello-world.json", listFirstPagePipelines[1].Name) + assert.Equal(t, "sequential", listFirstPagePipelines[2].Name) assert.NotEmpty(t, nextPageToken) listSecondPagePipelines, totalSize, nextPageToken, err = s.pipelineClient.List(¶ms.ListPipelinesParams{ - PageToken: util.StringPointer(nextPageToken), PageSize: util.Int32Pointer(2), SortBy: util.StringPointer("name desc")}) - assert.Nil(t, err) + PageToken: util.StringPointer(nextPageToken), PageSize: util.Int32Pointer(3), SortBy: util.StringPointer("name desc")}) + require.Nil(t, err) assert.Equal(t, 2, len(listSecondPagePipelines)) - assert.Equal(t, 4, totalSize) + assert.Equal(t, 5, totalSize) assert.Equal(t, "arguments.pipeline.zip", listSecondPagePipelines[0].Name) assert.Equal(t, "arguments-parameters.yaml", listSecondPagePipelines[1].Name) assert.Empty(t, nextPageToken) /* ---------- Verify get pipeline works ---------- */ pipeline, err := s.pipelineClient.Get(¶ms.GetPipelineParams{ID: argumentYAMLPipeline.ID}) - assert.Nil(t, err) + require.Nil(t, err) verifyPipeline(t, pipeline) /* ---------- Verify get template works ---------- */ template, err := s.pipelineClient.GetTemplate(¶ms.GetTemplateParams{ID: argumentYAMLPipeline.ID}) - assert.Nil(t, err) - expected, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") - assert.Nil(t, err) - var expectedWorkflow v1alpha1.Workflow - err = yaml.Unmarshal(expected, &expectedWorkflow) - assert.Nil(t, err) - assert.Equal(t, expectedWorkflow, *template) + require.Nil(t, err) + bytes, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") + require.Nil(t, err) + expected, err := util.NewTemplate(bytes) + assert.Equal(t, expected, template) + + template, err = s.pipelineClient.GetTemplate(¶ms.GetTemplateParams{ID: v2HelloPipeline.ID}) + require.Nil(t, err) + bytes, err = ioutil.ReadFile("../resources/v2-hello-world.json") + require.Nil(t, err) + expected, err = util.NewTemplate(bytes) + expected.OverrideV2PipelineName("v2-hello-world.json", "") + assert.Equal(t, expected, template) } func verifyPipeline(t *testing.T, pipeline *model.APIPipeline) { diff --git a/backend/test/integration/pipeline_version_api_test.go b/backend/test/integration/pipeline_version_api_test.go index 708657749cd..4a6c8d6d552 100644 --- a/backend/test/integration/pipeline_version_api_test.go +++ b/backend/test/integration/pipeline_version_api_test.go @@ -5,8 +5,6 @@ import ( "testing" "time" - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/ghodss/yaml" "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" @@ -15,6 +13,7 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" ) @@ -56,7 +55,7 @@ func (s *PipelineVersionApiTest) SetupTest() { s.cleanUp() } -func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { +func (s *PipelineVersionApiTest) TestArgoSpec() { t := s.T() test.DeleteAllPipelines(s.pipelineClient, t) @@ -66,12 +65,12 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { pipelineName := "test_pipeline" pipelineParams.SetName(&pipelineName) pipeline, err := s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", pipelineParams) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "test_pipeline", pipeline.Name) /* ---------- Get pipeline id ---------- */ pipelines, totalSize, _, err := s.pipelineClient.List(¶ms.ListPipelinesParams{}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 1, len(pipelines)) assert.Equal(t, 1, totalSize) pipelineId := pipelines[0].ID @@ -81,28 +80,28 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { pipelineVersionParams := uploadParams.NewUploadPipelineVersionParams() pipelineVersionParams.SetPipelineid(&pipelineId) argumentYAMLPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion("../resources/arguments-parameters.yaml", pipelineVersionParams) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "arguments-parameters.yaml", argumentYAMLPipelineVersion.Name) /* ---------- Update pipeline default version ---------- */ time.Sleep(1 * time.Second) sortBy := "created_at" versions, _, _, err := s.pipelineClient.ListPipelineVersions(¶ms.ListPipelineVersionsParams{ResourceKeyID: &pipelineId, SortBy: &sortBy}) - assert.Nil(t, err) + require.Nil(t, err) err = s.pipelineClient.UpdateDefaultVersion(¶ms.UpdatePipelineDefaultVersionParams{PipelineID: pipelineId, VersionID: versions[0].ID}) - assert.Nil(t, err) + require.Nil(t, err) time.Sleep(1 * time.Second) pipelineSelected, err := s.pipelineClient.Get(¶ms.GetPipelineParams{ID: pipelineId}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, pipelineSelected.DefaultVersion.ID, versions[0].ID) /* ---------- Upload the same pipeline version again. Should fail due to name uniqueness ---------- */ time.Sleep(1 * time.Second) _, err = s.pipelineUploadClient.UploadPipelineVersion("../resources/arguments-parameters.yaml", uploadParams.NewUploadPipelineVersionParams()) - assert.NotNil(t, err) + require.NotNil(t, err) assert.Contains(t, err.Error(), "Failed to upload pipeline version.") /* ---------- Import pipeline version YAML by URL ---------- */ @@ -120,7 +119,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { }, }, }}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "sequential", sequentialPipelineVersion.Name) /* ---------- Upload pipeline version zip ---------- */ @@ -130,7 +129,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { Name: util.StringPointer("zip-arguments-parameters"), Pipelineid: util.StringPointer(pipelineId), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "zip-arguments-parameters", argumentUploadPipelineVersion.Name) /* ---------- Import pipeline tarball by URL ---------- */ @@ -148,7 +147,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { }, }, }}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, "arguments", argumentUrlPipelineVersion.Name) /* ---------- Verify list pipeline version works ---------- */ @@ -156,7 +155,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { ResourceKeyID: util.StringPointer(pipelineId), ResourceKeyType: util.StringPointer("PIPELINE"), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 5, len(pipelineVersions)) assert.Equal(t, 5, totalSize) for _, p := range pipelineVersions { @@ -181,7 +180,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { ResourceKeyID: util.StringPointer(pipelineId), ResourceKeyType: util.StringPointer("PIPELINE"), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 3, len(listFirstPagePipelineVersions)) assert.Equal(t, 5, totalSize) assert.Equal(t, "arguments", listFirstPagePipelineVersions[0].Name) @@ -197,7 +196,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { ResourceKeyID: util.StringPointer(pipelineId), ResourceKeyType: util.StringPointer("PIPELINE"), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 2, len(listSecondPagePipelineVersions)) assert.Equal(t, 5, totalSize) assert.Equal(t, "test_pipeline", listSecondPagePipelineVersions[0].Name) @@ -212,7 +211,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { ResourceKeyID: util.StringPointer(pipelineId), ResourceKeyType: util.StringPointer("PIPELINE"), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 3, len(listFirstPagePipelineVersions)) assert.Equal(t, 5, totalSize) assert.Equal(t, "test_pipeline", listFirstPagePipelineVersions[0].Name) @@ -228,7 +227,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { ResourceKeyID: util.StringPointer(pipelineId), ResourceKeyType: util.StringPointer("PIPELINE"), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 2, len(listSecondPagePipelineVersions)) assert.Equal(t, 5, totalSize) assert.Equal(t, "zip-arguments-parameters", listSecondPagePipelineVersions[0].Name) @@ -252,7 +251,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { ResourceKeyID: util.StringPointer(pipelineId), ResourceKeyType: util.StringPointer("PIPELINE"), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 3, len(listFirstPagePipelineVersions)) assert.Equal(t, 5, totalSize) assert.Equal(t, "zip-arguments-parameters", listFirstPagePipelineVersions[0].Name) @@ -268,7 +267,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { ResourceKeyID: util.StringPointer(pipelineId), ResourceKeyType: util.StringPointer("PIPELINE"), }) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, 2, len(listSecondPagePipelineVersions)) assert.Equal(t, 5, totalSize) assert.Equal(t, "arguments-parameters.yaml", listSecondPagePipelineVersions[0].Name) @@ -277,7 +276,7 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { /* ---------- Verify get pipeline version works ---------- */ pipelineVersion, err := s.pipelineClient.GetPipelineVersion(¶ms.GetPipelineVersionParams{VersionID: argumentUrlPipelineVersion.ID}) - assert.Nil(t, err) + require.Nil(t, err) assert.Equal(t, pipelineVersion.Name, "arguments") assert.NotNil(t, pipelineVersion.CreatedAt) assert.Equal(t, pipelineVersion.Parameters, @@ -288,13 +287,46 @@ func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { /* ---------- Verify get template works ---------- */ template, err := s.pipelineClient.GetPipelineVersionTemplate(¶ms.GetPipelineVersionTemplateParams{VersionID: argumentYAMLPipelineVersion.ID}) - assert.Nil(t, err) - expected, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") - assert.Nil(t, err) - var expectedWorkflow v1alpha1.Workflow - err = yaml.Unmarshal(expected, &expectedWorkflow) - assert.Nil(t, err) - assert.Equal(t, expectedWorkflow, *template) + require.Nil(t, err) + bytes, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") + require.Nil(t, err) + expected, err := util.NewTemplate(bytes) + require.Nil(t, err) + assert.Equal(t, expected, template) +} + +func (s *PipelineVersionApiTest) TestV2Spec() { + t := s.T() + + test.DeleteAllPipelines(s.pipelineClient, t) + + /* ---------- Upload a pipeline YAML ---------- */ + pipelineParams := uploadParams.NewUploadPipelineParams() + pipelineName := "test_v2_pipeline" + pipelineParams.SetName(&pipelineName) + pipeline, err := s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", pipelineParams) + require.Nil(t, err) + assert.Equal(t, "test_v2_pipeline", pipeline.Name) + + /* ---------- Upload a pipeline version with v2 pipeline spec JSON ---------- */ + time.Sleep(1 * time.Second) + v2Version, err := s.pipelineUploadClient.UploadPipelineVersion( + "../resources/v2-hello-world.json", &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("v2-hello-world"), + Pipelineid: util.StringPointer(pipeline.ID), + }) + require.Nil(t, err) + assert.Equal(t, "v2-hello-world", v2Version.Name) + + /* ---------- Verify get template works ---------- */ + template, err := s.pipelineClient.GetPipelineVersionTemplate(¶ms.GetPipelineVersionTemplateParams{VersionID: v2Version.ID}) + require.Nil(t, err) + bytes, err := ioutil.ReadFile("../resources/v2-hello-world.json") + require.Nil(t, err) + expected, err := util.NewTemplate(bytes) + require.Nil(t, err) + expected.OverrideV2PipelineName("test_v2_pipeline", "") + assert.Equal(t, expected, template) } func TestPipelineVersionAPI(t *testing.T) { diff --git a/backend/test/integration/upgrade_test.go b/backend/test/integration/upgrade_test.go index 4fc6d17cf5e..98245546c20 100644 --- a/backend/test/integration/upgrade_test.go +++ b/backend/test/integration/upgrade_test.go @@ -6,8 +6,6 @@ import ( "testing" "time" - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/ghodss/yaml" "github.com/golang/glog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -232,12 +230,11 @@ func (s *UpgradeTests) VerifyPipelines() { /* ---------- Verify get template works ---------- */ template, err := s.pipelineClient.GetTemplate(&pipelineParams.GetTemplateParams{ID: pipelines[0].ID}) require.Nil(t, err) - expected, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") + bytes, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") require.Nil(t, err) - var expectedWorkflow v1alpha1.Workflow - err = yaml.Unmarshal(expected, &expectedWorkflow) - assert.Nil(t, err) - assert.Equal(t, expectedWorkflow, *template) + expected, err := util.NewTemplate(bytes) + require.Nil(t, err) + assert.Equal(t, expected, template) } func (s *UpgradeTests) PrepareRuns() { diff --git a/backend/test/resources/v2-hello-world.json b/backend/test/resources/v2-hello-world.json new file mode 100644 index 00000000000..455016c25fe --- /dev/null +++ b/backend/test/resources/v2-hello-world.json @@ -0,0 +1,66 @@ +{ + "components": { + "comp-hello-world": { + "executorLabel": "exec-hello-world", + "inputDefinitions": { + "parameters": { + "text": { + "type": "STRING" + } + } + } + } + }, + "deploymentSpec": { + "executors": { + "exec-hello-world": { + "container": { + "args": ["--text", "{{$.inputs.parameters['text']}}"], + "command": [ + "sh", + "-ec", + "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", + "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" + ], + "image": "python:3.7" + } + } + } + }, + "pipelineInfo": { + "name": "hello-world" + }, + "root": { + "dag": { + "tasks": { + "hello-world": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-hello-world" + }, + "inputs": { + "parameters": { + "text": { + "componentInputParameter": "text" + } + } + }, + "taskInfo": { + "name": "hello-world" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "text": { + "type": "STRING" + } + } + } + }, + "schemaVersion": "2.0.0", + "sdkVersion": "kfp-1.6.5" +} From fca4c1c2fde7ed89fcfbfa01070a62a3b00e2e10 Mon Sep 17 00:00:00 2001 From: Ajay Gopinathan Date: Wed, 27 Oct 2021 09:17:03 -0700 Subject: [PATCH 14/31] Fix missing experimental type module in setup.py. (#6816) --- sdk/python/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/python/setup.py b/sdk/python/setup.py index bea4e3eeae7..6167cc745be 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -52,9 +52,9 @@ 'protobuf>=3.13.0,<4', 'uritemplate>=3.0.1,<4', 'pydantic>=1.8.2,<2', + 'typer>=0.3.2,<1.0', # Standard library backports 'dataclasses;python_version<"3.7"', - 'typer>=0.3.2,<1.0', 'typing-extensions>=3.7.4,<4;python_version<"3.9"', ] @@ -118,6 +118,7 @@ def find_version(*file_path_parts): 'kfp.v2.compiler', 'kfp.v2.components', 'kfp.v2.components.types', + 'kfp.v2.components.types.experimental', 'kfp.v2.components.experimental', 'kfp.v2.dsl', 'kfp.v2.google.client', From 7768f28de3e9f96ef4776c1ea1fc7551c9180135 Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Wed, 27 Oct 2021 10:58:39 -0700 Subject: [PATCH 15/31] fix(sdk.v2): Fix missing experimental modules in setup.py (#6817) * fix missing experimental modules * fix typo * fix test --- sdk/python/kfp/v2/compiler/experimental/compiler_test.py | 3 +-- .../test_data/experimental_pipeline_with_exit_handler.py | 2 +- .../test_data/experimental_pipeline_with_loops.py | 2 +- .../experimental_pipeline_with_nested_conditions_yaml.py | 2 +- .../test_data/experimental_two_step_pipeline.py | 4 +--- sdk/python/kfp/v2/components/__init__.py | 2 -- sdk/python/kfp/v2/components/experimental/__init__.py | 2 ++ sdk/python/setup.py | 2 ++ 8 files changed, 9 insertions(+), 10 deletions(-) diff --git a/sdk/python/kfp/v2/compiler/experimental/compiler_test.py b/sdk/python/kfp/v2/compiler/experimental/compiler_test.py index d3da50b2209..c8ff48cb8f5 100644 --- a/sdk/python/kfp/v2/compiler/experimental/compiler_test.py +++ b/sdk/python/kfp/v2/compiler/experimental/compiler_test.py @@ -19,10 +19,9 @@ import tempfile import unittest -from kfp.v2 import components +import kfp.v2.components.experimental as components from kfp.v2.compiler.experimental import compiler import kfp.v2.dsl.experimental as dsl -from kfp.v2.components.types import type_utils class CompilerTest(unittest.TestCase): diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py index fe00e9240f5..fb88b5252f9 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py @@ -13,7 +13,7 @@ # limitations under the License. """Pipeline using ExitHandler.""" -from kfp.v2 import components +import kfp.v2.components.experimental as components import kfp.v2.dsl.experimental as dsl from kfp.v2.compiler.experimental import compiler diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py index c8fe6acdc2a..522b97a67cb 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py @@ -14,7 +14,7 @@ from typing import List -from kfp.v2 import components +import kfp.v2.components.experimental as components import kfp.v2.dsl.experimental as dsl from kfp.v2.compiler.experimental import compiler diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py index 2bbfd1ff504..ea9c84db652 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from kfp.v2 import components +import kfp.v2.components.experimental as components import kfp.v2.dsl.experimental as dsl from kfp.v2.compiler.experimental import compiler diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py index 6573685f36b..bba6bbe4437 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_two_step_pipeline.py @@ -12,9 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pathlib - -from kfp.v2 import components +import kfp.v2.components.experimental as components import kfp.v2.dsl.experimental as dsl from kfp.v2.compiler.experimental import compiler diff --git a/sdk/python/kfp/v2/components/__init__.py b/sdk/python/kfp/v2/components/__init__.py index 1059eb6b6f3..b4447dd5838 100644 --- a/sdk/python/kfp/v2/components/__init__.py +++ b/sdk/python/kfp/v2/components/__init__.py @@ -11,5 +11,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from kfp.v2.components.experimental.yaml_component import load_component_from_text diff --git a/sdk/python/kfp/v2/components/experimental/__init__.py b/sdk/python/kfp/v2/components/experimental/__init__.py index b4447dd5838..1059eb6b6f3 100644 --- a/sdk/python/kfp/v2/components/experimental/__init__.py +++ b/sdk/python/kfp/v2/components/experimental/__init__.py @@ -11,3 +11,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from kfp.v2.components.experimental.yaml_component import load_component_from_text diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 6167cc745be..75ba23b331b 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -116,11 +116,13 @@ def find_version(*file_path_parts): 'kfp.notebook', 'kfp.v2', 'kfp.v2.compiler', + 'kfp.v2.compiler.experimental', 'kfp.v2.components', 'kfp.v2.components.types', 'kfp.v2.components.types.experimental', 'kfp.v2.components.experimental', 'kfp.v2.dsl', + 'kfp.v2.dsl.experimental', 'kfp.v2.google.client', 'kfp.v2.google.experimental', ], From 40d8242bb000c7709c49661ea5a416b55c19769a Mon Sep 17 00:00:00 2001 From: ryansteakley <37981995+ryansteakley@users.noreply.github.com> Date: Wed, 27 Oct 2021 11:08:25 -0700 Subject: [PATCH 16/31] chore: update aws sagemaker components tests to kfp 1.7.0 (#6805) * Update to support kfp 1.7.0 * use variable for s3 bucket name --- components/aws/sagemaker/dev_requirements.txt | 2 +- .../aws/sagemaker/tests/integration_tests/.env.example | 2 +- .../aws/sagemaker/tests/integration_tests/environment.yml | 2 +- .../tests/integration_tests/scripts/run_integration_tests | 2 +- .../sagemaker/tests/integration_tests/utils/minio_utils.py | 5 ++++- 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/components/aws/sagemaker/dev_requirements.txt b/components/aws/sagemaker/dev_requirements.txt index 26bc936c97a..5e745028f88 100644 --- a/components/aws/sagemaker/dev_requirements.txt +++ b/components/aws/sagemaker/dev_requirements.txt @@ -1,7 +1,7 @@ # Install the production requirements before installing these ones # pip install -r requirements.txt # Development requirements -kfp==0.5.1 +kfp==1.7.0 docformatter==1.3.1 black==19.10b0 coverage==5.1 diff --git a/components/aws/sagemaker/tests/integration_tests/.env.example b/components/aws/sagemaker/tests/integration_tests/.env.example index 66cc4e0ee09..3e8b8d4ad45 100644 --- a/components/aws/sagemaker/tests/integration_tests/.env.example +++ b/components/aws/sagemaker/tests/integration_tests/.env.example @@ -17,7 +17,7 @@ S3_DATA_BUCKET=my-data-bucket # SKIP_FSX_TESTS=true # Version of the KFP to install on the cluster -# KFP_VERSION=0.5.1 +# KFP_VERSION=1.7.0 # If you have an IAM role that the EKS cluster should assume for the "assume role" tests # ASSUMED_ROLE_NAME=my-assumed-role \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/environment.yml b/components/aws/sagemaker/tests/integration_tests/environment.yml index 92d7cd229f4..07a67cac7ef 100644 --- a/components/aws/sagemaker/tests/integration_tests/environment.yml +++ b/components/aws/sagemaker/tests/integration_tests/environment.yml @@ -14,7 +14,7 @@ dependencies: - filelock=3.0.* - pip: - kubernetes==11.0.* - - kfp==0.5.* + - kfp==1.7.* - minio==5.0.10 - sagemaker==2.1.* - ruamel.yaml==0.16.* diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests index 900fff2e3a9..860a44703df 100755 --- a/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests +++ b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests @@ -22,7 +22,7 @@ EKS_NODE_COUNT=${EKS_NODE_COUNT:-"1"} # The initial node count of the EKS cluste EKS_PUBLIC_SUBNETS=${EKS_PUBLIC_SUBNETS:-""} EKS_PRIVATE_SUBNETS=${EKS_PRIVATE_SUBNETS:-""} SKIP_KFP_OIDC_SETUP=${SKIP_KFP_OIDC_SETUP:-"false"} -KFP_VERSION=${KFP_VERSION:-"0.5.1"} +KFP_VERSION=${KFP_VERSION:-"1.7.0"} ### Testing parameters MINIO_LOCAL_PORT=${MINIO_LOCAL_PORT:-9000} diff --git a/components/aws/sagemaker/tests/integration_tests/utils/minio_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/minio_utils.py index 23bfc40b38f..d8bf5eb07f0 100644 --- a/components/aws/sagemaker/tests/integration_tests/utils/minio_utils.py +++ b/components/aws/sagemaker/tests/integration_tests/utils/minio_utils.py @@ -24,6 +24,9 @@ def get_artifact_in_minio(workflow_json, step_name, artifact_name, output_dir): for artifact in node["outputs"]["artifacts"]: if artifact["name"] == artifact_name: s3_data = artifact["s3"] + s3_bucket = workflow_json["status"]["artifactRepositoryRef"][ + "artifactRepository" + ]["s3"]["bucket"] minio_client = Minio( "localhost:{}".format(minio_port), access_key=minio_access_key, @@ -31,7 +34,7 @@ def get_artifact_in_minio(workflow_json, step_name, artifact_name, output_dir): secure=False, ) output_file = os.path.join(output_dir, artifact_name + ".tgz") - minio_client.fget_object(s3_data["bucket"], s3_data["key"], output_file) + minio_client.fget_object(s3_bucket, s3_data["key"], output_file) # https://docs.min.io/docs/python-client-api-reference.html#fget_object return output_file From ec4ab2dc4c8a8e3d285e46ee3cf8ee33b98196de Mon Sep 17 00:00:00 2001 From: Joshua Carp Date: Thu, 28 Oct 2021 02:41:30 -0400 Subject: [PATCH 17/31] fix(sdk): Add missing retry policy. (#6808) --- sdk/RELEASE.md | 1 + sdk/python/kfp/dsl/_container_op.py | 1 + 2 files changed, 2 insertions(+) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 619925490b1..6db44765f4a 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -17,6 +17,7 @@ * Fix placeholder mapping error in v2. [\#6794](https://github.com/kubeflow/pipelines/pull/6794) * Depends on `kfp-pipeline-spec>=0.1.13,<0.2.0` [\#6803](https://github.com/kubeflow/pipelines/pull/6803) +* Add `OnTransientError` to allowed retry policies [\#6808](https://github.com/kubeflow/pipelines/pull/6808) ## Documentation Updates diff --git a/sdk/python/kfp/dsl/_container_op.py b/sdk/python/kfp/dsl/_container_op.py index acb07011171..f48c13c28f4 100644 --- a/sdk/python/kfp/dsl/_container_op.py +++ b/sdk/python/kfp/dsl/_container_op.py @@ -40,6 +40,7 @@ 'Always', 'OnError', 'OnFailure', + 'OnTransientError', ) # Shorthand for PipelineContainerSpec From 37d3234ce519bc019ed5eb549f076a8f2599312e Mon Sep 17 00:00:00 2001 From: Joshua Carp Date: Thu, 28 Oct 2021 04:15:50 -0400 Subject: [PATCH 18/31] feat(sdk): Add filters to python client. (#6748) * Add filters to python client. * Add filter docstrings. h/t @chensun Co-authored-by: Chen Sun --- sdk/RELEASE.md | 3 ++- sdk/python/kfp/_client.py | 46 +++++++++++++++++++++++++++++---------- 2 files changed, 37 insertions(+), 12 deletions(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 6db44765f4a..31b715af90f 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -16,8 +16,9 @@ ## Bug Fixes and Other Changes * Fix placeholder mapping error in v2. [\#6794](https://github.com/kubeflow/pipelines/pull/6794) -* Depends on `kfp-pipeline-spec>=0.1.13,<0.2.0` [\#6803](https://github.com/kubeflow/pipelines/pull/6803) * Add `OnTransientError` to allowed retry policies [\#6808](https://github.com/kubeflow/pipelines/pull/6808) +* Add optional `filter` argument to list methods of KFP client [\#6748](https://github.com/kubeflow/pipelines/pull/6748) +* Depends on `kfp-pipeline-spec>=0.1.13,<0.2.0` [\#6803](https://github.com/kubeflow/pipelines/pull/6803) ## Documentation Updates diff --git a/sdk/python/kfp/_client.py b/sdk/python/kfp/_client.py index 1932a4db781..d2c19788f74 100644 --- a/sdk/python/kfp/_client.py +++ b/sdk/python/kfp/_client.py @@ -506,7 +506,8 @@ def list_experiments(self, page_token='', page_size=10, sort_by='', - namespace=None): + namespace=None, + filter=None): """List experiments. Args: @@ -516,6 +517,8 @@ def list_experiments(self, namespace: Kubernetes namespace where the experiment was created. For single user deployment, leave it as None; For multi user, input a namespace where the user is authorized. + filter: A url-encoded, JSON-serialized Filter protocol buffer + (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). Returns: A response object including a list of experiments and next page token. @@ -527,7 +530,8 @@ def list_experiments(self, sort_by=sort_by, resource_reference_key_type=kfp_server_api.models.api_resource_type .ApiResourceType.NAMESPACE, - resource_reference_key_id=namespace) + resource_reference_key_id=namespace, + filter=filter) return response def get_experiment(self, @@ -644,19 +648,24 @@ def _override_caching_options(self, workflow: dict, enable_caching: bool): 'pipelines.kubeflow.org/enable_caching'] = str( enable_caching).lower() - def list_pipelines(self, page_token='', page_size=10, sort_by=''): + def list_pipelines(self, page_token='', page_size=10, sort_by='', filter=None): """List pipelines. Args: page_token: Token for starting of the page. page_size: Size of the page. sort_by: one of 'field_name', 'field_name desc'. For example, 'name desc'. + filter: A url-encoded, JSON-serialized Filter protocol buffer + (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). Returns: A response object including a list of pipelines and next page token. """ return self._pipelines_api.list_pipelines( - page_token=page_token, page_size=page_size, sort_by=sort_by) + page_token=page_token, + page_size=page_size, + sort_by=sort_by, + filter=filter) # TODO: provide default namespace, similar to kubectl default namespaces. def run_pipeline( @@ -1089,7 +1098,8 @@ def list_runs(self, page_size=10, sort_by='', experiment_id=None, - namespace=None): + namespace=None, + filter=None): """List runs, optionally can be filtered by experiment or namespace. Args: @@ -1100,6 +1110,8 @@ def list_runs(self, namespace: Kubernetes namespace to filter upon. For single user deployment, leave it as None; For multi user, input a namespace where the user is authorized. + filter: A url-encoded, JSON-serialized Filter protocol buffer + (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). Returns: A response object including a list of experiments and next page token. @@ -1112,7 +1124,8 @@ def list_runs(self, sort_by=sort_by, resource_reference_key_type=kfp_server_api.models .api_resource_type.ApiResourceType.EXPERIMENT, - resource_reference_key_id=experiment_id) + resource_reference_key_id=experiment_id, + filter=filter) elif namespace: response = self._run_api.list_runs( page_token=page_token, @@ -1120,17 +1133,22 @@ def list_runs(self, sort_by=sort_by, resource_reference_key_type=kfp_server_api.models .api_resource_type.ApiResourceType.NAMESPACE, - resource_reference_key_id=namespace) + resource_reference_key_id=namespace, + filter=filter) else: response = self._run_api.list_runs( - page_token=page_token, page_size=page_size, sort_by=sort_by) + page_token=page_token, + page_size=page_size, + sort_by=sort_by, + filter=filter) return response def list_recurring_runs(self, page_token='', page_size=10, sort_by='', - experiment_id=None): + experiment_id=None, + filter=None): """List recurring runs. Args: @@ -1138,6 +1156,8 @@ def list_recurring_runs(self, page_size: Size of the page. sort_by: One of 'field_name', 'field_name desc'. For example, 'name desc'. experiment_id: Experiment id to filter upon. + filter: A url-encoded, JSON-serialized Filter protocol buffer + (see [filter.proto](https://github.com/kubeflow/pipelines/blob/master/backend/api/filter.proto)). Returns: A response object including a list of recurring_runs and next page token. @@ -1149,10 +1169,14 @@ def list_recurring_runs(self, sort_by=sort_by, resource_reference_key_type=kfp_server_api.models .api_resource_type.ApiResourceType.EXPERIMENT, - resource_reference_key_id=experiment_id) + resource_reference_key_id=experiment_id, + filter=filter) else: response = self._job_api.list_jobs( - page_token=page_token, page_size=page_size, sort_by=sort_by) + page_token=page_token, + page_size=page_size, + sort_by=sort_by, + filter=filter) return response def get_recurring_run(self, job_id): From 41275b42287542642cccf7f4a5f45104a99b70b6 Mon Sep 17 00:00:00 2001 From: dinimicky Date: Thu, 28 Oct 2021 16:33:00 +0800 Subject: [PATCH 19/31] fix(sdk): fixes the specified 'mlpipeline-ui-metadata','mlpipeline-metrics' path is overrided by default value (#6796) * fix the issue: the 'mlpipeline-ui-metadata','mlpipeline-metrics' path is replaced by the default path as '_components._generate_output_file_name(output.name)' * reformat code * Update _container_op.py * Update RELEASE.md --- sdk/RELEASE.md | 1 + sdk/python/kfp/dsl/_container_op.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 31b715af90f..0212deeeea6 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -15,6 +15,7 @@ ## Bug Fixes and Other Changes +* Fix the the specified 'mlpipeline-ui-metadata','mlpipeline-metrics' path is overrided by default value [\#6796](https://github.com/kubeflow/pipelines/pull/6796) * Fix placeholder mapping error in v2. [\#6794](https://github.com/kubeflow/pipelines/pull/6794) * Add `OnTransientError` to allowed retry policies [\#6808](https://github.com/kubeflow/pipelines/pull/6808) * Add optional `filter` argument to list methods of KFP client [\#6748](https://github.com/kubeflow/pipelines/pull/6748) diff --git a/sdk/python/kfp/dsl/_container_op.py b/sdk/python/kfp/dsl/_container_op.py index f48c13c28f4..9284c85c96e 100644 --- a/sdk/python/kfp/dsl/_container_op.py +++ b/sdk/python/kfp/dsl/_container_op.py @@ -1441,10 +1441,16 @@ def _set_metadata(self, self.outputs.update(declared_outputs) for output in self._metadata.outputs: - if output.name not in self.file_outputs: + if output.name in self.file_outputs: + continue + is_legacy_name, normalized_name = _is_legacy_output_name( + output.name) + if is_legacy_name and normalized_name in self.output_artifact_paths: + output_filename = self.output_artifact_paths[normalized_name] + else: output_filename = _components._generate_output_file_name( output.name) - self.file_outputs[output.name] = output_filename + self.file_outputs[output.name] = output_filename if not kfp.COMPILING_FOR_V2: for output_name, path in dict(self.file_outputs).items(): From 3e6c776360f1bbdbaf78bf1a3c0cd1c2662acf3d Mon Sep 17 00:00:00 2001 From: Omar Marzouk Date: Thu, 28 Oct 2021 19:27:57 +0200 Subject: [PATCH 20/31] feat(sdk): Add load_component_from_spec. Fixes #5708 #3748 (#6690) * add load_component_from_spec * update release.md * update RELEASE.md * update RELEASE.md --- sdk/RELEASE.md | 1 + sdk/python/kfp/components/_components.py | 21 ++++++++++++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 0212deeeea6..e9bca58f7c4 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -66,6 +66,7 @@ * Add v2 placeholder variables [\#6693](https://github.com/kubeflow/pipelines/pull/6693) * Add a new command in KFP's CLI, `components`, that enables users to manage and build v2 components in a container with Docker [\#6417](https://github.com/kubeflow/pipelines/pull/6417) +* Add `load_component_from_spec` for SDK v1 which brings back the ability to build components directly in python, using `ComponentSpec` [\#6690](https://github.com/kubeflow/pipelines/pull/6690) ## Breaking Changes diff --git a/sdk/python/kfp/components/_components.py b/sdk/python/kfp/components/_components.py index 5fbe2320cac..325a8f672b1 100644 --- a/sdk/python/kfp/components/_components.py +++ b/sdk/python/kfp/components/_components.py @@ -33,7 +33,7 @@ _default_component_name = 'Component' -def load_component(filename=None, url=None, text=None): +def load_component(filename=None, url=None, text=None, component_spec=None): """Loads component from text, file or URL and creates a task factory function. @@ -43,6 +43,7 @@ def load_component(filename=None, url=None, text=None): filename: Path of local file containing the component definition. url: The URL of the component file data. text: A string containing the component file data. + component_spec: A ComponentSpec containing the component definition. Returns: A factory function with a strongly-typed signature. @@ -61,6 +62,8 @@ def load_component(filename=None, url=None, text=None): return load_component_from_url(url) elif text: return load_component_from_text(text) + elif component_spec: + return load_component_from_spec(component_spec) else: raise ValueError('Need to specify a source') @@ -120,6 +123,22 @@ def load_component_from_text(text): component_spec=component_spec) +def load_component_from_spec(component_spec): + """Loads component from a ComponentSpec and creates a task factory function. + + Args: + component_spec: A ComponentSpec containing the component definition. + + Returns: + A factory function with a strongly-typed signature. + Once called with the required arguments, the factory constructs a pipeline task instance (ContainerOp). + """ + if component_spec is None: + raise TypeError + return _create_task_factory_from_component_spec( + component_spec=component_spec) + + def _fix_component_uri(uri: str) -> str: #Handling Google Cloud Storage URIs if uri.startswith('gs://'): From 0c40154f6de8adb3632a83a4f94d49358e7a1e30 Mon Sep 17 00:00:00 2001 From: Yaqi Ji Date: Thu, 28 Oct 2021 10:53:26 -0700 Subject: [PATCH 21/31] chore: Release KFP SDK and v2 launcher 1.8.7 (#6823) * Update RELEASE.md * Update __init__.py * Update v2_compat.py --- sdk/RELEASE.md | 26 ++++++++++++++++++++++++++ sdk/python/kfp/__init__.py | 2 +- sdk/python/kfp/compiler/v2_compat.py | 2 +- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index e9bca58f7c4..984fc863e8e 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -2,17 +2,41 @@ ## Major Features and Improvements +## Breaking Changes + +### For Pipeline Authors + +### For Component Authors + +## Deprecations + +## Bug Fixes and Other Changes + +## Documentation Updates + +# 1.8.7 + +## Major Features and Improvements + * Add optional support to specify description for pipeline version [\#6472](https://github.com/kubeflow/pipelines/issues/6472). * New v2 experimental compiler. [\#6803](https://github.com/kubeflow/pipelines/pull/6803) ## Breaking Changes +* N/A + ### For Pipeline Authors +* N/A + ### For Component Authors +* N/A + ## Deprecations +* N/A + ## Bug Fixes and Other Changes * Fix the the specified 'mlpipeline-ui-metadata','mlpipeline-metrics' path is overrided by default value [\#6796](https://github.com/kubeflow/pipelines/pull/6796) @@ -23,6 +47,8 @@ ## Documentation Updates +* N/A + # 1.8.6 ## Major Features and Improvements diff --git a/sdk/python/kfp/__init__.py b/sdk/python/kfp/__init__.py index b050a9a4d7a..b6b280508e0 100644 --- a/sdk/python/kfp/__init__.py +++ b/sdk/python/kfp/__init__.py @@ -16,7 +16,7 @@ # https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages __path__ = __import__("pkgutil").extend_path(__path__, __name__) -__version__ = '1.8.6' +__version__ = '1.8.7' from . import components from . import containers diff --git a/sdk/python/kfp/compiler/v2_compat.py b/sdk/python/kfp/compiler/v2_compat.py index 307e4352bb8..d96d045efe0 100644 --- a/sdk/python/kfp/compiler/v2_compat.py +++ b/sdk/python/kfp/compiler/v2_compat.py @@ -22,7 +22,7 @@ from kfp.v2 import compiler from kubernetes import client as k8s_client -_DEFAULT_LAUNCHER_IMAGE = "gcr.io/ml-pipeline/kfp-launcher:1.8.6" +_DEFAULT_LAUNCHER_IMAGE = "gcr.io/ml-pipeline/kfp-launcher:1.8.7" def update_op(op: dsl.ContainerOp, From 0be57c38802b85d0b09b703e60b1b3131a1b9863 Mon Sep 17 00:00:00 2001 From: Ajay Gopinathan Date: Thu, 28 Oct 2021 14:25:51 -0700 Subject: [PATCH 22/31] feat(sdk): Use google.protobuf.Value in v2 for passing parameters. (#6804) * Use google.protobuf.Value in v2 for passing parameters. * retest samples. * Fix tests. * Update release, more cleanup. * Use github.com/kubeflow/pipelines/api from same repo. * Run go mod tidy * chore: go mod tidy * fix v2 compile error and clean up unused code * pr comments. * update goldens * Fix metadata recording. * Update kfp mlmd client. * fix test again * another try. * chore: migrate v2 DAG driver input parameters to protobuf.Value + small refactorings * fix v2 launcher + clean up * fix a compile error * fix a few more tests * fix number parsing * clean up * disable cache_v2 test. Co-authored-by: Yuan Gong --- api/go.mod | 5 +- api/go.sum | 116 ++++++++++- api/v2alpha1/cache_key.proto | 4 +- api/v2alpha1/go/cachekey/cache_key.pb.go | 189 +++++++++-------- samples/test/config.yaml | 6 +- ...eight_python_functions_v2_pipeline_test.py | 34 ++-- samples/test/util.py | 169 ++++++++------- sdk/RELEASE.md | 3 +- .../v2_compatible_two_step_pipeline.yaml | 24 +-- ...wo_step_pipeline_with_custom_launcher.yaml | 20 +- sdk/python/kfp/compiler/v2_compat.py | 17 +- sdk/python/kfp/dsl/_component_bridge.py | 56 +++-- sdk/python/kfp/dsl/component_spec.py | 9 +- sdk/python/kfp/dsl/component_spec_test.py | 38 ++-- sdk/python/kfp/dsl/type_utils.py | 1 - sdk/python/kfp/v2/compiler/compiler.py | 147 +++++++++++--- sdk/python/kfp/v2/compiler/compiler_utils.py | 35 ++-- .../kfp/v2/compiler/compiler_utils_test.py | 14 +- ...htweight_python_functions_v2_pipeline.json | 53 ++--- ...ight_python_functions_v2_with_outputs.json | 38 ++-- .../test_data/pipeline_with_after.json | 22 +- .../pipeline_with_concat_placeholder.json | 10 +- .../test_data/pipeline_with_condition.json | 28 ++- .../pipeline_with_custom_job_spec.json | 20 +- .../test_data/pipeline_with_env.json | 6 +- .../test_data/pipeline_with_exit_handler.json | 34 ++-- .../test_data/pipeline_with_gcpc_types.json | 6 +- .../pipeline_with_if_placeholder.json | 14 +- .../test_data/pipeline_with_importer.json | 34 ++-- .../test_data/pipeline_with_loops.json | 48 ++--- .../pipeline_with_loops_and_conditions.json | 192 +++++++++--------- .../pipeline_with_metrics_outputs.json | 10 +- .../pipeline_with_nested_conditions.json | 34 ++-- .../pipeline_with_nested_conditions_yaml.json | 66 +++--- .../test_data/pipeline_with_nested_loops.json | 65 ++++-- .../test_data/pipeline_with_ontology.json | 30 ++- ...ipeline_with_params_containing_format.json | 42 ++-- .../pipeline_with_resource_spec.json | 30 ++- .../pipeline_with_reused_component.json | 44 ++-- .../pipeline_with_various_io_types.json | 32 ++- .../test_data/two_step_pipeline.json | 14 +- .../v2_component_with_optional_inputs.json | 18 +- .../test_data/xgboost_sample_pipeline.json | 136 +++++-------- sdk/python/kfp/v2/components/executor.py | 31 ++- sdk/python/kfp/v2/components/executor_main.py | 3 + sdk/python/kfp/v2/components/executor_test.py | 162 +++++---------- .../kfp/v2/components/types/type_utils.py | 89 ++++---- .../v2/components/types/type_utils_test.py | 46 ++--- v2/cacheutils/cache.go | 35 +--- v2/component/launcher.go | 147 +++++++------- v2/component/launcher_v2.go | 48 ++--- v2/component/runtime_info.go | 54 +++-- v2/component/runtime_info_test.go | 33 ++- v2/driver/driver.go | 83 +++----- v2/go.mod | 8 +- v2/go.sum | 22 +- v2/metadata/client.go | 93 +++------ v2/metadata/converter.go | 85 +++++++- v2/metadata/model.go | 42 ++-- v2/test/Makefile | 2 +- 60 files changed, 1524 insertions(+), 1372 deletions(-) diff --git a/api/go.mod b/api/go.mod index ae6924db045..b147b9cdf79 100644 --- a/api/go.mod +++ b/api/go.mod @@ -2,4 +2,7 @@ module github.com/kubeflow/pipelines/api go 1.16 -require google.golang.org/protobuf v1.27.1 +require ( + google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024 + google.golang.org/protobuf v1.27.1 +) diff --git a/api/go.sum b/api/go.sum index 03b1917b5a4..700a627369e 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,8 +1,122 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024 h1:aePO4E0x+Urj9V5NQHjqOpaNG4oMeHQq0l2ob05z5tI= +google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/api/v2alpha1/cache_key.proto b/api/v2alpha1/cache_key.proto index 396828138d7..cc071f99cf9 100644 --- a/api/v2alpha1/cache_key.proto +++ b/api/v2alpha1/cache_key.proto @@ -17,7 +17,8 @@ syntax = "proto3"; option go_package = "github.com/kubeflow/pipelines/api/v2alpha1/go/cachekey"; package ml_pipelines; -import "google/protobuf/any.proto"; +// import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; import "pipeline_spec.proto"; message CacheKey { @@ -26,6 +27,7 @@ message CacheKey { map outputArtifactsSpec = 3; map outputParametersSpec=4; ContainerSpec containerSpec=5; + map input_parameter_values = 6; } message ContainerSpec { diff --git a/api/v2alpha1/go/cachekey/cache_key.pb.go b/api/v2alpha1/go/cachekey/cache_key.pb.go index cd32c176c04..ba70352ca51 100644 --- a/api/v2alpha1/go/cachekey/cache_key.pb.go +++ b/api/v2alpha1/go/cachekey/cache_key.pb.go @@ -24,7 +24,7 @@ import ( pipelinespec "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" - _ "google.golang.org/protobuf/types/known/anypb" + structpb "google.golang.org/protobuf/types/known/structpb" reflect "reflect" sync "sync" ) @@ -46,6 +46,7 @@ type CacheKey struct { OutputArtifactsSpec map[string]*pipelinespec.RuntimeArtifact `protobuf:"bytes,3,rep,name=outputArtifactsSpec,proto3" json:"outputArtifactsSpec,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` OutputParametersSpec map[string]string `protobuf:"bytes,4,rep,name=outputParametersSpec,proto3" json:"outputParametersSpec,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` ContainerSpec *ContainerSpec `protobuf:"bytes,5,opt,name=containerSpec,proto3" json:"containerSpec,omitempty"` + InputParameterValues map[string]*structpb.Value `protobuf:"bytes,6,rep,name=input_parameter_values,json=inputParameterValues,proto3" json:"input_parameter_values,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *CacheKey) Reset() { @@ -115,6 +116,13 @@ func (x *CacheKey) GetContainerSpec() *ContainerSpec { return nil } +func (x *CacheKey) GetInputParameterValues() map[string]*structpb.Value { + if x != nil { + return x.InputParameterValues + } + return nil +} + type ContainerSpec struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -222,73 +230,86 @@ var File_cache_key_proto protoreflect.FileDescriptor var file_cache_key_proto_rawDesc = []byte{ 0x0a, 0x0f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x1a, - 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, - 0xbd, 0x06, 0x0a, 0x08, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x5e, 0x0a, 0x12, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, - 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, + 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x13, 0x70, + 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x22, 0x86, 0x08, 0x0a, 0x08, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x12, + 0x5e, 0x0a, 0x12, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, + 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x6d, 0x6c, + 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, + 0x4b, 0x65, 0x79, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, + 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x12, 0x69, 0x6e, 0x70, + 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, + 0x55, 0x0a, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, - 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, - 0x6d, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x12, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x41, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x55, 0x0a, 0x0f, - 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, 0x49, 0x6e, - 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, - 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, - 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x64, 0x0a, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, 0x4f, 0x75, 0x74, + 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x18, 0x03, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, 0x4f, 0x75, 0x74, 0x70, + 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x13, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, + 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x64, 0x0a, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, - 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, 0x41, 0x0a, 0x0d, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, - 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x1a, - 0x65, 0x0a, 0x17, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, - 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6d, 0x6c, - 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, - 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x57, 0x0a, 0x14, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, + 0x63, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, + 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, + 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x6f, 0x75, 0x74, 0x70, 0x75, + 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x12, + 0x41, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, + 0x70, 0x65, 0x63, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, + 0x65, 0x63, 0x12, 0x66, 0x0a, 0x16, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, + 0x73, 0x2e, 0x43, 0x61, 0x63, 0x68, 0x65, 0x4b, 0x65, 0x79, 0x2e, 0x49, 0x6e, 0x70, 0x75, 0x74, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x14, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0x65, 0x0a, 0x17, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, + 0x6d, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, + 0x01, 0x1a, 0x57, 0x0a, 0x14, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x6d, 0x6c, 0x5f, + 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x65, 0x0a, 0x18, 0x4f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x73, 0x53, 0x70, 0x65, + 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x33, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x41, 0x72, + 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, + 0x01, 0x1a, 0x47, 0x0a, 0x19, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x13, 0x2e, 0x6d, 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, - 0x65, 0x0a, 0x18, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, - 0x74, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, - 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x33, 0x0a, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x6d, - 0x6c, 0x5f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2e, 0x52, 0x75, 0x6e, 0x74, - 0x69, 0x6d, 0x65, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x47, 0x0a, 0x19, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x53, 0x70, 0x65, 0x63, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0x3f, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, - 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67, 0x73, - 0x22, 0x38, 0x0a, 0x10, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, - 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, - 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x61, 0x72, 0x74, - 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, - 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x61, 0x70, 0x69, 0x2f, - 0x76, 0x32, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x2f, 0x63, 0x61, 0x63, 0x68, - 0x65, 0x6b, 0x65, 0x79, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5f, 0x0a, 0x19, 0x49, 0x6e, + 0x70, 0x75, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x3f, 0x0a, 0x0d, 0x43, + 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x53, 0x70, 0x65, 0x63, 0x12, 0x14, 0x0a, 0x05, + 0x69, 0x6d, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, 0x61, + 0x67, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6d, 0x64, 0x41, 0x72, 0x67, 0x73, 0x22, 0x38, 0x0a, 0x10, + 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x4c, 0x69, 0x73, 0x74, + 0x12, 0x24, 0x0a, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, + 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, + 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, + 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x31, 0x2f, 0x67, 0x6f, 0x2f, 0x63, 0x61, 0x63, 0x68, 0x65, 0x6b, 0x65, 0x79, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -303,7 +324,7 @@ func file_cache_key_proto_rawDescGZIP() []byte { return file_cache_key_proto_rawDescData } -var file_cache_key_proto_msgTypes = make([]protoimpl.MessageInfo, 7) +var file_cache_key_proto_msgTypes = make([]protoimpl.MessageInfo, 8) var file_cache_key_proto_goTypes = []interface{}{ (*CacheKey)(nil), // 0: ml_pipelines.CacheKey (*ContainerSpec)(nil), // 1: ml_pipelines.ContainerSpec @@ -312,23 +333,27 @@ var file_cache_key_proto_goTypes = []interface{}{ nil, // 4: ml_pipelines.CacheKey.InputParametersEntry nil, // 5: ml_pipelines.CacheKey.OutputArtifactsSpecEntry nil, // 6: ml_pipelines.CacheKey.OutputParametersSpecEntry - (*pipelinespec.Value)(nil), // 7: ml_pipelines.Value - (*pipelinespec.RuntimeArtifact)(nil), // 8: ml_pipelines.RuntimeArtifact + nil, // 7: ml_pipelines.CacheKey.InputParameterValuesEntry + (*pipelinespec.Value)(nil), // 8: ml_pipelines.Value + (*pipelinespec.RuntimeArtifact)(nil), // 9: ml_pipelines.RuntimeArtifact + (*structpb.Value)(nil), // 10: google.protobuf.Value } var file_cache_key_proto_depIdxs = []int32{ - 3, // 0: ml_pipelines.CacheKey.inputArtifactNames:type_name -> ml_pipelines.CacheKey.InputArtifactNamesEntry - 4, // 1: ml_pipelines.CacheKey.inputParameters:type_name -> ml_pipelines.CacheKey.InputParametersEntry - 5, // 2: ml_pipelines.CacheKey.outputArtifactsSpec:type_name -> ml_pipelines.CacheKey.OutputArtifactsSpecEntry - 6, // 3: ml_pipelines.CacheKey.outputParametersSpec:type_name -> ml_pipelines.CacheKey.OutputParametersSpecEntry - 1, // 4: ml_pipelines.CacheKey.containerSpec:type_name -> ml_pipelines.ContainerSpec - 2, // 5: ml_pipelines.CacheKey.InputArtifactNamesEntry.value:type_name -> ml_pipelines.ArtifactNameList - 7, // 6: ml_pipelines.CacheKey.InputParametersEntry.value:type_name -> ml_pipelines.Value - 8, // 7: ml_pipelines.CacheKey.OutputArtifactsSpecEntry.value:type_name -> ml_pipelines.RuntimeArtifact - 8, // [8:8] is the sub-list for method output_type - 8, // [8:8] is the sub-list for method input_type - 8, // [8:8] is the sub-list for extension type_name - 8, // [8:8] is the sub-list for extension extendee - 0, // [0:8] is the sub-list for field type_name + 3, // 0: ml_pipelines.CacheKey.inputArtifactNames:type_name -> ml_pipelines.CacheKey.InputArtifactNamesEntry + 4, // 1: ml_pipelines.CacheKey.inputParameters:type_name -> ml_pipelines.CacheKey.InputParametersEntry + 5, // 2: ml_pipelines.CacheKey.outputArtifactsSpec:type_name -> ml_pipelines.CacheKey.OutputArtifactsSpecEntry + 6, // 3: ml_pipelines.CacheKey.outputParametersSpec:type_name -> ml_pipelines.CacheKey.OutputParametersSpecEntry + 1, // 4: ml_pipelines.CacheKey.containerSpec:type_name -> ml_pipelines.ContainerSpec + 7, // 5: ml_pipelines.CacheKey.input_parameter_values:type_name -> ml_pipelines.CacheKey.InputParameterValuesEntry + 2, // 6: ml_pipelines.CacheKey.InputArtifactNamesEntry.value:type_name -> ml_pipelines.ArtifactNameList + 8, // 7: ml_pipelines.CacheKey.InputParametersEntry.value:type_name -> ml_pipelines.Value + 9, // 8: ml_pipelines.CacheKey.OutputArtifactsSpecEntry.value:type_name -> ml_pipelines.RuntimeArtifact + 10, // 9: ml_pipelines.CacheKey.InputParameterValuesEntry.value:type_name -> google.protobuf.Value + 10, // [10:10] is the sub-list for method output_type + 10, // [10:10] is the sub-list for method input_type + 10, // [10:10] is the sub-list for extension type_name + 10, // [10:10] is the sub-list for extension extendee + 0, // [0:10] is the sub-list for field type_name } func init() { file_cache_key_proto_init() } @@ -380,7 +405,7 @@ func file_cache_key_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_cache_key_proto_rawDesc, NumEnums: 0, - NumMessages: 7, + NumMessages: 8, NumExtensions: 0, NumServices: 0, }, diff --git a/samples/test/config.yaml b/samples/test/config.yaml index 216b8675622..680ced3909f 100644 --- a/samples/test/config.yaml +++ b/samples/test/config.yaml @@ -90,5 +90,7 @@ path: samples.v2.producer_consumer_param_test - name: pipeline_with_importer path: samples.v2.pipeline_with_importer_test -- name: cache_v2 - path: samples.v2.cache_test +# TODO(Bobgy): Re-enable after figuring out V2 Engine +# and protobuf.Value support. +# - name: cache_v2 +# path: samples.v2.cache_test diff --git a/samples/test/lightweight_python_functions_v2_pipeline_test.py b/samples/test/lightweight_python_functions_v2_pipeline_test.py index 49519910c9a..19942fd7ab2 100644 --- a/samples/test/lightweight_python_functions_v2_pipeline_test.py +++ b/samples/test/lightweight_python_functions_v2_pipeline_test.py @@ -61,9 +61,12 @@ def verify(run: kfp_server_api.ApiRun, mlmd_connection_config, **kwargs): 'type': 'system.Dataset' }], 'parameters': { - 'output_bool_parameter_path': 'True', - 'output_dict_parameter_path': '{"A": 1, "B": 2}', - 'output_list_parameter_path': '["a", "b", "c"]', + 'output_bool_parameter_path': True, + 'output_dict_parameter_path': { + "A": 1, + "B": 2 + }, + 'output_list_parameter_path': ["a", "b", "c"], 'output_parameter_path': 'message' } }, @@ -89,9 +92,12 @@ def verify(run: kfp_server_api.ApiRun, mlmd_connection_config, **kwargs): 'type': 'system.Dataset' }], 'parameters': { - 'input_bool': 'True', - 'input_dict': '{"A": 1, "B": 2}', - 'input_list': '["a", "b", "c"]', + 'input_bool': True, + 'input_dict': { + "A": 1, + "B": 2 + }, + 'input_list': ["a", "b", "c"], 'message': 'message', 'num_steps': 100, } @@ -116,14 +122,10 @@ def verify(run: kfp_server_api.ApiRun, mlmd_connection_config, **kwargs): run_pipeline_func([ - TestCase( - pipeline_func=pipeline, - verify_func=verify, - mode=dsl.PipelineExecutionMode.V2_COMPATIBLE - ), - TestCase( - pipeline_func=pipeline, - verify_func=verify, - mode=dsl.PipelineExecutionMode.V2_ENGINE - ), + TestCase(pipeline_func=pipeline, + verify_func=verify, + mode=dsl.PipelineExecutionMode.V2_COMPATIBLE), + TestCase(pipeline_func=pipeline, + verify_func=verify, + mode=dsl.PipelineExecutionMode.V2_ENGINE), ]) diff --git a/samples/test/util.py b/samples/test/util.py index 2a04f49d5cb..bfe9e860b7c 100644 --- a/samples/test/util.py +++ b/samples/test/util.py @@ -35,10 +35,9 @@ # Add **kwargs, so that when new arguments are added, this doesn't fail for # unknown arguments. def _default_verify_func( - run_id: int, run: kfp_server_api.ApiRun, - mlmd_connection_config: metadata_store_pb2.MetadataStoreClientConfig, - **kwargs -): + run_id: int, run: kfp_server_api.ApiRun, + mlmd_connection_config: metadata_store_pb2.MetadataStoreClientConfig, + **kwargs): assert run.status == 'Succeeded' @@ -66,7 +65,6 @@ def run_pipeline_func(test_cases: List[TestCase]): :param pipeline_func: pipeline function to run :type pipeline_func: function """ - def test_wrapper( run_pipeline: Callable[ [Callable, kfp.dsl.PipelineExecutionMode, bool, dict], @@ -74,12 +72,10 @@ def test_wrapper( mlmd_connection_config: metadata_store_pb2.MetadataStoreClientConfig, ): for case in test_cases: - run_detail = run_pipeline( - pipeline_func=case.pipeline_func, - mode=case.mode, - enable_caching=case.enable_caching, - arguments=case.arguments or {} - ) + run_detail = run_pipeline(pipeline_func=case.pipeline_func, + mode=case.mode, + enable_caching=case.enable_caching, + arguments=case.arguments or {}) pipeline_runtime: kfp_server_api.ApiPipelineRuntime = run_detail.pipeline_runtime argo_workflow = json.loads(pipeline_runtime.workflow_manifest) argo_workflow_name = argo_workflow.get('metadata').get('name') @@ -96,7 +92,7 @@ def test_wrapper( _run_test(test_wrapper) -def _retry_with_backoff(fn: Callable, retries=5, backoff_in_seconds=1): +def _retry_with_backoff(fn: Callable, retries=3, backoff_in_seconds=1): i = 0 while True: try: @@ -114,7 +110,6 @@ def _retry_with_backoff(fn: Callable, retries=5, backoff_in_seconds=1): def _run_test(callback): - def main( output_directory: Optional[str] = None, # example host: Optional[str] = None, @@ -158,18 +153,25 @@ def main( if output_directory is None: output_directory = os.getenv('KFP_OUTPUT_DIRECTORY') if metadata_service_host is None: - metadata_service_host = os.getenv( - 'METADATA_GRPC_SERVICE_HOST', 'metadata-grpc-service' - ) + metadata_service_host = os.getenv('METADATA_GRPC_SERVICE_HOST', + 'metadata-grpc-service') if launcher_image is None: launcher_image = os.getenv('KFP_LAUNCHER_IMAGE') + if launcher_v2_image is None: + launcher_v2_image = os.getenv('KFP_LAUNCHER_V2_IMAGE') + if not launcher_v2_image: + raise Exception("launcher_v2_image is empty") + if driver_image is None: + driver_image = os.getenv('KFP_DRIVER_IMAGE') + if not driver_image: + raise Exception("driver_image is empty") client = kfp.Client(host=host) def run_pipeline( pipeline_func: Callable, - mode: kfp.dsl.PipelineExecutionMode = kfp.dsl.PipelineExecutionMode. - V2_COMPATIBLE, + mode: kfp.dsl.PipelineExecutionMode = kfp.dsl. + PipelineExecutionMode.V2_COMPATIBLE, enable_caching: bool = False, arguments: Optional[dict] = None, ) -> kfp_server_api.ApiRunDetail: @@ -190,7 +192,7 @@ def _create_run(): launcher_v2_image=launcher_v2_image, pipeline_root=output_directory, enable_caching=enable_caching, - arguments = { + arguments={ **arguments, }, ) @@ -202,8 +204,7 @@ def _create_run(): cpu_request='0.5', cpu_limit='1', memory_limit='512Mi', - ) - ) + )) if mode == kfp.dsl.PipelineExecutionMode.V1_LEGACY: conf.add_op_transformer(disable_cache) return client.create_run_from_pipeline_func( @@ -250,10 +251,8 @@ def _create_run(): host=metadata_service_host, port=metadata_service_port, ) - callback( - run_pipeline=run_pipeline, - mlmd_connection_config=mlmd_connection_config - ) + callback(run_pipeline=run_pipeline, + mlmd_connection_config=mlmd_connection_config) import fire fire.Fire(main) @@ -266,15 +265,14 @@ def run_v2_pipeline( launcher_v2_image: str, pipeline_root: str, enable_caching: bool, - arguments: Mapping[str, str], - + arguments: Mapping[str, str], ): import tempfile import subprocess - original_pipeline_job = tempfile.mktemp(suffix='.json', prefix="original_pipeline_job") - kfp.v2.compiler.Compiler().compile( - pipeline_func=fn, package_path=original_pipeline_job - ) + original_pipeline_job = tempfile.mktemp(suffix='.json', + prefix="original_pipeline_job") + kfp.v2.compiler.Compiler().compile(pipeline_func=fn, + package_path=original_pipeline_job) # remove following overriding logic once we use create_run_from_job_spec to trigger kfp pipeline run with open(original_pipeline_job) as f: @@ -285,10 +283,10 @@ def run_v2_pipeline( for task in component['dag']['tasks'].values(): task['cachingOptions'] = {'enableCache': enable_caching} for k, v in arguments.items(): - parameter_value_dict = pipeline_job_dict['runtimeConfig']['parameters'][k] - for type, _ in parameter_value_dict.items(): - parameter_value_dict[type] = v - pipeline_job_dict['runtimeConfig']['parameters'][k] = parameter_value_dict + parameter_value = pipeline_job_dict['runtimeConfig'][ + 'parameterValues'][k] + pipeline_job_dict['runtimeConfig']['parameterValues'][ + k] = parameter_value pipeline_job = tempfile.mktemp(suffix='.json', prefix="pipeline_job") with open(pipeline_job, 'w') as f: @@ -310,8 +308,9 @@ def run_v2_pipeline( # call v2 backend compiler CLI to compile pipeline spec to argo workflow subprocess.check_call(args, stdout=f) return client.create_run_from_pipeline_package( - pipeline_file=argo_workflow_spec, arguments={}, enable_caching=enable_caching - ) + pipeline_file=argo_workflow_spec, + arguments={}, + enable_caching=enable_caching) def simplify_proto_struct(data: dict) -> dict: @@ -348,14 +347,11 @@ def new( # The original field is custom_properties, but MessageToDict converts it # to customProperties. metadata = simplify_proto_struct( - MessageToDict(mlmd_artifact).get('customProperties', {}) - ) - return cls( - name=artifact_name, - type=mlmd_artifact_type.name, - uri=mlmd_artifact.uri, - metadata=metadata - ) + MessageToDict(mlmd_artifact).get('customProperties', {})) + return cls(name=artifact_name, + type=mlmd_artifact_type.name, + uri=mlmd_artifact.uri, + metadata=metadata) @dataclass @@ -390,13 +386,13 @@ def get_dict(self): @classmethod def new( - cls, - context: metadata_store_pb2.Context, - execution: metadata_store_pb2.Execution, - execution_types_by_id, # dict[int, metadata_store_pb2.ExecutionType] - events_by_execution_id, # dict[int, List[metadata_store_pb2.Event]] - artifacts_by_id, # dict[int, metadata_store_pb2.Artifact] - artifact_types_by_id, # dict[int, metadata_store_pb2.ArtifactType] + cls, + context: metadata_store_pb2.Context, + execution: metadata_store_pb2.Execution, + execution_types_by_id, # dict[int, metadata_store_pb2.ExecutionType] + events_by_execution_id, # dict[int, List[metadata_store_pb2.Event]] + artifacts_by_id, # dict[int, metadata_store_pb2.Artifact] + artifact_types_by_id, # dict[int, metadata_store_pb2.ArtifactType] ): execution_type = execution_types_by_id[execution.type_id] params = _parse_parameters(execution) @@ -404,18 +400,16 @@ def new( input_artifacts = [] output_artifacts = [] if events: - input_artifacts_info = [(e.artifact_id, e) - for e in events - if e.type == metadata_store_pb2.Event.INPUT] + input_artifacts_info = [(e.artifact_id, e) for e in events + if e.type == metadata_store_pb2.Event.INPUT + ] output_artifacts_info = [ - (e.artifact_id, e) - for e in events + (e.artifact_id, e) for e in events if e.type == metadata_store_pb2.Event.OUTPUT ] - def kfp_artifact( - aid: int, e: metadata_store_pb2.Event - ) -> KfpArtifact: + def kfp_artifact(aid: int, + e: metadata_store_pb2.Event) -> KfpArtifact: mlmd_artifact = artifacts_by_id[aid] mlmd_type = artifact_types_by_id[mlmd_artifact.type_id] return KfpArtifact.new( @@ -437,17 +431,14 @@ def kfp_artifact( name=execution.custom_properties.get('task_name').string_value, type=execution_type.name, state=execution.last_known_state, - inputs=TaskInputs( - parameters=params['inputs'], artifacts=input_artifacts - ), - outputs=TaskOutputs( - parameters=params['outputs'], artifacts=output_artifacts - ), + inputs=TaskInputs(parameters=params['inputs'], + artifacts=input_artifacts), + outputs=TaskOutputs(parameters=params['outputs'], + artifacts=output_artifacts), ) class KfpMlmdClient: - def __init__( self, mlmd_connection_config: Optional[ @@ -468,33 +459,25 @@ def get_tasks(self, run_id: str): ) if not run_context: raise Exception( - f'Cannot find system.PipelineRun context "{run_id}"' - ) + f'Cannot find system.PipelineRun context "{run_id}"') logging.info( - f'run_context: name={run_context.name} id={run_context.id}' - ) + f'run_context: name={run_context.name} id={run_context.id}') executions = self.mlmd_store.get_executions_by_context( - context_id=run_context.id - ) + context_id=run_context.id) execution_types = self.mlmd_store.get_execution_types_by_id( - list(set([e.type_id for e in executions])) - ) + list(set([e.type_id for e in executions]))) execution_types_by_id = {et.id: et for et in execution_types} - events = self.mlmd_store.get_events_by_execution_ids([ - e.id for e in executions - ]) + events = self.mlmd_store.get_events_by_execution_ids( + [e.id for e in executions]) events_by_execution_id = {} for e in events: - events_by_execution_id[ - e.execution_id - ] = (events_by_execution_id.get(e.execution_id) or []) + [e] + events_by_execution_id[e.execution_id] = ( + events_by_execution_id.get(e.execution_id) or []) + [e] artifacts = self.mlmd_store.get_artifacts_by_context( - context_id=run_context.id - ) + context_id=run_context.id) artifacts_by_id = {a.id: a for a in artifacts} artifact_types = self.mlmd_store.get_artifact_types_by_id( - list(set([a.type_id for a in artifacts])) - ) + list(set([a.type_id for a in artifacts]))) artifact_types_by_id = {at.id: at for at in artifact_types} _validate_executions_have_task_names(executions) tasks = [ @@ -538,12 +521,20 @@ def _parse_parameters(execution: metadata_store_pb2.Execution) -> dict: parameters['inputs'][name[len('input:'):]] = raw_value if name.startswith('output:'): parameters['outputs'][name[len('output:'):]] = raw_value + if name == "inputs" and value.HasField('struct_value'): + for k, v in simplify_proto_struct( + MessageToDict(value))["structValue"].items(): + parameters['inputs'][k] = v + if name == "outputs" and value.HasField('struct_value'): + for k, v in simplify_proto_struct( + MessageToDict(value))["structValue"].items(): + parameters['outputs'][k] = v return parameters def disable_cache(task): - # Skip tasks which are not container ops. - if not isinstance(task, kfp.dsl.ContainerOp): - return task - task.execution_options.caching_strategy.max_cache_staleness = "P0D" + # Skip tasks which are not container ops. + if not isinstance(task, kfp.dsl.ContainerOp): return task + task.execution_options.caching_strategy.max_cache_staleness = "P0D" + return task diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 984fc863e8e..f36414e8892 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -19,7 +19,8 @@ ## Major Features and Improvements * Add optional support to specify description for pipeline version [\#6472](https://github.com/kubeflow/pipelines/issues/6472). -* New v2 experimental compiler. [\#6803](https://github.com/kubeflow/pipelines/pull/6803) +* New v2 experimental compiler [\#6803](https://github.com/kubeflow/pipelines/pull/6803). +* Support passing parameters in v2 using google.protobuf.Value [\#6804](https://github.com/kubeflow/pipelines/pull/6804). ## Breaking Changes diff --git a/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline.yaml b/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline.yaml index 624b426fc17..a6263af8e7c 100644 --- a/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline.yaml +++ b/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline.yaml @@ -3,15 +3,15 @@ kind: Workflow metadata: generateName: my-test-pipeline- annotations: - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 - pipelines.kubeflow.org/pipeline_compilation_time: '2021-10-13T17:35:51.450220' + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 + pipelines.kubeflow.org/pipeline_compilation_time: '2021-10-26T15:02:07.868312' pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "gs://output-directory/v2-artifacts", "name": "pipeline-root"}, {"default": "pipeline/my-test-pipeline", "name": "pipeline-name"}], "name": "my-test-pipeline"}' pipelines.kubeflow.org/v2_pipeline: "true" labels: pipelines.kubeflow.org/v2_pipeline: "true" - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 spec: entrypoint: my-test-pipeline templates: @@ -49,7 +49,7 @@ spec: python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip fi - PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && "$0" "$@" + PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && "$0" "$@" - sh - -ec - | @@ -103,8 +103,8 @@ spec: fieldRef: {fieldPath: 'metadata.labels[''pipelines.kubeflow.org/enable_caching'']'} - {name: KFP_V2_IMAGE, value: 'python:3.7'} - {name: KFP_V2_RUNTIME_INFO, value: '{"inputParameters": {"some_int": {"type": - "INT"}, "uri": {"type": "STRING"}}, "inputArtifacts": {}, "outputParameters": - {"output_parameter_one": {"type": "INT", "path": "/tmp/outputs/output_parameter_one/data"}}, + "NUMBER_INTEGER"}, "uri": {"type": "STRING"}}, "inputArtifacts": {}, "outputParameters": + {"output_parameter_one": {"type": "NUMBER_INTEGER", "path": "/tmp/outputs/output_parameter_one/data"}}, "outputArtifacts": {"output_dataset_one": {"schemaTitle": "system.Dataset", "instanceSchema": "", "schemaVersion": "0.0.1", "metadataPath": "/tmp/outputs/output_dataset_one/data"}}}'} envFrom: @@ -129,13 +129,13 @@ spec: pipelines.kubeflow.org/component_ref: '{}' pipelines.kubeflow.org/arguments.parameters: '{"some_int": "12", "uri": "uri-to-import"}' labels: - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 pipelines.kubeflow.org/pipeline-sdk-type: kfp pipelines.kubeflow.org/v2_component: "true" pipelines.kubeflow.org/enable_caching: "true" initContainers: - command: [launcher, --copy, /kfp-launcher/launch] - image: gcr.io/ml-pipeline/kfp-launcher:1.8.5 + image: gcr.io/ml-pipeline/kfp-launcher:1.8.6 name: kfp-launcher mirrorVolumeMounts: true volumes: @@ -151,7 +151,7 @@ spec: python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip fi - PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && "$0" "$@" + PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && "$0" "$@" - sh - -ec - | @@ -210,7 +210,7 @@ spec: fieldRef: {fieldPath: 'metadata.labels[''pipelines.kubeflow.org/enable_caching'']'} - {name: KFP_V2_IMAGE, value: 'python:3.7'} - {name: KFP_V2_RUNTIME_INFO, value: '{"inputParameters": {"num_steps": {"type": - "INT"}}, "inputArtifacts": {"dataset": {"metadataPath": "/tmp/inputs/dataset/data", + "NUMBER_INTEGER"}}, "inputArtifacts": {"dataset": {"metadataPath": "/tmp/inputs/dataset/data", "schemaTitle": "system.Dataset", "instanceSchema": "", "schemaVersion": "0.0.1"}}, "outputParameters": {}, "outputArtifacts": {"model": {"schemaTitle": "system.Model", "instanceSchema": "", "schemaVersion": "0.0.1", "metadataPath": @@ -236,13 +236,13 @@ spec: pipelines.kubeflow.org/component_ref: '{}' pipelines.kubeflow.org/arguments.parameters: '{"num_steps": "{{inputs.parameters.preprocess-output_parameter_one}}"}' labels: - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 pipelines.kubeflow.org/pipeline-sdk-type: kfp pipelines.kubeflow.org/v2_component: "true" pipelines.kubeflow.org/enable_caching: "true" initContainers: - command: [launcher, --copy, /kfp-launcher/launch] - image: gcr.io/ml-pipeline/kfp-launcher:1.8.5 + image: gcr.io/ml-pipeline/kfp-launcher:1.8.6 name: kfp-launcher mirrorVolumeMounts: true volumes: diff --git a/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline_with_custom_launcher.yaml b/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline_with_custom_launcher.yaml index 393a9db4d3e..48f9066128d 100644 --- a/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline_with_custom_launcher.yaml +++ b/sdk/python/kfp/compiler/testdata/v2_compatible_two_step_pipeline_with_custom_launcher.yaml @@ -3,15 +3,15 @@ kind: Workflow metadata: generateName: my-test-pipeline-with-custom-launcher- annotations: - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 - pipelines.kubeflow.org/pipeline_compilation_time: '2021-10-13T17:35:51.012522' + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 + pipelines.kubeflow.org/pipeline_compilation_time: '2021-10-26T15:02:07.414964' pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "gs://output-directory/v2-artifacts", "name": "pipeline-root"}, {"default": "pipeline/my-test-pipeline-with-custom-launcher", "name": "pipeline-name"}], "name": "my-test-pipeline-with-custom-launcher"}' pipelines.kubeflow.org/v2_pipeline: "true" labels: pipelines.kubeflow.org/v2_pipeline: "true" - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 spec: entrypoint: my-test-pipeline-with-custom-launcher templates: @@ -49,7 +49,7 @@ spec: python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip fi - PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && "$0" "$@" + PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && "$0" "$@" - sh - -ec - | @@ -103,8 +103,8 @@ spec: fieldRef: {fieldPath: 'metadata.labels[''pipelines.kubeflow.org/enable_caching'']'} - {name: KFP_V2_IMAGE, value: 'python:3.7'} - {name: KFP_V2_RUNTIME_INFO, value: '{"inputParameters": {"some_int": {"type": - "INT"}, "uri": {"type": "STRING"}}, "inputArtifacts": {}, "outputParameters": - {"output_parameter_one": {"type": "INT", "path": "/tmp/outputs/output_parameter_one/data"}}, + "NUMBER_INTEGER"}, "uri": {"type": "STRING"}}, "inputArtifacts": {}, "outputParameters": + {"output_parameter_one": {"type": "NUMBER_INTEGER", "path": "/tmp/outputs/output_parameter_one/data"}}, "outputArtifacts": {"output_dataset_one": {"schemaTitle": "system.Dataset", "instanceSchema": "", "schemaVersion": "0.0.1", "metadataPath": "/tmp/outputs/output_dataset_one/data"}}}'} envFrom: @@ -129,7 +129,7 @@ spec: pipelines.kubeflow.org/component_ref: '{}' pipelines.kubeflow.org/arguments.parameters: '{"some_int": "12", "uri": "uri-to-import"}' labels: - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 pipelines.kubeflow.org/pipeline-sdk-type: kfp pipelines.kubeflow.org/v2_component: "true" pipelines.kubeflow.org/enable_caching: "true" @@ -151,7 +151,7 @@ spec: python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip fi - PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && "$0" "$@" + PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && "$0" "$@" - sh - -ec - | @@ -210,7 +210,7 @@ spec: fieldRef: {fieldPath: 'metadata.labels[''pipelines.kubeflow.org/enable_caching'']'} - {name: KFP_V2_IMAGE, value: 'python:3.7'} - {name: KFP_V2_RUNTIME_INFO, value: '{"inputParameters": {"num_steps": {"type": - "INT"}}, "inputArtifacts": {"dataset": {"metadataPath": "/tmp/inputs/dataset/data", + "NUMBER_INTEGER"}}, "inputArtifacts": {"dataset": {"metadataPath": "/tmp/inputs/dataset/data", "schemaTitle": "system.Dataset", "instanceSchema": "", "schemaVersion": "0.0.1"}}, "outputParameters": {}, "outputArtifacts": {"model": {"schemaTitle": "system.Model", "instanceSchema": "", "schemaVersion": "0.0.1", "metadataPath": @@ -236,7 +236,7 @@ spec: pipelines.kubeflow.org/component_ref: '{}' pipelines.kubeflow.org/arguments.parameters: '{"num_steps": "{{inputs.parameters.preprocess-output_parameter_one}}"}' labels: - pipelines.kubeflow.org/kfp_sdk_version: 1.8.5 + pipelines.kubeflow.org/kfp_sdk_version: 1.8.6 pipelines.kubeflow.org/pipeline-sdk-type: kfp pipelines.kubeflow.org/v2_component: "true" pipelines.kubeflow.org/enable_caching: "true" diff --git a/sdk/python/kfp/compiler/v2_compat.py b/sdk/python/kfp/compiler/v2_compat.py index d96d045efe0..f6d05fdcfa1 100644 --- a/sdk/python/kfp/compiler/v2_compat.py +++ b/sdk/python/kfp/compiler/v2_compat.py @@ -141,12 +141,13 @@ def update_op(op: dsl.ContainerOp, component_spec = op.component_spec for parameter, spec in sorted( component_spec.input_definitions.parameters.items()): - parameter_info = { - "type": - pipeline_spec_pb2.PrimitiveType.PrimitiveTypeEnum.Name(spec.type - ), - } - op.command += [f"{parameter}={op._parameter_arguments[parameter]}"] + parameter_type = pipeline_spec_pb2.ParameterType.ParameterTypeEnum.Name( + spec.parameter_type) + parameter_info = {"type": parameter_type} + + parameter_value = op._parameter_arguments[parameter] + op.command += [f"{parameter}={parameter_value}"] + runtime_info["inputParameters"][parameter] = parameter_info op.command += ["--"] @@ -164,8 +165,8 @@ def update_op(op: dsl.ContainerOp, component_spec.output_definitions.parameters.items()): parameter_info = { "type": - pipeline_spec_pb2.PrimitiveType.PrimitiveTypeEnum.Name(spec.type - ), + pipeline_spec_pb2.ParameterType.ParameterTypeEnum.Name( + spec.parameter_type), "path": op.file_outputs[parameter], } diff --git a/sdk/python/kfp/dsl/_component_bridge.py b/sdk/python/kfp/dsl/_component_bridge.py index a7277fe3518..c61a6cb0d1f 100644 --- a/sdk/python/kfp/dsl/_component_bridge.py +++ b/sdk/python/kfp/dsl/_component_bridge.py @@ -20,7 +20,7 @@ from typing import Any, Mapping, Optional import kfp -from kfp.components import _structures +from kfp.components import _structures, _data_passing from kfp.components import _components from kfp.components import _naming from kfp import dsl @@ -199,6 +199,12 @@ def _create_container_op_from_component_and_arguments( default_value = int(default_value) elif input_spec.type == 'Float': default_value = float(default_value) + elif (type_utils.is_parameter_type(input_spec.type) and + kfp.COMPILING_FOR_V2): + parameter_type = type_utils.get_parameter_type(input_spec.type) + default_value = type_utils.deserialize_parameter_value( + value=default_value, parameter_type=parameter_type) + arguments[input_spec.name] = default_value # Check types of the reference arguments and serialize PipelineParams @@ -264,13 +270,22 @@ def _create_container_op_from_component_and_arguments( name_to_spec_type = {} if component_meta.inputs: name_to_spec_type = { - input.name: input.type for input in component_meta.inputs + input.name: { + 'type': input.type, + 'default': input.default, + } for input in component_meta.inputs } + if kfp.COMPILING_FOR_V2: for name, spec_type in name_to_spec_type.items(): if (name in original_arguments and - type_utils.is_parameter_type(spec_type)): - task._parameter_arguments[name] = str(original_arguments[name]) + type_utils.is_parameter_type(spec_type['type'])): + if isinstance(original_arguments[name], (list, dict)): + task._parameter_arguments[name] = json.dumps( + original_arguments[name]) + else: + task._parameter_arguments[name] = str( + original_arguments[name]) for name in list(task.artifact_arguments.keys()): if name in task._parameter_arguments: @@ -573,24 +588,35 @@ def _resolve_ir_placeholders_v2( input_type = component_spec._inputs_dict[input_name].type if type_utils.is_parameter_type(input_type): pipeline_task_spec.inputs.parameters[ - input_name].runtime_value.constant_value.string_value = ( - argument_value) + input_name].runtime_value.constant.string_value = argument_value elif isinstance(argument_value, int): argument_type = 'Integer' pipeline_task_spec.inputs.parameters[ - input_name].runtime_value.constant_value.int_value = ( - argument_value) + input_name].runtime_value.constant.number_value = argument_value elif isinstance(argument_value, float): argument_type = 'Float' pipeline_task_spec.inputs.parameters[ - input_name].runtime_value.constant_value.double_value = ( + input_name].runtime_value.constant.number_value = argument_value + elif isinstance(argument_value, bool): + argument_type = 'Bool' + pipeline_task_spec.inputs.parameters[ + input_name].runtime_value.constant.bool_value = argument_value + elif isinstance(argument_value, list): + argument_type = 'List' + + # Convert any PipelineParams to strings. + argument_value = map( + lambda x: str(x) + if isinstance(x, dsl.PipelineParam) else x, argument_value) + + pipeline_task_spec.inputs.parameters[ + input_name].runtime_value.constant.list_value.extend( argument_value) - elif isinstance(argument_value, - (dict, list, bool)) and kfp.COMPILING_FOR_V2: - argument_type = type(argument_value).__name__ + elif isinstance(argument_value, dict): + argument_type = 'Dict' pipeline_task_spec.inputs.parameters[ - input_name].runtime_value.constant_value.string_value = ( - json.dumps(argument_value)) + input_name].runtime_value.constant.struct_value.update( + argument_value) elif isinstance(argument_value, _container_op.ContainerOp): raise TypeError( f'ContainerOp object {input_name} was passed to component as an ' @@ -625,7 +651,7 @@ def _resolve_ir_placeholders_v2( if argument_is_parameter_type else 'Artifact', input_name=input_name, input_type=input_type, - input_category='Paramter' + input_category='Parameter' if input_is_parameter_type else 'Artifact', )) diff --git a/sdk/python/kfp/dsl/component_spec.py b/sdk/python/kfp/dsl/component_spec.py index 756849e8574..57d70e0f93c 100644 --- a/sdk/python/kfp/dsl/component_spec.py +++ b/sdk/python/kfp/dsl/component_spec.py @@ -98,7 +98,7 @@ def build_component_spec_from_structure( continue if type_utils.is_parameter_type(input_spec.type): result.input_definitions.parameters[ - input_spec.name].type = type_utils.get_parameter_type( + input_spec.name].parameter_type = type_utils.get_parameter_type( input_spec.type) else: result.input_definitions.artifacts[ @@ -108,7 +108,8 @@ def build_component_spec_from_structure( for output_spec in component_spec.outputs or []: if type_utils.is_parameter_type(output_spec.type): result.output_definitions.parameters[ - output_spec.name].type = type_utils.get_parameter_type( + output_spec + .name].parameter_type = type_utils.get_parameter_type( output_spec.type) else: result.output_definitions.artifacts[ @@ -141,7 +142,7 @@ def build_component_inputs_spec( if type_utils.is_parameter_type(param.param_type): component_spec.input_definitions.parameters[ - input_name].type = type_utils.get_parameter_type( + input_name].parameter_type = type_utils.get_parameter_type( param.param_type) elif input_name not in getattr(component_spec.input_definitions, 'parameters', []): @@ -164,7 +165,7 @@ def build_component_outputs_spec( output_name = param.full_name if type_utils.is_parameter_type(param.param_type): component_spec.output_definitions.parameters[ - output_name].type = type_utils.get_parameter_type( + output_name].parameter_type = type_utils.get_parameter_type( param.param_type) elif output_name not in getattr(component_spec.output_definitions, 'parameters', []): diff --git a/sdk/python/kfp/dsl/component_spec_test.py b/sdk/python/kfp/dsl/component_spec_test.py index b0919b75ed1..73a49923eeb 100644 --- a/sdk/python/kfp/dsl/component_spec_test.py +++ b/sdk/python/kfp/dsl/component_spec_test.py @@ -72,10 +72,10 @@ def test_build_component_spec_from_structure(self): }, 'parameters': { 'input2': { - 'type': 'STRING' + 'parameterType': 'STRING' }, 'input3': { - 'type': 'INT' + 'parameterType': 'NUMBER_INTEGER' } } }, @@ -118,13 +118,13 @@ def test_build_component_spec_from_structure(self): }, 'parameters': { 'input2': { - 'type': 'INT' + 'parameterType': 'NUMBER_INTEGER' }, 'input3': { - 'type': 'STRING' + 'parameterType': 'STRING' }, 'input4': { - 'type': 'DOUBLE' + 'parameterType': 'NUMBER_DOUBLE' } } } @@ -144,13 +144,13 @@ def test_build_component_spec_from_structure(self): }, 'parameters': { 'pipelineparam--input2': { - 'type': 'INT' + 'parameterType': 'NUMBER_INTEGER' }, 'pipelineparam--input3': { - 'type': 'STRING' + 'parameterType': 'STRING' }, 'pipelineparam--input4': { - 'type': 'DOUBLE' + 'parameterType': 'NUMBER_DOUBLE' } } } @@ -194,13 +194,13 @@ def test_build_component_outputs_spec(self): }, 'parameters': { 'output2': { - 'type': 'INT' + 'parameterType': 'NUMBER_INTEGER' }, 'output3': { - 'type': 'STRING' + 'parameterType': 'STRING' }, 'output4': { - 'type': 'DOUBLE' + 'parameterType': 'NUMBER_DOUBLE' } } } @@ -343,7 +343,7 @@ def test_build_task_inputs_spec(self, is_parent_component_root, }, 'parameters': { 'param1': { - 'type': 'STRING' + 'parameterType': 'STRING' }, } }, @@ -419,10 +419,10 @@ def test_build_task_inputs_spec(self, is_parent_component_root, }, 'parameters': { 'pipelineparam--op-2-output2' : { - 'type': 'INT' + 'parameterType': 'NUMBER_INTEGER' }, 'pipelineparam--param1': { - 'type': 'STRING' + 'parameterType': 'STRING' }, } }, @@ -483,9 +483,9 @@ def test_pop_input_from_component_spec(self): component_spec.input_definitions.artifacts[ 'input1'].artifact_type.schema_title = 'system.Dataset' component_spec.input_definitions.parameters[ - 'input2'].type = pipeline_spec_pb2.PrimitiveType.STRING + 'input2'].parameter_type = pipeline_spec_pb2.ParameterType.STRING component_spec.input_definitions.parameters[ - 'input3'].type = pipeline_spec_pb2.PrimitiveType.DOUBLE + 'input3'].parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE # pop an artifact, and there're other inputs left dsl_component_spec.pop_input_from_component_spec( @@ -494,10 +494,10 @@ def test_pop_input_from_component_spec(self): 'inputDefinitions': { 'parameters': { 'input2': { - 'type': 'STRING' + 'parameterType': 'STRING' }, 'input3': { - 'type': 'DOUBLE' + 'parameterType': 'NUMBER_DOUBLE' } } }, @@ -514,7 +514,7 @@ def test_pop_input_from_component_spec(self): 'inputDefinitions': { 'parameters': { 'input3': { - 'type': 'DOUBLE' + 'parameterType': 'NUMBER_DOUBLE' } } }, diff --git a/sdk/python/kfp/dsl/type_utils.py b/sdk/python/kfp/dsl/type_utils.py index 8d11e478b60..7c61f1d426e 100644 --- a/sdk/python/kfp/dsl/type_utils.py +++ b/sdk/python/kfp/dsl/type_utils.py @@ -26,5 +26,4 @@ is_parameter_type = type_utils.is_parameter_type get_artifact_type_schema = type_utils.get_artifact_type_schema get_parameter_type = type_utils.get_parameter_type -get_parameter_type_field_name = type_utils.get_parameter_type_field_name get_input_artifact_type_schema = type_utils.get_input_artifact_type_schema diff --git a/sdk/python/kfp/v2/compiler/compiler.py b/sdk/python/kfp/v2/compiler/compiler.py index 3c9400375ff..873b3d662a7 100644 --- a/sdk/python/kfp/v2/compiler/compiler.py +++ b/sdk/python/kfp/v2/compiler/compiler.py @@ -549,37 +549,116 @@ def _resolve_condition_operands( operand2: Union[str, dsl.PipelineParam]) -> Tuple[str, str]: """Resolves values and PipelineParams for condition operands.""" - # Pre-scan the operand to get the type of constant value if there's any. - # The value_type can be used to backfill missing PipelineParam.param_type. - value_type = None for value_or_reference in [operand1, operand2]: + if not isinstance(value_or_reference, + (dsl.PipelineParam, int, float, bool, str)): + raise ValueError('Conditional requires scalar constant values' + ' for comparison. Found "{}" of type {}' + ' in pipeline definition instead.'.format( + value_or_reference, + type(value_or_reference))) + + # Check specified type of PipelineParam is a scalar as well. if isinstance(value_or_reference, dsl.PipelineParam): - continue - if isinstance(value_or_reference, float): - value_type = 'Float' - elif isinstance(value_or_reference, int): - value_type = 'Integer' + parameter_type = type_utils.get_parameter_type( + value_or_reference.param_type) + + if parameter_type in [ + pipeline_spec_pb2.ParameterType.STRUCT, + pipeline_spec_pb2.ParameterType.LIST, + pipeline_spec_pb2.ParameterType + .PARAMETER_TYPE_ENUM_UNSPECIFIED, + ]: + input_name = dsl_component_spec.additional_input_name_for_pipelineparam( + value_or_reference) + raise ValueError( + 'Conditional requires scalar parameter values' + ' for comparison. Found input "{}" of type {}' + ' in pipeline definition instead.'.format( + input_name, value_or_reference.param_type)) + + parameter_types = set() + for value_or_reference in [operand1, operand2]: + if isinstance(value_or_reference, dsl.PipelineParam): + parameter_type = type_utils.get_parameter_type( + value_or_reference.param_type) else: - value_type = 'String' + parameter_type = type_utils.get_parameter_type( + type(value_or_reference).__name__) + + parameter_types.add(parameter_type) + + if len(parameter_types) == 2: + # Two different types being compared. The only possible types are + # String, Boolean, Double and Integer. We'll promote the other type + # using the following precedence: + # String > Boolean > Double > Integer + if pipeline_spec_pb2.ParameterType.STRING in parameter_types: + canonical_parameter_type = pipeline_spec_pb2.ParameterType.STRING + elif pipeline_spec_pb2.ParameterType.BOOLEAN in parameter_types: + canonical_parameter_type = pipeline_spec_pb2.ParameterType.BOOLEAN + else: + # Must be a double and int, promote to double. + assert pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE in parameter_types, 'Types: {} [{} {}]'.format( + parameter_types, operand1, operand2) + assert pipeline_spec_pb2.ParameterType.NUMBER_INTEGER in parameter_types, 'Types: {} [{} {}]'.format( + parameter_types, operand1, operand2) + canonical_parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE + elif len(parameter_types) == 1: # Both operands are the same type. + canonical_parameter_type = parameter_types.pop() + else: + # Probably shouldn't happen. + raise ValueError('Unable to determine operand types for' + ' "{}" and "{}"'.format(operand1, operand2)) operand_values = [] for value_or_reference in [operand1, operand2]: if isinstance(value_or_reference, dsl.PipelineParam): input_name = dsl_component_spec.additional_input_name_for_pipelineparam( value_or_reference) - # Condition operand is always parameters for now. - value_or_reference.param_type = ( - value_or_reference.param_type or value_type) - operand_values.append( - "inputs.parameters['{input_name}'].{value_field}".format( - input_name=input_name, - value_field=type_utils.get_parameter_type_field_name( - value_or_reference.param_type))) + operand_value = "inputs.parameter_values['{input_name}']".format( + input_name=input_name) + parameter_type = type_utils.get_parameter_type( + value_or_reference.param_type) + if parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER: + operand_value = 'int({})'.format(operand_value) + elif isinstance(value_or_reference, str): + operand_value = "'{}'".format(value_or_reference) + parameter_type = pipeline_spec_pb2.ParameterType.STRING + elif isinstance(value_or_reference, bool): + # Booleans need to be compared as 'true' or 'false' in CEL. + operand_value = str(value_or_reference).lower() + parameter_type = pipeline_spec_pb2.ParameterType.BOOLEAN + elif isinstance(value_or_reference, int): + operand_value = str(value_or_reference) + parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_INTEGER else: - if isinstance(value_or_reference, str): - operand_values.append("'{}'".format(value_or_reference)) + assert isinstance(value_or_reference, float), value_or_reference + operand_value = str(value_or_reference) + parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE + + if parameter_type != canonical_parameter_type: + # Type-cast to so CEL does not complain. + if canonical_parameter_type == pipeline_spec_pb2.ParameterType.STRING: + assert parameter_type in [ + pipeline_spec_pb2.ParameterType.BOOLEAN, + pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + ] + operand_value = "'{}'".format(operand_value) + elif canonical_parameter_type == pipeline_spec_pb2.ParameterType.BOOLEAN: + assert parameter_type in [ + pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + ] + operand_value = 'true' if int( + operand_value) == 0 else 'false' else: - operand_values.append(str(value_or_reference)) + assert canonical_parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE + assert parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER + operand_value = 'double({})'.format(operand_value) + + operand_values.append(operand_value) return tuple(operand_values) @@ -822,7 +901,7 @@ def _group_to_dag_spec( _for_loop.LoopArguments.LOOP_ITEM_NAME_BASE) subgroup_component_spec.input_definitions.parameters[ - loop_arguments_item].type = pipeline_spec_pb2.PrimitiveType.STRING + loop_arguments_item].parameter_type = pipeline_spec_pb2.ParameterType.STRING subgroup_task_spec.parameter_iterator.items.input_parameter = ( input_parameter_name) subgroup_task_spec.parameter_iterator.item_input = ( @@ -986,8 +1065,8 @@ def _create_pipeline_spec( pipeline_spec.pipeline_info.name = pipeline.name pipeline_spec.sdk_version = 'kfp-{}'.format(kfp.__version__) - # Schema version 2.0.0 is required for kfp-pipeline-spec>0.1.3.1 - pipeline_spec.schema_version = '2.0.0' + # Schema version 2.1.0 is required for kfp-pipeline-spec>0.1.3.1 + pipeline_spec.schema_version = '2.1.0' dsl_component_spec.build_component_inputs_spec( component_spec=pipeline_spec.root, @@ -1204,13 +1283,21 @@ def _create_pipeline_v2( # Fill in the default values. args_list_with_defaults = [] if pipeline_meta.inputs: - args_list_with_defaults = [ - dsl.PipelineParam( - sanitize_k8s_name(input_spec.name, True), - param_type=input_spec.type, - value=input_spec.default) - for input_spec in pipeline_meta.inputs - ] + args_list_with_defaults = [] + for input_spec in pipeline_meta.inputs: + default_value = input_spec.default + + if input_spec.default is not None: + parameter_type = type_utils.get_parameter_type( + input_spec.type) + default_value = type_utils.deserialize_parameter_value( + value=input_spec.default, parameter_type=parameter_type) + + args_list_with_defaults.append( + dsl.PipelineParam( + sanitize_k8s_name(input_spec.name, True), + param_type=input_spec.type, + value=default_value)) # Making the pipeline group name unique to prevent name clashes with templates pipeline_group = dsl_pipeline.groups[0] diff --git a/sdk/python/kfp/v2/compiler/compiler_utils.py b/sdk/python/kfp/v2/compiler/compiler_utils.py index ac17ec4235e..73919f51635 100644 --- a/sdk/python/kfp/v2/compiler/compiler_utils.py +++ b/sdk/python/kfp/v2/compiler/compiler_utils.py @@ -16,6 +16,8 @@ import re from typing import Any, Mapping, Optional, Union +from google.protobuf import struct_pb2 + from kfp.containers import _component_builder from kfp.dsl import _container_op from kfp.dsl import _pipeline_param @@ -31,7 +33,7 @@ def build_runtime_config_spec( pipeline_parameters: Optional[Mapping[ str, _pipeline_param.PipelineParam]] = None, ) -> pipeline_spec_pb2.PipelineJob.RuntimeConfig: - """Converts pipeine parameters to runtime parameters mapping. + """Converts pipeline parameters to runtime parameters mapping. Args: output_directory: The root of pipeline outputs. @@ -42,32 +44,33 @@ def build_runtime_config_spec( A pipeline job RuntimeConfig object. """ - def _get_value( - param: _pipeline_param.PipelineParam) -> pipeline_spec_pb2.Value: - assert param.value is not None, 'None values should be filterd out.' + def _get_value(param: _pipeline_param.PipelineParam) -> struct_pb2.Value: + assert param.value is not None, 'None values should be filtered out.' - result = pipeline_spec_pb2.Value() + result = struct_pb2.Value() # TODO(chensun): remove defaulting to 'String' for None param_type once we # fix importer behavior. param_type = type_utils.get_parameter_type(param.param_type or 'String') - if param_type == pipeline_spec_pb2.PrimitiveType.INT: - result.int_value = int(param.value) - elif param_type == pipeline_spec_pb2.PrimitiveType.DOUBLE: - result.double_value = float(param.value) - elif param_type == pipeline_spec_pb2.PrimitiveType.STRING: - result.string_value = str(param.value) + if (param_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER or + param_type == pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE): + result.number_value = float(param.value) + elif param_type == pipeline_spec_pb2.ParameterType.STRING: + result.string_value = param.value + elif param_type == pipeline_spec_pb2.ParameterType.BOOLEAN: + result.bool_value = param.value + elif param_type == pipeline_spec_pb2.ParameterType.LIST: + result.list_value.extend(param.value) + elif param_type == pipeline_spec_pb2.ParameterType.STRUCT: + result.struct_value.update(param.value) else: - # For every other type, defaults to 'String'. - # TODO(chensun): remove this default behavior once we migrate from - # `pipeline_spec_pb2.Value` to `protobuf.Value`. - result.string_value = str(param.value) + raise ValueError('Unknown type for PipelineParam {}'.format(param)) return result parameters = pipeline_parameters or {} return pipeline_spec_pb2.PipelineJob.RuntimeConfig( gcs_output_directory=output_directory, - parameters={ + parameter_values={ k: _get_value(v) for k, v in parameters.items() if v.value is not None diff --git a/sdk/python/kfp/v2/compiler/compiler_utils_test.py b/sdk/python/kfp/v2/compiler/compiler_utils_test.py index 6fd52ab58db..bc0a6965290 100644 --- a/sdk/python/kfp/v2/compiler/compiler_utils_test.py +++ b/sdk/python/kfp/v2/compiler/compiler_utils_test.py @@ -34,16 +34,10 @@ def assertProtoEquals(self, proto1: message.Message, def test_build_runtime_config_spec(self): expected_dict = { 'gcsOutputDirectory': 'gs://path', - 'parameters': { - 'input1': { - 'stringValue': 'test' - }, - 'input2': { - 'intValue': 2 - }, - 'input3': { - 'stringValue': '[1, 2, 3]' - } + 'parameterValues': { + 'input1': 'test', + 'input2': 2, + 'input3': [1, 2, 3] } } expected_spec = pipeline_spec_pb2.PipelineJob.RuntimeConfig() diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_pipeline.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_pipeline.json index 49aa2ab3b0a..52bc28b1b32 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_pipeline.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_pipeline.json @@ -6,13 +6,13 @@ "inputDefinitions": { "parameters": { "input_dict_parameter": { - "type": "STRING" + "parameterType": "STRUCT" }, "input_list_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "message": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -33,16 +33,16 @@ }, "parameters": { "output_bool_parameter_path": { - "type": "STRING" + "parameterType": "BOOLEAN" }, "output_dict_parameter_path": { - "type": "STRING" + "parameterType": "STRUCT" }, "output_list_parameter_path": { - "type": "STRING" + "parameterType": "LIST" }, "output_parameter_path": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -66,19 +66,19 @@ }, "parameters": { "input_bool": { - "type": "STRING" + "parameterType": "BOOLEAN" }, "input_dict": { - "type": "STRING" + "parameterType": "STRUCT" }, "input_list": { - "type": "STRING" + "parameterType": "LIST" }, "message": { - "type": "STRING" + "parameterType": "STRING" }, "num_steps": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, @@ -107,7 +107,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -127,7 +127,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -158,9 +158,11 @@ }, "input_list_parameter": { "runtimeValue": { - "constantValue": { - "stringValue": "[\"a\", \"b\", \"c\"]" - } + "constant": [ + "a", + "b", + "c" + ] } }, "message": { @@ -224,9 +226,7 @@ }, "num_steps": { "runtimeValue": { - "constantValue": { - "intValue": "100" - } + "constant": 100.0 } } } @@ -240,22 +240,23 @@ "inputDefinitions": { "parameters": { "input_dict": { - "type": "STRING" + "parameterType": "STRUCT" }, "message": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { + "parameterValues": { "input_dict": { - "stringValue": "{\"A\": 1, \"B\": 2}" + "A": 1.0, + "B": 2.0 } } } diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_with_outputs.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_with_outputs.json index 875c6edbf41..9761d894ff1 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_with_outputs.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/lightweight_python_functions_v2_with_outputs.json @@ -6,17 +6,17 @@ "inputDefinitions": { "parameters": { "first": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "second": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, "outputDefinitions": { "parameters": { "Output": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -26,17 +26,17 @@ "inputDefinitions": { "parameters": { "first": { - "type": "STRING" + "parameterType": "STRING" }, "second": { - "type": "STRING" + "parameterType": "STRING" } } }, "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -46,10 +46,10 @@ "inputDefinitions": { "parameters": { "message": { - "type": "STRING" + "parameterType": "STRING" }, "number": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, @@ -93,7 +93,7 @@ }, "parameters": { "scalar": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -112,7 +112,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -132,7 +132,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -152,7 +152,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -172,7 +172,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -303,16 +303,16 @@ "inputDefinitions": { "parameters": { "first_message": { - "type": "STRING" + "parameterType": "STRING" }, "first_number": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "second_message": { - "type": "STRING" + "parameterType": "STRING" }, "second_number": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, @@ -327,8 +327,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_after.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_after.json index cf4076ad05a..5b4ba87fda6 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_after.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_after.json @@ -6,7 +6,7 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -16,7 +16,7 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -26,7 +26,7 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -86,9 +86,7 @@ "parameters": { "text": { "runtimeValue": { - "constantValue": { - "stringValue": "1st task" - } + "constant": "1st task" } } } @@ -111,9 +109,7 @@ "parameters": { "text": { "runtimeValue": { - "constantValue": { - "stringValue": "2nd task" - } + "constant": "2nd task" } } } @@ -137,9 +133,7 @@ "parameters": { "text": { "runtimeValue": { - "constantValue": { - "stringValue": "3rd task" - } + "constant": "3rd task" } } } @@ -151,8 +145,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_concat_placeholder.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_concat_placeholder.json index 675d57e486d..4eb955c2f4b 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_concat_placeholder.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_concat_placeholder.json @@ -6,7 +6,7 @@ "inputDefinitions": { "parameters": { "input_prefix": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -42,9 +42,7 @@ "parameters": { "input_prefix": { "runtimeValue": { - "constantValue": { - "stringValue": "some prefix:" - } + "constant": "some prefix:" } } } @@ -56,8 +54,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_condition.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_condition.json index b960c550daa..bee99cfa87c 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_condition.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_condition.json @@ -60,10 +60,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--text": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -73,7 +73,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -83,7 +83,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -93,7 +93,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -103,7 +103,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -113,7 +113,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -228,7 +228,7 @@ "name": "condition-1" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--flip-coin-Output'].string_value == 'heads'" + "condition": "inputs.parameter_values['pipelineparam--flip-coin-Output'] == 'heads'" } }, "flip-coin": { @@ -269,20 +269,18 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "text": { - "stringValue": "condition test" - } + "parameterValues": { + "text": "condition test" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_custom_job_spec.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_custom_job_spec.json index 1e4b89576f6..38e72ae8754 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_custom_job_spec.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_custom_job_spec.json @@ -6,7 +6,7 @@ "inputDefinitions": { "parameters": { "input1": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -16,7 +16,7 @@ "inputDefinitions": { "parameters": { "input1": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -41,7 +41,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -65,7 +65,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -128,9 +128,7 @@ "parameters": { "input1": { "runtimeValue": { - "constantValue": { - "stringValue": "hello-world" - } + "constant": "hello-world" } } } @@ -150,9 +148,7 @@ "parameters": { "input1": { "runtimeValue": { - "constantValue": { - "stringValue": "advanced setting - raw workerPoolSpec" - } + "constant": "advanced setting - raw workerPoolSpec" } } } @@ -164,8 +160,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_env.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_env.json index f2c3c3ddeb4..78ec22e9660 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_env.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_env.json @@ -41,7 +41,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -89,8 +89,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_exit_handler.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_exit_handler.json index 7fb94bdfa35..12850386836 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_exit_handler.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_exit_handler.json @@ -15,9 +15,7 @@ "parameters": { "message": { "runtimeValue": { - "constantValue": { - "stringValue": "Task failed." - } + "constant": "Task failed." } } } @@ -49,7 +47,7 @@ "inputDefinitions": { "parameters": { "pipelineparam--message": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -59,7 +57,7 @@ "inputDefinitions": { "parameters": { "message": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -69,7 +67,7 @@ "inputDefinitions": { "parameters": { "message": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -79,7 +77,7 @@ "inputDefinitions": { "parameters": { "message": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -98,7 +96,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -118,7 +116,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -138,7 +136,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -181,9 +179,7 @@ "parameters": { "message": { "runtimeValue": { - "constantValue": { - "stringValue": "Exit handler has worked!" - } + "constant": "Exit handler has worked!" } } } @@ -200,20 +196,18 @@ "inputDefinitions": { "parameters": { "message": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "message": { - "stringValue": "Hello World!" - } + "parameterValues": { + "message": "Hello World!" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_gcpc_types.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_gcpc_types.json index 61d2f0113bc..efc5297d903 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_gcpc_types.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_gcpc_types.json @@ -41,7 +41,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -107,8 +107,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": {} } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_if_placeholder.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_if_placeholder.json index d354707f554..a3e0df000bb 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_if_placeholder.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_if_placeholder.json @@ -6,10 +6,10 @@ "inputDefinitions": { "parameters": { "optional_input_1": { - "type": "STRING" + "parameterType": "STRING" }, "required_input": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -64,19 +64,19 @@ "inputDefinitions": { "parameters": { "input0": { - "type": "STRING" + "parameterType": "STRING" }, "input1": { - "type": "STRING" + "parameterType": "STRING" }, "input2": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json index 94001ca7ff6..07622538962 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json @@ -51,10 +51,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--dataset2": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--train-scalar": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -124,14 +124,14 @@ "inputDefinitions": { "parameters": { "value": { - "type": "STRING" + "parameterType": "STRING" } } }, "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -159,7 +159,7 @@ }, "parameters": { "scalar": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -187,7 +187,7 @@ }, "parameters": { "scalar": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -215,7 +215,7 @@ }, "parameters": { "scalar": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -286,7 +286,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -306,7 +306,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -326,7 +326,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -367,7 +367,7 @@ "name": "condition-1" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--train-scalar'].string_value == '123'" + "condition": "inputs.parameter_values['pipelineparam--train-scalar'] == '123'" } }, "importer": { @@ -487,20 +487,18 @@ "inputDefinitions": { "parameters": { "dataset2": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "dataset2": { - "stringValue": "gs://ml-pipeline-playground/shakespeare2.txt" - } + "parameterValues": { + "dataset2": "gs://ml-pipeline-playground/shakespeare2.txt" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops.json index e8b2082bcd5..4b1804eb688 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops.json @@ -6,7 +6,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "LIST" } } } @@ -37,10 +37,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -109,10 +109,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-Output": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--args-generator-op-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -181,7 +181,7 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop-item-param-3": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -191,7 +191,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -201,7 +201,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -211,7 +211,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -221,7 +221,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -231,7 +231,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -241,7 +241,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -251,7 +251,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -270,7 +270,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -290,7 +290,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -310,7 +310,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -330,7 +330,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -350,7 +350,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -370,7 +370,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -390,7 +390,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -410,7 +410,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -505,13 +505,13 @@ "inputDefinitions": { "parameters": { "loop_parameter": { - "type": "STRING" + "parameterType": "LIST" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": {} } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops_and_conditions.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops_and_conditions.json index b6d548523c6..cb23a9445cc 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops_and_conditions.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_loops_and_conditions.json @@ -6,7 +6,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "LIST" } } } @@ -16,7 +16,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "LIST" } } } @@ -79,16 +79,16 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-Output": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -107,9 +107,7 @@ "parameters": { "msg": { "runtimeValue": { - "constantValue": { - "stringValue": "1" - } + "constant": "1" } } } @@ -123,10 +121,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop-item-param-11": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -164,10 +162,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter-loop-item-subvar-A_a": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -199,13 +197,13 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--args-generator-op-Output-loop-item-subvar-A_a": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -237,10 +235,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -277,13 +275,13 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--args-generator-op-Output-loop-item-subvar-A_a": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -293,7 +291,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -328,16 +326,16 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-2-Output": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--args-generator-op-2-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -363,7 +361,7 @@ "name": "condition-13" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--loop-item-param-11'].string_value == '1'" + "condition": "inputs.parameter_values['pipelineparam--loop-item-param-11'] == '1'" } }, "print-op-8": { @@ -389,10 +387,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop-item-param-11": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -419,7 +417,7 @@ "name": "condition-15" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--loop_parameter-loop-item-subvar-A_a'].string_value == 'heads'" + "condition": "inputs.parameter_values['pipelineparam--loop_parameter-loop-item-subvar-A_a'] == 'heads'" } } } @@ -427,10 +425,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -461,13 +459,13 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter-loop-item-subvar-B_b": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter-loop-item-subvar-B_b-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -497,7 +495,7 @@ "name": "condition-3" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--args-generator-op-Output-loop-item-subvar-A_a'].string_value == 'heads'" + "condition": "inputs.parameter_values['pipelineparam--args-generator-op-Output-loop-item-subvar-A_a'] == 'heads'" } }, "condition-4": { @@ -518,7 +516,7 @@ "name": "condition-4" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--flip-coin-op-Output'].string_value == 'heads'" + "condition": "inputs.parameter_values['pipelineparam--flip-coin-op-Output'] == 'heads'" } }, "condition-5": { @@ -543,7 +541,7 @@ "name": "condition-5" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--args-generator-op-Output-loop-item-subvar-A_a'].string_value == 'tails'" + "condition": "inputs.parameter_values['pipelineparam--args-generator-op-Output-loop-item-subvar-A_a'] == 'tails'" } }, "for-loop-12": { @@ -645,22 +643,22 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-2-Output": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--args-generator-op-Output": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--args-generator-op-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -691,13 +689,13 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop-item-param-6": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -728,16 +726,16 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-Output-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--args-generator-op-Output-loop-item-subvar-B_b": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--args-generator-op-Output-loop-item-subvar-B_b-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -795,16 +793,16 @@ "inputDefinitions": { "parameters": { "pipelineparam--args-generator-op-2-Output": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--flip-coin-op-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -814,7 +812,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -824,7 +822,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -834,7 +832,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -844,7 +842,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -854,7 +852,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -864,7 +862,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -874,7 +872,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -884,10 +882,10 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" }, "msg2": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -897,7 +895,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -907,7 +905,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -926,7 +924,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -946,7 +944,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -966,7 +964,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -986,7 +984,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1006,7 +1004,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1026,7 +1024,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1046,7 +1044,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1066,7 +1064,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1086,7 +1084,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1106,7 +1104,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1126,7 +1124,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1146,7 +1144,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1166,7 +1164,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -1228,7 +1226,7 @@ "name": "condition-1" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--flip-coin-op-Output'].string_value != 'no-such-result'" + "condition": "inputs.parameter_values['pipelineparam--flip-coin-op-Output'] != 'no-such-result'" } }, "flip-coin-op": { @@ -1268,25 +1266,37 @@ "inputDefinitions": { "parameters": { "loop_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "msg": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { - "parameters": { - "loop_parameter": { - "stringValue": "[{\"A_a\": \"heads\", \"B_b\": [\"A\", \"B\"]}, {\"A_a\": \"tails\", \"B_b\": [\"X\", \"Y\", \"Z\"]}]" - }, - "msg": { - "stringValue": "hello" - } + "parameterValues": { + "loop_parameter": [ + { + "A_a": "heads", + "B_b": [ + "A", + "B" + ] + }, + { + "A_a": "tails", + "B_b": [ + "X", + "Y", + "Z" + ] + } + ], + "msg": "hello" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_metrics_outputs.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_metrics_outputs.json index 6c03ced8c8f..a7d052b95d2 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_metrics_outputs.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_metrics_outputs.json @@ -32,7 +32,7 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop-item-param-1": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -87,7 +87,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -107,7 +107,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -188,8 +188,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions.json index ec28059eff1..0cc5c12e27a 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions.json @@ -31,7 +31,7 @@ "name": "condition-2" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--flip-coin-2-Output'].string_value == inputs.parameters['pipelineparam--flip-coin-3-Output'].string_value" + "condition": "inputs.parameter_values['pipelineparam--flip-coin-2-Output'] == inputs.parameter_values['pipelineparam--flip-coin-3-Output']" } }, "flip-coin-3": { @@ -74,10 +74,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-2-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -125,13 +125,13 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-2-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-3-Output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--flip-coin-Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -141,7 +141,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -151,7 +151,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -161,7 +161,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -171,7 +171,7 @@ "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -181,7 +181,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -191,7 +191,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -201,7 +201,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -211,7 +211,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -375,7 +375,7 @@ "name": "condition-1" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--flip-coin-Output'].string_value != 'no-such-result'" + "condition": "inputs.parameter_values['pipelineparam--flip-coin-Output'] != 'no-such-result'" } }, "flip-coin": { @@ -451,8 +451,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions_yaml.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions_yaml.json index 7cff1928080..6325680bca0 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions_yaml.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_conditions_yaml.json @@ -28,7 +28,7 @@ "name": "condition-2" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--generate-random-number-output'].int_value > 5" + "condition": "int(inputs.parameter_values['pipelineparam--generate-random-number-output']) > 5" } }, "condition-3": { @@ -55,7 +55,7 @@ "name": "condition-3" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--generate-random-number-output'].int_value <= 5" + "condition": "int(inputs.parameter_values['pipelineparam--generate-random-number-output']) <= 5" } }, "generate-random-number": { @@ -74,7 +74,7 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -93,9 +93,7 @@ "parameters": { "msg": { "runtimeValue": { - "constantValue": { - "stringValue": "heads and {{$.inputs.parameters['pipelineparam--generate-random-number-output']}} > 5!" - } + "constant": "heads and {{$.inputs.parameters['pipelineparam--generate-random-number-output']}} > 5!" } }, "pipelineparam--generate-random-number-output": { @@ -112,10 +110,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--generate-random-number-output": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -134,9 +132,7 @@ "parameters": { "msg": { "runtimeValue": { - "constantValue": { - "stringValue": "heads and {{$.inputs.parameters['pipelineparam--generate-random-number-output']}} <= 5!" - } + "constant": "heads and {{$.inputs.parameters['pipelineparam--generate-random-number-output']}} <= 5!" } }, "pipelineparam--generate-random-number-output": { @@ -153,10 +149,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--generate-random-number-output": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -188,7 +184,7 @@ "name": "condition-5" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--generate-random-number-2-output'].int_value > 15" + "condition": "int(inputs.parameter_values['pipelineparam--generate-random-number-2-output']) > 15" } }, "condition-6": { @@ -215,7 +211,7 @@ "name": "condition-6" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--generate-random-number-2-output'].int_value <= 15" + "condition": "int(inputs.parameter_values['pipelineparam--generate-random-number-2-output']) <= 15" } }, "generate-random-number-2": { @@ -234,7 +230,7 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -253,9 +249,7 @@ "parameters": { "msg": { "runtimeValue": { - "constantValue": { - "stringValue": "tails and {{$.inputs.parameters['pipelineparam--generate-random-number-2-output']}} > 15!" - } + "constant": "tails and {{$.inputs.parameters['pipelineparam--generate-random-number-2-output']}} > 15!" } }, "pipelineparam--generate-random-number-2-output": { @@ -272,10 +266,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--generate-random-number-2-output": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -294,9 +288,7 @@ "parameters": { "msg": { "runtimeValue": { - "constantValue": { - "stringValue": "tails and {{$.inputs.parameters['pipelineparam--generate-random-number-2-output']}} <= 15!" - } + "constant": "tails and {{$.inputs.parameters['pipelineparam--generate-random-number-2-output']}} <= 15!" } }, "pipelineparam--generate-random-number-2-output": { @@ -313,10 +305,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--flip-coin-output": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--generate-random-number-2-output": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -326,7 +318,7 @@ "outputDefinitions": { "parameters": { "output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -336,7 +328,7 @@ "outputDefinitions": { "parameters": { "output": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -346,7 +338,7 @@ "outputDefinitions": { "parameters": { "output": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -356,7 +348,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -366,7 +358,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -376,7 +368,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -386,7 +378,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -502,7 +494,7 @@ "name": "condition-1" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--flip-coin-output'].string_value == 'heads'" + "condition": "inputs.parameter_values['pipelineparam--flip-coin-output'] == 'heads'" } }, "condition-4": { @@ -526,7 +518,7 @@ "name": "condition-4" }, "triggerPolicy": { - "condition": "inputs.parameters['pipelineparam--flip-coin-output'].string_value == 'tails'" + "condition": "inputs.parameter_values['pipelineparam--flip-coin-output'] == 'tails'" } }, "flip-coin": { @@ -543,8 +535,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.4" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_loops.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_loops.json index 4a5c7b8eb4a..6b6b73cd03b 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_loops.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_nested_loops.json @@ -34,10 +34,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop_parameter": { - "type": "STRING" + "parameterType": "LIST" }, "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -69,13 +69,13 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop_parameter-loop-item": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter-loop-item-subvar-p_a": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop_parameter-loop-item-subvar-p_a-loop-item": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -127,7 +127,7 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop-item-param-3": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -161,10 +161,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop-item-param-3": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--loop-item-param-5": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -174,7 +174,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -184,7 +184,7 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -194,10 +194,10 @@ "inputDefinitions": { "parameters": { "msg": { - "type": "STRING" + "parameterType": "STRING" }, "msg2": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -216,7 +216,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -236,7 +236,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -256,7 +256,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -313,19 +313,40 @@ "inputDefinitions": { "parameters": { "loop_parameter": { - "type": "STRING" + "parameterType": "LIST" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { - "parameters": { - "loop_parameter": { - "stringValue": "[{\"p_a\": [{\"q_a\": 1}, {\"q_a\": 2}], \"p_b\": \"hello\"}, {\"p_a\": [{\"q_a\": 11}, {\"q_a\": 22}], \"p_b\": \"halo\"}]" - } + "parameterValues": { + "loop_parameter": [ + { + "p_a": [ + { + "q_a": 1.0 + }, + { + "q_a": 2.0 + } + ], + "p_b": "hello" + }, + { + "p_a": [ + { + "q_a": 11.0 + }, + { + "q_a": 22.0 + } + ], + "p_b": "halo" + } + ] } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_ontology.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_ontology.json index 74bb82f1bbd..97cf271e27d 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_ontology.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_ontology.json @@ -6,7 +6,7 @@ "inputDefinitions": { "parameters": { "input_location": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -34,10 +34,10 @@ }, "parameters": { "n_epochs": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "optimizer": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -144,32 +144,26 @@ "inputDefinitions": { "parameters": { "input_location": { - "type": "STRING" + "parameterType": "STRING" }, "n_epochs": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "optimizer": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "input_location": { - "stringValue": "gs://test-bucket/pipeline_root" - }, - "n_epochs": { - "intValue": "200" - }, - "optimizer": { - "stringValue": "sgd" - } + "parameterValues": { + "input_location": "gs://test-bucket/pipeline_root", + "n_epochs": 200.0, + "optimizer": "sgd" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_params_containing_format.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_params_containing_format.json index d12508fe456..0b61181cd03 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_params_containing_format.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_params_containing_format.json @@ -21,9 +21,7 @@ }, "text2": { "runtimeValue": { - "constantValue": { - "stringValue": " and {{$.inputs.parameters['pipelineparam--name']}}." - } + "constant": " and {{$.inputs.parameters['pipelineparam--name']}}." } } } @@ -37,10 +35,10 @@ "inputDefinitions": { "parameters": { "pipelineparam--loop-item-param-1": { - "type": "STRING" + "parameterType": "STRING" }, "pipelineparam--name": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -50,14 +48,14 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } }, "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -67,14 +65,14 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } }, "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -84,17 +82,17 @@ "inputDefinitions": { "parameters": { "text1": { - "type": "STRING" + "parameterType": "STRING" }, "text2": { - "type": "STRING" + "parameterType": "STRING" } } }, "outputDefinitions": { "parameters": { "Output": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -198,9 +196,7 @@ }, "text": { "runtimeValue": { - "constantValue": { - "stringValue": "Hello {{$.inputs.parameters['pipelineparam--name']}}" - } + "constant": "Hello {{$.inputs.parameters['pipelineparam--name']}}" } } } @@ -229,9 +225,7 @@ }, "text": { "runtimeValue": { - "constantValue": { - "stringValue": "{{$.inputs.parameters['pipelineparam--print-op-Output']}}, again." - } + "constant": "{{$.inputs.parameters['pipelineparam--print-op-Output']}}, again." } } } @@ -245,20 +239,18 @@ "inputDefinitions": { "parameters": { "name": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "name": { - "stringValue": "KFP" - } + "parameterValues": { + "name": "KFP" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_resource_spec.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_resource_spec.json index e1d4044de0d..c9e9476baef 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_resource_spec.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_resource_spec.json @@ -6,7 +6,7 @@ "inputDefinitions": { "parameters": { "input_location": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -34,10 +34,10 @@ }, "parameters": { "n_epochs": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "optimizer": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -152,32 +152,26 @@ "inputDefinitions": { "parameters": { "input_location": { - "type": "STRING" + "parameterType": "STRING" }, "n_epochs": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "optimizer": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "input_location": { - "stringValue": "gs://test-bucket/pipeline_root" - }, - "n_epochs": { - "intValue": "200" - }, - "optimizer": { - "stringValue": "sgd" - } + "parameterValues": { + "input_location": "gs://test-bucket/pipeline_root", + "n_epochs": 200.0, + "optimizer": "sgd" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_reused_component.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_reused_component.json index 41f8a6f44d1..4891dd4133f 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_reused_component.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_reused_component.json @@ -6,17 +6,17 @@ "inputDefinitions": { "parameters": { "op1": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "op2": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, "outputDefinitions": { "parameters": { "sum": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -26,17 +26,17 @@ "inputDefinitions": { "parameters": { "op1": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "op2": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, "outputDefinitions": { "parameters": { "sum": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -46,17 +46,17 @@ "inputDefinitions": { "parameters": { "op1": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "op2": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, "outputDefinitions": { "parameters": { "sum": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -125,9 +125,7 @@ }, "op2": { "runtimeValue": { - "constantValue": { - "intValue": "3" - } + "constant": 3.0 } } } @@ -183,9 +181,7 @@ }, "op2": { "runtimeValue": { - "constantValue": { - "intValue": "7" - } + "constant": 7.0 } } } @@ -199,26 +195,22 @@ "inputDefinitions": { "parameters": { "a": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "b": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "a": { - "intValue": "2" - }, - "b": { - "intValue": "5" - } + "parameterValues": { + "a": 2.0, + "b": 5.0 } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_various_io_types.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_various_io_types.json index cfda8706bed..4cf44688eef 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_various_io_types.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_various_io_types.json @@ -56,7 +56,7 @@ }, "parameters": { "input_a": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -66,16 +66,16 @@ "inputDefinitions": { "parameters": { "input_1": { - "type": "STRING" + "parameterType": "STRING" }, "input_2": { - "type": "DOUBLE" + "parameterType": "NUMBER_DOUBLE" }, "input_3": { - "type": "STRING" + "parameterType": "STRING" }, "input_4": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -132,7 +132,7 @@ }, "parameters": { "output_1": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } } @@ -270,9 +270,7 @@ }, "input_2": { "runtimeValue": { - "constantValue": { - "doubleValue": 3.1415926 - } + "constant": 3.1415926 } }, "input_3": { @@ -292,26 +290,24 @@ "inputDefinitions": { "parameters": { "input1": { - "type": "STRING" + "parameterType": "STRING" }, "input3": { - "type": "STRING" + "parameterType": "STRING" }, "input4": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.3" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "input4": { - "stringValue": "" - } + "parameterValues": { + "input4": "" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/two_step_pipeline.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/two_step_pipeline.json index 6e32cad5603..5c979bf7e5e 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/two_step_pipeline.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/two_step_pipeline.json @@ -19,7 +19,7 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -115,20 +115,18 @@ "inputDefinitions": { "parameters": { "text": { - "type": "STRING" + "parameterType": "STRING" } } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", - "parameters": { - "text": { - "stringValue": "Hello KFP!" - } + "parameterValues": { + "text": "Hello KFP!" } } } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/v2_component_with_optional_inputs.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/v2_component_with_optional_inputs.json index 8a6bf06390a..8ce7a0a5942 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/v2_component_with_optional_inputs.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/v2_component_with_optional_inputs.json @@ -6,10 +6,10 @@ "inputDefinitions": { "parameters": { "input1": { - "type": "STRING" + "parameterType": "STRING" }, "input2": { - "type": "STRING" + "parameterType": "STRING" } } } @@ -28,7 +28,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.5' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -56,16 +56,12 @@ "parameters": { "input1": { "runtimeValue": { - "constantValue": { - "stringValue": "Hello" - } + "constant": "Hello" } }, "input2": { "runtimeValue": { - "constantValue": { - "stringValue": "World" - } + "constant": "World" } } } @@ -77,8 +73,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.8.5" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/xgboost_sample_pipeline.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/xgboost_sample_pipeline.json index d6354d2785f..68aac70d7d4 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/xgboost_sample_pipeline.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/xgboost_sample_pipeline.json @@ -6,16 +6,16 @@ "inputDefinitions": { "parameters": { "Format": { - "type": "STRING" + "parameterType": "STRING" }, "Limit": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "Select": { - "type": "STRING" + "parameterType": "STRING" }, "Where": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -72,7 +72,7 @@ }, "parameters": { "label_column": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, @@ -106,7 +106,7 @@ }, "parameters": { "label_column_name": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -140,7 +140,7 @@ }, "parameters": { "label_column_name": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -174,7 +174,7 @@ }, "parameters": { "label_column": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" } } }, @@ -202,25 +202,25 @@ }, "parameters": { "booster": { - "type": "STRING" + "parameterType": "STRING" }, "label_column": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "learning_rate": { - "type": "DOUBLE" + "parameterType": "NUMBER_DOUBLE" }, "max_depth": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "min_split_loss": { - "type": "DOUBLE" + "parameterType": "NUMBER_DOUBLE" }, "num_iterations": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "objective": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -254,25 +254,25 @@ }, "parameters": { "booster": { - "type": "STRING" + "parameterType": "STRING" }, "label_column_name": { - "type": "STRING" + "parameterType": "STRING" }, "learning_rate": { - "type": "DOUBLE" + "parameterType": "NUMBER_DOUBLE" }, "max_depth": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "min_split_loss": { - "type": "DOUBLE" + "parameterType": "NUMBER_DOUBLE" }, "num_iterations": { - "type": "INT" + "parameterType": "NUMBER_INTEGER" }, "objective": { - "type": "STRING" + "parameterType": "STRING" } } }, @@ -518,30 +518,22 @@ "parameters": { "Format": { "runtimeValue": { - "constantValue": { - "stringValue": "csv" - } + "constant": "csv" } }, "Limit": { "runtimeValue": { - "constantValue": { - "intValue": "10000" - } + "constant": 10000.0 } }, "Select": { "runtimeValue": { - "constantValue": { - "stringValue": "tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total" - } + "constant": "tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total" } }, "Where": { "runtimeValue": { - "constantValue": { - "stringValue": "trip_start_timestamp >= \"2019-01-01\" AND trip_start_timestamp < \"2019-02-01\"" - } + "constant": "trip_start_timestamp >= \"2019-01-01\" AND trip_start_timestamp < \"2019-02-01\"" } } } @@ -603,9 +595,7 @@ "parameters": { "label_column": { "runtimeValue": { - "constantValue": { - "intValue": "0" - } + "constant": 0.0 } } } @@ -643,9 +633,7 @@ "parameters": { "label_column_name": { "runtimeValue": { - "constantValue": { - "stringValue": "tips" - } + "constant": "tips" } } } @@ -683,9 +671,7 @@ "parameters": { "label_column_name": { "runtimeValue": { - "constantValue": { - "stringValue": "tips" - } + "constant": "tips" } } } @@ -723,9 +709,7 @@ "parameters": { "label_column": { "runtimeValue": { - "constantValue": { - "intValue": "0" - } + "constant": 0.0 } } } @@ -756,51 +740,37 @@ "parameters": { "booster": { "runtimeValue": { - "constantValue": { - "stringValue": "gbtree" - } + "constant": "gbtree" } }, "label_column": { "runtimeValue": { - "constantValue": { - "intValue": "0" - } + "constant": 0.0 } }, "learning_rate": { "runtimeValue": { - "constantValue": { - "doubleValue": 0.3 - } + "constant": 0.3 } }, "max_depth": { "runtimeValue": { - "constantValue": { - "intValue": "6" - } + "constant": 6.0 } }, "min_split_loss": { "runtimeValue": { - "constantValue": { - "doubleValue": 0.0 - } + "constant": 0.0 } }, "num_iterations": { "runtimeValue": { - "constantValue": { - "intValue": "200" - } + "constant": 200.0 } }, "objective": { "runtimeValue": { - "constantValue": { - "stringValue": "reg:squarederror" - } + "constant": "reg:squarederror" } } } @@ -831,51 +801,37 @@ "parameters": { "booster": { "runtimeValue": { - "constantValue": { - "stringValue": "gbtree" - } + "constant": "gbtree" } }, "label_column_name": { "runtimeValue": { - "constantValue": { - "stringValue": "tips" - } + "constant": "tips" } }, "learning_rate": { "runtimeValue": { - "constantValue": { - "doubleValue": 0.3 - } + "constant": 0.3 } }, "max_depth": { "runtimeValue": { - "constantValue": { - "intValue": "6" - } + "constant": 6.0 } }, "min_split_loss": { "runtimeValue": { - "constantValue": { - "doubleValue": 0.0 - } + "constant": 0.0 } }, "num_iterations": { "runtimeValue": { - "constantValue": { - "intValue": "200" - } + "constant": 200.0 } }, "objective": { "runtimeValue": { - "constantValue": { - "stringValue": "reg:squarederror" - } + "constant": "reg:squarederror" } } } @@ -887,8 +843,8 @@ } } }, - "schemaVersion": "2.0.0", - "sdkVersion": "kfp-1.7.2" + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root" diff --git a/sdk/python/kfp/v2/components/executor.py b/sdk/python/kfp/v2/components/executor.py index 2e0a57f06ec..12783ac796f 100644 --- a/sdk/python/kfp/v2/components/executor.py +++ b/sdk/python/kfp/v2/components/executor.py @@ -68,6 +68,13 @@ def _get_output_artifact(self, name: str): def _get_input_parameter_value(self, parameter_name: str, parameter_type: Any): + parameter_values = self._input.get('inputs', + {}).get('parameterValues', None) + + if parameter_values is not None: + value = parameter_values.get(parameter_name) + return value + parameter = self._input.get('inputs', {}).get('parameters', {}).get(parameter_name, None) @@ -120,20 +127,22 @@ def _get_input_artifact_path(self, artifact_name: str): def _write_output_parameter_value(self, name: str, value: Union[str, int, float, bool, dict, list, Dict, List]): - if type(value) == str: - output = {'stringValue': value} - elif type(value) == int: - output = {'intValue': value} - elif type(value) == float: - output = {'doubleValue': value} + if isinstance(value, (float, int)): + output = str(value) + elif isinstance(value, str): + # value is already a string. + output = value + elif isinstance(value, (bool, list, dict)): + output = json.dumps(value) else: - # For bool, list, dict, List, Dict, json serialize the value. - output = {'stringValue': json.dumps(value)} + raise ValueError( + 'Unable to serialize unknown type `{}` for parameter' + ' input with value `{}`'.format(value, type(value))) - if not self._executor_output.get('parameters'): - self._executor_output['parameters'] = {} + if not self._executor_output.get('parameterValues'): + self._executor_output['parameterValues'] = {} - self._executor_output['parameters'][name] = output + self._executor_output['parameterValues'][name] = value def _write_output_artifact_payload(self, name: str, value: Any): path = self._get_output_artifact_path(name) diff --git a/sdk/python/kfp/v2/components/executor_main.py b/sdk/python/kfp/v2/components/executor_main.py index 0b535110d96..612b7396a08 100644 --- a/sdk/python/kfp/v2/components/executor_main.py +++ b/sdk/python/kfp/v2/components/executor_main.py @@ -94,6 +94,9 @@ def executor_main(): executor_input = json.loads(args.executor_input) function_to_execute = getattr(module, func_name) + logging.info('Got executor_input:\n{}'.format( + json.dumps(executor_input, indent=4))) + executor = component_executor.Executor( executor_input=executor_input, function_to_execute=function_to_execute) diff --git a/sdk/python/kfp/v2/components/executor_test.py b/sdk/python/kfp/v2/components/executor_test.py index 607f9a7fa2e..a46e6a153dc 100644 --- a/sdk/python/kfp/v2/components/executor_test.py +++ b/sdk/python/kfp/v2/components/executor_test.py @@ -29,10 +29,8 @@ _EXECUTOR_INPUT = """\ { "inputs": { - "parameters": { - "input_parameter": { - "stringValue": "Hello, KFP" - } + "parameterValues": { + "input_parameter": "Hello, KFP" }, "artifacts": { "input_artifact_one_path": { @@ -217,16 +215,10 @@ def test_function_string_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first_message": { - "stringValue": "Hello" - }, - "second_message": { - "stringValue": "" - }, - "third_message": { - "stringValue": "World" - } + "parameterValues": { + "first_message": "Hello", + "second_message": "", + "third_message": "World" } }, "outputs": { @@ -252,10 +244,8 @@ def test_func( 'r') as f: output_metadata = json.loads(f.read()) self.assertDictEqual(output_metadata, { - "parameters": { - "Output": { - "stringValue": "Hello, , World" - } + "parameterValues": { + "Output": "Hello, , World" }, }) @@ -263,13 +253,9 @@ def test_function_with_int_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first": { - "intValue": 40 - }, - "second": { - "intValue": 2 - } + "parameterValues": { + "first": 40, + "second": 2 } }, "outputs": { @@ -291,10 +277,8 @@ def test_func(first: int, second: int) -> int: 'r') as f: output_metadata = json.loads(f.read()) self.assertDictEqual(output_metadata, { - "parameters": { - "Output": { - "intValue": 42 - } + "parameterValues": { + "Output": 42 }, }) @@ -302,13 +286,9 @@ def test_function_with_float_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first": { - "doubleValue": 0.0 - }, - "second": { - "doubleValue": 1.2 - } + "parameterValues": { + "first": 0.0, + "second": 1.2 } }, "outputs": { @@ -330,10 +310,8 @@ def test_func(first: float, second: float) -> float: 'r') as f: output_metadata = json.loads(f.read()) self.assertDictEqual(output_metadata, { - "parameters": { - "Output": { - "doubleValue": 1.2 - } + "parameterValues": { + "Output": 1.2 }, }) @@ -341,13 +319,9 @@ def test_function_with_list_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first": { - "intValue": 40 - }, - "second": { - "intValue": 2 - } + "parameterValues": { + "first": 40, + "second": 2 } }, "outputs": { @@ -369,10 +343,8 @@ def test_func(first: int, second: int) -> List: 'r') as f: output_metadata = json.loads(f.read()) self.assertDictEqual(output_metadata, { - "parameters": { - "Output": { - "stringValue": "[40, 2]" - } + "parameterValues": { + "Output": [40, 2] }, }) @@ -380,13 +352,9 @@ def test_function_with_dict_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first": { - "intValue": 40 - }, - "second": { - "intValue": 2 - } + "parameterValues": { + "first": 40, + "second": 2 } }, "outputs": { @@ -408,9 +376,10 @@ def test_func(first: int, second: int) -> Dict: 'r') as f: output_metadata = json.loads(f.read()) self.assertDictEqual(output_metadata, { - "parameters": { - "Output": { - "stringValue": "{\"first\": 40, \"second\": 2}" + 'parameterValues': { + 'Output': { + 'first': 40, + 'second': 2 } }, }) @@ -419,13 +388,9 @@ def test_function_with_typed_list_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first": { - "intValue": 40 - }, - "second": { - "intValue": 2 - } + "parameterValues": { + "first": 40, + "second": 2 } }, "outputs": { @@ -447,10 +412,8 @@ def test_func(first: int, second: int) -> List[int]: 'r') as f: output_metadata = json.loads(f.read()) self.assertDictEqual(output_metadata, { - "parameters": { - "Output": { - "stringValue": "[40, 2]" - } + "parameterValues": { + "Output": [40, 2] }, }) @@ -458,13 +421,9 @@ def test_function_with_typed_dict_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first": { - "intValue": 40 - }, - "second": { - "intValue": 2 - } + "parameterValues": { + "first": 40, + "second": 2 } }, "outputs": { @@ -486,9 +445,10 @@ def test_func(first: int, second: int) -> Dict[str, int]: 'r') as f: output_metadata = json.loads(f.read()) self.assertDictEqual(output_metadata, { - "parameters": { + "parameterValues": { "Output": { - "stringValue": "{\"first\": 40, \"second\": 2}" + "first": 40, + "second": 2 } }, }) @@ -497,13 +457,9 @@ def test_artifact_output(self): executor_input = """\ { "inputs": { - "parameters": { - "first": { - "stringValue": "Hello" - }, - "second": { - "stringValue": "World" - } + "parameterValues": { + "first": "Hello", + "second": "World" } }, "outputs": { @@ -617,13 +573,9 @@ def func_returning_plain_tuple() -> NamedTuple('Outputs', [ }] } }, - "parameters": { - "output_string": { - "stringValue": "Some output string" - }, - "output_int": { - "intValue": 101 - } + "parameterValues": { + "output_int": 101, + "output_string": "Some output string" }, }) @@ -637,13 +589,9 @@ def test_function_with_optional_inputs(self): executor_input = """\ { "inputs": { - "parameters": { - "first_message": { - "stringValue": "Hello" - }, - "second_message": { - "stringValue": "World" - } + "parameterValues": { + "first_message": "Hello", + "second_message": "World" } }, "outputs": { @@ -672,12 +620,10 @@ def test_func( output_metadata = json.loads(f.read()) self.assertDictEqual( output_metadata, { - "parameters": { - "Output": { - "stringValue": "Hello (), " - "World (), " - "None ()." - } + "parameterValues": { + "Output": "Hello (), " + "World (), " + "None ()." }, }) diff --git a/sdk/python/kfp/v2/components/types/type_utils.py b/sdk/python/kfp/v2/components/types/type_utils.py index 911e00369a9..2959431d358 100644 --- a/sdk/python/kfp/v2/components/types/type_utils.py +++ b/sdk/python/kfp/v2/components/types/type_utils.py @@ -13,6 +13,7 @@ # limitations under the License. """Utilities for component I/O type mapping.""" import inspect +import json import re import warnings from typing import Dict, List, Optional, Type, Union @@ -41,27 +42,19 @@ # The keys are normalized (lowercased). These are types viewed as Parameters. # The values are the corresponding IR parameter primitive types. _PARAMETER_TYPES_MAPPING = { - 'integer': pipeline_spec_pb2.PrimitiveType.INT, - 'int': pipeline_spec_pb2.PrimitiveType.INT, - 'double': pipeline_spec_pb2.PrimitiveType.DOUBLE, - 'float': pipeline_spec_pb2.PrimitiveType.DOUBLE, - 'string': pipeline_spec_pb2.PrimitiveType.STRING, - 'str': pipeline_spec_pb2.PrimitiveType.STRING, - 'text': pipeline_spec_pb2.PrimitiveType.STRING, - 'bool': pipeline_spec_pb2.PrimitiveType.STRING, - 'boolean': pipeline_spec_pb2.PrimitiveType.STRING, - 'dict': pipeline_spec_pb2.PrimitiveType.STRING, - 'list': pipeline_spec_pb2.PrimitiveType.STRING, - 'jsonobject': pipeline_spec_pb2.PrimitiveType.STRING, - 'jsonarray': pipeline_spec_pb2.PrimitiveType.STRING, -} - -# Mapping primitive types to their IR message field names. -# This is used in constructing condition strings. -_PARAMETER_TYPES_VALUE_REFERENCE_MAPPING = { - pipeline_spec_pb2.PrimitiveType.INT: 'int_value', - pipeline_spec_pb2.PrimitiveType.DOUBLE: 'double_value', - pipeline_spec_pb2.PrimitiveType.STRING: 'string_value', + 'integer': pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + 'int': pipeline_spec_pb2.ParameterType.NUMBER_INTEGER, + 'double': pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + 'float': pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE, + 'string': pipeline_spec_pb2.ParameterType.STRING, + 'str': pipeline_spec_pb2.ParameterType.STRING, + 'text': pipeline_spec_pb2.ParameterType.STRING, + 'bool': pipeline_spec_pb2.ParameterType.BOOLEAN, + 'boolean': pipeline_spec_pb2.ParameterType.BOOLEAN, + 'dict': pipeline_spec_pb2.ParameterType.STRUCT, + 'list': pipeline_spec_pb2.ParameterType.LIST, + 'jsonobject': pipeline_spec_pb2.ParameterType.STRUCT, + 'jsonarray': pipeline_spec_pb2.ParameterType.LIST, } @@ -110,7 +103,7 @@ def get_artifact_type_schema( def get_parameter_type( param_type: Optional[Union[Type, str, dict]] -) -> pipeline_spec_pb2.PrimitiveType: +) -> pipeline_spec_pb2.ParameterType: """Get the IR I/O parameter type for the given ComponentSpec I/O type. Args: @@ -132,22 +125,44 @@ def get_parameter_type( return _PARAMETER_TYPES_MAPPING.get(type_name.lower()) -def get_parameter_type_field_name(type_name: Optional[str]) -> str: - """Get the IR field name for the given primitive type. - - For example: 'str' -> 'string_value', 'double' -> 'double_value', etc. - - Args: - type_name: type name of the ComponentSpec I/O primitive type. - - Returns: - The IR value reference field name. +def deserialize_parameter_value( + value: str, parameter_type: pipeline_spec_pb2.ParameterType +) -> Union[str, float, int, bool, list, dict]: + if parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE: + result = float(value) + elif parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER: + result = int(value) + elif parameter_type == pipeline_spec_pb2.ParameterType.STRING: + # value is already a string. + result = value + elif parameter_type == pipeline_spec_pb2.ParameterType.BOOLEAN: + result = (value == 'True' or value == 'true') + elif parameter_type == pipeline_spec_pb2.ParameterType.LIST: + result = json.loads(value) + elif parameter_type == pipeline_spec_pb2.ParameterType.STRUCT: + result = json.loads(value) + else: + raise ValueError( + 'Unknown parameter type `{}` for input with value `{}`'.format( + parameter_type, value)) + + return result + + +def serialize_parameter_value( + value: Union[str, float, int, bool, list, dict]) -> str: + if isinstance(value, (float, int)): + result = str(value) + elif isinstance(value, str): + # value is already a string. + result = value + elif isinstance(value, (bool, list, dict)): + result = json.dumps(value) + else: + raise ValueError('Unable to serialize unknown type `{}` for parameter' + ' input with value `{}`'.format(value, type(value))) - Raises: - AttributeError: if type_name is not a string type. - """ - return _PARAMETER_TYPES_VALUE_REFERENCE_MAPPING.get( - get_parameter_type(type_name)) + return result def get_input_artifact_type_schema( diff --git a/sdk/python/kfp/v2/components/types/type_utils_test.py b/sdk/python/kfp/v2/components/types/type_utils_test.py index 2fa1fbc7b6b..9a315867c55 100644 --- a/sdk/python/kfp/v2/components/types/type_utils_test.py +++ b/sdk/python/kfp/v2/components/types/type_utils_test.py @@ -199,71 +199,71 @@ def test_get_artifact_type_schema(self, artifact_class_or_type_name, @parameterized.parameters( { 'given_type': 'Int', - 'expected_type': pb.PrimitiveType.INT, + 'expected_type': pb.ParameterType.NUMBER_INTEGER, }, { 'given_type': 'Integer', - 'expected_type': pb.PrimitiveType.INT, + 'expected_type': pb.ParameterType.NUMBER_INTEGER, }, { 'given_type': int, - 'expected_type': pb.PrimitiveType.INT, + 'expected_type': pb.ParameterType.NUMBER_INTEGER, }, { 'given_type': 'Double', - 'expected_type': pb.PrimitiveType.DOUBLE, + 'expected_type': pb.ParameterType.NUMBER_DOUBLE, }, { 'given_type': 'Float', - 'expected_type': pb.PrimitiveType.DOUBLE, + 'expected_type': pb.ParameterType.NUMBER_DOUBLE, }, { 'given_type': float, - 'expected_type': pb.PrimitiveType.DOUBLE, + 'expected_type': pb.ParameterType.NUMBER_DOUBLE, }, { 'given_type': 'String', - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.STRING, }, { 'given_type': 'Text', - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.STRING, }, { 'given_type': str, - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.STRING, }, { 'given_type': 'Boolean', - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.BOOLEAN, }, { 'given_type': bool, - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.BOOLEAN, }, { 'given_type': 'Dict', - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.STRUCT, }, { 'given_type': dict, - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.STRUCT, }, { 'given_type': 'List', - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.LIST, }, { 'given_type': list, - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.LIST, }, { 'given_type': Dict[str, int], - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.STRUCT, }, { 'given_type': List[Any], - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.LIST, }, { 'given_type': { @@ -271,7 +271,7 @@ def test_get_artifact_type_schema(self, artifact_class_or_type_name, 'data_type': 'proto:tfx.components.trainer.TrainArgs' } }, - 'expected_type': pb.PrimitiveType.STRING, + 'expected_type': pb.ParameterType.STRUCT, }, ) def test_get_parameter_type(self, given_type, expected_type): @@ -279,7 +279,7 @@ def test_get_parameter_type(self, given_type, expected_type): type_utils.get_parameter_type(given_type)) # Test get parameter by Python type. - self.assertEqual(pb.PrimitiveType.INT, + self.assertEqual(pb.ParameterType.NUMBER_INTEGER, type_utils.get_parameter_type(int)) def test_get_parameter_type_invalid(self): @@ -314,14 +314,6 @@ def test_get_input_artifact_type_schema(self): type_utils.get_input_artifact_type_schema('input3', input_specs).schema_title) - def test_get_parameter_type_field_name(self): - self.assertEqual('string_value', - type_utils.get_parameter_type_field_name('String')) - self.assertEqual('int_value', - type_utils.get_parameter_type_field_name('Integer')) - self.assertEqual('double_value', - type_utils.get_parameter_type_field_name('Float')) - @parameterized.parameters( { 'given_type': 'String', diff --git a/v2/cacheutils/cache.go b/v2/cacheutils/cache.go index d3336c19531..048c7ffdd82 100644 --- a/v2/cacheutils/cache.go +++ b/v2/cacheutils/cache.go @@ -7,17 +7,15 @@ import ( "encoding/json" "fmt" "os" - "strconv" - "strings" "google.golang.org/grpc" "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" "github.com/golang/glog" "github.com/kubeflow/pipelines/api/v2alpha1/go/cachekey" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" api "github.com/kubeflow/pipelines/v2/kfp-api" - "github.com/kubeflow/pipelines/v2/third_party/ml_metadata" ) const ( @@ -61,6 +59,7 @@ func GenerateCacheKey( InputParameters: make(map[string]*pipelinespec.Value), OutputArtifactsSpec: make(map[string]*pipelinespec.RuntimeArtifact), OutputParametersSpec: make(map[string]string), + InputParameterValues: make(map[string]*structpb.Value), } for inputArtifactName, inputArtifactList := range inputs.GetArtifacts() { @@ -77,6 +76,10 @@ func GenerateCacheKey( } } + for inputParameterName, inputParameterValue := range inputs.GetParameterValues() { + cacheKey.InputParameterValues[inputParameterName] = inputParameterValue + } + for outputArtifactName, outputArtifactList := range outputs.GetArtifacts() { if len(outputArtifactList.Artifacts) == 0 { continue @@ -189,29 +192,3 @@ func (c *Client) CreateExecutionCache(ctx context.Context, task *api.Task) error } return nil } - -func GetMLMDOutputParams(cachedExecution *ml_metadata.Execution) (map[string]string, error) { - mlmdOutputParameters := make(map[string]string) - for customPropName, customPropValue := range cachedExecution.CustomProperties { - if strings.HasPrefix(customPropName, "output:") { - slice := strings.Split(customPropName, ":") - if len(slice) != 2 { - return nil, fmt.Errorf("failed to parse output parameter from MLMD execution custom property %v", customPropName) - } - outputParamName := slice[1] - var outputParamValue string - switch t := customPropValue.Value.(type) { - case *ml_metadata.Value_StringValue: - outputParamValue = customPropValue.GetStringValue() - case *ml_metadata.Value_DoubleValue: - outputParamValue = strconv.FormatFloat(customPropValue.GetDoubleValue(), 'f', -1, 64) - case *ml_metadata.Value_IntValue: - outputParamValue = strconv.FormatInt(customPropValue.GetIntValue(), 10) - default: - return nil, fmt.Errorf("unknown PipelineSpec Value type %T", t) - } - mlmdOutputParameters[outputParamName] = outputParamValue - } - } - return mlmdOutputParameters, nil -} diff --git a/v2/component/launcher.go b/v2/component/launcher.go index a6959f12793..35d9448a415 100644 --- a/v2/component/launcher.go +++ b/v2/component/launcher.go @@ -18,6 +18,7 @@ package component import ( "context" + "encoding/json" "errors" "flag" "fmt" @@ -42,9 +43,11 @@ import ( "gocloud.dev/blob" _ "gocloud.dev/blob/gcsblob" "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" ) + const OutputMetadataFilepath = "/tmp/kfp_outputs/output_metadata.json" // Launcher is used to launch KFP components. It handles the recording of the @@ -276,19 +279,12 @@ func (l *Launcher) executeWithCacheHit(ctx context.Context, executorInput *pipel if err != nil { return fmt.Errorf("failure while transfering cachedMLMDExecutionID %s from string to int64: %w", cachedMLMDExecutionID, err) } - executions, err := l.metadataClient.GetExecutions(ctx, []int64{cachedMLMDExecutionIDInt64}) + execution, err := l.metadataClient.GetExecution(ctx, cachedMLMDExecutionIDInt64) if err != nil { return fmt.Errorf("failure while getting execution of cachedMLMDExecutionID %v: %w", cachedMLMDExecutionIDInt64, err) } - if len(executions) == 0 { - return fmt.Errorf("the execution with id %s does not exist in MLMD", cachedMLMDExecutionID) - } - if len(executions) > 1 { - return fmt.Errorf("got multiple executions with id %s in MLMD", cachedMLMDExecutionID) - } - cachedExecution := executions[0] - outputParameters, err := l.storeOutputParameterValueFromCache(cachedExecution) + outputParameters, err := l.storeOutputParameterValueFromCache(execution) if err != nil { return fmt.Errorf("failed to store output parameter value from cache: %w", err) } @@ -304,47 +300,26 @@ func (l *Launcher) executeWithCacheHit(ctx context.Context, executorInput *pipel return nil } -func (l *Launcher) storeOutputParameterValueFromCache(cachedExecution *pb.Execution) (*metadata.Parameters, error) { - mlmdOutputParameters, err := cacheutils.GetMLMDOutputParams(cachedExecution) +func (l *Launcher) storeOutputParameterValueFromCache(execution *metadata.Execution) (map[string]*structpb.Value, error) { + _, outputs, err := execution.GetParameters() if err != nil { return nil, err } - // Read output parameters. - outputParameters := &metadata.Parameters{ - IntParameters: make(map[string]int64), - StringParameters: make(map[string]string), - DoubleParameters: make(map[string]float64), - } - for name, param := range l.runtimeInfo.OutputParameters { filename := param.Path - outputParamValue, ok := mlmdOutputParameters[name] + value, ok := outputs[name] if !ok { return nil, fmt.Errorf("can't find parameter %v in mlmdOutputParameters", name) } - if err := ioutil.WriteFile(filename, []byte(outputParamValue), 0644); err != nil { - return nil, fmt.Errorf("failed to write output parameter %q to file %q: %w", name, filename, err) + text, err := metadata.PbValueToText(value) + if err != nil { + return nil, err } - switch param.Type { - case "STRING": - outputParameters.StringParameters[name] = outputParamValue - case "INT": - i, err := strconv.ParseInt(strings.TrimSpace(outputParamValue), 10, 0) - if err != nil { - return nil, fmt.Errorf("failed to parse parameter name=%q value =%v to int: %w", name, outputParamValue, err) - } - outputParameters.IntParameters[name] = i - case "DOUBLE": - f, err := strconv.ParseFloat(strings.TrimSpace(outputParamValue), 0) - if err != nil { - return nil, fmt.Errorf("failed to parse parameter name=%q value =%v to double: %w", name, outputParamValue, err) - } - outputParameters.DoubleParameters[name] = f - default: - return nil, fmt.Errorf("unknown type. Expected STRING, INT or DOUBLE") + if err := ioutil.WriteFile(filename, []byte(text), 0644); err != nil { + return nil, fmt.Errorf("failed to write output parameter %q to file %q: %w", name, filename, err) } } - return outputParameters, nil + return outputs, nil } func (l *Launcher) storeOutputArtifactMetadataFromCache(ctx context.Context, executorInputOutputs *pipelinespec.ExecutorInput_Outputs, cachedMLMDExecutionID int64) ([]*metadata.OutputArtifact, error) { @@ -496,22 +471,14 @@ func (l *Launcher) publish(ctx context.Context, executorInput *pipelinespec.Exec } func (l *Launcher) dumpOutputParameters(executorOutput *pipelinespec.ExecutorOutput) error { - for name, parameter := range executorOutput.Parameters { + for name, parameter := range executorOutput.ParameterValues { wrap := func(err error) error { return fmt.Errorf("failed to dump output parameter %q in executor output to disk: %w", name, err) } - var value string - switch t := parameter.Value.(type) { - case *pipelinespec.Value_StringValue: - value = parameter.GetStringValue() - case *pipelinespec.Value_DoubleValue: - value = strconv.FormatFloat(parameter.GetDoubleValue(), 'f', -1, 64) - case *pipelinespec.Value_IntValue: - value = strconv.FormatInt(parameter.GetIntValue(), 10) - default: - return wrap(fmt.Errorf("unknown PipelineSpec Value type %T", t)) + value, err := metadata.PbValueToText(parameter) + if err != nil { + return wrap(err) } - outputParam, ok := l.runtimeInfo.OutputParameters[name] if !ok { return wrap(fmt.Errorf("parameter is not defined in component")) @@ -608,37 +575,49 @@ func (l *Launcher) dumpOutputArtifactsMetadata(outputArtifacts []*metadata.Outpu return nil } -func (l *Launcher) readOutputParameters() (*metadata.Parameters, error) { - outputParameters := &metadata.Parameters{ - IntParameters: make(map[string]int64), - StringParameters: make(map[string]string), - DoubleParameters: make(map[string]float64), - } +func (l *Launcher) readOutputParameters() (map[string]*structpb.Value, error) { + outputParameters := make(map[string]*structpb.Value) + for n, op := range l.runtimeInfo.OutputParameters { - msg := func(err error) error { + wrap := func(err error) error { return fmt.Errorf("Failed to read output parameter name=%q type=%q path=%q: %w", n, op.Type, op.Path, err) } + b, err := ioutil.ReadFile(op.Path) if err != nil { - return nil, msg(err) + return nil, wrap(err) } switch op.Type { case "STRING": - outputParameters.StringParameters[n] = string(b) - case "INT": - i, err := strconv.ParseInt(strings.TrimSpace(string(b)), 10, 0) + outputParameters[n] = structpb.NewStringValue(string(b)) + case "NUMBER_INTEGER", "NUMBER_DOUBLE": + f, err := strconv.ParseFloat(strings.TrimSpace(string(b)), 0) if err != nil { - return nil, msg(err) + return nil, wrap(fmt.Errorf("failed to parse number parameter: %w", err)) } - outputParameters.IntParameters[n] = i - case "DOUBLE": - f, err := strconv.ParseFloat(strings.TrimSpace(string(b)), 0) + outputParameters[n] = structpb.NewNumberValue(f) + case "BOOLEAN": + b, err := strconv.ParseBool(strings.TrimSpace(string(b))) if err != nil { - return nil, msg(err) + return nil, wrap(fmt.Errorf("failed to parse boolean parameter: %w", err)) } - outputParameters.DoubleParameters[n] = f + outputParameters[n] = structpb.NewBoolValue(b) + case "LIST": + value := &structpb.Value{} + if err := value.UnmarshalJSON(b); err != nil { + return nil, wrap(fmt.Errorf("failed to parse list parameter: %w", err)) + + } + outputParameters[n] = value + case "STRUCT": + value := &structpb.Value{} + if err := value.UnmarshalJSON(b); err != nil { + return nil, wrap(fmt.Errorf("failed to parse dict parameter: %w", err)) + + } + outputParameters[n] = value default: - return nil, msg(fmt.Errorf("unknown type. Expected STRING, INT or DOUBLE")) + return nil, wrap(fmt.Errorf("unknown ParameterType %q", op.Type)) } } return outputParameters, nil @@ -783,15 +762,27 @@ func getPlaceholders(executorInput *pipelinespec.ExecutorInput) (placeholders ma } // Prepare input parameter placeholders. - for name, parameter := range executorInput.Inputs.Parameters { + for name, parameter := range executorInput.Inputs.ParameterValues { key := fmt.Sprintf(`{{$.inputs.parameters['%s']}}`, name) - switch t := parameter.Value.(type) { - case *pipelinespec.Value_StringValue: + switch t := parameter.Kind.(type) { + case *structpb.Value_StringValue: placeholders[key] = parameter.GetStringValue() - case *pipelinespec.Value_DoubleValue: - placeholders[key] = strconv.FormatFloat(parameter.GetDoubleValue(), 'f', -1, 64) - case *pipelinespec.Value_IntValue: - placeholders[key] = strconv.FormatInt(parameter.GetIntValue(), 10) + case *structpb.Value_NumberValue: + placeholders[key] = strconv.FormatFloat(parameter.GetNumberValue(), 'f', -1, 64) + case *structpb.Value_BoolValue: + placeholders[key] = strconv.FormatBool(parameter.GetBoolValue()) + case *structpb.Value_ListValue: + b, err := json.Marshal(parameter.GetListValue()) + if err != nil { + return nil, fmt.Errorf("failed to JSON-marshal list input parameter %q: %w", name, err) + } + placeholders[key] = string(b) + case *structpb.Value_StructValue: + b, err := json.Marshal(parameter.GetStructValue()) + if err != nil { + return nil, fmt.Errorf("failed to JSON-marshal dict input parameter %q: %w", name, err) + } + placeholders[key] = string(b) default: return nil, fmt.Errorf("unknown PipelineSpec Value type %T", t) } @@ -838,8 +829,8 @@ func mergeRuntimeArtifacts(src, dst *pipelinespec.RuntimeArtifact) { func getExecutorOutputFile(path string) (*pipelinespec.ExecutorOutput, error) { // collect user executor output file executorOutput := &pipelinespec.ExecutorOutput{ - Parameters: map[string]*pipelinespec.Value{}, - Artifacts: map[string]*pipelinespec.ArtifactList{}, + ParameterValues: map[string]*structpb.Value{}, + Artifacts: map[string]*pipelinespec.ArtifactList{}, } _, err := os.Stat(path) diff --git a/v2/component/launcher_v2.go b/v2/component/launcher_v2.go index f7f6e442f3b..89cbe38aca1 100644 --- a/v2/component/launcher_v2.go +++ b/v2/component/launcher_v2.go @@ -5,20 +5,22 @@ import ( "context" "encoding/json" "fmt" - "github.com/golang/protobuf/ptypes/timestamp" - "github.com/kubeflow/pipelines/v2/cacheutils" - api "github.com/kubeflow/pipelines/v2/kfp-api" "io/ioutil" "strconv" "strings" "time" + "github.com/golang/protobuf/ptypes/timestamp" + "github.com/kubeflow/pipelines/v2/cacheutils" + api "github.com/kubeflow/pipelines/v2/kfp-api" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/v2/metadata" "github.com/kubeflow/pipelines/v2/objectstore" pb "github.com/kubeflow/pipelines/v2/third_party/ml_metadata" "gocloud.dev/blob" "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" ) @@ -28,7 +30,7 @@ type LauncherV2Options struct { PodName, PodUID, MLMDServerAddress, - MLMDServerPort , + MLMDServerPort, PipelineName, RunID string } @@ -98,7 +100,7 @@ func NewLauncherV2(ctx context.Context, executionID int64, executorInputJSON, co options: *opts, metadataClient: metadataClient, k8sClient: k8sClient, - cacheClient: cacheClient, + cacheClient: cacheClient, }, nil } @@ -145,7 +147,7 @@ func (l *LauncherV2) Execute(ctx context.Context) (err error) { RunId: l.options.RunID, MlmdExecutionID: strconv.FormatInt(id, 10), CreatedAt: ×tamp.Timestamp{Seconds: executedStartedTime}, - FinishedAt: ×tamp.Timestamp{Seconds: time.Now().Unix()}, + FinishedAt: ×tamp.Timestamp{Seconds: time.Now().Unix()}, Fingerprint: fingerPrint, } return l.cacheClient.CreateExecutionCache(ctx, task) @@ -212,17 +214,14 @@ func (l *LauncherV2) publish(ctx context.Context, execution *metadata.Execution, err = fmt.Errorf("failed to publish results to ML Metadata: %w", err) } }() - outputParameters, err := metadata.NewParameters(executorOutput.GetParameters()) - if err != nil { - return err - } + outputParameters := executorOutput.GetParameterValues() // TODO(Bobgy): upload output artifacts. // TODO(Bobgy): when adding artifacts, we will need execution.pipeline to be non-nil, because we need // to publish output artifacts to the context too. return l.metadataClient.PublishExecution(ctx, execution, outputParameters, outputArtifacts, pb.Execution_COMPLETE) } -func executeV2(ctx context.Context, executorInput *pipelinespec.ExecutorInput, component *pipelinespec.ComponentSpec, cmd string, args []string, bucket *blob.Bucket, bucketConfig *objectstore.Config, metadataClient *metadata.Client, namespace string, k8sClient *kubernetes.Clientset ) (*pipelinespec.ExecutorOutput, []*metadata.OutputArtifact, error) { +func executeV2(ctx context.Context, executorInput *pipelinespec.ExecutorInput, component *pipelinespec.ComponentSpec, cmd string, args []string, bucket *blob.Bucket, bucketConfig *objectstore.Config, metadataClient *metadata.Client, namespace string, k8sClient *kubernetes.Clientset) (*pipelinespec.ExecutorOutput, []*metadata.OutputArtifact, error) { executorOutput, err := execute(ctx, executorInput, cmd, args, bucket, bucketConfig, namespace, k8sClient) if err != nil { return nil, nil, err @@ -251,10 +250,10 @@ func executeV2(ctx context.Context, executorInput *pipelinespec.ExecutorInput, c // collectOutputParameters collect output parameters from local disk and add them // to executor output. func collectOutputParameters(executorInput *pipelinespec.ExecutorInput, executorOutput *pipelinespec.ExecutorOutput, component *pipelinespec.ComponentSpec) error { - if executorOutput.Parameters == nil { - executorOutput.Parameters = make(map[string]*pipelinespec.Value) + if executorOutput.ParameterValues == nil { + executorOutput.ParameterValues = make(map[string]*structpb.Value) } - outputParameters := executorOutput.GetParameters() + outputParameters := executorOutput.GetParameterValues() for name, param := range executorInput.GetOutputs().GetParameters() { _, ok := outputParameters[name] if ok { @@ -274,24 +273,11 @@ func collectOutputParameters(executorInput *pipelinespec.ExecutorInput, executor if err != nil { return msg(err) } - switch paramSpec.GetType() { - case pipelinespec.PrimitiveType_STRING: - outputParameters[name] = metadata.StringValue(string(b)) - case pipelinespec.PrimitiveType_INT: - i, err := strconv.ParseInt(strings.TrimSpace(string(b)), 10, 0) - if err != nil { - return msg(err) - } - outputParameters[name] = metadata.IntValue(i) - case pipelinespec.PrimitiveType_DOUBLE: - f, err := strconv.ParseFloat(strings.TrimSpace(string(b)), 0) - if err != nil { - return msg(err) - } - outputParameters[name] = metadata.DoubleValue(f) - default: - return msg(fmt.Errorf("unknown type. Expected STRING, INT or DOUBLE")) + value, err := metadata.TextToPbValue(string(b), paramSpec.GetParameterType()) + if err != nil { + return msg(err) } + outputParameters[name] = value } return nil } diff --git a/v2/component/runtime_info.go b/v2/component/runtime_info.go index 81826c88263..f5cca36fdfe 100644 --- a/v2/component/runtime_info.go +++ b/v2/component/runtime_info.go @@ -29,7 +29,13 @@ import ( ) type inputParameter struct { - // Type should be one of "INT", "STRING" or "DOUBLE". + // Type should be one of: + // - STRING + // - NUMBER_INTEGER + // - NUMBER_DOUBLE + // - BOOLEAN + // - LIST + // - STRUCT Type string // File used to read input parameters. Value string @@ -46,7 +52,13 @@ type inputArtifact struct { } type outputParameter struct { - // Type should be one of "INT", "STRING" or "DOUBLE". + // Type should be one of: + // - STRING + // - NUMBER_INTEGER + // - NUMBER_DOUBLE + // - BOOLEAN + // - LIST + // - STRUCT Type string // File used to write output parameters to. Path string @@ -153,8 +165,8 @@ type generateOutputURI func(outputName string) string func (r *runtimeInfo) generateExecutorInput(genOutputURI generateOutputURI, outputMetadataFilepath string) (*pipelinespec.ExecutorInput, error) { inputs := &pipelinespec.ExecutorInput_Inputs{ - Parameters: make(map[string]*pipelinespec.Value), - Artifacts: make(map[string]*pipelinespec.ArtifactList), + ParameterValues: make(map[string]*structpb.Value), + Artifacts: make(map[string]*pipelinespec.ArtifactList), } outputs := &pipelinespec.ExecutorInput_Outputs{ @@ -164,26 +176,38 @@ func (r *runtimeInfo) generateExecutorInput(genOutputURI generateOutputURI, outp } for name, ip := range r.InputParameters { - value := &pipelinespec.Value{} + var value *structpb.Value switch ip.Type { case "STRING": - value.Value = &pipelinespec.Value_StringValue{StringValue: ip.Value} - case "INT": - i, err := strconv.ParseInt(ip.Value, 10, 0) + value = structpb.NewStringValue(ip.Value) + case "NUMBER_INTEGER", "NUMBER_DOUBLE": + f, err := strconv.ParseFloat(ip.Value, 0) if err != nil { - return nil, fmt.Errorf("failed to parse int parameter %q from '%v': %w", name, i, err) + return nil, fmt.Errorf("failed to parse number parameter %q from '%v': %w", name, ip.Value, err) } - value.Value = &pipelinespec.Value_IntValue{IntValue: i} - case "DOUBLE": - f, err := strconv.ParseFloat(ip.Value, 0) + value = structpb.NewNumberValue(f) + case "BOOLEAN": + b, err := strconv.ParseBool(ip.Value) if err != nil { - return nil, fmt.Errorf("failed to parse double parameter %q from '%v': %w", name, f, err) + return nil, fmt.Errorf("failed to parse boolean parameter %q from '%v': %w", name, ip.Value, err) + } + value = structpb.NewBoolValue(b) + case "LIST": + value = &structpb.Value{} + if err := value.UnmarshalJSON([]byte(ip.Value)); err != nil { + return nil, fmt.Errorf("failed to parse list parameter %q from '%v': %w", name, ip.Value, err) + + } + case "STRUCT": + value = &structpb.Value{} + if err := value.UnmarshalJSON([]byte(ip.Value)); err != nil { + return nil, fmt.Errorf("failed to parse struct parameter %q from '%v': %w", name, ip.Value, err) + } - value.Value = &pipelinespec.Value_DoubleValue{DoubleValue: f} default: return nil, fmt.Errorf("unknown ParameterType for parameter %q: %q", name, ip.Type) } - inputs.Parameters[name] = value + inputs.ParameterValues[name] = value } for name, ia := range r.InputArtifacts { diff --git a/v2/component/runtime_info_test.go b/v2/component/runtime_info_test.go index 25a268fb24e..ac0d11c084d 100644 --- a/v2/component/runtime_info_test.go +++ b/v2/component/runtime_info_test.go @@ -194,7 +194,15 @@ func TestExecutorInputGeneration(t *testing.T) { "type": "STRING" }, "num_steps": { - "type": "INT" + "type": "NUMBER_INTEGER" + }, + "list_parameter": { + "type": "LIST", + "value": "[1, 2, 3]" + }, + "dict_parameter": { + "type": "STRUCT", + "value": "{\"key_1\": \"value_1\", \"key_2\": 2}" } }, "inputArtifacts": { @@ -215,7 +223,7 @@ func TestExecutorInputGeneration(t *testing.T) { "path": "/tmp/outputs/output_parameter_one/data" }, "output_parameter_two": { - "type": "INT", + "type": "NUMBER_INTEGER", "path": "/tmp/outputs/output_parameter_two/data" } }, @@ -234,9 +242,22 @@ func TestExecutorInputGeneration(t *testing.T) { }`, dataset_one_path, dataset_two_path), want: &pipelinespec.ExecutorInput{ Inputs: &pipelinespec.ExecutorInput_Inputs{ - Parameters: map[string]*pipelinespec.Value{ - "message": {Value: &pipelinespec.Value_StringValue{StringValue: "Some string value with { \"special\": \"chars\" }"}}, - "num_steps": {Value: &pipelinespec.Value_IntValue{IntValue: 5}}, + ParameterValues: map[string]*structpb.Value{ + "message": structpb.NewStringValue("Some string value with { \"special\": \"chars\" }"), + "num_steps": structpb.NewNumberValue(5), + "list_parameter": structpb.NewListValue(&structpb.ListValue{ + Values: []*structpb.Value{ + structpb.NewNumberValue(1), + structpb.NewNumberValue(2), + structpb.NewNumberValue(3), + }, + }), + "dict_parameter": structpb.NewStructValue(&structpb.Struct{ + Fields: map[string]*structpb.Value{ + "key_1": structpb.NewStringValue("value_1"), + "key_2": structpb.NewNumberValue(2), + }, + }), }, Artifacts: map[string]*pipelinespec.ArtifactList{ "dataset_one": { @@ -312,7 +333,7 @@ func TestExecutorInputGeneration(t *testing.T) { } if diff := cmp.Diff(test.want, got, cmpopts.EquateEmpty(), protocmp.Transform()); diff != "" { - t.Errorf("generateExecutorInput() = %+v, want %+v\nDiff (-want, +got)\n%s", got, test.want, diff) + t.Errorf("generateExecutorInput() =\n%+v\nWant:\n%+v\nDiff (-want, +got)\n%s", got, test.want, diff) s, _ := json.MarshalIndent(test.want, "", " ") fmt.Printf("Want\n%s", s) } diff --git a/v2/driver/driver.go b/v2/driver/driver.go index 085683aab9a..1290ccddd89 100644 --- a/v2/driver/driver.go +++ b/v2/driver/driver.go @@ -3,7 +3,12 @@ package driver import ( "context" "fmt" + "path" + "strconv" + "strings" + "github.com/golang/glog" + structpb "github.com/golang/protobuf/ptypes/struct" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "github.com/kubeflow/pipelines/v2/cacheutils" "github.com/kubeflow/pipelines/v2/component" @@ -12,9 +17,6 @@ import ( pb "github.com/kubeflow/pipelines/v2/third_party/ml_metadata" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" - "path" - "strconv" - "strings" ) // TODO Move driver to component package @@ -33,7 +35,7 @@ type Options struct { // required only by container driver DAGExecutionID int64 DAGContextID int64 - Container *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec + Container *pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec // required only by root DAG driver Namespace string } @@ -106,7 +108,7 @@ func RootDAG(ctx context.Context, opts Options, mlmd *metadata.Client) (executio } executorInput := &pipelinespec.ExecutorInput{ Inputs: &pipelinespec.ExecutorInput_Inputs{ - Parameters: opts.RuntimeConfig.Parameters, + ParameterValues: opts.RuntimeConfig.GetParameterValues(), }, } // TODO(Bobgy): validate executorInput matches component spec types @@ -205,7 +207,7 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl if err != nil { return nil, fmt.Errorf("failure while getting fingerPrint: %w", err) } - cachedMLMDExecutionID, err := cacheClient.GetExecutionCache(fingerPrint, "pipeline/" + opts.PipelineName, opts.Namespace) + cachedMLMDExecutionID, err := cacheClient.GetExecutionCache(fingerPrint, "pipeline/"+opts.PipelineName, opts.Namespace) if err != nil { return nil, fmt.Errorf("failure while getting executionCache: %w", err) } @@ -224,21 +226,17 @@ func Container(ctx context.Context, opts Options, mlmd *metadata.Client, cacheCl if err != nil { return nil, err } - outputParameters, err := metadata.NewParameters(executorOutput.GetParameters()) - if err != nil { - return nil, err - } // TODO(Bobgy): upload output artifacts. // TODO(Bobgy): when adding artifacts, we will need execution.pipeline to be non-nil, because we need // to publish output artifacts to the context too. - if err := mlmd.PublishExecution(ctx, createdExecution, outputParameters, outputArtifacts, pb.Execution_CACHED); err != nil { + if err := mlmd.PublishExecution(ctx, createdExecution, executorOutput.GetParameterValues(), outputArtifacts, pb.Execution_CACHED); err != nil { return nil, fmt.Errorf("failed to publish cached execution: %w", err) } glog.Infof("Cached") - return &Execution{ + return &Execution{ ID: createdExecution.GetID(), ExecutorInput: executorInput, - Cached: true, + Cached: true, }, nil } @@ -257,57 +255,19 @@ func reuseCachedOutputs(ctx context.Context, executorInput *pipelinespec.Executo if err != nil { return nil, nil, fmt.Errorf("failure while getting execution of cachedMLMDExecutionID %v: %w", cachedMLMDExecutionIDInt64, err) } - cachedExecution := execution.GetExecution() executorOutput := &pipelinespec.ExecutorOutput{ - Parameters: map[string]*pipelinespec.Value{}, - Artifacts: map[string]*pipelinespec.ArtifactList{}, + Artifacts: map[string]*pipelinespec.ArtifactList{}, } - if err := collectOutPutParametersFromCache(executorOutput, outputDefinitions, executorInput, cachedExecution); err != nil { + _, outputs, err := execution.GetParameters() + if err != nil { return nil, nil, fmt.Errorf("failed to collect output parameters from cache: %w", err) } + executorOutput.ParameterValues = outputs outputArtifacts, err := collectOutputArtifactMetadataFromCache(ctx, executorInput, cachedMLMDExecutionIDInt64, mlmd) if err != nil { return nil, nil, fmt.Errorf("failed collect output artifact metadata from cache: %w", err) } return executorOutput, outputArtifacts, nil - -} - -func collectOutPutParametersFromCache(executorOutput *pipelinespec.ExecutorOutput, outputDefinitions *pipelinespec.ComponentOutputsSpec, executorInput *pipelinespec.ExecutorInput, cachedExecution *pb.Execution, ) error { - mlmdOutputParameters, err := cacheutils.GetMLMDOutputParams(cachedExecution) - if err != nil { - return err - } - outputParameters := executorOutput.GetParameters() - for name, _ := range executorInput.GetOutputs().GetParameters() { - paramSpec, ok := outputDefinitions.GetParameters()[name] - if !ok { - return fmt.Errorf("can't find parameter %v in outputDefinitions", name) - } - outputParamValue, ok := mlmdOutputParameters[name] - if !ok { - return fmt.Errorf("can't find parameter %v in mlmdOutputParameters", name) - } - switch paramSpec.GetType() { - case pipelinespec.PrimitiveType_STRING: - outputParameters[name] = metadata.StringValue(outputParamValue) - case pipelinespec.PrimitiveType_INT: - i, err := strconv.ParseInt(strings.TrimSpace(outputParamValue), 10, 0) - if err != nil { - return fmt.Errorf("failed to parse parameter name=%q value =%v to int: %w", name, outputParamValue, err) - } - outputParameters[name] = metadata.IntValue(i) - case pipelinespec.PrimitiveType_DOUBLE: - f, err := strconv.ParseFloat(strings.TrimSpace(outputParamValue), 0) - if err != nil { - return fmt.Errorf("failed to parse parameter name=%q value =%v to double: %w", name, outputParamValue, err) - } - outputParameters[name] = metadata.DoubleValue(f) - default: - return fmt.Errorf("unknown type. Expected STRING, INT or DOUBLE") - } - } - return nil } func collectOutputArtifactMetadataFromCache(ctx context.Context, executorInput *pipelinespec.ExecutorInput, cachedMLMDExecutionID int64, mlmd *metadata.Client) ([]*metadata.OutputArtifact, error) { @@ -391,8 +351,8 @@ func resolveInputs(ctx context.Context, dag *metadata.DAG, task *pipelinespec.Pi } glog.Infof("parent DAG input parameters %+v", inputParams) inputs := &pipelinespec.ExecutorInput_Inputs{ - Parameters: make(map[string]*pipelinespec.Value), - Artifacts: make(map[string]*pipelinespec.ArtifactList), + ParameterValues: make(map[string]*structpb.Value), + Artifacts: make(map[string]*pipelinespec.ArtifactList), } // get executions in context on demand var tasksCache map[string]*metadata.Execution @@ -424,7 +384,7 @@ func resolveInputs(ctx context.Context, dag *metadata.DAG, task *pipelinespec.Pi if !ok { return nil, paramError(fmt.Errorf("parent DAG does not have input parameter %s", componentInput)) } - inputs.Parameters[name] = v + inputs.ParameterValues[name] = v case *pipelinespec.TaskInputsSpec_InputParameterSpec_TaskOutputParameter: taskOutput := paramSpec.GetTaskOutputParameter() @@ -450,10 +410,13 @@ func resolveInputs(ctx context.Context, dag *metadata.DAG, task *pipelinespec.Pi if !ok { return nil, paramError(fmt.Errorf("cannot find output parameter key %q in producer task %q", taskOutput.GetOutputParameterKey(), taskOutput.GetProducerTask())) } - inputs.Parameters[name] = param + inputs.ParameterValues[name] = param case *pipelinespec.TaskInputsSpec_InputParameterSpec_RuntimeValue: runtimeValue := paramSpec.GetRuntimeValue() switch t := runtimeValue.Value.(type) { + case *pipelinespec.ValueOrRuntimeParameter_Constant: + inputs.ParameterValues[name] = runtimeValue.GetConstant() + // TODO(v2): clean up pipelinespec.Value usages case *pipelinespec.ValueOrRuntimeParameter_ConstantValue: inputs.Parameters[name] = runtimeValue.GetConstantValue() default: @@ -516,7 +479,7 @@ func resolveInputs(ctx context.Context, dag *metadata.DAG, task *pipelinespec.Pi func provisionOutputs(pipelineRoot, taskName string, outputsSpec *pipelinespec.ComponentOutputsSpec) *pipelinespec.ExecutorInput_Outputs { outputs := &pipelinespec.ExecutorInput_Outputs{ - Artifacts: make(map[string]*pipelinespec.ArtifactList), + Artifacts: make(map[string]*pipelinespec.ArtifactList), Parameters: make(map[string]*pipelinespec.ExecutorInput_OutputParameter), OutputFile: component.OutputMetadataFilepath, } diff --git a/v2/go.mod b/v2/go.mod index 882479c4a48..02f95fa4e3b 100644 --- a/v2/go.mod +++ b/v2/go.mod @@ -7,18 +7,20 @@ require ( github.com/aws/aws-sdk-go v1.36.1 github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b - github.com/golang/protobuf v1.5.0 + github.com/golang/protobuf v1.5.2 github.com/google/go-cmp v0.5.5 github.com/google/uuid v1.1.2 github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 github.com/kubeflow/pipelines/api v0.0.0-20211020193552-20f28631517d github.com/stretchr/testify v1.7.0 gocloud.dev v0.22.0 - google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 - google.golang.org/grpc v1.36.0 + google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024 + google.golang.org/grpc v1.40.0 google.golang.org/protobuf v1.27.1 gopkg.in/yaml.v2 v2.4.0 k8s.io/api v0.20.4 k8s.io/apimachinery v0.21.2 k8s.io/client-go v0.20.4 ) + +replace github.com/kubeflow/pipelines/api => ../api diff --git a/v2/go.sum b/v2/go.sum index 6383d392a69..623eb83b0bc 100644 --- a/v2/go.sum +++ b/v2/go.sum @@ -198,6 +198,7 @@ github.com/cloudfoundry/jibber_jabber v0.0.0-20151120183258-bcc4c8345a21/go.mod github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/colinmarc/hdfs v1.1.4-0.20180802165501-48eb8d6c34a9/go.mod h1:0DumPviB681UcSuJErAbDIOx6SIaJWj463TymfZG02I= github.com/colinmarc/hdfs v1.1.4-0.20180805212432-9746310a4d31/go.mod h1:vSBumefK4HA5uiRSwNP+3ofgrEoScpCS2MMWcWXEuQ4= @@ -258,6 +259,7 @@ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.m github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= @@ -451,8 +453,9 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0 h1:LUVKkCeviFUMKqHa4tXIIij/lbhnMbP7Fn5wKdKkRh4= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -642,8 +645,6 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kubeflow/pipelines/api v0.0.0-20211020193552-20f28631517d h1:Dga88zE1XJZUWDejk7qjZSXjKPYQvMRo8Y1CghdTwQ4= -github.com/kubeflow/pipelines/api v0.0.0-20211020193552-20f28631517d/go.mod h1:ItI8RjFTt0RY6X0g6B3VocSaphuE+DNuNTzAY9NF8EY= github.com/labstack/echo v3.2.1+incompatible/go.mod h1:0INS7j/VjnFxD4E2wkz67b8cVwCLbBmJyDaka6Cmk1s= github.com/labstack/gommon v0.2.7/go.mod h1:/tj9csK2iPSBvn+3NLM9e52usepMtrd5ilFYA+wQNJ4= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= @@ -938,6 +939,7 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5 h1:dntmOdLpSpHlVqbW5Eay97DelsZHe+55D+xC6i0dDS0= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -1073,8 +1075,9 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210224082022-3d97a244fca7/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226101413-39120d07d75e/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1164,8 +1167,10 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 h1:dXfMednGJh/SUUFjTLsWJz3P+TQt9qnR11GgeI3vWKs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -1347,8 +1352,9 @@ google.golang.org/genproto v0.0.0-20200914193844-75d14daec038/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20200921151605-7abf4a1a14d5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 h1:jDYzwXmX9tLnuG4sL85HPmE1ruErXOopALp2i/0AHnI= google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024 h1:aePO4E0x+Urj9V5NQHjqOpaNG4oMeHQq0l2ob05z5tI= +google.golang.org/genproto v0.0.0-20211026145609-4688e4c4e024/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -1369,8 +1375,9 @@ google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.40.0 h1:AGJ0Ih4mHjSeibYkFGh1dD9KJ/eOtZ93I6hoHhukQ5Q= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc/examples v0.0.0-20201226181154-53788aa5dcb4/go.mod h1:Ly7ZA/ARzg8fnPU9TyZIxoz33sEUuWX7txiqs8lPTgE= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1383,6 +1390,7 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= diff --git a/v2/metadata/client.go b/v2/metadata/client.go index d63fcc235a9..52e31005ddc 100644 --- a/v2/metadata/client.go +++ b/v2/metadata/client.go @@ -36,6 +36,7 @@ import ( "google.golang.org/grpc/status" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/structpb" "gopkg.in/yaml.v2" ) @@ -98,41 +99,12 @@ func NewClient(serverAddress, serverPort string) (*Client, error) { }, nil } -// Parameters is used to represent input or output parameters (which are scalar -// values) from pipeline components. -type Parameters struct { - IntParameters map[string]int64 - StringParameters map[string]string - DoubleParameters map[string]float64 -} - -func NewParameters(params map[string]*pipelinespec.Value) (*Parameters, error) { - result := &Parameters{ - IntParameters: make(map[string]int64), - StringParameters: make(map[string]string), - DoubleParameters: make(map[string]float64), - } - for name, parameter := range params { - switch t := parameter.Value.(type) { - case *pipelinespec.Value_StringValue: - result.StringParameters[name] = parameter.GetStringValue() - case *pipelinespec.Value_IntValue: - result.IntParameters[name] = parameter.GetIntValue() - case *pipelinespec.Value_DoubleValue: - result.DoubleParameters[name] = parameter.GetDoubleValue() - default: - return nil, fmt.Errorf("failed to convert from map[string]*pipelinespec.Value to metadata.Parameters: unknown parameter type for parameter name=%q: %T", name, t) - } - } - return result, nil -} - // ExecutionConfig represents the input parameters and artifacts to an Execution. type ExecutionConfig struct { - InputParameters *Parameters + InputParameters map[string]*structpb.Value InputArtifactIDs map[string][]int64 TaskName, PodName, PodUID, Namespace, - Image, CachedMLMDExecutionID, ExecutionType , FingerPrint string + Image, CachedMLMDExecutionID, ExecutionType, FingerPrint string // a temporary flag to special case some logic for root DAG IsRootDAG bool } @@ -400,21 +372,21 @@ func getArtifactName(eventPath *pb.Event_Path) (string, error) { // PublishExecution publishes the specified execution with the given output // parameters, artifacts and state. -func (c *Client) PublishExecution(ctx context.Context, execution *Execution, outputParameters *Parameters, outputArtifacts []*OutputArtifact, state pb.Execution_State) error { +func (c *Client) PublishExecution(ctx context.Context, execution *Execution, outputParameters map[string]*structpb.Value, outputArtifacts []*OutputArtifact, state pb.Execution_State) error { e := execution.execution e.LastKnownState = state.Enum() if outputParameters != nil { // Record output parameters. - for n, p := range outputParameters.IntParameters { - e.CustomProperties["output:"+n] = intValue(p) - } - for n, p := range outputParameters.DoubleParameters { - e.CustomProperties["output:"+n] = doubleValue(p) + outputs := &pb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: make(map[string]*structpb.Value), + }, } - for n, p := range outputParameters.StringParameters { - e.CustomProperties["output:"+n] = stringValue(p) + for n, p := range outputParameters { + outputs.StructValue.Fields[n] = p } + e.CustomProperties[keyOutputs] = &pb.Value{Value: outputs} } contexts := []*pb.Context{} @@ -427,8 +399,7 @@ func (c *Client) PublishExecution(ctx context.Context, execution *Execution, out } for _, oa := range outputArtifacts { - aePair := &pb.PutExecutionRequest_ArtifactAndEvent{ - } + aePair := &pb.PutExecutionRequest_ArtifactAndEvent{} if oa.Artifact.GetId() == 0 { glog.Infof("the id of output artifact is not set, will create new artifact when publishing execution") aePair = &pb.PutExecutionRequest_ArtifactAndEvent{ @@ -456,16 +427,18 @@ func (c *Client) PublishExecution(ctx context.Context, execution *Execution, out // metadata keys const ( - keyDisplayName = "display_name" - keyTaskName = "task_name" - keyImage = "image" - keyPodName = "pod_name" - keyPodUID = "pod_uid" - keyNamespace = "namespace" - keyResourceName = "resource_name" - keyPipelineRoot = "pipeline_root" - keyCacheFingerPrint = "cache_fingerprint" + keyDisplayName = "display_name" + keyTaskName = "task_name" + keyImage = "image" + keyPodName = "pod_name" + keyPodUID = "pod_uid" + keyNamespace = "namespace" + keyResourceName = "resource_name" + keyPipelineRoot = "pipeline_root" + keyCacheFingerPrint = "cache_fingerprint" keyCachedExecutionID = "cached_execution_id" + keyInputs = "inputs" + keyOutputs = "outputs" ) // CreateExecution creates a new MLMD execution under the specified Pipeline. @@ -498,15 +471,15 @@ func (c *Client) CreateExecution(ctx context.Context, pipeline *Pipeline, config } if config.InputParameters != nil { - for k, v := range config.InputParameters.StringParameters { - e.CustomProperties["input:"+k] = stringValue(v) - } - for k, v := range config.InputParameters.IntParameters { - e.CustomProperties["input:"+k] = intValue(v) + inputs := &pb.Value_StructValue{ + StructValue: &structpb.Struct{ + Fields: make(map[string]*structpb.Value), + }, } - for k, v := range config.InputParameters.DoubleParameters { - e.CustomProperties["input:"+k] = doubleValue(v) + for n, p := range config.InputParameters { + inputs.StructValue.Fields[n] = p } + e.CustomProperties[keyInputs] = &pb.Value{Value: inputs} } req := &pb.PutExecutionRequest{ @@ -984,11 +957,7 @@ func GenerateExecutionConfig(executorInput *pipelinespec.ExecutorInput) (*Execut } } - parameters, err := NewParameters(executorInput.Inputs.Parameters) - if err != nil { - return nil, err - } - ecfg.InputParameters = parameters + ecfg.InputParameters = executorInput.Inputs.ParameterValues return ecfg, nil } diff --git a/v2/metadata/converter.go b/v2/metadata/converter.go index f2a537f05ad..f939e798210 100644 --- a/v2/metadata/converter.go +++ b/v2/metadata/converter.go @@ -1,8 +1,10 @@ package metadata import ( + "encoding/json" "fmt" "strconv" + "strings" "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" pb "github.com/kubeflow/pipelines/v2/third_party/ml_metadata" @@ -41,6 +43,87 @@ func IntValue(v int64) *pipelinespec.Value { } } +func PbValueToText(v *structpb.Value) (string, error) { + wrap := func(err error) error { + return fmt.Errorf("failed to convert structpb.Value to text: %w", err) + } + if v == nil { + return "", nil + } + var text string + switch t := v.Kind.(type) { + case *structpb.Value_StringValue: + text = v.GetStringValue() + case *structpb.Value_NumberValue: + text = strconv.FormatFloat(v.GetNumberValue(), 'f', -1, 64) + case *structpb.Value_BoolValue: + text = strconv.FormatBool(v.GetBoolValue()) + case *structpb.Value_ListValue: + b, err := json.Marshal(v.GetListValue()) + if err != nil { + return "", wrap(fmt.Errorf("failed to JSON-marshal a list: %w", err)) + } + text = string(b) + case *structpb.Value_StructValue: + b, err := json.Marshal(v.GetStructValue()) + if err != nil { + return "", wrap(fmt.Errorf("failed to JSON-marshal a struct: %w", err)) + } + text = string(b) + default: + return "", wrap(fmt.Errorf("unknown type %T", t)) + } + return text, nil +} + +func TextToPbValue(text string, t pipelinespec.ParameterType_ParameterTypeEnum) (*structpb.Value, error) { + msg := func(err error) error { + return fmt.Errorf("textToPbValue(text=%q, t=%q) failed: %w", text, t, err) + } + switch t { + case pipelinespec.ParameterType_STRING: + return structpb.NewStringValue(text), nil + case pipelinespec.ParameterType_NUMBER_INTEGER: + i, err := strconv.ParseInt(strings.TrimSpace(text), 10, 0) + if err != nil { + return nil, msg(err) + } + return structpb.NewNumberValue(float64(i)), nil + case pipelinespec.ParameterType_NUMBER_DOUBLE: + f, err := strconv.ParseFloat(strings.TrimSpace(text), 0) + if err != nil { + return nil, msg(err) + } + return structpb.NewNumberValue(f), nil + case pipelinespec.ParameterType_BOOLEAN: + v, err := strconv.ParseBool(strings.TrimSpace(text)) + if err != nil { + return nil, msg(err) + } + return structpb.NewBoolValue(v), nil + case pipelinespec.ParameterType_LIST: + v := &structpb.Value{} + if err := v.UnmarshalJSON([]byte(text)); err != nil { + return nil, msg(err) + } + if _, ok := v.GetKind().(*structpb.Value_ListValue); !ok { + return nil, msg(fmt.Errorf("unexpected type")) + } + return v, nil + case pipelinespec.ParameterType_STRUCT: + v := &structpb.Value{} + if err := v.UnmarshalJSON([]byte(text)); err != nil { + return nil, msg(err) + } + if _, ok := v.GetKind().(*structpb.Value_StructValue); !ok { + return nil, msg(fmt.Errorf("unexpected type")) + } + return v, nil + default: + return nil, msg(fmt.Errorf("unknown type. Expected STRING, NUMBER_INTEGER, NUMBER_DOUBLE, BOOLEAN, LIST or STRUCT")) + } +} + func pipelineSpecValueToMLMDValue(v *pipelinespec.Value) (*pb.Value, error) { switch t := v.Value.(type) { case *pipelinespec.Value_StringValue: @@ -173,5 +256,3 @@ func toRuntimeArtifact(artifact *pb.Artifact) (*pipelinespec.RuntimeArtifact, er return rta, nil } - - diff --git a/v2/metadata/model.go b/v2/metadata/model.go index 62fdd2ab854..cf4989ce0ee 100644 --- a/v2/metadata/model.go +++ b/v2/metadata/model.go @@ -18,19 +18,20 @@ package metadata import ( "fmt" - "strings" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + pb "github.com/kubeflow/pipelines/v2/third_party/ml_metadata" + "google.golang.org/protobuf/types/known/structpb" ) -const ( - prefixInput = "input:" - prefixOutput = "output:" -) +// A hacky way to get Execution from pb.Execution, usually you should get +// an Execution from this metadata package directly without using ml_metadata.Execution +func NewExecution(e *pb.Execution) *Execution { + return &Execution{execution: e} +} -func (e *Execution) GetParameters() (inputs, outputs map[string]*pipelinespec.Value, err error) { - inputs = make(map[string]*pipelinespec.Value) - outputs = make(map[string]*pipelinespec.Value) +func (e *Execution) GetParameters() (inputs, outputs map[string]*structpb.Value, err error) { + inputs = make(map[string]*structpb.Value) + outputs = make(map[string]*structpb.Value) defer func() { if err != nil { err = fmt.Errorf("execution(ID=%v).GetParameters failed: %w", e.GetID(), err) @@ -39,21 +40,14 @@ func (e *Execution) GetParameters() (inputs, outputs map[string]*pipelinespec.Va if e == nil || e.execution == nil { return nil, nil, nil } - for key, value := range e.execution.CustomProperties { - if strings.HasPrefix(key, prefixInput) { - name := strings.TrimPrefix(key, prefixInput) - kfpValue, err := mlmdValueToPipelineSpecValue(value) - if err != nil { - return nil, nil, err - } - inputs[name] = kfpValue - } else if strings.HasPrefix(key, prefixOutput) { - name := strings.TrimPrefix(key, prefixOutput) - kfpValue, err := mlmdValueToPipelineSpecValue(value) - if err != nil { - return nil, nil, err - } - outputs[name] = kfpValue + if stored_inputs, ok := e.execution.CustomProperties[keyInputs]; ok { + for name, value := range stored_inputs.GetStructValue().GetFields() { + inputs[name] = value + } + } + if stored_outputs, ok := e.execution.CustomProperties[keyOutputs]; ok { + for name, value := range stored_outputs.GetStructValue().GetFields() { + outputs[name] = value } } return inputs, outputs, nil diff --git a/v2/test/Makefile b/v2/test/Makefile index fb739dc6cc4..1aac77773e6 100644 --- a/v2/test/Makefile +++ b/v2/test/Makefile @@ -21,7 +21,7 @@ sample-test: context --host $(HOST) \ --gcs_root $(GCS_ROOT)/data \ --gcr_root $(GCR_ROOT) \ - --kfp_package_path $(KFP_PACKAGE_PATH) + --kfp_package_path "$(KFP_PACKAGE_PATH)" .PHONY: context context: From 4cf34e2d0890f144df519fc335a3c50fbecdb146 Mon Sep 17 00:00:00 2001 From: James Liu <37026441+zijianjoy@users.noreply.github.com> Date: Thu, 28 Oct 2021 15:17:51 -0700 Subject: [PATCH 23/31] feat: upgrade argo to v3.1.14 (#6809) * feat: upgrade argo to v3.1.14 * go mod tidy * fix license versions --- .cloudbuild.yaml | 4 ++-- .release.cloudbuild.yaml | 20 +++++++++---------- backend/Dockerfile | 2 +- backend/third_party_licenses/apiserver.csv | 2 +- backend/third_party_licenses/cache_server.csv | 2 +- .../persistence_agent.csv | 2 +- backend/third_party_licenses/swf.csv | 2 +- go.mod | 2 +- go.sum | 4 ++-- .../kubeflow-pipelines/templates/argo.yaml | 6 +++--- .../gcp_marketplace/test/snapshot-base.yaml | 6 +++--- .../test/snapshot-emissary.yaml | 6 +++--- ...apshot-managed-storage-with-db-prefix.yaml | 6 +++--- .../test/snapshot-managed-storage.yaml | 6 +++--- .../workflow-controller-configmap-patch.yaml | 6 +++--- .../workflow-controller-configmap-patch.yaml | 6 +++--- .../workflow-controller-configmap-patch.yaml | 6 +++--- .../workflow-controller-configmap-patch.yaml | 6 +++--- .../workflow-controller-deployment-patch.yaml | 4 ++-- .../argo/upstream/manifests/Kptfile | 4 ++-- test/install-argo-cli.sh | 4 ++-- test/tag_for_hosted.sh | 8 ++++---- third_party/argo/Dockerfile.argoexec | 4 ++-- .../argo/Dockerfile.workflow-controller | 4 ++-- third_party/argo/README.md | 2 +- third_party/argo/VERSION | 2 +- third_party/argo/go-licenses.yaml | 2 +- third_party/argo/imp-1-update-notices.sh | 2 +- third_party/argo/licenses-argoexec.csv | 2 +- .../argo/licenses-workflow-controller.csv | 2 +- 30 files changed, 67 insertions(+), 67 deletions(-) diff --git a/.cloudbuild.yaml b/.cloudbuild.yaml index 6b9b0ce5a57..746ecde8c13 100644 --- a/.cloudbuild.yaml +++ b/.cloudbuild.yaml @@ -192,10 +192,10 @@ steps: args: ['pull', 'gcr.io/cloudsql-docker/gce-proxy:1.14'] id: 'pullCloudsqlProxy' - name: 'gcr.io/cloud-builders/docker' - args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance'] + args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance'] id: 'pullArgoExecutor' - name: 'gcr.io/cloud-builders/docker' - args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance'] + args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance'] id: 'pullArgoWorkflowController' # Tag for Hosted - SemVersion to Major.Minor parsing diff --git a/.release.cloudbuild.yaml b/.release.cloudbuild.yaml index 6b77611ffe9..7afbd310b9c 100644 --- a/.release.cloudbuild.yaml +++ b/.release.cloudbuild.yaml @@ -478,14 +478,14 @@ steps: docker push gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' - args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance'] + args: ['pull', 'gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance'] id: 'pullArgoExecutor' - name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME'] + args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME'] id: 'tagArgoExecutorForMarketplace' waitFor: ['pullArgoExecutor'] - name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME'] + args: ['tag', 'gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME'] id: 'tagArgoExecutorForMarketplaceTest' waitFor: ['pullArgoExecutor'] - id: 'tagArgoExecutorForMarketplaceMajorMinor' @@ -495,20 +495,20 @@ steps: args: - -ceux - | - docker tag gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver) - docker tag gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver) docker push gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver) docker push gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' - args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance'] + args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance'] id: 'pullArgoWorkflowController' - name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME'] + args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME'] id: 'tagArgoWorkflowControllerForMarketplace' waitFor: ['pullArgoWorkflowController'] - name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME'] + args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME'] id: 'tagArgoWorkflowControllerForMarketplaceTest' waitFor: ['pullArgoWorkflowController'] - id: 'tagArgoWorkflowControllerForMarketplaceMajorMinor' @@ -518,8 +518,8 @@ steps: args: - -ceux - | - docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver) - docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver) docker push gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver) docker push gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver) diff --git a/backend/Dockerfile b/backend/Dockerfile index 30bbe940ef6..ff38619ea52 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -33,7 +33,7 @@ COPY backend/requirements.txt . RUN python3 -m pip install -r requirements.txt --no-cache-dir # Downloading Argo CLI so that the samples are validated -ENV ARGO_VERSION v3.1.6 +ENV ARGO_VERSION v3.1.14 RUN curl -sLO https://github.com/argoproj/argo-workflows/releases/download/${ARGO_VERSION}/argo-linux-amd64.gz && \ gunzip argo-linux-amd64.gz && \ chmod +x argo-linux-amd64 && \ diff --git a/backend/third_party_licenses/apiserver.csv b/backend/third_party_licenses/apiserver.csv index fc0737256b1..492f5f67004 100644 --- a/backend/third_party_licenses/apiserver.csv +++ b/backend/third_party_licenses/apiserver.csv @@ -9,7 +9,7 @@ github.com/PuerkitoBio/purell, https://github.com/PuerkitoBio/purell/blob/v1.1.1 github.com/PuerkitoBio/urlesc, https://github.com/PuerkitoBio/urlesc/blob/de5bf2ad4578/LICENSE, BSD-3-Clause github.com/VividCortex/mysqlerr, https://github.com/VividCortex/mysqlerr/blob/6c6b55f8796f/LICENSE, MIT github.com/antonmedv/expr, https://github.com/antonmedv/expr/blob/v1.8.8/LICENSE, MIT -github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.6/LICENSE, Apache-2.0 +github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 github.com/argoproj/pkg, https://github.com/argoproj/pkg/blob/v0.10.1/LICENSE, Apache-2.0 github.com/asaskevich/govalidator, https://github.com/asaskevich/govalidator/blob/7a23bdc65eef/LICENSE, MIT github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT diff --git a/backend/third_party_licenses/cache_server.csv b/backend/third_party_licenses/cache_server.csv index def541f04a1..f9b11ebbae8 100644 --- a/backend/third_party_licenses/cache_server.csv +++ b/backend/third_party_licenses/cache_server.csv @@ -7,7 +7,7 @@ github.com/Masterminds/sprig, https://github.com/Masterminds/sprig/blob/v2.22.0/ github.com/PuerkitoBio/purell, https://github.com/PuerkitoBio/purell/blob/v1.1.1/LICENSE, BSD-3-Clause github.com/PuerkitoBio/urlesc, https://github.com/PuerkitoBio/urlesc/blob/de5bf2ad4578/LICENSE, BSD-3-Clause github.com/antonmedv/expr, https://github.com/antonmedv/expr/blob/v1.8.8/LICENSE, MIT -github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.6/LICENSE, Apache-2.0 +github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 github.com/argoproj/pkg, https://github.com/argoproj/pkg/blob/v0.10.1/LICENSE, Apache-2.0 github.com/asaskevich/govalidator, https://github.com/asaskevich/govalidator/blob/7a23bdc65eef/LICENSE, MIT github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT diff --git a/backend/third_party_licenses/persistence_agent.csv b/backend/third_party_licenses/persistence_agent.csv index 889f02c2d93..a9dbcae0b54 100644 --- a/backend/third_party_licenses/persistence_agent.csv +++ b/backend/third_party_licenses/persistence_agent.csv @@ -9,7 +9,7 @@ github.com/Masterminds/sprig, https://github.com/Masterminds/sprig/blob/v2.22.0/ github.com/PuerkitoBio/purell, https://github.com/PuerkitoBio/purell/blob/v1.1.1/LICENSE, BSD-3-Clause github.com/PuerkitoBio/urlesc, https://github.com/PuerkitoBio/urlesc/blob/de5bf2ad4578/LICENSE, BSD-3-Clause github.com/antonmedv/expr, https://github.com/antonmedv/expr/blob/v1.8.8/LICENSE, MIT -github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.6/LICENSE, Apache-2.0 +github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 github.com/argoproj/pkg, https://github.com/argoproj/pkg/blob/v0.10.1/LICENSE, Apache-2.0 github.com/asaskevich/govalidator, https://github.com/asaskevich/govalidator/blob/7a23bdc65eef/LICENSE, MIT github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT diff --git a/backend/third_party_licenses/swf.csv b/backend/third_party_licenses/swf.csv index 8063554eda3..9a2b34bfbfc 100644 --- a/backend/third_party_licenses/swf.csv +++ b/backend/third_party_licenses/swf.csv @@ -9,7 +9,7 @@ github.com/Masterminds/sprig, https://github.com/Masterminds/sprig/blob/v2.22.0/ github.com/PuerkitoBio/purell, https://github.com/PuerkitoBio/purell/blob/v1.1.1/LICENSE, BSD-3-Clause github.com/PuerkitoBio/urlesc, https://github.com/PuerkitoBio/urlesc/blob/de5bf2ad4578/LICENSE, BSD-3-Clause github.com/antonmedv/expr, https://github.com/antonmedv/expr/blob/v1.8.8/LICENSE, MIT -github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.6/LICENSE, Apache-2.0 +github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 github.com/argoproj/pkg, https://github.com/argoproj/pkg/blob/v0.10.1/LICENSE, Apache-2.0 github.com/asaskevich/govalidator, https://github.com/asaskevich/govalidator/blob/7a23bdc65eef/LICENSE, MIT github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT diff --git a/go.mod b/go.mod index c05230a4077..69e68931501 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/kubeflow/pipelines require ( github.com/Masterminds/squirrel v0.0.0-20190107164353-fa735ea14f09 github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f - github.com/argoproj/argo-workflows/v3 v3.1.6 + github.com/argoproj/argo-workflows/v3 v3.1.14 github.com/cenkalti/backoff v2.2.1+incompatible github.com/denisenkom/go-mssqldb v0.0.0-20181014144952-4e0d7dc8888f // indirect github.com/eapache/go-resiliency v1.2.0 diff --git a/go.sum b/go.sum index 350c2490d17..386336ab954 100644 --- a/go.sum +++ b/go.sum @@ -127,8 +127,8 @@ github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= github.com/ardielle/ardielle-tools v1.5.4/go.mod h1:oZN+JRMnqGiIhrzkRN9l26Cej9dEx4jeNG6A+AdkShk= github.com/argoproj/argo-events v1.4.0/go.mod h1:wI5A0U3Wj9ZvfPn3ioL18Dz29+7aibtlyU9pS0Ry+bg= -github.com/argoproj/argo-workflows/v3 v3.1.6 h1:axha5XDjvJ7zjtUg/Zq+4KGyglFA5HPhQPGl4nnsGAA= -github.com/argoproj/argo-workflows/v3 v3.1.6/go.mod h1:AOj9yCLSNPCCxEF/PT+0dMZCDBDWIGX6EL6PPvqTyMc= +github.com/argoproj/argo-workflows/v3 v3.1.14 h1:JTcCK2el7sTWfvbDJw+hcZ/1sCa5igPq6AxIodv7egw= +github.com/argoproj/argo-workflows/v3 v3.1.14/go.mod h1:AOj9yCLSNPCCxEF/PT+0dMZCDBDWIGX6EL6PPvqTyMc= github.com/argoproj/pkg v0.9.0/go.mod h1:ra+bQPmbVAoEL+gYSKesuigt4m49i3Qa3mE/xQcjCiA= github.com/argoproj/pkg v0.10.1 h1:B7y7IqEFKNaNGg82U0COeVe/V5uj4Dum027yFe5DxRU= github.com/argoproj/pkg v0.10.1/go.mod h1:ra+bQPmbVAoEL+gYSKesuigt4m49i3Qa3mE/xQcjCiA= diff --git a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml index 742ab0e1202..6c592dbb6dc 100644 --- a/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml +++ b/manifests/gcp_marketplace/chart/kubeflow-pipelines/templates/argo.yaml @@ -341,9 +341,9 @@ subjects: apiVersion: v1 data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # emissary executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. containerRuntimeExecutor: '{{ if .Values.executor.emissary }}emissary{{ else }}docker{{ end }}' diff --git a/manifests/gcp_marketplace/test/snapshot-base.yaml b/manifests/gcp_marketplace/test/snapshot-base.yaml index 21220a38a76..09d1d32ff68 100644 --- a/manifests/gcp_marketplace/test/snapshot-base.yaml +++ b/manifests/gcp_marketplace/test/snapshot-base.yaml @@ -826,9 +826,9 @@ subjects: apiVersion: v1 data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # emissary executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. containerRuntimeExecutor: 'docker' diff --git a/manifests/gcp_marketplace/test/snapshot-emissary.yaml b/manifests/gcp_marketplace/test/snapshot-emissary.yaml index a88a9c1d9c2..05b3de79a98 100644 --- a/manifests/gcp_marketplace/test/snapshot-emissary.yaml +++ b/manifests/gcp_marketplace/test/snapshot-emissary.yaml @@ -826,9 +826,9 @@ subjects: apiVersion: v1 data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # emissary executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. containerRuntimeExecutor: 'emissary' diff --git a/manifests/gcp_marketplace/test/snapshot-managed-storage-with-db-prefix.yaml b/manifests/gcp_marketplace/test/snapshot-managed-storage-with-db-prefix.yaml index 653055a5fb3..3dfc448d688 100644 --- a/manifests/gcp_marketplace/test/snapshot-managed-storage-with-db-prefix.yaml +++ b/manifests/gcp_marketplace/test/snapshot-managed-storage-with-db-prefix.yaml @@ -841,9 +841,9 @@ subjects: apiVersion: v1 data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # emissary executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. containerRuntimeExecutor: 'docker' diff --git a/manifests/gcp_marketplace/test/snapshot-managed-storage.yaml b/manifests/gcp_marketplace/test/snapshot-managed-storage.yaml index 6aeaf12edea..df29961e227 100644 --- a/manifests/gcp_marketplace/test/snapshot-managed-storage.yaml +++ b/manifests/gcp_marketplace/test/snapshot-managed-storage.yaml @@ -841,9 +841,9 @@ subjects: apiVersion: v1 data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # emissary executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. containerRuntimeExecutor: 'docker' diff --git a/manifests/kustomize/env/platform-agnostic-multi-user-emissary/workflow-controller-configmap-patch.yaml b/manifests/kustomize/env/platform-agnostic-multi-user-emissary/workflow-controller-configmap-patch.yaml index 24303c0081d..7af58d85276 100644 --- a/manifests/kustomize/env/platform-agnostic-multi-user-emissary/workflow-controller-configmap-patch.yaml +++ b/manifests/kustomize/env/platform-agnostic-multi-user-emissary/workflow-controller-configmap-patch.yaml @@ -4,9 +4,9 @@ metadata: name: workflow-controller-configmap data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # Emissary Executor: https://argoproj.github.io/argo-workflows/workflow-executors/#emissary-emissary containerRuntimeExecutor: emissary diff --git a/manifests/kustomize/env/platform-agnostic-multi-user-pns/workflow-controller-configmap-patch.yaml b/manifests/kustomize/env/platform-agnostic-multi-user-pns/workflow-controller-configmap-patch.yaml index 6da9a704ae8..89650130098 100644 --- a/manifests/kustomize/env/platform-agnostic-multi-user-pns/workflow-controller-configmap-patch.yaml +++ b/manifests/kustomize/env/platform-agnostic-multi-user-pns/workflow-controller-configmap-patch.yaml @@ -4,9 +4,9 @@ metadata: name: workflow-controller-configmap data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # pns executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. containerRuntimeExecutor: pns diff --git a/manifests/kustomize/env/platform-agnostic-pns/workflow-controller-configmap-patch.yaml b/manifests/kustomize/env/platform-agnostic-pns/workflow-controller-configmap-patch.yaml index 6d83453f7c3..0a2ee2fab9c 100644 --- a/manifests/kustomize/env/platform-agnostic-pns/workflow-controller-configmap-patch.yaml +++ b/manifests/kustomize/env/platform-agnostic-pns/workflow-controller-configmap-patch.yaml @@ -4,9 +4,9 @@ metadata: name: workflow-controller-configmap data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # pns executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. # However, it is flaky for containers that run really fast, see https://github.com/kubeflow/pipelines/issues/5285. diff --git a/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml b/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml index 51962aa60e4..67bb79c0a1d 100644 --- a/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml +++ b/manifests/kustomize/third-party/argo/base/workflow-controller-configmap-patch.yaml @@ -4,9 +4,9 @@ metadata: name: workflow-controller-configmap data: # References: - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/config/config.go - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.md - # * https://github.com/argoproj/argo-workflows/blob/v3.1.6/docs/workflow-controller-configmap.yaml + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/config/config.go + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.md + # * https://github.com/argoproj/argo-workflows/blob/v3.1.14/docs/workflow-controller-configmap.yaml # emissary executor is a more portable default, see https://github.com/kubeflow/pipelines/issues/1654. containerRuntimeExecutor: docker diff --git a/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml b/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml index c5bfa821422..91a9317f4ba 100644 --- a/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml +++ b/manifests/kustomize/third-party/argo/base/workflow-controller-deployment-patch.yaml @@ -7,12 +7,12 @@ spec: spec: containers: - name: workflow-controller - image: gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance + image: gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance args: - --configmap - workflow-controller-configmap - --executor-image - - gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance + - gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance resources: requests: cpu: 100m diff --git a/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile b/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile index 35f40b5f539..d950696fb09 100644 --- a/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile +++ b/manifests/kustomize/third-party/argo/upstream/manifests/Kptfile @@ -7,12 +7,12 @@ upstream: git: repo: https://github.com/argoproj/argo-workflows directory: /manifests - ref: v3.1.6 + ref: v3.1.14 updateStrategy: resource-merge upstreamLock: type: git git: repo: https://github.com/argoproj/argo-workflows directory: /manifests - ref: v3.1.6 + ref: v3.1.14 commit: 14e1278572b28d8b1854858ce7de355ce60199c9 diff --git a/test/install-argo-cli.sh b/test/install-argo-cli.sh index 434ac974acc..0b28c401206 100755 --- a/test/install-argo-cli.sh +++ b/test/install-argo-cli.sh @@ -18,9 +18,9 @@ set -ex DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)" REPO_ROOT="${DIR}/.." -# ARGO_VERSION="$(cat ${REPO_ROOT}/third_party/argo/VERSION)" +ARGO_VERSION="$(cat ${REPO_ROOT}/third_party/argo/VERSION)" +# ARGO_VERSION=v3.1.14 OS=${OS:-"linux-amd64"} -ARGO_VERSION=v3.1.6 # if argo is not installed if ! which argo; then diff --git a/test/tag_for_hosted.sh b/test/tag_for_hosted.sh index 3fa49df6b52..30966e18895 100755 --- a/test/tag_for_hosted.sh +++ b/test/tag_for_hosted.sh @@ -120,12 +120,12 @@ docker tag gcr.io/cloudsql-docker/gce-proxy:1.14 gcr.io/$PROJECT_ID/hosted/$COMM docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/cloudsqlproxy:$SEM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/cloudsqlproxy:$MM_VER -docker tag gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$SEM_VER -docker tag gcr.io/ml-pipeline/argoexec:v3.1.6-patch-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$MM_VER +docker tag gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$SEM_VER +docker tag gcr.io/ml-pipeline/argoexec:v3.1.14-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$MM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$SEM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoexecutor:$MM_VER -docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$SEM_VER -docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.6-patch-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$MM_VER +docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$SEM_VER +docker tag gcr.io/ml-pipeline/workflow-controller:v3.1.14-license-compliance gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$MM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$SEM_VER docker push gcr.io/$PROJECT_ID/hosted/$COMMIT_SHA/argoworkflowcontroller:$MM_VER diff --git a/third_party/argo/Dockerfile.argoexec b/third_party/argo/Dockerfile.argoexec index e806510b010..7074a4ae94d 100644 --- a/third_party/argo/Dockerfile.argoexec +++ b/third_party/argo/Dockerfile.argoexec @@ -13,9 +13,9 @@ # limitations under the License. ARG TAG -# FROM docker.io/argoproj/argoexec:${TAG} +FROM docker.io/argoproj/argoexec:${TAG} # Use the following path when we need to fork temporarily. -FROM gcr.io/ml-pipeline-test/argoexec:v3.1.6-patch +# FROM gcr.io/ml-pipeline-test/argoexec:v3.1.14 # Copy notices, licenses and source code. COPY NOTICES/argoexec /NOTICES diff --git a/third_party/argo/Dockerfile.workflow-controller b/third_party/argo/Dockerfile.workflow-controller index 95ce3b443de..382d0513b9a 100644 --- a/third_party/argo/Dockerfile.workflow-controller +++ b/third_party/argo/Dockerfile.workflow-controller @@ -13,9 +13,9 @@ # limitations under the License. ARG TAG -# FROM docker.io/argoproj/workflow-controller:${TAG} +FROM docker.io/argoproj/workflow-controller:${TAG} # Use the following path when we need to fork temporarily. -FROM gcr.io/ml-pipeline-test/workflow-controller:v3.1.6-patch +# FROM gcr.io/ml-pipeline-test/workflow-controller:v3.1.14 # Copy notices, licenses and source code. COPY NOTICES/workflow-controller /NOTICES diff --git a/third_party/argo/README.md b/third_party/argo/README.md index 20dcd27fa38..0c5b28b32e5 100644 --- a/third_party/argo/README.md +++ b/third_party/argo/README.md @@ -21,7 +21,7 @@ Instructions: 1. Set version of argo you want to upgrade to, for example: ```bash - ARGO_TAG=v3.1.6 + ARGO_TAG=v3.1.14 ``` 1. ```bash diff --git a/third_party/argo/VERSION b/third_party/argo/VERSION index 23b17965bf1..3b63c19c99c 100644 --- a/third_party/argo/VERSION +++ b/third_party/argo/VERSION @@ -1 +1 @@ -v3.1.6-patch +v3.1.14 diff --git a/third_party/argo/go-licenses.yaml b/third_party/argo/go-licenses.yaml index c455fb42a30..a518e2ad835 100644 --- a/third_party/argo/go-licenses.yaml +++ b/third_party/argo/go-licenses.yaml @@ -1,6 +1,6 @@ module: go: - version: v3.1.6 + version: v3.1.14 overrides: - name: github.com/davecgh/go-spew version: v1.1.1 diff --git a/third_party/argo/imp-1-update-notices.sh b/third_party/argo/imp-1-update-notices.sh index dd3cb3d7d42..88b575526c9 100755 --- a/third_party/argo/imp-1-update-notices.sh +++ b/third_party/argo/imp-1-update-notices.sh @@ -38,7 +38,7 @@ which go-licenses >/dev/null || (echo "go-licenses not found in PATH" && exit 1) # Clean up generated files rm -rf "${DIR}/NOTICES" -cd "$WORK_DIR" +cd "${WORK_DIR}" gh repo clone argoproj/argo-workflows cd argo-workflows REPO="${WORK_DIR}/argo-workflows" diff --git a/third_party/argo/licenses-argoexec.csv b/third_party/argo/licenses-argoexec.csv index 6cd7ab818a7..7ec9c550641 100644 --- a/third_party/argo/licenses-argoexec.csv +++ b/third_party/argo/licenses-argoexec.csv @@ -1,5 +1,5 @@ # Generated by https://github.com/google/go-licenses/v2. DO NOT EDIT. -github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.2/LICENSE, Apache-2.0 +github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/LICENSE, Apache-2.0 cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go, BSD-2-Clause cloud.google.com/go/storage, https://github.com/googleapis/google-cloud-go/blob/storage/v1.6.0/LICENSE, Apache-2.0 diff --git a/third_party/argo/licenses-workflow-controller.csv b/third_party/argo/licenses-workflow-controller.csv index 93ef2566407..f0d7daa6fcc 100644 --- a/third_party/argo/licenses-workflow-controller.csv +++ b/third_party/argo/licenses-workflow-controller.csv @@ -1,5 +1,5 @@ # Generated by https://github.com/google/go-licenses/v2. DO NOT EDIT. -github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.2/LICENSE, Apache-2.0 +github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/LICENSE, Apache-2.0 cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go, BSD-2-Clause github.com/Azure/go-autorest/autorest, https://github.com/Azure/go-autorest/blob/autorest/v0.11.1/LICENSE, Apache-2.0 From 95c145be611710248cebacb3b526cafbf8eea07e Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Thu, 28 Oct 2021 15:29:51 -0700 Subject: [PATCH 24/31] chore(sdk): Fix RELEASE.md (#6824) --- sdk/RELEASE.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index f36414e8892..dd6a6f2d315 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -2,6 +2,8 @@ ## Major Features and Improvements +* Support passing parameters in v2 using google.protobuf.Value [\#6804](https://github.com/kubeflow/pipelines/pull/6804). + ## Breaking Changes ### For Pipeline Authors @@ -20,7 +22,6 @@ * Add optional support to specify description for pipeline version [\#6472](https://github.com/kubeflow/pipelines/issues/6472). * New v2 experimental compiler [\#6803](https://github.com/kubeflow/pipelines/pull/6803). -* Support passing parameters in v2 using google.protobuf.Value [\#6804](https://github.com/kubeflow/pipelines/pull/6804). ## Breaking Changes From fb6ff0a064de67518585edaff23cfeae398a2233 Mon Sep 17 00:00:00 2001 From: Yang Pan Date: Thu, 28 Oct 2021 16:41:44 -0700 Subject: [PATCH 25/31] chore(component): Disable the cache when building the image PiperOrigin-RevId: 406246359 --- .../container/aiplatform/cloudbuild.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/cloudbuild.yaml b/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/cloudbuild.yaml index a1ac90e8e05..5b6d27b061b 100644 --- a/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/cloudbuild.yaml +++ b/components/google-cloud/google_cloud_pipeline_components/container/aiplatform/cloudbuild.yaml @@ -2,5 +2,4 @@ steps: - name: 'gcr.io/kaniko-project/executor:latest' args: - --destination=gcr.io/$PROJECT_ID/google-cloud-pipeline-components:latest - - --cache=true - - --cache-ttl=12h + - --cache=false From ea2e5be81a87f95cefded563d86b90925e48a73e Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Thu, 28 Oct 2021 18:29:51 -0700 Subject: [PATCH 26/31] feat(sdk.v2): Implement experimental `@component` component (#6825) * Implement experimental `@component` component * release note --- sdk/RELEASE.md | 1 + sdk/python/kfp/v2/compiler/compiler_utils.py | 3 +- .../kfp/v2/compiler/experimental/compiler.py | 16 +- .../experimental/pipeline_spec_builder.py | 2 +- sdk/python/kfp/v2/compiler/main.py | 20 +- .../compiler_cli_tests/compiler_cli_tests.py | 10 + ...htweight_python_functions_v2_pipeline.json | 257 ++++++++++++++ ...ightweight_python_functions_v2_pipeline.py | 138 ++++++++ ...ight_python_functions_v2_with_outputs.json | 332 ++++++++++++++++++ ...weight_python_functions_v2_with_outputs.py | 79 +++++ .../components/experimental/base_component.py | 13 +- .../experimental/component_decorator.py | 111 ++++++ .../experimental/component_factory.py | 53 +-- .../{pipeline.py => pipeline_context.py} | 48 +++ .../components/experimental/placeholders.py | 2 +- .../experimental/placeholders_test.py | 2 +- .../experimental/python_component.py | 39 ++ .../v2/components/experimental/structures.py | 2 + .../v2/components/experimental/tasks_group.py | 20 +- .../components/experimental/yaml_component.py | 3 +- .../types/experimental/type_utils.py | 26 ++ sdk/python/kfp/v2/dsl/__init__.py | 3 +- .../kfp/v2/dsl/experimental/__init__.py | 34 +- 23 files changed, 1125 insertions(+), 89 deletions(-) create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.json create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.py create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.json create mode 100644 sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.py create mode 100644 sdk/python/kfp/v2/components/experimental/component_decorator.py rename sdk/python/kfp/v2/components/experimental/{pipeline.py => pipeline_context.py} (76%) create mode 100644 sdk/python/kfp/v2/components/experimental/python_component.py diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index dd6a6f2d315..afcfc8b6f51 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -3,6 +3,7 @@ ## Major Features and Improvements * Support passing parameters in v2 using google.protobuf.Value [\#6804](https://github.com/kubeflow/pipelines/pull/6804). +* Implement experimental v2 `@component` component [\#6825](https://github.com/kubeflow/pipelines/pull/6825) ## Breaking Changes diff --git a/sdk/python/kfp/v2/compiler/compiler_utils.py b/sdk/python/kfp/v2/compiler/compiler_utils.py index 73919f51635..fbee332020f 100644 --- a/sdk/python/kfp/v2/compiler/compiler_utils.py +++ b/sdk/python/kfp/v2/compiler/compiler_utils.py @@ -97,10 +97,10 @@ def validate_pipeline_name(name: str) -> None: '`dsl.pipeline(name=...)` decorator.' % name) +# TODO: drop this method once experimental compiler merge back def is_v2_component(op: _container_op.ContainerOp) -> bool: """Determines whether a component is a KFP v2 component.""" - # TODO: migrate v2 component to PipelineTask if not isinstance(op, _container_op.ContainerOp): return False @@ -111,6 +111,7 @@ def is_v2_component(op: _container_op.ContainerOp) -> bool: _component_builder.V2_COMPONENT_ANNOTATION) == 'true') +# TODO: drop this method once experimental compiler merge back def refactor_v2_container_spec(container_spec: PipelineContainerSpec) -> None: """Refactor the container spec for a v2 component.""" if not '--function_name' in container_spec.args: diff --git a/sdk/python/kfp/v2/compiler/experimental/compiler.py b/sdk/python/kfp/v2/compiler/experimental/compiler.py index 7ee7fe3901d..31024012ac3 100644 --- a/sdk/python/kfp/v2/compiler/experimental/compiler.py +++ b/sdk/python/kfp/v2/compiler/experimental/compiler.py @@ -36,6 +36,7 @@ from kfp.v2.components.experimental import component_factory from kfp.v2.components.experimental import for_loop from kfp.v2.components.experimental import pipeline_channel +from kfp.v2.components.experimental import pipeline_context from kfp.v2.components.experimental import pipeline_task from kfp.v2.components.experimental import tasks_group from kfp.v2.components.types import artifact_types @@ -148,7 +149,7 @@ def _create_pipeline_v2( dsl.PipelineParameterChannel( name=arg_name, channel_type=arg_type)) - with dsl.Pipeline(pipeline_name) as dsl_pipeline: + with pipeline_context.Pipeline(pipeline_name) as dsl_pipeline: pipeline_func(*args_list) if not dsl_pipeline.tasks: @@ -217,7 +218,8 @@ def _write_pipeline_spec_json( 'The output path {} should ends with ".json".'.format( output_path)) - def _validate_exit_handler(self, pipeline: dsl.Pipeline) -> None: + def _validate_exit_handler(self, + pipeline: pipeline_context.Pipeline) -> None: """Makes sure there is only one global exit handler. This is temporary to be compatible with KFP v1. @@ -258,7 +260,7 @@ def _validate_exit_handler_helper( def _create_pipeline_spec( self, pipeline_args: List[dsl.PipelineChannel], - pipeline: dsl.Pipeline, + pipeline: pipeline_context.Pipeline, ) -> pipeline_spec_pb2.PipelineSpec: """Creates a pipeline spec object. @@ -502,7 +504,7 @@ def _get_condition_channels_for_tasks_helper( def _get_inputs_for_all_groups( self, - pipeline: dsl.Pipeline, + pipeline: pipeline_context.Pipeline, pipeline_args: List[dsl.PipelineChannel], root_group: tasks_group.TasksGroup, task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]], @@ -736,7 +738,7 @@ def _get_uncommon_ancestors( # TODO: revisit for dependency that breaks through DAGs. def _get_dependencies( self, - pipeline: dsl.Pipeline, + pipeline: pipeline_context.Pipeline, root_group: tasks_group.TasksGroup, task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]], group_name_to_parent_groups: Mapping[str, List[tasks_group.TasksGroup]], @@ -891,10 +893,6 @@ def _build_spec_by_group( subgroup_container_spec = builder.build_container_spec_for_task( task=subgroup) - if compiler_utils.is_v2_component(subgroup): - compiler_utils.refactor_v2_container_spec( - subgroup_container_spec) - executor_label = subgroup_component_spec.executor_label if executor_label not in deployment_config.executors: diff --git a/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py index 6c5e55ff751..b2339c57584 100644 --- a/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py +++ b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py @@ -837,7 +837,7 @@ def populate_metrics_in_dag_outputs( artifact_types.Metrics.TYPE_NAME, artifact_types.ClassificationMetrics.TYPE_NAME, ]: - unique_output_name = '{}-{}'.format(op.name, output_name) + unique_output_name = '{}-{}'.format(task.name, output_name) sub_task_name = task.name sub_task_output = output_name diff --git a/sdk/python/kfp/v2/compiler/main.py b/sdk/python/kfp/v2/compiler/main.py index b80f1bb00c3..5a2a7386830 100644 --- a/sdk/python/kfp/v2/compiler/main.py +++ b/sdk/python/kfp/v2/compiler/main.py @@ -22,6 +22,7 @@ import kfp.dsl as dsl from kfp.v2 import compiler from kfp.v2.compiler.experimental import compiler as experimental_compiler +from kfp.v2.components.experimental import pipeline_context def parse_arguments() -> argparse.Namespace: @@ -118,6 +119,10 @@ def _compile_pipeline_function( class PipelineCollectorContext(): + # TODO: remove this once experimental merge back + def __init__(self, use_experimental: bool): + self.use_experimental = use_experimental + def __enter__(self): pipeline_funcs = [] @@ -125,12 +130,19 @@ def add_pipeline(func: Callable) -> Callable: pipeline_funcs.append(func) return func - self.old_handler = dsl._pipeline._pipeline_decorator_handler - dsl._pipeline._pipeline_decorator_handler = add_pipeline + if self.use_experimental: + self.old_handler = pipeline_context.pipeline_decorator_handler + pipeline_context.pipeline_decorator_handler = add_pipeline + else: + self.old_handler = dsl._pipeline._pipeline_decorator_handler + dsl._pipeline._pipeline_decorator_handler = add_pipeline return pipeline_funcs def __exit__(self, *args): - dsl._pipeline._pipeline_decorator_handler = self.old_handler + if self.use_experimental: + pipeline_context.pipeline_decorator_handler = self.old_handler + else: + dsl._pipeline._pipeline_decorator_handler = self.old_handler def compile_pyfile( @@ -153,7 +165,7 @@ def compile_pyfile( sys.path.insert(0, os.path.dirname(pyfile)) try: filename = os.path.basename(pyfile) - with PipelineCollectorContext() as pipeline_funcs: + with PipelineCollectorContext(use_experimental) as pipeline_funcs: __import__(os.path.splitext(filename)[0]) _compile_pipeline_function( pipeline_funcs=pipeline_funcs, diff --git a/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py b/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py index a5523f86803..16849e072bb 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/compiler_cli_tests.py @@ -169,10 +169,20 @@ def test_lightweight_python_functions_v2_pipeline(self): self._test_compile_py_to_json( 'lightweight_python_functions_v2_pipeline') + def test_lightweight_python_functions_v2_pipeline_experimental(self): + self._test_compile_py_to_json( + 'experimental_lightweight_python_functions_v2_pipeline', + use_experimental=True) + def test_lightweight_python_functions_v2_with_outputs(self): self._test_compile_py_to_json( 'lightweight_python_functions_v2_with_outputs') + def test_lightweight_python_functions_v2_with_outputs_experimental(self): + self._test_compile_py_to_json( + 'experimental_lightweight_python_functions_v2_with_outputs', + use_experimental=True) + def test_xgboost_sample_pipeline(self): self._test_compile_py_to_json('xgboost_sample_pipeline') diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.json new file mode 100644 index 00000000000..6a5e02d72eb --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.json @@ -0,0 +1,257 @@ +{ + "components": { + "comp-preprocess": { + "executorLabel": "exec-preprocess", + "inputDefinitions": { + "parameters": { + "input_dict_parameter": { + "parameterType": "STRUCT" + }, + "input_list_parameter": { + "parameterType": "LIST" + }, + "message": { + "parameterType": "STRING" + } + } + }, + "outputDefinitions": { + "artifacts": { + "output_dataset_one": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + }, + "output_dataset_two_path": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + }, + "parameters": { + "output_bool_parameter_path": { + "parameterType": "BOOLEAN" + }, + "output_dict_parameter_path": { + "parameterType": "STRUCT" + }, + "output_list_parameter_path": { + "parameterType": "LIST" + }, + "output_parameter_path": { + "parameterType": "STRING" + } + } + } + }, + "comp-train": { + "executorLabel": "exec-train", + "inputDefinitions": { + "artifacts": { + "dataset_one_path": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + }, + "dataset_two": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + }, + "parameters": { + "input_bool": { + "parameterType": "BOOLEAN" + }, + "input_dict": { + "parameterType": "STRUCT" + }, + "input_list": { + "parameterType": "LIST" + }, + "message": { + "parameterType": "STRING" + }, + "num_steps": { + "parameterType": "NUMBER_INTEGER" + } + } + }, + "outputDefinitions": { + "artifacts": { + "model": { + "artifactType": { + "schemaTitle": "system.Model", + "schemaVersion": "0.0.1" + } + } + } + } + } + }, + "defaultPipelineRoot": "dummy_root", + "deploymentSpec": { + "executors": { + "exec-preprocess": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "preprocess" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef preprocess(\n # An input parameter of type string.\n message: str,\n # An input parameter of type dict.\n input_dict_parameter: Dict[str, int],\n # An input parameter of type list.\n input_list_parameter: List[str],\n # Use Output[T] to get a metadata-rich handle to the output artifact\n # of type `Dataset`.\n output_dataset_one: Output[Dataset],\n # A locally accessible filepath for another output artifact of type\n # `Dataset`.\n output_dataset_two_path: OutputPath('Dataset'),\n # A locally accessible filepath for an output parameter of type string.\n output_parameter_path: OutputPath(str),\n # A locally accessible filepath for an output parameter of type bool.\n output_bool_parameter_path: OutputPath(bool),\n # A locally accessible filepath for an output parameter of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n # A locally accessible filepath for an output parameter of type list.\n output_list_parameter_path: OutputPath(List[str]),\n):\n \"\"\"Dummy preprocessing step.\"\"\"\n\n # Use Dataset.path to access a local file path for writing.\n # One can also use Dataset.uri to access the actual URI file path.\n with open(output_dataset_one.path, 'w') as f:\n f.write(message)\n\n # OutputPath is used to just pass the local file path of the output artifact\n # to the function.\n with open(output_dataset_two_path, 'w') as f:\n f.write(message)\n\n with open(output_parameter_path, 'w') as f:\n f.write(message)\n\n with open(output_bool_parameter_path, 'w') as f:\n f.write(\n str(True)) # use either `str()` or `json.dumps()` for bool values.\n\n import json\n with open(output_dict_parameter_path, 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\n" + ], + "image": "python:3.7" + } + }, + "exec-train": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "train" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef train(\n # Use InputPath to get a locally accessible path for the input artifact\n # of type `Dataset`.\n dataset_one_path: InputPath('Dataset'),\n # Use Input[T] to get a metadata-rich handle to the input artifact\n # of type `Dataset`.\n dataset_two: Input[Dataset],\n # An input parameter of type string.\n message: str,\n # Use Output[T] to get a metadata-rich handle to the output artifact\n # of type `Dataset`.\n model: Output[Model],\n # An input parameter of type bool.\n input_bool: bool,\n # An input parameter of type dict.\n input_dict: Dict[str, int],\n # An input parameter of type List[str].\n input_list: List[str],\n # An input parameter of type int with a default value.\n num_steps: int = 100,\n):\n \"\"\"Dummy Training step.\"\"\"\n with open(dataset_one_path, 'r') as input_file:\n dataset_one_contents = input_file.read()\n\n with open(dataset_two.path, 'r') as input_file:\n dataset_two_contents = input_file.read()\n\n line = (f'dataset_one_contents: {dataset_one_contents} || '\n f'dataset_two_contents: {dataset_two_contents} || '\n f'message: {message} || '\n f'input_bool: {input_bool}, type {type(input_bool)} || '\n f'input_dict: {input_dict}, type {type(input_dict)} || '\n f'input_list: {input_list}, type {type(input_list)} \\n')\n\n with open(model.path, 'w') as output_file:\n for i in range(num_steps):\n output_file.write('Step {}\\n{}\\n=====\\n'.format(i, line))\n\n # model is an instance of Model artifact, which has a .metadata dictionary\n # to store arbitrary metadata for the output artifact.\n model.metadata['accuracy'] = 0.9\n\n" + ], + "image": "python:3.7" + } + } + } + }, + "pipelineInfo": { + "name": "my-test-pipeline-beta" + }, + "root": { + "dag": { + "tasks": { + "preprocess": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-preprocess" + }, + "inputs": { + "parameters": { + "input_dict_parameter": { + "componentInputParameter": "input_dict" + }, + "input_list_parameter": { + "runtimeValue": { + "constant": [ + "a", + "b", + "c" + ] + } + }, + "message": { + "componentInputParameter": "message" + } + } + }, + "taskInfo": { + "name": "preprocess" + } + }, + "train": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-train" + }, + "dependentTasks": [ + "preprocess" + ], + "inputs": { + "artifacts": { + "dataset_one_path": { + "taskOutputArtifact": { + "outputArtifactKey": "output_dataset_one", + "producerTask": "preprocess" + } + }, + "dataset_two": { + "taskOutputArtifact": { + "outputArtifactKey": "output_dataset_two_path", + "producerTask": "preprocess" + } + } + }, + "parameters": { + "input_bool": { + "taskOutputParameter": { + "outputParameterKey": "output_bool_parameter_path", + "producerTask": "preprocess" + } + }, + "input_dict": { + "taskOutputParameter": { + "outputParameterKey": "output_dict_parameter_path", + "producerTask": "preprocess" + } + }, + "input_list": { + "taskOutputParameter": { + "outputParameterKey": "output_list_parameter_path", + "producerTask": "preprocess" + } + }, + "message": { + "taskOutputParameter": { + "outputParameterKey": "output_parameter_path", + "producerTask": "preprocess" + } + }, + "num_steps": { + "runtimeValue": { + "constant": 100.0 + } + } + } + }, + "taskInfo": { + "name": "train" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "input_dict": { + "defaultValue": { + "A": 1.0, + "B": 2.0 + }, + "parameterType": "STRUCT" + }, + "message": { + "parameterType": "STRING" + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" +} \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.py new file mode 100644 index 00000000000..e4733a26be3 --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_pipeline.py @@ -0,0 +1,138 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Sample pipeline for passing data in KFP v2.""" +from typing import Dict, List + +import kfp.v2.dsl.experimental as dsl +from kfp.v2.compiler.experimental import compiler +from kfp.v2.dsl.experimental import (Dataset, Input, InputPath, Model, Output, + OutputPath, component) + + +@component +def preprocess( + # An input parameter of type string. + message: str, + # An input parameter of type dict. + input_dict_parameter: Dict[str, int], + # An input parameter of type list. + input_list_parameter: List[str], + # Use Output[T] to get a metadata-rich handle to the output artifact + # of type `Dataset`. + output_dataset_one: Output[Dataset], + # A locally accessible filepath for another output artifact of type + # `Dataset`. + output_dataset_two_path: OutputPath('Dataset'), + # A locally accessible filepath for an output parameter of type string. + output_parameter_path: OutputPath(str), + # A locally accessible filepath for an output parameter of type bool. + output_bool_parameter_path: OutputPath(bool), + # A locally accessible filepath for an output parameter of type dict. + output_dict_parameter_path: OutputPath(Dict[str, int]), + # A locally accessible filepath for an output parameter of type list. + output_list_parameter_path: OutputPath(List[str]), +): + """Dummy preprocessing step.""" + + # Use Dataset.path to access a local file path for writing. + # One can also use Dataset.uri to access the actual URI file path. + with open(output_dataset_one.path, 'w') as f: + f.write(message) + + # OutputPath is used to just pass the local file path of the output artifact + # to the function. + with open(output_dataset_two_path, 'w') as f: + f.write(message) + + with open(output_parameter_path, 'w') as f: + f.write(message) + + with open(output_bool_parameter_path, 'w') as f: + f.write( + str(True)) # use either `str()` or `json.dumps()` for bool values. + + import json + with open(output_dict_parameter_path, 'w') as f: + f.write(json.dumps(input_dict_parameter)) + + with open(output_list_parameter_path, 'w') as f: + f.write(json.dumps(input_list_parameter)) + + +@component +def train( + # Use InputPath to get a locally accessible path for the input artifact + # of type `Dataset`. + dataset_one_path: InputPath('Dataset'), + # Use Input[T] to get a metadata-rich handle to the input artifact + # of type `Dataset`. + dataset_two: Input[Dataset], + # An input parameter of type string. + message: str, + # Use Output[T] to get a metadata-rich handle to the output artifact + # of type `Dataset`. + model: Output[Model], + # An input parameter of type bool. + input_bool: bool, + # An input parameter of type dict. + input_dict: Dict[str, int], + # An input parameter of type List[str]. + input_list: List[str], + # An input parameter of type int with a default value. + num_steps: int = 100, +): + """Dummy Training step.""" + with open(dataset_one_path, 'r') as input_file: + dataset_one_contents = input_file.read() + + with open(dataset_two.path, 'r') as input_file: + dataset_two_contents = input_file.read() + + line = (f'dataset_one_contents: {dataset_one_contents} || ' + f'dataset_two_contents: {dataset_two_contents} || ' + f'message: {message} || ' + f'input_bool: {input_bool}, type {type(input_bool)} || ' + f'input_dict: {input_dict}, type {type(input_dict)} || ' + f'input_list: {input_list}, type {type(input_list)} \n') + + with open(model.path, 'w') as output_file: + for i in range(num_steps): + output_file.write('Step {}\n{}\n=====\n'.format(i, line)) + + # model is an instance of Model artifact, which has a .metadata dictionary + # to store arbitrary metadata for the output artifact. + model.metadata['accuracy'] = 0.9 + + +@dsl.pipeline(pipeline_root='dummy_root', name='my-test-pipeline-beta') +def pipeline(message: str, input_dict: Dict[str, int] = {'A': 1, 'B': 2}): + + preprocess_task = preprocess( + message=message, + input_dict_parameter=input_dict, + input_list_parameter=['a', 'b', 'c'], + ) + train_task = train( + dataset_one_path=preprocess_task.outputs['output_dataset_one'], + dataset_two=preprocess_task.outputs['output_dataset_two_path'], + message=preprocess_task.outputs['output_parameter_path'], + input_bool=preprocess_task.outputs['output_bool_parameter_path'], + input_dict=preprocess_task.outputs['output_dict_parameter_path'], + input_list=preprocess_task.outputs['output_list_parameter_path'], + ) + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline, package_path=__file__.replace('.py', '.json')) diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.json new file mode 100644 index 00000000000..b0b96b8922e --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.json @@ -0,0 +1,332 @@ +{ + "components": { + "comp-add-numbers": { + "executorLabel": "exec-add-numbers", + "inputDefinitions": { + "parameters": { + "first": { + "parameterType": "NUMBER_INTEGER" + }, + "second": { + "parameterType": "NUMBER_INTEGER" + } + } + }, + "outputDefinitions": { + "parameters": { + "Output": { + "parameterType": "NUMBER_INTEGER" + } + } + } + }, + "comp-concat-message": { + "executorLabel": "exec-concat-message", + "inputDefinitions": { + "parameters": { + "first": { + "parameterType": "STRING" + }, + "second": { + "parameterType": "STRING" + } + } + }, + "outputDefinitions": { + "parameters": { + "Output": { + "parameterType": "STRING" + } + } + } + }, + "comp-output-artifact": { + "executorLabel": "exec-output-artifact", + "inputDefinitions": { + "parameters": { + "message": { + "parameterType": "STRING" + }, + "number": { + "parameterType": "NUMBER_INTEGER" + } + } + }, + "outputDefinitions": { + "artifacts": { + "Output": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + } + } + }, + "comp-output-named-tuple": { + "executorLabel": "exec-output-named-tuple", + "inputDefinitions": { + "artifacts": { + "artifact": { + "artifactType": { + "schemaTitle": "system.Dataset", + "schemaVersion": "0.0.1" + } + } + } + }, + "outputDefinitions": { + "artifacts": { + "metrics": { + "artifactType": { + "schemaTitle": "system.Metrics", + "schemaVersion": "0.0.1" + } + }, + "model": { + "artifactType": { + "schemaTitle": "system.Model", + "schemaVersion": "0.0.1" + } + } + }, + "parameters": { + "scalar": { + "parameterType": "STRING" + } + } + } + } + }, + "defaultPipelineRoot": "dummy_root", + "deploymentSpec": { + "executors": { + "exec-add-numbers": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "add_numbers" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef add_numbers(first: int, second: int) -> int:\n return first + second\n\n" + ], + "image": "python:3.7" + } + }, + "exec-concat-message": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "concat_message" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef concat_message(first: str, second: str) -> str:\n return first + second\n\n" + ], + "image": "python:3.7" + } + }, + "exec-output-artifact": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "output_artifact" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef output_artifact(number: int, message: str) -> Dataset:\n result = [message for _ in range(number)]\n return '\\n'.join(result)\n\n" + ], + "image": "python:3.7" + } + }, + "exec-output-named-tuple": { + "container": { + "args": [ + "--executor_input", + "{{$}}", + "--function_to_execute", + "output_named_tuple" + ], + "command": [ + "sh", + "-c", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "sh", + "-ec", + "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", + "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef output_named_tuple(\n artifact: Input[Dataset]\n) -> NamedTuple('Outputs', [\n ('scalar', str),\n ('metrics', Metrics),\n ('model', Model),\n]):\n scalar = \"123\"\n\n import json\n metrics = json.dumps({\n 'metrics': [{\n 'name': 'accuracy',\n 'numberValue': 0.9,\n 'format': \"PERCENTAGE\",\n }]\n })\n\n with open(artifact.path, 'r') as f:\n artifact_contents = f.read()\n model = \"Model contents: \" + artifact_contents\n\n from collections import namedtuple\n output = namedtuple('Outputs', ['scalar', 'metrics', 'model'])\n return output(scalar, metrics, model)\n\n" + ], + "image": "python:3.7" + } + } + } + }, + "pipelineInfo": { + "name": "functions-with-outputs" + }, + "root": { + "dag": { + "outputs": { + "artifacts": { + "output-named-tuple-metrics": { + "artifactSelectors": [ + { + "outputArtifactKey": "metrics", + "producerSubtask": "output-named-tuple" + } + ] + } + } + }, + "tasks": { + "add-numbers": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-add-numbers" + }, + "inputs": { + "parameters": { + "first": { + "componentInputParameter": "first_number" + }, + "second": { + "componentInputParameter": "second_number" + } + } + }, + "taskInfo": { + "name": "add-numbers" + } + }, + "concat-message": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-concat-message" + }, + "inputs": { + "parameters": { + "first": { + "componentInputParameter": "first_message" + }, + "second": { + "componentInputParameter": "second_message" + } + } + }, + "taskInfo": { + "name": "concat-message" + } + }, + "output-artifact": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-output-artifact" + }, + "dependentTasks": [ + "add-numbers", + "concat-message" + ], + "inputs": { + "parameters": { + "message": { + "taskOutputParameter": { + "outputParameterKey": "Output", + "producerTask": "concat-message" + } + }, + "number": { + "taskOutputParameter": { + "outputParameterKey": "Output", + "producerTask": "add-numbers" + } + } + } + }, + "taskInfo": { + "name": "output-artifact" + } + }, + "output-named-tuple": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-output-named-tuple" + }, + "dependentTasks": [ + "output-artifact" + ], + "inputs": { + "artifacts": { + "artifact": { + "taskOutputArtifact": { + "outputArtifactKey": "Output", + "producerTask": "output-artifact" + } + } + } + }, + "taskInfo": { + "name": "output-named-tuple" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "first_message": { + "parameterType": "STRING" + }, + "first_number": { + "parameterType": "NUMBER_INTEGER" + }, + "second_message": { + "parameterType": "STRING" + }, + "second_number": { + "parameterType": "NUMBER_INTEGER" + } + } + }, + "outputDefinitions": { + "artifacts": { + "output-named-tuple-metrics": { + "artifactType": { + "schemaTitle": "system.Metrics", + "schemaVersion": "0.0.1" + } + } + } + } + }, + "schemaVersion": "2.1.0", + "sdkVersion": "kfp-1.8.6" +} \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.py new file mode 100644 index 00000000000..658fe45c966 --- /dev/null +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_lightweight_python_functions_v2_with_outputs.py @@ -0,0 +1,79 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Lightweight functions v2 with outputs.""" +from typing import NamedTuple + +import kfp.v2.dsl.experimental as dsl +from kfp.v2.compiler.experimental import compiler +from kfp.v2.dsl.experimental import component, Input, Dataset, Model, Metrics + + +@component +def concat_message(first: str, second: str) -> str: + return first + second + + +@component +def add_numbers(first: int, second: int) -> int: + return first + second + + +@component +def output_artifact(number: int, message: str) -> Dataset: + result = [message for _ in range(number)] + return '\n'.join(result) + + +@component +def output_named_tuple( + artifact: Input[Dataset] +) -> NamedTuple('Outputs', [ + ('scalar', str), + ('metrics', Metrics), + ('model', Model), +]): + scalar = "123" + + import json + metrics = json.dumps({ + 'metrics': [{ + 'name': 'accuracy', + 'numberValue': 0.9, + 'format': "PERCENTAGE", + }] + }) + + with open(artifact.path, 'r') as f: + artifact_contents = f.read() + model = "Model contents: " + artifact_contents + + from collections import namedtuple + output = namedtuple('Outputs', ['scalar', 'metrics', 'model']) + return output(scalar, metrics, model) + + +@dsl.pipeline(pipeline_root='dummy_root', name='functions-with-outputs') +def pipeline(first_message: str, second_message: str, first_number: int, + second_number: int): + concat_op = concat_message(first=first_message, second=second_message) + add_numbers_op = add_numbers(first=first_number, second=second_number) + output_artifact_op = output_artifact( + number=add_numbers_op.output, message=concat_op.output) + output_name_tuple_op = output_named_tuple( + artifact=output_artifact_op.output) + + +if __name__ == '__main__': + compiler.Compiler().compile( + pipeline_func=pipeline, package_path=__file__.replace('.py', '.json')) diff --git a/sdk/python/kfp/v2/components/experimental/base_component.py b/sdk/python/kfp/v2/components/experimental/base_component.py index 35430b3cbb1..76dceea9f3d 100644 --- a/sdk/python/kfp/v2/components/experimental/base_component.py +++ b/sdk/python/kfp/v2/components/experimental/base_component.py @@ -63,13 +63,14 @@ def __call__(self, *args, **kwargs) -> pipeline_task.PipelineTask: task_inputs[k] = v # Fill in default value if there was no user provided value - for name, input_spec in (self.component_spec.inputs or {}).items(): - if input_spec.default is not None and name not in task_inputs: - task_inputs[name] = input_spec.default + for input_name, input_spec in (self.component_spec.inputs or + {}).items(): + if input_spec.default is not None and input_name not in task_inputs: + task_inputs[input_name] = input_spec.default missing_arguments = [ - name for name in (self.component_spec.inputs or {}) - if name not in task_inputs + input_name for input_name in (self.component_spec.inputs or {}) + if input_name not in task_inputs ] if missing_arguments: argument_or_arguments = 'argument' if len( @@ -86,7 +87,7 @@ def __call__(self, *args, **kwargs) -> pipeline_task.PipelineTask: ) @abc.abstractmethod - def execute(self, *args, **kwargs): + def execute(self, **kwargs): """Executes the component given the required inputs. Subclasses of BaseComponent must override this abstract method diff --git a/sdk/python/kfp/v2/components/experimental/component_decorator.py b/sdk/python/kfp/v2/components/experimental/component_decorator.py new file mode 100644 index 00000000000..988cc517116 --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/component_decorator.py @@ -0,0 +1,111 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +from typing import Callable, Optional, List + +from kfp.v2.components.experimental import component_factory + + +def component(func: Optional[Callable] = None, + *, + base_image: Optional[str] = None, + target_image: Optional[str] = None, + packages_to_install: List[str] = None, + output_component_file: Optional[str] = None, + install_kfp_package: bool = True, + kfp_package_path: Optional[str] = None): + """Decorator for Python-function based components in KFP v2. + + A KFP v2 component can either be a lightweight component, or a containerized + one. + + If target_image is not specified, this function creates a lightweight + component. A lightweight component is a self-contained Python function that + includes all necessary imports and dependencies. In lightweight components, + packages_to_install will be used to install dependencies at runtime. The + parameters install_kfp_package and kfp_package_path can be used to control + how KFP should be installed when the lightweight component is executed. + + If target_image is specified, this function creates a component definition + based around the target_image. The assumption is that the function in func + will be packaged by KFP into this target_image. Use the KFP CLI's `build` + command to package func into target_image. + + Example usage: + + from kfp.v2 import dsl + @dsl.component + def my_function_one(input: str, output: Output[Model]): + ... + + @dsl.component( + base_image='python:3.9', + output_component_file='my_function.yaml' + ) + def my_function_two(input: Input[Mode])): + ... + + @dsl.pipeline(pipeline_root='...', + name='my-pipeline') + def pipeline(): + my_function_one_task = my_function_one(input=...) + my_function_two_task = my_function_two(input=my_function_one_task.outputs.. + + Args: + func: The python function to create a component from. The function + should have type annotations for all its arguments, indicating how + it is intended to be used (e.g. as an input/output Artifact object, + a plain parameter, or a path to a file). + base_image: The image to use when executing func. It should + contain a default Python interpreter that is compatible with KFP. + packages_to_install: A list of optional packages to install before + executing func. These will always be installed at component runtime. + output_component_file: If specified, this function will write a + shareable/loadable version of the component spec into this file. + install_kfp_package: Specifies if we should add a KFP Python package to + packages_to_install. Lightweight Python functions always require + an installation of KFP in base_image to work. If you specify + a base_image that already contains KFP, you can set this to False. + This flag is ignored when target_image is specified, which implies + we're building a containerized component. Containerized components + will always install KFP as part of the build process. + kfp_package_path: Specifies the location from which to install KFP. By + default, this will try to install from PyPi using the same version + as that used when this component was created. KFP developers can + choose to override this to point to a Github pull request or + other pip-compatible location when testing changes to lightweight + Python functions. + + Returns: + A component task factory that can be used in pipeline definitions. + """ + if func is None: + return functools.partial( + component, + base_image=base_image, + target_image=target_image, + packages_to_install=packages_to_install, + output_component_file=output_component_file, + install_kfp_package=install_kfp_package, + kfp_package_path=kfp_package_path) + + return component_factory.create_component_from_func( + func, + base_image=base_image, + target_image=target_image, + packages_to_install=packages_to_install, + output_component_file=output_component_file, + install_kfp_package=install_kfp_package, + kfp_package_path=kfp_package_path) diff --git a/sdk/python/kfp/v2/components/experimental/component_factory.py b/sdk/python/kfp/v2/components/experimental/component_factory.py index abf5ed28cbf..4c65b7b2f77 100644 --- a/sdk/python/kfp/v2/components/experimental/component_factory.py +++ b/sdk/python/kfp/v2/components/experimental/component_factory.py @@ -22,10 +22,11 @@ import docstring_parser -from kfp import components as v1_components -from kfp.components import _components, _data_passing +from kfp.v2.components.experimental import placeholders +from kfp.v2.components.experimental import python_component from kfp.v2.components.experimental import structures from kfp.v2.components.types import artifact_types, type_annotations +from kfp.v2.components.types.experimental import type_utils _DEFAULT_BASE_IMAGE = 'python:3.7' @@ -114,7 +115,7 @@ def _annotation_to_type_struct(annotation): if isinstance(annotation, dict): return annotation if isinstance(annotation, type): - type_struct = _data_passing.get_canonical_type_name_for_type(annotation) + type_struct = type_utils.get_canonical_type_name_for_type(annotation) if type_struct: return type_struct if issubclass(annotation, artifact_types.Artifact @@ -132,7 +133,7 @@ def _annotation_to_type_struct(annotation): type_name = str(annotation) # It's also possible to get the converter by type name - type_struct = _data_passing.get_canonical_type_name_for_type(type_name) + type_struct = type_utils.get_canonical_type_name_for_type(type_name) if type_struct: return type_struct return type_name @@ -150,7 +151,6 @@ def _maybe_make_unique(name: str, names: List[str]): raise RuntimeError('Too many arguments with the name {}'.format(name)) -# TODO: switch to v2 structures def extract_component_interface(func: Callable) -> structures.ComponentSpec: single_output_name_const = 'Output' @@ -190,12 +190,6 @@ def extract_component_interface(func: Callable) -> structures.ComponentSpec: raise ValueError( 'Default values for Input/Output artifacts are not supported.' ) - # elif isinstance(parameter_type, - # (v1_components.InputPath, v1_components.OutputPath)): - # raise TypeError( - # 'In v2 components, please import the Python function' - # ' annotations `InputPath` and `OutputPath` from' - # ' package `kfp.v2.dsl` instead of `kfp.dsl`.') elif isinstance( parameter_type, (type_annotations.InputPath, type_annotations.OutputPath)): @@ -217,9 +211,7 @@ def extract_component_interface(func: Callable) -> structures.ComponentSpec: output_names.add(io_name) output_spec = structures.OutputSpec( type=type_struct, description=doc_dict.get(parameter.name)) - # output_spec._passing_style = passing_style - # output_spec._parameter_name = parameter.name - outputs.append(output_spec) + outputs[io_name] = output_spec else: io_name = _maybe_make_unique(io_name, input_names) input_names.add(io_name) @@ -231,14 +223,11 @@ def extract_component_interface(func: Callable) -> structures.ComponentSpec: outer_type_name = list(type_struct.keys())[0] if isinstance( type_struct, dict) else type_struct try: - input_spec.default = _data_passing.serialize_value( - parameter.default, outer_type_name) + input_spec.default = parameter.default except Exception as ex: warnings.warn( 'Could not serialize the default value of the' ' parameter "{}". {}'.format(parameter.name, ex)) - # input_spec._passing_style = passing_style - # input_spec._parameter_name = parameter.name inputs[io_name] = input_spec #Analyzing the return type annotations. @@ -259,8 +248,6 @@ def extract_component_interface(func: Callable) -> structures.ComponentSpec: output_name = _maybe_make_unique(field_name, output_names) output_names.add(output_name) output_spec = structures.OutputSpec(type=type_struct) - # output_spec._passing_style = None - # output_spec._return_tuple_field_name = field_name outputs[output_name] = output_spec # Deprecated dict-based way of declaring multiple outputs. Was only used by # the @component decorator @@ -281,8 +268,7 @@ def extract_component_interface(func: Callable) -> structures.ComponentSpec: # `def func(output_path: OutputPath()) -> str: ...` output_names.add(output_name) type_struct = _annotation_to_type_struct(signature.return_annotation) - output_spec = structures.OutputSpec(type=type_struct,) - # output_spec._passing_style = None + output_spec = structures.OutputSpec(type=type_struct) outputs[output_name] = output_spec # Component name and description are derived from the function's name and @@ -340,7 +326,7 @@ def _get_command_and_args_for_lightweight_component( args = [ "--executor_input", - structures.ExecutorInputPlaceholder(), + placeholders.executor_input_placeholder(), "--function_to_execute", func.__name__, ] @@ -358,7 +344,7 @@ def _get_command_and_args_for_containerized_component( args = [ "--executor_input", - structures.ExecutorInputPlaceholder(), + placeholders.executor_input_placeholder(), "--function_to_execute", function_name, ] @@ -403,11 +389,11 @@ def create_component_from_func(func: Callable, func=func) component_spec = extract_component_interface(func) - component_spec.implementation = structures.ContainerImplementation( + component_spec.implementation = structures.Implementation( container=structures.ContainerSpec( image=component_image, - command=packages_to_install_command + command, - args=args, + commands=packages_to_install_command + command, + arguments=args, )) module_path = pathlib.Path(inspect.getsourcefile(func)) @@ -430,14 +416,5 @@ def create_component_from_func(func: Callable, if output_component_file: component_spec.save(output_component_file) - # TODO(KFPv2): Replace with v2 BaseComponent. - task_factory = _components._create_task_factory_from_component_spec( - component_spec) - - # TODO(KFPv2): Once this returns a BaseComponent, we should check for this - # in the Executor, and get the appropriate callable. For now, we'll look for - # this special attribute to hold the Python function in the task factory - # during runtime. - setattr(task_factory, 'python_func', func) - - return task_factory + return python_component.PythonComponent( + component_spec=component_spec, python_func=func) diff --git a/sdk/python/kfp/v2/components/experimental/pipeline.py b/sdk/python/kfp/v2/components/experimental/pipeline_context.py similarity index 76% rename from sdk/python/kfp/v2/components/experimental/pipeline.py rename to sdk/python/kfp/v2/components/experimental/pipeline_context.py index 69a6d29470f..9824c3d11aa 100644 --- a/sdk/python/kfp/v2/components/experimental/pipeline.py +++ b/sdk/python/kfp/v2/components/experimental/pipeline_context.py @@ -13,10 +13,58 @@ # limitations under the License. """Definition for Pipeline.""" +from typing import Callable, Optional + from kfp.v2.components.experimental import pipeline_task from kfp.v2.components.experimental import tasks_group from kfp.v2.components import utils +# This handler is called whenever the @pipeline decorator is applied. +# It can be used by command-line DSL compiler to inject code that runs for every +# pipeline definition. +pipeline_decorator_handler = None + + +def pipeline(name: Optional[str] = None, + description: Optional[str] = None, + pipeline_root: Optional[str] = None): + """Decorator of pipeline functions. + + Example + :: + + @pipeline( + name='my-pipeline', + description='My ML Pipeline.' + pipeline_root='gs://my-bucket/my-output-path' + ) + def my_pipeline(a: str, b: int): + ... + + Args: + name: The pipeline name. Default to a sanitized version of the function + name. + description: Optionally, a human-readable description of the pipeline. + pipeline_root: The root directory to generate input/output URI under this + pipeline. This is required if input/output URI placeholder is used in + this pipeline. + """ + + def _pipeline(func: Callable): + if name: + func._component_human_name = name + if description: + func._component_description = description + if pipeline_root: + func.pipeline_root = pipeline_root + + if pipeline_decorator_handler: + return pipeline_decorator_handler(func) or func + else: + return func + + return _pipeline + class Pipeline: """A pipeline contains a list of tasks. diff --git a/sdk/python/kfp/v2/components/experimental/placeholders.py b/sdk/python/kfp/v2/components/experimental/placeholders.py index ad24a3adb2c..cc46faa338f 100644 --- a/sdk/python/kfp/v2/components/experimental/placeholders.py +++ b/sdk/python/kfp/v2/components/experimental/placeholders.py @@ -39,4 +39,4 @@ def output_parameter_path_placeholder(output_key: str) -> str: def executor_input_placeholder() -> str: - return "{{{{$}}}}" + return "{{$}}" diff --git a/sdk/python/kfp/v2/components/experimental/placeholders_test.py b/sdk/python/kfp/v2/components/experimental/placeholders_test.py index 46e252ef165..fbd8c202f41 100644 --- a/sdk/python/kfp/v2/components/experimental/placeholders_test.py +++ b/sdk/python/kfp/v2/components/experimental/placeholders_test.py @@ -58,7 +58,7 @@ def test_output_parameter_path_placeholder(self): def test_executor_input_placeholder(self): self.assertEqual( - "{{{{$}}}}", + "{{$}}", placeholders.executor_input_placeholder(), ) diff --git a/sdk/python/kfp/v2/components/experimental/python_component.py b/sdk/python/kfp/v2/components/experimental/python_component.py new file mode 100644 index 00000000000..17c4c04eb04 --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/python_component.py @@ -0,0 +1,39 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Python function-based component.""" + +from typing import Callable + +from kfp.v2.components.experimental import base_component +from kfp.v2.components.experimental import structures + + +class PythonComponent(base_component.BaseComponent): + """Component defined via Python function. + + Attribute: + pipeline_func: The Python function that becomes the implementation of + this component. + """ + + def __init__( + self, + component_spec: structures.ComponentSpec, + python_func: Callable, + ): + super().__init__(component_spec=component_spec) + self.python_func = python_func + + def execute(self, **kwargs): + return python_func(**kwargs) diff --git a/sdk/python/kfp/v2/components/experimental/structures.py b/sdk/python/kfp/v2/components/experimental/structures.py index fe2fc07be1a..911c7a1f02a 100644 --- a/sdk/python/kfp/v2/components/experimental/structures.py +++ b/sdk/python/kfp/v2/components/experimental/structures.py @@ -50,8 +50,10 @@ class OutputSpec(BaseModel): Attributes: type: The type of the output. + description: Optional: the user description of the output. """ type: str + description: Optional[str] = None class BasePlaceholder(BaseModel): diff --git a/sdk/python/kfp/v2/components/experimental/tasks_group.py b/sdk/python/kfp/v2/components/experimental/tasks_group.py index 7848fe9c113..1c142c7f2a2 100644 --- a/sdk/python/kfp/v2/components/experimental/tasks_group.py +++ b/sdk/python/kfp/v2/components/experimental/tasks_group.py @@ -17,7 +17,7 @@ from typing import Optional, Union from kfp.v2.components.experimental import for_loop -from kfp.v2.components.experimental import pipeline +from kfp.v2.components.experimental import pipeline_context from kfp.v2.components.experimental import pipeline_channel from kfp.v2.components.experimental import pipeline_task @@ -64,26 +64,26 @@ def __init__( self.dependencies = [] def __enter__(self): - if not pipeline.Pipeline.get_default_pipeline(): + if not pipeline_context.Pipeline.get_default_pipeline(): raise ValueError('Default pipeline not defined.') self._make_name_unique() - pipeline.Pipeline.get_default_pipeline().push_tasks_group(self) + pipeline_context.Pipeline.get_default_pipeline().push_tasks_group(self) return self def __exit__(self, *unused_args): - pipeline.Pipeline.get_default_pipeline().pop_tasks_group() + pipeline_context.Pipeline.get_default_pipeline().pop_tasks_group() def _make_name_unique(self): """Generates a unique TasksGroup name in the pipeline.""" - if not pipeline.Pipeline.get_default_pipeline(): + if not pipeline_context.Pipeline.get_default_pipeline(): raise ValueError('Default pipeline not defined.') self.name = ( self.group_type + '-' + - ('' if self.name is None else self.name + '-') + - pipeline.Pipeline.get_default_pipeline().get_next_group_id()) + ('' if self.name is None else self.name + '-') + pipeline_context + .Pipeline.get_default_pipeline().get_next_group_id()) self.name = self.name.replace('_', '-') def remove_task_recursive(self, task: pipeline_task.PipelineTask): @@ -130,8 +130,8 @@ def __init__( raise ValueError('exit_task cannot depend on any other tasks.') # Removing exit_task form any group - pipeline.Pipeline.get_default_pipeline().remove_task_from_groups( - exit_task) + pipeline_context.Pipeline.get_default_pipeline( + ).remove_task_from_groups(exit_task) # Set is_exit_handler since the compiler might be using this attribute. exit_task.is_exit_handler = True @@ -209,7 +209,7 @@ def __init__( else: self.loop_argument = for_loop.LoopArgument.from_raw_items( raw_items=items, - name_code=pipeline.Pipeline.get_default_pipeline() + name_code=pipeline_context.Pipeline.get_default_pipeline() .get_next_group_id(), ) self.items_is_pipeline_channel = False diff --git a/sdk/python/kfp/v2/components/experimental/yaml_component.py b/sdk/python/kfp/v2/components/experimental/yaml_component.py index 02993f51a73..9cc855a9cd5 100644 --- a/sdk/python/kfp/v2/components/experimental/yaml_component.py +++ b/sdk/python/kfp/v2/components/experimental/yaml_component.py @@ -18,6 +18,7 @@ class YamlComponent(base_component.BaseComponent): + """Component defined YAML component spec.""" def execute(self, *args, **kwargs): pass @@ -26,4 +27,4 @@ def execute(self, *args, **kwargs): def load_component_from_text(text: str) -> base_component.BaseComponent: """Loads component from text.""" return YamlComponent( - structures.ComponentSpec.load_from_component_yaml(text)) + component_spec=structures.ComponentSpec.load_from_component_yaml(text)) diff --git a/sdk/python/kfp/v2/components/types/experimental/type_utils.py b/sdk/python/kfp/v2/components/types/experimental/type_utils.py index 1ebd2e68f1a..f7dcab4e30f 100644 --- a/sdk/python/kfp/v2/components/types/experimental/type_utils.py +++ b/sdk/python/kfp/v2/components/types/experimental/type_utils.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """Utilities for component I/O type mapping.""" +import json import inspect import re import warnings @@ -277,3 +278,28 @@ def _check_dict_types( " and " + str(expected_type[type_name][type_property])) return False return True + + +_TYPE_TO_TYPE_NAME = { + str: 'String', + int: 'Integer', + float: 'Float', + bool: 'Boolean', + list: 'List', + dict: 'Dict', +} + + +def get_canonical_type_name_for_type(typ: Type) -> str: + """Find the canonical type name for a given type. + + Args: + typ: The type to search for. + + Returns: + The canonical name of the type found. + """ + try: + return _TYPE_TO_TYPE_NAME.get(typ, None) + except: + return None diff --git a/sdk/python/kfp/v2/dsl/__init__.py b/sdk/python/kfp/v2/dsl/__init__.py index 5dfc3958639..bbfa5440633 100644 --- a/sdk/python/kfp/v2/dsl/__init__.py +++ b/sdk/python/kfp/v2/dsl/__init__.py @@ -34,6 +34,7 @@ OutputPath, ) +# TODO: remove once experimental dsl merge back. from kfp.dsl import ( graph_component, pipeline, @@ -47,4 +48,4 @@ PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER = '{{$.pipeline_job_resource_name}}' PIPELINE_JOB_ID_PLACEHOLDER = '{{$.pipeline_job_uuid}}' PIPELINE_TASK_NAME_PLACEHOLDER = '{{$.pipeline_task_name}}' -PIPELINE_TASK_ID_PLACEHOLDER = '{{$.pipeline_task_uuid}}' \ No newline at end of file +PIPELINE_TASK_ID_PLACEHOLDER = '{{$.pipeline_task_uuid}}' diff --git a/sdk/python/kfp/v2/dsl/experimental/__init__.py b/sdk/python/kfp/v2/dsl/experimental/__init__.py index 1f5b3dd13c5..255f81cc5fc 100644 --- a/sdk/python/kfp/v2/dsl/experimental/__init__.py +++ b/sdk/python/kfp/v2/dsl/experimental/__init__.py @@ -12,12 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from kfp.v2.components.experimental.pipeline import Pipeline - -from kfp.v2.components.importer_node import importer -from kfp.v2.dsl import ( - pipeline, - component, +from kfp.v2.components.experimental.component_decorator import component +from kfp.v2.components.experimental.pipeline_channel import ( + PipelineArtifactChannel, + PipelineChannel, + PipelineParameterChannel, +) +from kfp.v2.components.experimental.pipeline_context import pipeline +from kfp.v2.components.experimental.pipeline_task import PipelineTask +from kfp.v2.components.experimental.tasks_group import ( + Condition, + ExitHandler, + ParallelFor, ) from kfp.v2.components.types.artifact_types import ( Artifact, @@ -35,14 +41,10 @@ InputPath, OutputPath, ) -from kfp.v2.components.experimental.pipeline_channel import ( - PipelineArtifactChannel, - PipelineChannel, - PipelineParameterChannel, -) -from kfp.v2.components.experimental.pipeline_task import PipelineTask -from kfp.v2.components.experimental.tasks_group import ( - Condition, - ExitHandler, - ParallelFor, +from kfp.v2.dsl import ( + PIPELINE_JOB_NAME_PLACEHOLDER, + PIPELINE_JOB_RESOURCE_NAME_PLACEHOLDER, + PIPELINE_JOB_ID_PLACEHOLDER, + PIPELINE_TASK_NAME_PLACEHOLDER, + PIPELINE_TASK_ID_PLACEHOLDER, ) From 8c6843fdb89a8ce6c7c1d1689c523e946974c049 Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Thu, 28 Oct 2021 20:40:51 -0700 Subject: [PATCH 27/31] fix(sdk.v2): Fix importer ignoring `reimport` setting, and switch to Protobuf.Value for import uri. (#6827) * fix importer * release note * remove error usage in test sample * disable importer test --- samples/test/config.yaml | 5 +- sdk/RELEASE.md | 2 + .../test_data/pipeline_with_importer.json | 194 +----------------- .../test_data/pipeline_with_importer.py | 4 - sdk/python/kfp/v2/components/importer_node.py | 38 ++-- .../kfp/v2/components/importer_node_test.py | 30 +-- 6 files changed, 49 insertions(+), 224 deletions(-) diff --git a/samples/test/config.yaml b/samples/test/config.yaml index 680ced3909f..f57be3028e3 100644 --- a/samples/test/config.yaml +++ b/samples/test/config.yaml @@ -88,8 +88,9 @@ path: samples.v2.hello_world_test - name: producer_consumer_param path: samples.v2.producer_consumer_param_test -- name: pipeline_with_importer - path: samples.v2.pipeline_with_importer_test +# TODO: Re-enable after fixing protobuf.Value support for importer +# - name: pipeline_with_importer +# path: samples.v2.pipeline_with_importer_test # TODO(Bobgy): Re-enable after figuring out V2 Engine # and protobuf.Value support. # - name: cache_v2 diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index afcfc8b6f51..0b2851eaaf6 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -15,6 +15,8 @@ ## Bug Fixes and Other Changes +* Fix importer ignoring reimport setting, and switch to Protobuf.Value for import uri [\#6827](https://github.com/kubeflow/pipelines/pull/6827) + ## Documentation Updates # 1.8.7 diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json index 07622538962..8e5a9147519 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.json @@ -99,43 +99,6 @@ } } }, - "comp-importer-3": { - "executorLabel": "exec-importer-3", - "inputDefinitions": { - "parameters": { - "uri": { - "type": "STRING" - } - } - }, - "outputDefinitions": { - "artifacts": { - "artifact": { - "artifactType": { - "schemaTitle": "system.Dataset", - "schemaVersion": "0.0.1" - } - } - } - } - }, - "comp-pass-through-op": { - "executorLabel": "exec-pass-through-op", - "inputDefinitions": { - "parameters": { - "value": { - "parameterType": "STRING" - } - } - }, - "outputDefinitions": { - "parameters": { - "Output": { - "parameterType": "STRING" - } - } - } - }, "comp-train": { "executorLabel": "exec-train", "inputDefinitions": { @@ -191,34 +154,6 @@ } } } - }, - "comp-train-3": { - "executorLabel": "exec-train-3", - "inputDefinitions": { - "artifacts": { - "dataset": { - "artifactType": { - "schemaTitle": "system.Dataset", - "schemaVersion": "0.0.1" - } - } - } - }, - "outputDefinitions": { - "artifacts": { - "model": { - "artifactType": { - "schemaTitle": "system.Model", - "schemaVersion": "0.0.1" - } - } - }, - "parameters": { - "scalar": { - "parameterType": "STRING" - } - } - } } }, "deploymentSpec": { @@ -226,9 +161,7 @@ "exec-importer": { "importer": { "artifactUri": { - "constantValue": { - "stringValue": "gs://ml-pipeline-playground/shakespeare1.txt" - } + "constant": "gs://ml-pipeline-playground/shakespeare1.txt" }, "typeSchema": { "schemaTitle": "system.Dataset", @@ -241,40 +174,13 @@ "artifactUri": { "runtimeParameter": "uri" }, + "reimport": true, "typeSchema": { "schemaTitle": "system.Dataset", "schemaVersion": "0.0.1" } } }, - "exec-importer-3": { - "importer": { - "artifactUri": { - "runtimeParameter": "uri" - }, - "typeSchema": { - "schemaTitle": "system.Dataset", - "schemaVersion": "0.0.1" - } - } - }, - "exec-pass-through-op": { - "container": { - "args": [ - "--value", - "{{$.inputs.parameters['value']}}", - "----output-paths", - "{{$.outputs.parameters['Output'].output_file}}" - ], - "command": [ - "sh", - "-ec", - "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", - "def pass_through_op(value):\n return value\n\ndef _serialize_str(str_value: str) -> str:\n if not isinstance(str_value, str):\n raise TypeError('Value \"{}\" has type \"{}\" instead of str.'.format(\n str(str_value), str(type(str_value))))\n return str_value\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Pass through op', description='')\n_parser.add_argument(\"--value\", dest=\"value\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"----output-paths\", dest=\"_output_paths\", type=str, nargs=1)\n_parsed_args = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = pass_through_op(**_parsed_args)\n\n_outputs = [_outputs]\n\n_output_serializers = [\n _serialize_str,\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, 'w') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n" - ], - "image": "python:3.7" - } - }, "exec-train": { "container": { "args": [ @@ -286,7 +192,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.7' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -306,27 +212,7 @@ "command": [ "sh", "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", - "sh", - "-ec", - "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", - "\nimport kfp\nfrom kfp.v2 import dsl\nfrom kfp.v2.dsl import *\nfrom typing import *\n\ndef train(\n dataset: Input[Dataset]\n) -> NamedTuple('Outputs', [\n ('scalar', str),\n ('model', Model),\n]):\n \"\"\"Dummy Training step.\"\"\"\n with open(dataset.path, 'r') as f:\n data = f.read()\n print('Dataset:', data)\n\n scalar = '123'\n model = 'My model trained using data: {}'.format(data)\n\n from collections import namedtuple\n output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar, model)\n\n" - ], - "image": "python:3.7" - } - }, - "exec-train-3": { - "container": { - "args": [ - "--executor_input", - "{{$}}", - "--function_to_execute", - "train" - ], - "command": [ - "sh", - "-c", - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.6' && \"$0\" \"$@\"\n", + "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location 'kfp==1.8.7' && \"$0\" \"$@\"\n", "sh", "-ec", "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.v2.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", @@ -381,9 +267,7 @@ "parameters": { "uri": { "runtimeValue": { - "constantValue": { - "stringValue": "gs://ml-pipeline-playground/shakespeare1.txt" - } + "constant": "gs://ml-pipeline-playground/shakespeare1.txt" } } } @@ -392,48 +276,6 @@ "name": "importer" } }, - "importer-3": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-importer-3" - }, - "dependentTasks": [ - "pass-through-op" - ], - "inputs": { - "parameters": { - "uri": { - "taskOutputParameter": { - "outputParameterKey": "Output", - "producerTask": "pass-through-op" - } - } - } - }, - "taskInfo": { - "name": "importer-3" - } - }, - "pass-through-op": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-pass-through-op" - }, - "inputs": { - "parameters": { - "value": { - "componentInputParameter": "dataset2" - } - } - }, - "taskInfo": { - "name": "pass-through-op" - } - }, "train": { "cachingOptions": { "enableCache": true @@ -457,30 +299,6 @@ "taskInfo": { "name": "train" } - }, - "train-3": { - "cachingOptions": { - "enableCache": true - }, - "componentRef": { - "name": "comp-train-3" - }, - "dependentTasks": [ - "importer-3" - ], - "inputs": { - "artifacts": { - "dataset": { - "taskOutputArtifact": { - "outputArtifactKey": "artifact", - "producerTask": "importer-3" - } - } - } - }, - "taskInfo": { - "name": "train-3" - } } } }, @@ -493,7 +311,7 @@ } }, "schemaVersion": "2.1.0", - "sdkVersion": "kfp-1.8.6" + "sdkVersion": "kfp-1.8.7" }, "runtimeConfig": { "gcsOutputDirectory": "dummy_root", diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.py index bd35aca6875..44035baf94d 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_importer.py @@ -59,10 +59,6 @@ def my_pipeline(dataset2: str = 'gs://ml-pipeline-playground/shakespeare2.txt'): artifact_uri=dataset2, artifact_class=Dataset, reimport=True) train(dataset=importer2.output) - importer3 = importer( - artifact_uri=pass_through_op(dataset2).output, artifact_class=Dataset) - train(dataset=importer3.output) - if __name__ == '__main__': compiler.Compiler().compile( diff --git a/sdk/python/kfp/v2/components/importer_node.py b/sdk/python/kfp/v2/components/importer_node.py index 38ccbc8b41d..a91fc2944c8 100644 --- a/sdk/python/kfp/v2/components/importer_node.py +++ b/sdk/python/kfp/v2/components/importer_node.py @@ -28,24 +28,26 @@ def _build_importer_spec( artifact_uri: Union[_pipeline_param.PipelineParam, str], artifact_type_schema: pipeline_spec_pb2.ArtifactTypeSchema, + reimport: bool, ) -> pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec: """Builds an importer executor spec. Args: - artifact_uri: The artifact uri to import from. - artifact_type_schema: The user specified artifact type schema of the - artifact to be imported. - + artifact_uri: The artifact uri to import from. + artifact_type_schema: The user specified artifact type schema of the + artifact to be imported. + reimport: Whether to reimport the artifact. Returns: - An importer spec. + An importer spec. """ - importer_spec = pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec() - importer_spec.type_schema.CopyFrom(artifact_type_schema) + importer_spec = pipeline_spec_pb2.PipelineDeploymentConfig.ImporterSpec( + type_schema=artifact_type_schema, reimport=reimport) + #importer_spec.type_schema.CopyFrom(artifact_type_schema) if isinstance(artifact_uri, _pipeline_param.PipelineParam): importer_spec.artifact_uri.runtime_parameter = INPUT_KEY elif isinstance(artifact_uri, str): - importer_spec.artifact_uri.constant_value.string_value = artifact_uri + importer_spec.artifact_uri.constant.string_value = artifact_uri return importer_spec @@ -57,11 +59,11 @@ def _build_importer_task_spec( """Builds an importer task spec. Args: - importer_base_name: The base name of the importer node. - artifact_uri: The artifact uri to import from. + importer_base_name: The base name of the importer node. + artifact_uri: The artifact uri to import from. Returns: - An importer node task spec. + An importer node task spec. """ result = pipeline_spec_pb2.PipelineTaskSpec() result.component_ref.name = dsl_utils.sanitize_component_name( @@ -80,7 +82,7 @@ def _build_importer_task_spec( INPUT_KEY].component_input_parameter = param.full_name elif isinstance(artifact_uri, str): result.inputs.parameters[ - INPUT_KEY].runtime_value.constant_value.string_value = artifact_uri + INPUT_KEY].runtime_value.constant.string_value = artifact_uri return result @@ -92,12 +94,12 @@ def _build_importer_component_spec( """Builds an importer component spec. Args: - importer_base_name: The base name of the importer node. - artifact_type_schema: The user specified artifact type schema of the - artifact to be imported. + importer_base_name: The base name of the importer node. + artifact_type_schema: The user specified artifact type schema of the + artifact to be imported. Returns: - An importer node component spec. + An importer node component spec. """ result = pipeline_spec_pb2.ComponentSpec() result.executor_label = dsl_utils.sanitize_executor_label( @@ -154,7 +156,9 @@ def importer(artifact_uri: Union[_pipeline_param.PipelineParam, str], artifact_type_schema = type_utils.get_artifact_type_schema(artifact_class) task.importer_spec = _build_importer_spec( - artifact_uri=artifact_uri, artifact_type_schema=artifact_type_schema) + artifact_uri=artifact_uri, + artifact_type_schema=artifact_type_schema, + reimport=reimport) task.task_spec = _build_importer_task_spec( importer_base_name=task.name, artifact_uri=artifact_uri) task.component_spec = _build_importer_component_spec( diff --git a/sdk/python/kfp/v2/components/importer_node_test.py b/sdk/python/kfp/v2/components/importer_node_test.py index 041591b2742..74bafcabaa8 100644 --- a/sdk/python/kfp/v2/components/importer_node_test.py +++ b/sdk/python/kfp/v2/components/importer_node_test.py @@ -23,6 +23,9 @@ class ImporterNodeTest(parameterized.TestCase): + def setUp(self): + self.maxDiff = None + @parameterized.parameters( { # artifact_uri is a constant value @@ -30,15 +33,16 @@ class ImporterNodeTest(parameterized.TestCase): 'gs://artifact', 'artifact_type_schema': pb.ArtifactTypeSchema(schema_title='system.Dataset'), + 'reimport': + True, 'expected_result': { 'artifactUri': { - 'constantValue': { - 'stringValue': 'gs://artifact' - } + 'constant': 'gs://artifact' }, 'typeSchema': { 'schemaTitle': 'system.Dataset' - } + }, + 'reimport': True } }, { @@ -47,23 +51,27 @@ class ImporterNodeTest(parameterized.TestCase): _pipeline_param.PipelineParam(name='uri_to_import'), 'artifact_type_schema': pb.ArtifactTypeSchema(schema_title='system.Model'), + 'reimport': + False, 'expected_result': { 'artifactUri': { 'runtimeParameter': 'uri' }, 'typeSchema': { 'schemaTitle': 'system.Model' - } + }, + 'reimport': False }, }) def test_build_importer_spec(self, input_uri, artifact_type_schema, - expected_result): + reimport, expected_result): expected_importer_spec = pb.PipelineDeploymentConfig.ImporterSpec() json_format.ParseDict(expected_result, expected_importer_spec) importer_spec = importer_node._build_importer_spec( - artifact_uri=input_uri, artifact_type_schema=artifact_type_schema) + artifact_uri=input_uri, + artifact_type_schema=artifact_type_schema, + reimport=reimport) - self.maxDiff = None self.assertEqual(expected_importer_spec, importer_spec) @parameterized.parameters( @@ -76,9 +84,7 @@ def test_build_importer_spec(self, input_uri, artifact_type_schema, 'parameters': { 'uri': { 'runtimeValue': { - 'constantValue': { - 'stringValue': 'gs://artifact' - } + 'constant': 'gs://artifact' } } } @@ -113,7 +119,6 @@ def test_build_importer_task_spec(self, importer_name, input_uri, task_spec = importer_node._build_importer_task_spec( importer_base_name=importer_name, artifact_uri=input_uri) - self.maxDiff = None self.assertEqual(expected_task_spec, task_spec) def test_build_importer_component_spec(self): @@ -144,7 +149,6 @@ def test_build_importer_component_spec(self): artifact_type_schema=pb.ArtifactTypeSchema( schema_title='system.Artifact')) - self.maxDiff = None self.assertEqual(expected_importer_comp_spec, importer_comp_spec) def test_import_with_invalid_artifact_uri_value_should_fail(self): From 2e945750cb1758eea6db8453b437e57e68152b4a Mon Sep 17 00:00:00 2001 From: capri-xiyue <52932582+capri-xiyue@users.noreply.github.com> Date: Fri, 29 Oct 2021 04:53:51 +0000 Subject: [PATCH 28/31] feat(v2): V2 create run api (#6689) * added draft of create v2 pipeline run * fixed broken UT and added UT for parsing template * modified run apis to support v2 IR spec * remove temporary patch * fixed dependency * fixed build failure * finished draft * finished create job and run * refactor template and fixed broken UT * updated go license * fixed build failure * fixed build * added UT * modified UT * fixed build failure * fixed license --- backend/api/go_client/pipeline_spec.pb.go | 263 +++++++++++++- .../job_model/api_pipeline_spec.go | 27 +- .../api/go_http_client/job_model/api_value.go | 49 +++ .../job_model/pipeline_spec_runtime_config.go | 83 +++++ .../run_model/api_pipeline_spec.go | 27 +- .../api/go_http_client/run_model/api_value.go | 49 +++ .../run_model/pipeline_spec_runtime_config.go | 83 +++++ backend/api/pipeline_spec.proto | 33 +- backend/api/swagger/job.swagger.json | 43 ++- .../swagger/kfp_api_single_file.swagger.json | 43 ++- backend/api/swagger/run.swagger.json | 43 ++- backend/src/apiserver/common/config.go | 1 - backend/src/apiserver/common/util.go | 23 ++ backend/src/apiserver/config/config.json | 3 +- .../src/apiserver/resource/model_converter.go | 112 ++++-- .../resource/model_converter_test.go | 341 +++++++++++++----- .../apiserver/resource/resource_manager.go | 202 +++-------- .../resource/resource_manager_test.go | 143 +++++++- .../resource/resource_manager_util.go | 150 +------- .../resource/resource_manager_util_test.go | 126 +------ backend/src/apiserver/server/api_converter.go | 3 +- .../src/apiserver/server/job_server_test.go | 4 +- .../src/apiserver/server/run_server_test.go | 6 +- backend/src/apiserver/server/util.go | 55 ++- backend/src/apiserver/server/util_test.go | 8 +- backend/src/apiserver/storage/run_store.go | 4 +- .../src/apiserver/template/argo_template.go | 203 +++++++++++ backend/src/apiserver/template/template.go | 308 ++++++++++++++++ .../src/apiserver/template/template_test.go | 336 +++++++++++++++++ backend/src/apiserver/template/v2_template.go | 158 ++++++++ .../client/api_server/pipeline_client.go | 11 +- .../client/api_server/pipeline_client_fake.go | 8 +- backend/src/common/util/template_util.go | 277 -------------- backend/src/common/util/template_util_test.go | 88 ----- backend/test/integration/pipeline_api_test.go | 5 +- .../integration/pipeline_version_api_test.go | 5 +- backend/test/integration/upgrade_test.go | 3 +- backend/third_party_licenses/apiserver.csv | 32 +- backend/third_party_licenses/cache_server.csv | 63 +--- .../persistence_agent.csv | 65 +--- backend/third_party_licenses/swf.csv | 52 +-- backend/third_party_licenses/viewer.csv | 29 +- go-licenses.yaml | 6 +- go.mod | 14 +- go.sum | 222 +++++++++++- 45 files changed, 2657 insertions(+), 1152 deletions(-) create mode 100644 backend/api/go_http_client/job_model/api_value.go create mode 100644 backend/api/go_http_client/job_model/pipeline_spec_runtime_config.go create mode 100644 backend/api/go_http_client/run_model/api_value.go create mode 100644 backend/api/go_http_client/run_model/pipeline_spec_runtime_config.go create mode 100644 backend/src/apiserver/template/argo_template.go create mode 100644 backend/src/apiserver/template/template.go create mode 100644 backend/src/apiserver/template/template_test.go create mode 100644 backend/src/apiserver/template/v2_template.go delete mode 100644 backend/src/common/util/template_util.go delete mode 100644 backend/src/common/util/template_util_test.go diff --git a/backend/api/go_client/pipeline_spec.pb.go b/backend/api/go_client/pipeline_spec.pb.go index 560d859e0ac..5eeccfd7566 100644 --- a/backend/api/go_client/pipeline_spec.pb.go +++ b/backend/api/go_client/pipeline_spec.pb.go @@ -51,8 +51,10 @@ type PipelineSpec struct { PipelineManifest string `protobuf:"bytes,3,opt,name=pipeline_manifest,json=pipelineManifest,proto3" json:"pipeline_manifest,omitempty"` // The parameter user provide to inject to the pipeline JSON. // If a default value of a parameter exist in the JSON, - // the value user provided here will replace. + // the value user provided here will replace. V1 only Parameters []*Parameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty"` + // Runtime config of the pipeline. V2 only + RuntimeConfig *PipelineSpec_RuntimeConfig `protobuf:"bytes,6,opt,name=runtime_config,json=runtimeConfig,proto3" json:"runtime_config,omitempty"` } func (x *PipelineSpec) Reset() { @@ -122,6 +124,173 @@ func (x *PipelineSpec) GetParameters() []*Parameter { return nil } +func (x *PipelineSpec) GetRuntimeConfig() *PipelineSpec_RuntimeConfig { + if x != nil { + return x.RuntimeConfig + } + return nil +} + +// Value is the value of the field. +type Value struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Value: + // *Value_IntValue + // *Value_DoubleValue + // *Value_StringValue + Value isValue_Value `protobuf_oneof:"value"` +} + +func (x *Value) Reset() { + *x = Value{} + if protoimpl.UnsafeEnabled { + mi := &file_backend_api_pipeline_spec_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value) ProtoMessage() {} + +func (x *Value) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_pipeline_spec_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value.ProtoReflect.Descriptor instead. +func (*Value) Descriptor() ([]byte, []int) { + return file_backend_api_pipeline_spec_proto_rawDescGZIP(), []int{1} +} + +func (m *Value) GetValue() isValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (x *Value) GetIntValue() int64 { + if x, ok := x.GetValue().(*Value_IntValue); ok { + return x.IntValue + } + return 0 +} + +func (x *Value) GetDoubleValue() float64 { + if x, ok := x.GetValue().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (x *Value) GetStringValue() string { + if x, ok := x.GetValue().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +type isValue_Value interface { + isValue_Value() +} + +type Value_IntValue struct { + // An integer value + IntValue int64 `protobuf:"varint,1,opt,name=int_value,json=intValue,proto3,oneof"` +} + +type Value_DoubleValue struct { + // A double value + DoubleValue float64 `protobuf:"fixed64,2,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_StringValue struct { + // A string value + StringValue string `protobuf:"bytes,3,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +func (*Value_IntValue) isValue_Value() {} + +func (*Value_DoubleValue) isValue_Value() {} + +func (*Value_StringValue) isValue_Value() {} + +// The runtime config of a PipelineSpec. +type PipelineSpec_RuntimeConfig struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The runtime parameters of the PipelineSpec. The parameters will be + // used to replace the placeholders + // at runtime. + Parameters map[string]*Value `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // A path in a object store bucket which will be treated as the root + // output directory of the pipeline. It is used by the system to + // generate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/) + PipelineRoot string `protobuf:"bytes,2,opt,name=pipeline_root,json=pipelineRoot,proto3" json:"pipeline_root,omitempty"` +} + +func (x *PipelineSpec_RuntimeConfig) Reset() { + *x = PipelineSpec_RuntimeConfig{} + if protoimpl.UnsafeEnabled { + mi := &file_backend_api_pipeline_spec_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PipelineSpec_RuntimeConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PipelineSpec_RuntimeConfig) ProtoMessage() {} + +func (x *PipelineSpec_RuntimeConfig) ProtoReflect() protoreflect.Message { + mi := &file_backend_api_pipeline_spec_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PipelineSpec_RuntimeConfig.ProtoReflect.Descriptor instead. +func (*PipelineSpec_RuntimeConfig) Descriptor() ([]byte, []int) { + return file_backend_api_pipeline_spec_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *PipelineSpec_RuntimeConfig) GetParameters() map[string]*Value { + if x != nil { + return x.Parameters + } + return nil +} + +func (x *PipelineSpec_RuntimeConfig) GetPipelineRoot() string { + if x != nil { + return x.PipelineRoot + } + return "" +} + var File_backend_api_pipeline_spec_proto protoreflect.FileDescriptor var file_backend_api_pipeline_spec_proto_rawDesc = []byte{ @@ -129,7 +298,7 @@ var file_backend_api_pipeline_spec_proto_rawDesc = []byte{ 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1b, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x22, 0xde, 0x01, 0x0a, 0x0c, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, + 0x6f, 0x74, 0x6f, 0x22, 0xf9, 0x03, 0x0a, 0x0c, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, @@ -143,11 +312,36 @@ var file_backend_api_pipeline_spec_proto_rawDesc = []byte{ 0x66, 0x65, 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x42, 0x35, 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x74, 0x65, 0x72, 0x73, 0x12, 0x46, 0x0a, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5f, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, + 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0d, 0x72, + 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0xd0, 0x01, 0x0a, + 0x0d, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4f, + 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, + 0x23, 0x0a, 0x0d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, + 0x52, 0x6f, 0x6f, 0x74, 0x1a, 0x49, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x20, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0x79, 0x0a, 0x05, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, + 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x64, 0x6f, 0x75, 0x62, 0x6c, + 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, + 0x0b, 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, + 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x35, 0x5a, 0x33, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6b, 0x75, 0x62, 0x65, 0x66, 0x6c, 0x6f, + 0x77, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x62, 0x61, 0x63, 0x6b, + 0x65, 0x6e, 0x64, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x6f, 0x5f, 0x63, 0x6c, 0x69, 0x65, 0x6e, + 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -162,18 +356,24 @@ func file_backend_api_pipeline_spec_proto_rawDescGZIP() []byte { return file_backend_api_pipeline_spec_proto_rawDescData } -var file_backend_api_pipeline_spec_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_backend_api_pipeline_spec_proto_msgTypes = make([]protoimpl.MessageInfo, 4) var file_backend_api_pipeline_spec_proto_goTypes = []interface{}{ - (*PipelineSpec)(nil), // 0: api.PipelineSpec - (*Parameter)(nil), // 1: api.Parameter + (*PipelineSpec)(nil), // 0: api.PipelineSpec + (*Value)(nil), // 1: api.Value + (*PipelineSpec_RuntimeConfig)(nil), // 2: api.PipelineSpec.RuntimeConfig + nil, // 3: api.PipelineSpec.RuntimeConfig.ParametersEntry + (*Parameter)(nil), // 4: api.Parameter } var file_backend_api_pipeline_spec_proto_depIdxs = []int32{ - 1, // 0: api.PipelineSpec.parameters:type_name -> api.Parameter - 1, // [1:1] is the sub-list for method output_type - 1, // [1:1] is the sub-list for method input_type - 1, // [1:1] is the sub-list for extension type_name - 1, // [1:1] is the sub-list for extension extendee - 0, // [0:1] is the sub-list for field type_name + 4, // 0: api.PipelineSpec.parameters:type_name -> api.Parameter + 2, // 1: api.PipelineSpec.runtime_config:type_name -> api.PipelineSpec.RuntimeConfig + 3, // 2: api.PipelineSpec.RuntimeConfig.parameters:type_name -> api.PipelineSpec.RuntimeConfig.ParametersEntry + 1, // 3: api.PipelineSpec.RuntimeConfig.ParametersEntry.value:type_name -> api.Value + 4, // [4:4] is the sub-list for method output_type + 4, // [4:4] is the sub-list for method input_type + 4, // [4:4] is the sub-list for extension type_name + 4, // [4:4] is the sub-list for extension extendee + 0, // [0:4] is the sub-list for field type_name } func init() { file_backend_api_pipeline_spec_proto_init() } @@ -195,6 +395,35 @@ func file_backend_api_pipeline_spec_proto_init() { return nil } } + file_backend_api_pipeline_spec_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_backend_api_pipeline_spec_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PipelineSpec_RuntimeConfig); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_backend_api_pipeline_spec_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*Value_IntValue)(nil), + (*Value_DoubleValue)(nil), + (*Value_StringValue)(nil), } type x struct{} out := protoimpl.TypeBuilder{ @@ -202,7 +431,7 @@ func file_backend_api_pipeline_spec_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_backend_api_pipeline_spec_proto_rawDesc, NumEnums: 0, - NumMessages: 1, + NumMessages: 4, NumExtensions: 0, NumServices: 0, }, diff --git a/backend/api/go_http_client/job_model/api_pipeline_spec.go b/backend/api/go_http_client/job_model/api_pipeline_spec.go index 66b66c9e298..d6341c7dec1 100644 --- a/backend/api/go_http_client/job_model/api_pipeline_spec.go +++ b/backend/api/go_http_client/job_model/api_pipeline_spec.go @@ -20,7 +20,7 @@ type APIPipelineSpec struct { // The parameter user provide to inject to the pipeline JSON. // If a default value of a parameter exist in the JSON, - // the value user provided here will replace. + // the value user provided here will replace. V1 only Parameters []*APIParameter `json:"parameters"` // Optional input field. The ID of the pipeline user uploaded before. @@ -33,6 +33,9 @@ type APIPipelineSpec struct { // Not empty if the pipeline id is not empty. PipelineName string `json:"pipeline_name,omitempty"` + // Runtime config of the pipeline. V2 only + RuntimeConfig *PipelineSpecRuntimeConfig `json:"runtime_config,omitempty"` + // Optional input field. The marshalled raw argo JSON workflow. // This will be deprecated when pipeline_manifest is in use. WorkflowManifest string `json:"workflow_manifest,omitempty"` @@ -46,6 +49,10 @@ func (m *APIPipelineSpec) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validateRuntimeConfig(formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } @@ -77,6 +84,24 @@ func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { return nil } +func (m *APIPipelineSpec) validateRuntimeConfig(formats strfmt.Registry) error { + + if swag.IsZero(m.RuntimeConfig) { // not required + return nil + } + + if m.RuntimeConfig != nil { + if err := m.RuntimeConfig.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runtime_config") + } + return err + } + } + + return nil +} + // MarshalBinary interface implementation func (m *APIPipelineSpec) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/go_http_client/job_model/api_value.go b/backend/api/go_http_client/job_model/api_value.go new file mode 100644 index 00000000000..62e690fa971 --- /dev/null +++ b/backend/api/go_http_client/job_model/api_value.go @@ -0,0 +1,49 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package job_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/swag" +) + +// APIValue Value is the value of the field. +// swagger:model apiValue +type APIValue struct { + + // A double value + DoubleValue float64 `json:"double_value,omitempty"` + + // An integer value + IntValue string `json:"int_value,omitempty"` + + // A string value + StringValue string `json:"string_value,omitempty"` +} + +// Validate validates this api value +func (m *APIValue) Validate(formats strfmt.Registry) error { + return nil +} + +// MarshalBinary interface implementation +func (m *APIValue) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIValue) UnmarshalBinary(b []byte) error { + var res APIValue + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/job_model/pipeline_spec_runtime_config.go b/backend/api/go_http_client/job_model/pipeline_spec_runtime_config.go new file mode 100644 index 00000000000..3672eb05918 --- /dev/null +++ b/backend/api/go_http_client/job_model/pipeline_spec_runtime_config.go @@ -0,0 +1,83 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package job_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// PipelineSpecRuntimeConfig The runtime config of a PipelineSpec. +// swagger:model PipelineSpecRuntimeConfig +type PipelineSpecRuntimeConfig struct { + + // The runtime parameters of the PipelineSpec. The parameters will be + // used to replace the placeholders + // at runtime. + Parameters map[string]APIValue `json:"parameters,omitempty"` + + // A path in a object store bucket which will be treated as the root + // output directory of the pipeline. It is used by the system to + // generate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/) + PipelineRoot string `json:"pipeline_root,omitempty"` +} + +// Validate validates this pipeline spec runtime config +func (m *PipelineSpecRuntimeConfig) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateParameters(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineSpecRuntimeConfig) validateParameters(formats strfmt.Registry) error { + + if swag.IsZero(m.Parameters) { // not required + return nil + } + + for k := range m.Parameters { + + if err := validate.Required("parameters"+"."+k, "body", m.Parameters[k]); err != nil { + return err + } + if val, ok := m.Parameters[k]; ok { + if err := val.Validate(formats); err != nil { + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *PipelineSpecRuntimeConfig) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *PipelineSpecRuntimeConfig) UnmarshalBinary(b []byte) error { + var res PipelineSpecRuntimeConfig + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/run_model/api_pipeline_spec.go b/backend/api/go_http_client/run_model/api_pipeline_spec.go index ba829c2291a..49edc9a4d80 100644 --- a/backend/api/go_http_client/run_model/api_pipeline_spec.go +++ b/backend/api/go_http_client/run_model/api_pipeline_spec.go @@ -20,7 +20,7 @@ type APIPipelineSpec struct { // The parameter user provide to inject to the pipeline JSON. // If a default value of a parameter exist in the JSON, - // the value user provided here will replace. + // the value user provided here will replace. V1 only Parameters []*APIParameter `json:"parameters"` // Optional input field. The ID of the pipeline user uploaded before. @@ -33,6 +33,9 @@ type APIPipelineSpec struct { // Not empty if the pipeline id is not empty. PipelineName string `json:"pipeline_name,omitempty"` + // Runtime config of the pipeline. V2 only + RuntimeConfig *PipelineSpecRuntimeConfig `json:"runtime_config,omitempty"` + // Optional input field. The marshalled raw argo JSON workflow. // This will be deprecated when pipeline_manifest is in use. WorkflowManifest string `json:"workflow_manifest,omitempty"` @@ -46,6 +49,10 @@ func (m *APIPipelineSpec) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validateRuntimeConfig(formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } @@ -77,6 +84,24 @@ func (m *APIPipelineSpec) validateParameters(formats strfmt.Registry) error { return nil } +func (m *APIPipelineSpec) validateRuntimeConfig(formats strfmt.Registry) error { + + if swag.IsZero(m.RuntimeConfig) { // not required + return nil + } + + if m.RuntimeConfig != nil { + if err := m.RuntimeConfig.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("runtime_config") + } + return err + } + } + + return nil +} + // MarshalBinary interface implementation func (m *APIPipelineSpec) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/go_http_client/run_model/api_value.go b/backend/api/go_http_client/run_model/api_value.go new file mode 100644 index 00000000000..9cc2c14d709 --- /dev/null +++ b/backend/api/go_http_client/run_model/api_value.go @@ -0,0 +1,49 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/swag" +) + +// APIValue Value is the value of the field. +// swagger:model apiValue +type APIValue struct { + + // A double value + DoubleValue float64 `json:"double_value,omitempty"` + + // An integer value + IntValue string `json:"int_value,omitempty"` + + // A string value + StringValue string `json:"string_value,omitempty"` +} + +// Validate validates this api value +func (m *APIValue) Validate(formats strfmt.Registry) error { + return nil +} + +// MarshalBinary interface implementation +func (m *APIValue) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIValue) UnmarshalBinary(b []byte) error { + var res APIValue + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/run_model/pipeline_spec_runtime_config.go b/backend/api/go_http_client/run_model/pipeline_spec_runtime_config.go new file mode 100644 index 00000000000..eaa49886074 --- /dev/null +++ b/backend/api/go_http_client/run_model/pipeline_spec_runtime_config.go @@ -0,0 +1,83 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package run_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// PipelineSpecRuntimeConfig The runtime config of a PipelineSpec. +// swagger:model PipelineSpecRuntimeConfig +type PipelineSpecRuntimeConfig struct { + + // The runtime parameters of the PipelineSpec. The parameters will be + // used to replace the placeholders + // at runtime. + Parameters map[string]APIValue `json:"parameters,omitempty"` + + // A path in a object store bucket which will be treated as the root + // output directory of the pipeline. It is used by the system to + // generate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/) + PipelineRoot string `json:"pipeline_root,omitempty"` +} + +// Validate validates this pipeline spec runtime config +func (m *PipelineSpecRuntimeConfig) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateParameters(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *PipelineSpecRuntimeConfig) validateParameters(formats strfmt.Registry) error { + + if swag.IsZero(m.Parameters) { // not required + return nil + } + + for k := range m.Parameters { + + if err := validate.Required("parameters"+"."+k, "body", m.Parameters[k]); err != nil { + return err + } + if val, ok := m.Parameters[k]; ok { + if err := val.Validate(formats); err != nil { + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *PipelineSpecRuntimeConfig) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *PipelineSpecRuntimeConfig) UnmarshalBinary(b []byte) error { + var res PipelineSpecRuntimeConfig + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/pipeline_spec.proto b/backend/api/pipeline_spec.proto index e618031f5dd..e32c5adeba5 100644 --- a/backend/api/pipeline_spec.proto +++ b/backend/api/pipeline_spec.proto @@ -36,6 +36,37 @@ message PipelineSpec { // The parameter user provide to inject to the pipeline JSON. // If a default value of a parameter exist in the JSON, - // the value user provided here will replace. + // the value user provided here will replace. V1 only repeated Parameter parameters = 4; + + // The runtime config of a PipelineSpec. + message RuntimeConfig { + // The runtime parameters of the PipelineSpec. The parameters will be + // used to replace the placeholders + // at runtime. + map parameters = 1; + + // A path in a object store bucket which will be treated as the root + // output directory of the pipeline. It is used by the system to + // generate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/) + string pipeline_root = 2; + + } + + // Runtime config of the pipeline. V2 only + RuntimeConfig runtime_config = 6; } + +// Value is the value of the field. +message Value { + oneof value { + // An integer value + int64 int_value = 1; + // A double value + double double_value = 2; + // A string value + string string_value = 3; + } +} + + diff --git a/backend/api/swagger/job.swagger.json b/backend/api/swagger/job.swagger.json index e4c90db0add..731301489ce 100644 --- a/backend/api/swagger/job.swagger.json +++ b/backend/api/swagger/job.swagger.json @@ -262,6 +262,23 @@ "default": "UNKNOWN_MODE", "description": "Required input.\n\n - DISABLED: The job won't schedule any run if disabled." }, + "PipelineSpecRuntimeConfig": { + "type": "object", + "properties": { + "parameters": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/apiValue" + }, + "description": "The runtime parameters of the PipelineSpec. The parameters will be\nused to replace the placeholders\nat runtime." + }, + "pipeline_root": { + "type": "string", + "title": "A path in a object store bucket which will be treated as the root\noutput directory of the pipeline. It is used by the system to\ngenerate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/)" + } + }, + "description": "The runtime config of a PipelineSpec." + }, "apiCronSchedule": { "type": "object", "properties": { @@ -431,7 +448,11 @@ "items": { "$ref": "#/definitions/apiParameter" }, - "description": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace." + "title": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace. V1 only" + }, + "runtime_config": { + "$ref": "#/definitions/PipelineSpecRuntimeConfig", + "title": "Runtime config of the pipeline. V2 only" } } }, @@ -515,6 +536,26 @@ }, "description": "Trigger defines what starts a pipeline run." }, + "apiValue": { + "type": "object", + "properties": { + "int_value": { + "type": "string", + "format": "int64", + "title": "An integer value" + }, + "double_value": { + "type": "number", + "format": "double", + "title": "A double value" + }, + "string_value": { + "type": "string", + "title": "A string value" + } + }, + "description": "Value is the value of the field." + }, "protobufAny": { "type": "object", "properties": { diff --git a/backend/api/swagger/kfp_api_single_file.swagger.json b/backend/api/swagger/kfp_api_single_file.swagger.json index f5a91348d56..a0cb82eefb3 100644 --- a/backend/api/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/swagger/kfp_api_single_file.swagger.json @@ -1454,6 +1454,23 @@ } }, "definitions": { + "PipelineSpecRuntimeConfig": { + "type": "object", + "properties": { + "parameters": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/apiValue" + }, + "description": "The runtime parameters of the PipelineSpec. The parameters will be\nused to replace the placeholders\nat runtime." + }, + "pipeline_root": { + "type": "string", + "title": "A path in a object store bucket which will be treated as the root\noutput directory of the pipeline. It is used by the system to\ngenerate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/)" + } + }, + "description": "The runtime config of a PipelineSpec." + }, "ReportRunMetricsResponseReportRunMetricResult": { "type": "object", "properties": { @@ -1565,7 +1582,11 @@ "items": { "$ref": "#/definitions/apiParameter" }, - "description": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace." + "title": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace. V1 only" + }, + "runtime_config": { + "$ref": "#/definitions/PipelineSpecRuntimeConfig", + "title": "Runtime config of the pipeline. V2 only" } } }, @@ -1781,6 +1802,26 @@ } } }, + "apiValue": { + "type": "object", + "properties": { + "int_value": { + "type": "string", + "format": "int64", + "title": "An integer value" + }, + "double_value": { + "type": "number", + "format": "double", + "title": "A double value" + }, + "string_value": { + "type": "string", + "title": "A string value" + } + }, + "description": "Value is the value of the field." + }, "protobufAny": { "type": "object", "properties": { diff --git a/backend/api/swagger/run.swagger.json b/backend/api/swagger/run.swagger.json index 1fbf2fdb518..754f2cd183d 100644 --- a/backend/api/swagger/run.swagger.json +++ b/backend/api/swagger/run.swagger.json @@ -403,6 +403,23 @@ } }, "definitions": { + "PipelineSpecRuntimeConfig": { + "type": "object", + "properties": { + "parameters": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/apiValue" + }, + "description": "The runtime parameters of the PipelineSpec. The parameters will be\nused to replace the placeholders\nat runtime." + }, + "pipeline_root": { + "type": "string", + "title": "A path in a object store bucket which will be treated as the root\noutput directory of the pipeline. It is used by the system to\ngenerate the paths of output artifacts. Ref:(https://www.kubeflow.org/docs/components/pipelines/pipeline-root/)" + } + }, + "description": "The runtime config of a PipelineSpec." + }, "ReportRunMetricsResponseReportRunMetricResult": { "type": "object", "properties": { @@ -514,7 +531,11 @@ "items": { "$ref": "#/definitions/apiParameter" }, - "description": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace." + "title": "The parameter user provide to inject to the pipeline JSON.\nIf a default value of a parameter exist in the JSON,\nthe value user provided here will replace. V1 only" + }, + "runtime_config": { + "$ref": "#/definitions/PipelineSpecRuntimeConfig", + "title": "Runtime config of the pipeline. V2 only" } } }, @@ -730,6 +751,26 @@ } } }, + "apiValue": { + "type": "object", + "properties": { + "int_value": { + "type": "string", + "format": "int64", + "title": "An integer value" + }, + "double_value": { + "type": "number", + "format": "double", + "title": "A double value" + }, + "string_value": { + "type": "string", + "title": "A string value" + } + }, + "description": "Value is the value of the field." + }, "protobufAny": { "type": "object", "properties": { diff --git a/backend/src/apiserver/common/config.go b/backend/src/apiserver/common/config.go index b35243cdbb4..3bd508810d4 100644 --- a/backend/src/apiserver/common/config.go +++ b/backend/src/apiserver/common/config.go @@ -27,7 +27,6 @@ const ( MultiUserModeSharedReadAccess string = "MULTIUSER_SHARED_READ" PodNamespace string = "POD_NAMESPACE" CacheEnabled string = "CacheEnabled" - DefaultPipelineRunnerServiceAccount string = "DefaultPipelineRunnerServiceAccount" KubeflowUserIDHeader string = "KUBEFLOW_USERID_HEADER" KubeflowUserIDPrefix string = "KUBEFLOW_USERID_PREFIX" UpdatePipelineVersionByDefault string = "AUTO_UPDATE_PIPELINE_DEFAULT_VERSION" diff --git a/backend/src/apiserver/common/util.go b/backend/src/apiserver/common/util.go index aad08416f10..ad6a54b4bfc 100644 --- a/backend/src/apiserver/common/util.go +++ b/backend/src/apiserver/common/util.go @@ -16,7 +16,14 @@ package common import ( api "github.com/kubeflow/pipelines/backend/api/go_client" + "strings" ) +const ( + DefaultPipelineRunnerServiceAccount = "pipeline-runner" + HasDefaultBucketEnvVar = "HAS_DEFAULT_BUCKET" + DefaultBucketNameEnvVar = "BUCKET_NAME" + ProjectIDEnvVar = "PROJECT_ID" + ) func GetNamespaceFromAPIResourceReferences(resourceRefs []*api.ResourceReference) string { namespace := "" @@ -39,3 +46,19 @@ func GetExperimentIDFromAPIResourceReferences(resourceRefs []*api.ResourceRefere } return experimentID } + +// Mutate default values of specified pipeline spec. +// Args: +// text: (part of) pipeline file in string. +func PatchPipelineDefaultParameter(text string) (string, error) { + defaultBucket := GetStringConfig(DefaultBucketNameEnvVar) + projectId := GetStringConfig(ProjectIDEnvVar) + toPatch := map[string]string{ + "{{kfp-default-bucket}}": defaultBucket, + "{{kfp-project-id}}": projectId, + } + for key, value := range toPatch { + text = strings.Replace(text, key, value, -1) + } + return text, nil +} diff --git a/backend/src/apiserver/config/config.json b/backend/src/apiserver/config/config.json index 3897872143f..1fa703b54a1 100644 --- a/backend/src/apiserver/config/config.json +++ b/backend/src/apiserver/config/config.json @@ -18,7 +18,6 @@ "DefaultPipelineRunnerServiceAccount": "pipeline-runner", "CacheEnabled": "true", "CRON_SCHEDULE_TIMEZONE": "UTC", - "CACHE_IMAGE": "gcr.io/google-containers/busybox", + "CACHE_IMAGE": "gcr.io/google-containers/busybox", "CACHE_NODE_RESTRICTIONS": "false" - } diff --git a/backend/src/apiserver/resource/model_converter.go b/backend/src/apiserver/resource/model_converter.go index d16564ae1d9..c246ecf4512 100644 --- a/backend/src/apiserver/resource/model_converter.go +++ b/backend/src/apiserver/resource/model_converter.go @@ -16,6 +16,8 @@ package resource import ( "encoding/json" + "fmt" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" api "github.com/kubeflow/pipelines/backend/api/go_client" @@ -53,13 +55,9 @@ func (r *ResourceManager) ToModelRunMetric(metric *api.RunMetric, runUUID string } } -// The input run might not contain workflowSpecManifest, but instead a pipeline ID. -// The caller would retrieve workflowSpecManifest and pass in. -func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow *util.Workflow, workflowSpecManifest string) (*model.RunDetail, error) { - params, err := toModelParameters(run.GetPipelineSpec().GetParameters()) - if err != nil { - return nil, util.Wrap(err, "Unable to parse the parameter.") - } +// The input run might not contain workflowSpecManifest and pipelineSpecManifest, but instead a pipeline ID. +// The caller would retrieve manifest and pass in. +func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow *util.Workflow, manifest string, templateType template.TemplateType) (*model.RunDetail, error) { resourceReferences, err := r.toModelResourceReferences(runId, common.Run, run.GetResourceReferences()) if err != nil { return nil, util.Wrap(err, "Unable to convert resource references.") @@ -77,7 +75,7 @@ func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow return nil, util.Wrap(err, "Error getting the experiment UUID") } - return &model.RunDetail{ + runDetail := &model.RunDetail{ Run: model.Run{ UUID: runId, ExperimentUUID: experimentUUID, @@ -91,21 +89,35 @@ func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow PipelineSpec: model.PipelineSpec{ PipelineId: run.GetPipelineSpec().GetPipelineId(), PipelineName: pipelineName, - WorkflowSpecManifest: workflowSpecManifest, - Parameters: params, }, }, - PipelineRuntime: model.PipelineRuntime{ - WorkflowRuntimeManifest: workflow.ToStringForStore(), - }, - }, nil -} + } -func (r *ResourceManager) ToModelJob(job *api.Job, swf *util.ScheduledWorkflow, workflowSpecManifest string) (*model.Job, error) { - params, err := toModelParameters(job.GetPipelineSpec().GetParameters()) - if err != nil { - return nil, util.Wrap(err, "Error parsing the input job.") + if templateType == template.V1 { + params, err := apiParametersToModelParameters(run.GetPipelineSpec().GetParameters()) + if err != nil { + return nil, util.Wrap(err, "Unable to parse the parameter.") + } + runDetail.Parameters = params + runDetail.WorkflowSpecManifest = manifest + runDetail.WorkflowRuntimeManifest = workflow.ToStringForStore() + return runDetail, nil + + } else if templateType == template.V2 { + params, err := runtimeConfigToModelParameters(run.GetPipelineSpec().GetRuntimeConfig()) + if err != nil { + return nil, util.Wrap(err, "Unable to parse the parameter.") + } + runDetail.Parameters = params + runDetail.PipelineSpecManifest = manifest + return runDetail, nil + + } else { + return nil, fmt.Errorf("failed to generate RunDetail with templateType %s", templateType) } +} + +func (r *ResourceManager) ToModelJob(job *api.Job, swf *util.ScheduledWorkflow, manifest string, templateType template.TemplateType) (*model.Job, error) { resourceReferences, err := r.toModelResourceReferences(string(swf.UID), common.Job, job.GetResourceReferences()) if err != nil { return nil, util.Wrap(err, "Error to convert resource references.") @@ -121,7 +133,7 @@ func (r *ResourceManager) ToModelJob(job *api.Job, swf *util.ScheduledWorkflow, if swf.Spec.Workflow != nil { serviceAccount = swf.Spec.Workflow.Spec.ServiceAccountName } - return &model.Job{ + modelJob := &model.Job{ UUID: string(swf.UID), DisplayName: job.Name, Name: swf.Name, @@ -137,14 +149,33 @@ func (r *ResourceManager) ToModelJob(job *api.Job, swf *util.ScheduledWorkflow, PipelineSpec: model.PipelineSpec{ PipelineId: job.GetPipelineSpec().GetPipelineId(), PipelineName: pipelineName, - WorkflowSpecManifest: workflowSpecManifest, - Parameters: params, - }, - }, nil + }} + + if templateType == template.V1 { + params, err := apiParametersToModelParameters(job.GetPipelineSpec().GetParameters()) + if err != nil { + return nil, util.Wrap(err, "Unable to parse the parameter.") + } + modelJob.Parameters = params + modelJob.WorkflowSpecManifest = manifest + return modelJob, nil + + } else if templateType == template.V2 { + params, err := runtimeConfigToModelParameters(job.GetPipelineSpec().GetRuntimeConfig()) + if err != nil { + return nil, util.Wrap(err, "Unable to parse the parameter.") + } + modelJob.Parameters = params + modelJob.PipelineSpecManifest = manifest + return modelJob, nil + + } else { + return nil, fmt.Errorf("failed to generate ModelJob with templateType %s", templateType) + } } func (r *ResourceManager) ToModelPipelineVersion(version *api.PipelineVersion) (*model.PipelineVersion, error) { - paramStr, err := toModelParameters(version.Parameters) + paramStr, err := apiParametersToModelParameters(version.Parameters) if err != nil { return nil, err } @@ -196,7 +227,7 @@ func toModelTrigger(trigger *api.Trigger) model.Trigger { return modelTrigger } -func toModelParameters(apiParams []*api.Parameter) (string, error) { +func apiParametersToModelParameters(apiParams []*api.Parameter) (string, error) { if apiParams == nil || len(apiParams) == 0 { return "", nil } @@ -215,6 +246,35 @@ func toModelParameters(apiParams []*api.Parameter) (string, error) { return string(paramsBytes), nil } +func runtimeConfigToModelParameters(runtimeConfig *api.PipelineSpec_RuntimeConfig) (string, error) { + if runtimeConfig == nil { + return "", nil + } + var params []v1alpha1.Parameter + for k, v := range runtimeConfig.GetParameters() { + param := v1alpha1.Parameter{ + Name: k, + } + switch t := v.Value.(type) { + case *api.Value_StringValue: + param.Value = v1alpha1.AnyStringPtr(v.GetStringValue()) + case *api.Value_DoubleValue: + param.Value = v1alpha1.AnyStringPtr(v.GetDoubleValue()) + case *api.Value_IntValue: + param.Value = v1alpha1.AnyStringPtr(v.GetIntValue()) + default: + return "", fmt.Errorf("unknown property type in pipelineSpec runtimeConfig Parameters: %T", t) + } + + params = append(params, param) + } + paramsBytes, err := json.Marshal(params) + if err != nil { + return "", util.NewInternalServerError(err, "Failed to stream API parameter as string.") + } + return string(paramsBytes), nil +} + func (r *ResourceManager) toModelResourceReferences( resourceId string, resourceType model.ResourceType, apiRefs []*api.ResourceReference) ([]*model.ResourceReference, error) { var modelRefs []*model.ResourceReference diff --git a/backend/src/apiserver/resource/model_converter_test.go b/backend/src/apiserver/resource/model_converter_test.go index 0caab6b1171..6ab93429b48 100644 --- a/backend/src/apiserver/resource/model_converter_test.go +++ b/backend/src/apiserver/resource/model_converter_test.go @@ -15,10 +15,11 @@ package resource import ( + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" "strings" "testing" - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/golang/protobuf/ptypes/timestamp" "github.com/google/go-cmp/cmp" api "github.com/kubeflow/pipelines/backend/api/go_client" @@ -150,115 +151,265 @@ func TestToModelRunDetail(t *testing.T) { store, manager, experiment := initWithExperiment(t) defer store.Close() - apiRun := &api.Run{ - Id: "run1", - Name: "name1", - Description: "this is a run", - PipelineSpec: &api.PipelineSpec{ - Parameters: []*api.Parameter{{Name: "param2", Value: "world"}}, - }, - ResourceReferences: []*api.ResourceReference{ - {Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, Relationship: api.Relationship_OWNER}, - }, - } - workflow := util.NewWorkflow(&v1alpha1.Workflow{ - ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "123"}, - Status: v1alpha1.WorkflowStatus{Phase: "running"}, - }) - modelRunDetail, err := manager.ToModelRunDetail(apiRun, "123", workflow, "workflow spec") - assert.Nil(t, err) - - expectedModelRunDetail := &model.RunDetail{ - Run: model.Run{ - UUID: "123", - ExperimentUUID: experiment.UUID, - DisplayName: "name1", - Name: "workflow-name", - Conditions: "running", - Description: "this is a run", - PipelineSpec: model.PipelineSpec{ - WorkflowSpecManifest: "workflow spec", - Parameters: `[{"name":"param2","value":"world"}]`, + tests := []struct { + name string + apiRun *api.Run + workflow *util.Workflow + manifest string + templateType template.TemplateType + expectedModelRunDetail *model.RunDetail + }{ + { name : "v1", + apiRun: &api.Run{ + Id: "run1", + Name: "name1", + Description: "this is a run", + PipelineSpec: &api.PipelineSpec{ + Parameters: []*api.Parameter{{Name: "param2", Value: "world"}}, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER}}, }, - ResourceReferences: []*model.ResourceReference{ - { - ResourceUUID: "123", - ResourceType: common.Run, - ReferenceUUID: experiment.UUID, - ReferenceName: experiment.Name, - ReferenceType: common.Experiment, - Relationship: common.Owner}, + workflow: util.NewWorkflow(&v1alpha1.Workflow{ + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "123"}, + Status: v1alpha1.WorkflowStatus{Phase: "running"}, + }), + manifest: "workflow spec", + templateType: template.V1, + expectedModelRunDetail: &model.RunDetail{ + Run: model.Run{ + UUID: "123", + ExperimentUUID: experiment.UUID, + DisplayName: "name1", + Name: "workflow-name", + Conditions: "running", + Description: "this is a run", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: "workflow spec", + Parameters: `[{"name":"param2","value":"world"}]`, + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123", + ResourceType: common.Run, + ReferenceUUID: experiment.UUID, + ReferenceName: experiment.Name, + ReferenceType: common.Experiment, + Relationship: common.Owner}, + }, + }, + PipelineRuntime: model.PipelineRuntime{ + WorkflowRuntimeManifest: util.NewWorkflow(&v1alpha1.Workflow{ + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "123"}, + Status: v1alpha1.WorkflowStatus{Phase: "running"}, + }).ToStringForStore(), + }, }, }, - PipelineRuntime: model.PipelineRuntime{ - WorkflowRuntimeManifest: workflow.ToStringForStore(), + { name : "v2", + apiRun: &api.Run{ + Id: "run1", + Name: "name1", + Description: "this is a run", + PipelineSpec: &api.PipelineSpec{RuntimeConfig: &api.PipelineSpec_RuntimeConfig{Parameters: map[string]*api.Value{ + "param2": { + Value: &api.Value_StringValue{ + StringValue: "world", + }, + }, + }}}, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER}}, + }, + workflow: util.NewWorkflow(&v1alpha1.Workflow{ + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "123"}, + Status: v1alpha1.WorkflowStatus{Phase: "running"}, + }), + manifest: "pipeline spec", + templateType: template.V2, + expectedModelRunDetail: &model.RunDetail{ + Run: model.Run{ + UUID: "123", + ExperimentUUID: experiment.UUID, + DisplayName: "name1", + Name: "workflow-name", + Conditions: "running", + Description: "this is a run", + PipelineSpec: model.PipelineSpec{ + PipelineSpecManifest: "pipeline spec", + Parameters: `[{"name":"param2","value":"world"}]`, + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123", + ResourceType: common.Run, + ReferenceUUID: experiment.UUID, + ReferenceName: experiment.Name, + ReferenceType: common.Experiment, + Relationship: common.Owner}, + }, + }, + }, }, } - assert.Equal(t, expectedModelRunDetail, modelRunDetail) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + modelRunDetail, err := manager.ToModelRunDetail(tt.apiRun, "123", tt.workflow, tt.manifest, tt.templateType) + assert.Nil(t, err) + assert.Equal(t, tt.expectedModelRunDetail, modelRunDetail) + }) + } + } func TestToModelJob(t *testing.T) { store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) defer store.Close() - apiJob := &api.Job{ - Name: "name1", - Enabled: true, - MaxConcurrency: 1, - NoCatchup: true, - Trigger: &api.Trigger{ - Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, - Cron: "1 * * * *", - }}}, - ResourceReferences: []*api.ResourceReference{ - {Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, Relationship: api.Relationship_OWNER}, - }, - PipelineSpec: &api.PipelineSpec{PipelineId: pipeline.UUID, Parameters: []*api.Parameter{{Name: "param2", Value: "world"}}}, - } - swf := util.NewScheduledWorkflow(&swfapi.ScheduledWorkflow{ - ObjectMeta: v1.ObjectMeta{ - Name: "swf_name", - Namespace: "swf_namespace", - UID: "swf_123", - }, - Status: swfapi.ScheduledWorkflowStatus{ - Conditions: []swfapi.ScheduledWorkflowCondition{{Type: swfapi.ScheduledWorkflowEnabled}}}, - }) - modelJob, err := manager.ToModelJob(apiJob, swf, "workflow spec") - assert.Nil(t, err) - expectedModelJob := &model.Job{ - UUID: "swf_123", - Name: "swf_name", - Namespace: "swf_namespace", - Conditions: "Enabled", - DisplayName: "name1", - Enabled: true, - Trigger: model.Trigger{ - CronSchedule: model.CronSchedule{ - CronScheduleStartTimeInSec: util.Int64Pointer(1), - Cron: util.StringPointer("1 * * * *"), + tests := []struct { + name string + apiJob *api.Job + swf *util.ScheduledWorkflow + manifest string + templateType template.TemplateType + expectedModelJob *model.Job + }{ + {name: "v1", + apiJob: &api.Job{ + Name: "name1", + Enabled: true, + MaxConcurrency: 1, + NoCatchup: true, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + ResourceReferences: []*api.ResourceReference{ + {Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, Relationship: api.Relationship_OWNER}, + }, + PipelineSpec: &api.PipelineSpec{PipelineId: pipeline.UUID, Parameters: []*api.Parameter{{Name: "param2", Value: "world"}}}, + }, + swf: util.NewScheduledWorkflow(&swfapi.ScheduledWorkflow{ + ObjectMeta: v1.ObjectMeta{ + Name: "swf_name", + Namespace: "swf_namespace", + UID: "swf_123", + }, + Status: swfapi.ScheduledWorkflowStatus{ + Conditions: []swfapi.ScheduledWorkflowCondition{{Type: swfapi.ScheduledWorkflowEnabled}}}, + }), + manifest: "workflow spec", + templateType: template.V1, + expectedModelJob: &model.Job{ + UUID: "swf_123", + Name: "swf_name", + Namespace: "swf_namespace", + Conditions: "Enabled", + DisplayName: "name1", + Enabled: true, + Trigger: model.Trigger{ + CronSchedule: model.CronSchedule{ + CronScheduleStartTimeInSec: util.Int64Pointer(1), + Cron: util.StringPointer("1 * * * *"), + }, + }, + MaxConcurrency: 1, + NoCatchup: true, + PipelineSpec: model.PipelineSpec{ + PipelineId: pipeline.UUID, + PipelineName: pipeline.Name, + WorkflowSpecManifest: "workflow spec", + Parameters: `[{"name":"param2","value":"world"}]`, + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "swf_123", + ResourceType: common.Job, + ReferenceUUID: experiment.UUID, + ReferenceName: experiment.Name, + ReferenceType: common.Experiment, + Relationship: common.Owner}, + }, }, }, - MaxConcurrency: 1, - NoCatchup: true, - PipelineSpec: model.PipelineSpec{ - PipelineId: pipeline.UUID, - PipelineName: pipeline.Name, - WorkflowSpecManifest: "workflow spec", - Parameters: `[{"name":"param2","value":"world"}]`, - }, - ResourceReferences: []*model.ResourceReference{ - { - ResourceUUID: "swf_123", - ResourceType: common.Job, - ReferenceUUID: experiment.UUID, - ReferenceName: experiment.Name, - ReferenceType: common.Experiment, - Relationship: common.Owner}, + {name: "v2", + apiJob: &api.Job{ + Name: "name1", + Enabled: true, + MaxConcurrency: 1, + NoCatchup: true, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + ResourceReferences: []*api.ResourceReference{ + {Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, Relationship: api.Relationship_OWNER}, + }, + PipelineSpec: &api.PipelineSpec{PipelineId: pipeline.UUID, RuntimeConfig: &api.PipelineSpec_RuntimeConfig{Parameters: map[string]*api.Value{ + "param2": { + Value: &api.Value_StringValue{ + StringValue: "world", + }, + }, + }}}, + }, + swf: util.NewScheduledWorkflow(&swfapi.ScheduledWorkflow{ + ObjectMeta: v1.ObjectMeta{ + Name: "swf_name", + Namespace: "swf_namespace", + UID: "swf_123", + }, + Status: swfapi.ScheduledWorkflowStatus{ + Conditions: []swfapi.ScheduledWorkflowCondition{{Type: swfapi.ScheduledWorkflowEnabled}}}, + }), + manifest: "pipeline spec", + templateType: template.V2, + expectedModelJob: &model.Job{ + UUID: "swf_123", + Name: "swf_name", + Namespace: "swf_namespace", + Conditions: "Enabled", + DisplayName: "name1", + Enabled: true, + Trigger: model.Trigger{ + CronSchedule: model.CronSchedule{ + CronScheduleStartTimeInSec: util.Int64Pointer(1), + Cron: util.StringPointer("1 * * * *"), + }, + }, + MaxConcurrency: 1, + NoCatchup: true, + PipelineSpec: model.PipelineSpec{ + PipelineId: pipeline.UUID, + PipelineName: pipeline.Name, + PipelineSpecManifest: "pipeline spec", + Parameters: `[{"name":"param2","value":"world"}]`, + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "swf_123", + ResourceType: common.Job, + ReferenceUUID: experiment.UUID, + ReferenceName: experiment.Name, + ReferenceType: common.Experiment, + Relationship: common.Owner}, + }, + }, }, } - assert.Equal(t, expectedModelJob, modelJob) + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + modelJob, err := manager.ToModelJob(tt.apiJob, tt.swf, tt.manifest, tt.templateType) + assert.Nil(t, err) + assert.Equal(t, tt.expectedModelJob, modelJob) + }) + } } func TestToModelResourceReferences(t *testing.T) { diff --git a/backend/src/apiserver/resource/resource_manager.go b/backend/src/apiserver/resource/resource_manager.go index f2dcbb0d4af..6d3200beb34 100644 --- a/backend/src/apiserver/resource/resource_manager.go +++ b/backend/src/apiserver/resource/resource_manager.go @@ -18,6 +18,7 @@ import ( "context" "encoding/json" "fmt" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" "io" "strconv" @@ -36,7 +37,6 @@ import ( "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/storage" "github.com/kubeflow/pipelines/backend/src/common/util" - scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" scheduledworkflowclient "github.com/kubeflow/pipelines/backend/src/crd/pkg/client/clientset/versioned/typed/scheduledworkflow/v1beta1" "github.com/pkg/errors" "github.com/prometheus/client_golang/prometheus" @@ -50,13 +50,6 @@ import ( utilerrors "k8s.io/apimachinery/pkg/util/errors" ) -const ( - defaultPipelineRunnerServiceAccount = "pipeline-runner" - HasDefaultBucketEnvVar = "HAS_DEFAULT_BUCKET" - ProjectIDEnvVar = "PROJECT_ID" - DefaultBucketNameEnvVar = "BUCKET_NAME" -) - // Metric variables. Please prefix the metric names with resource_manager_. var ( // Count the removed workflows due to garbage collection. @@ -262,7 +255,7 @@ func (r *ResourceManager) UpdatePipelineDefaultVersion(pipelineId string, versio } func (r *ResourceManager) CreatePipeline(name string, description string, namespace string, pipelineFile []byte) (*model.Pipeline, error) { - tmpl, err := util.NewTemplate(pipelineFile) + tmpl, err := template.New(pipelineFile) if err != nil { return nil, util.Wrap(err, "Create pipeline failed") } @@ -338,83 +331,35 @@ func (r *ResourceManager) GetPipelineTemplate(pipelineId string) ([]byte, error) } func (r *ResourceManager) CreateRun(ctx context.Context, apiRun *api.Run) (*model.RunDetail, error) { - // Get workflow from either of the two places: - // (1) raw pipeline manifest in pipeline_spec + // Get manifest from either of the two places: + // (1) raw manifest in pipeline_spec // (2) pipeline version in resource_references - // And the latter takes priority over the former when the pipeline manifest is from pipeline_spec.pipeline_id - // workflow manifest and pipeline id/version will not exist at the same time, guaranteed by the validation phase - workflowSpecManifestBytes, err := getWorkflowSpecManifestBytes(apiRun.PipelineSpec, &apiRun.ResourceReferences, r) + // And the latter takes priority over the former when the manifest is from pipeline_spec.pipeline_id + // workflow/pipeline manifest and pipeline id/version will not exist at the same time, guaranteed by the validation phase + manifestBytes, err := getManifestBytes(apiRun.PipelineSpec, &apiRun.ResourceReferences, r) if err != nil { return nil, err } + uuid, err := r.uuid.NewRandom() if err != nil { return nil, util.NewInternalServerError(err, "Failed to generate run ID.") } runId := uuid.String() - runAt := r.time.Now().Unix() - var workflow util.Workflow - if err = json.Unmarshal(workflowSpecManifestBytes, &workflow); err != nil { - return nil, util.NewInternalServerError(err, - "Failed to unmarshal workflow spec manifest. Workflow bytes: %s", string(workflowSpecManifestBytes)) - } - if workflow.Workflow == nil { - return nil, util.Wrap( - util.NewResourceNotFoundError("WorkflowSpecManifest", apiRun.GetName()), - "Failed to fetch workflow spec manifest.") - } - - parameters := toParametersMap(apiRun.GetPipelineSpec().GetParameters()) - // Verify no additional parameter provided - if err = workflow.VerifyParameters(parameters); err != nil { - return nil, util.Wrap(err, "Failed to verify parameters.") - } - // Append provided parameter - workflow.OverrideParameters(parameters) - - // Replace macros - formatter := util.NewRunParameterFormatter(uuid.String(), runAt) - formattedParams := formatter.FormatWorkflowParameters(workflow.GetWorkflowParametersAsMap()) - workflow.OverrideParameters(formattedParams) - - r.setDefaultServiceAccount(&workflow, apiRun.GetServiceAccount()) - - // Disable istio sidecar injection if not specified - workflow.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) - // Add a KFP specific label for cache service filtering. The cache_enabled flag here is a global control for whether cache server will - // receive targeting pods. Since cache server only receives pods in step level, the resource manager here will set this global label flag - // on every single step/pod so the cache server can understand. - // TODO: Add run_level flag with similar logic by reading flag value from create_run api. - workflow.SetLabelsToAllTemplates(util.LabelKeyCacheEnabled, common.IsCacheEnabled()) - - err = OverrideParameterWithSystemDefault(workflow, apiRun) + tmpl, err := template.New(manifestBytes) if err != nil { return nil, err } - - // Add label to the workflow so it can be persisted by persistent agent later. - workflow.SetLabels(util.LabelKeyWorkflowRunId, runId) - // Add run name annotation to the workflow so that it can be logged by the Metadata Writer. - workflow.SetAnnotations(util.AnnotationKeyRunName, apiRun.Name) - // Replace {{workflow.uid}} with runId - err = workflow.ReplaceUID(runId) - if err != nil { - return nil, util.NewInternalServerError(err, "Failed to replace workflow ID") + runWorkflowOptions := template.RunWorkflowOptions{ + RunId: runId, + RunAt: runAt, } - workflow.SetPodMetadataLabels(util.LabelKeyWorkflowRunId, runId) - - // Marking auto-added artifacts as optional. Otherwise most older workflows will start failing after upgrade to Argo 2.3. - // TODO: Fix the components to explicitly declare the artifacts they really output. - for templateIdx, template := range workflow.Workflow.Spec.Templates { - for artIdx, artifact := range template.Outputs.Artifacts { - if artifact.Name == "mlpipeline-ui-metadata" || artifact.Name == "mlpipeline-metrics" { - workflow.Workflow.Spec.Templates[templateIdx].Outputs.Artifacts[artIdx].Optional = true - } - } + workflow, err := tmpl.RunWorkflow(apiRun, runWorkflowOptions) + if err != nil { + return nil, util.NewInternalServerError(err, "failed to generate the workflow.") } - // Add a reference to the default experiment if run does not already have a containing experiment ref, err := r.getDefaultExperimentIfNoExperiment(apiRun.GetResourceReferences()) if err != nil { @@ -443,8 +388,19 @@ func (r *ResourceManager) CreateRun(ctx context.Context, apiRun *api.Run) (*mode return nil, util.NewInternalServerError(err, "Failed to create a workflow for (%s)", workflow.Name) } + // Patched the default value to apiRun + if common.GetBoolConfigWithDefault(common.HasDefaultBucketEnvVar, false) { + for _, param := range apiRun.PipelineSpec.Parameters { + var err error + param.Value, err = common.PatchPipelineDefaultParameter(param.Value) + if err != nil { + return nil, fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) + } + } + } + // Store run metadata into database - runDetail, err := r.ToModelRunDetail(apiRun, runId, util.NewWorkflow(newWorkflow), string(workflowSpecManifestBytes)) + runDetail, err := r.ToModelRunDetail(apiRun, runId, util.NewWorkflow(newWorkflow), string(manifestBytes), tmpl.GetTemplateType()) if err != nil { return nil, util.Wrap(err, "Failed to convert run model") } @@ -577,9 +533,12 @@ func (r *ResourceManager) RetryRun(ctx context.Context, runId string) error { return util.Wrap(err, "Retry run failed") } - if runDetail.WorkflowRuntimeManifest == "" { + if runDetail.WorkflowSpecManifest != "" && runDetail.WorkflowRuntimeManifest == "" { return util.NewBadRequestError(errors.New("workflow cannot be retried"), "Workflow must be Failed/Error to retry") } + if runDetail.PipelineSpecManifest != "" { + return util.NewBadRequestError(errors.New("workflow cannot be retried"), "Workflow must be with v1 mode to retry") + } var workflow util.Workflow if err := json.Unmarshal([]byte(runDetail.WorkflowRuntimeManifest), &workflow); err != nil { return util.NewInternalServerError(err, "Failed to retrieve the runtime pipeline spec from the run") @@ -712,62 +671,20 @@ func (r *ResourceManager) CreateJob(ctx context.Context, apiJob *api.Job) (*mode // (2) pipeline version in resource_references // And the latter takes priority over the former when the pipeline manifest is from pipeline_spec.pipeline_id // workflow manifest and pipeline id/version will not exist at the same time, guaranteed by the validation phase - workflowSpecManifestBytes, err := getWorkflowSpecManifestBytes(apiJob.PipelineSpec, &apiJob.ResourceReferences, r) + manifestBytes, err := getManifestBytes(apiJob.PipelineSpec, &apiJob.ResourceReferences, r) if err != nil { return nil, err } - var workflow util.Workflow - err = json.Unmarshal(workflowSpecManifestBytes, &workflow) + tmpl, err := template.New(manifestBytes) if err != nil { - return nil, util.NewInternalServerError(err, - "Failed to unmarshal workflow spec manifest. Workflow bytes: %s", string(workflowSpecManifestBytes)) - } - if workflow.Workflow == nil { - return nil, util.Wrap( - util.NewResourceNotFoundError("WorkflowSpecManifest", apiJob.GetName()), - "Failed to fetch workflow spec manifest.") - } - - // Verify no additional parameter provided - err = workflow.VerifyParameters(toParametersMap(apiJob.GetPipelineSpec().GetParameters())) - if err != nil { - return nil, util.Wrap(err, "Create job failed") + return nil, err } - r.setDefaultServiceAccount(&workflow, apiJob.GetServiceAccount()) - - // Disable istio sidecar injection if not specified - workflow.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) - - swfGeneratedName, err := toSWFCRDResourceGeneratedName(apiJob.Name) + scheduledWorkflow, err := tmpl.ScheduledWorkflow(apiJob) if err != nil { - return nil, util.Wrap(err, "Create job failed") - } - scheduledWorkflow := &scheduledworkflow.ScheduledWorkflow{ - ObjectMeta: v1.ObjectMeta{GenerateName: swfGeneratedName}, - Spec: scheduledworkflow.ScheduledWorkflowSpec{ - Enabled: apiJob.Enabled, - MaxConcurrency: &apiJob.MaxConcurrency, - Trigger: *toCRDTrigger(apiJob.Trigger), - Workflow: &scheduledworkflow.WorkflowResource{ - Parameters: toCRDParameter(apiJob.GetPipelineSpec().GetParameters()), - Spec: workflow.Spec, - }, - NoCatchup: util.BoolPointer(apiJob.NoCatchup), - }, + return nil, util.Wrap(err, "failed to generate the scheduledWorkflow.") } - - // Marking auto-added artifacts as optional. Otherwise most older workflows will start failing after upgrade to Argo 2.3. - // TODO: Fix the components to explicitly declare the artifacts they really output. - for templateIdx, template := range scheduledWorkflow.Spec.Workflow.Spec.Templates { - for artIdx, artifact := range template.Outputs.Artifacts { - if artifact.Name == "mlpipeline-ui-metadata" || artifact.Name == "mlpipeline-metrics" { - scheduledWorkflow.Spec.Workflow.Spec.Templates[templateIdx].Outputs.Artifacts[artIdx].Optional = true - } - } - } - // Add a reference to the default experiment if run does not already have a containing experiment ref, err := r.getDefaultExperimentIfNoExperiment(apiJob.GetResourceReferences()) if err != nil { @@ -787,7 +704,7 @@ func (r *ResourceManager) CreateJob(ctx context.Context, apiJob *api.Job) (*mode return nil, util.NewInternalServerError(err, "Failed to create a scheduled workflow for (%s)", scheduledWorkflow.Name) } - job, err := r.ToModelJob(apiJob, util.NewScheduledWorkflow(newScheduledWorkflow), string(workflowSpecManifestBytes)) + job, err := r.ToModelJob(apiJob, util.NewScheduledWorkflow(newScheduledWorkflow), string(manifestBytes), tmpl.GetTemplateType()) if err != nil { return nil, util.Wrap(err, "Create job failed") } @@ -1055,7 +972,7 @@ func (r *ResourceManager) getWorkflowSpecBytesFromPipelineSpec(spec *api.Pipelin return nil, util.NewInvalidInputError("Please provide a valid pipeline spec") } -func (r *ResourceManager) getWorkflowSpecBytesFromPipelineVersion(references []*api.ResourceReference) ([]byte, error) { +func (r *ResourceManager) getManifestBytesFromPipelineVersion(references []*api.ResourceReference) ([]byte, error) { var pipelineVersionId = "" for _, reference := range references { if reference.Key.Type == api.ResourceType_PIPELINE_VERSION && reference.Relationship == api.Relationship_CREATOR { @@ -1065,30 +982,31 @@ func (r *ResourceManager) getWorkflowSpecBytesFromPipelineVersion(references []* if len(pipelineVersionId) == 0 { return nil, util.NewInvalidInputError("No pipeline version.") } - var workflow util.Workflow - err := r.objectStore.GetFromYamlFile(&workflow, r.objectStore.GetPipelineKey(pipelineVersionId)) + manifestBytes, err := r.objectStore.GetFile(r.objectStore.GetPipelineKey(pipelineVersionId)) if err != nil { - return nil, util.Wrap(err, "Get pipeline YAML failed.") + return nil, util.Wrap(err, "Get manifest bytes from PipelineVersion failed.") } - return []byte(workflow.ToStringForStore()), nil + return manifestBytes, nil } -func getWorkflowSpecManifestBytes(pipelineSpec *api.PipelineSpec, resourceReferences *[]*api.ResourceReference, r *ResourceManager) ([]byte, error) { - var workflowSpecManifestBytes []byte +func getManifestBytes(pipelineSpec *api.PipelineSpec, resourceReferences *[]*api.ResourceReference, r *ResourceManager) ([]byte, error) { + var manifestBytes []byte if pipelineSpec.GetWorkflowManifest() != "" { - workflowSpecManifestBytes = []byte(pipelineSpec.GetWorkflowManifest()) + manifestBytes = []byte(pipelineSpec.GetWorkflowManifest()) + } else if pipelineSpec.GetPipelineManifest() != "" { + manifestBytes = []byte(pipelineSpec.GetPipelineManifest()) } else { err := convertPipelineIdToDefaultPipelineVersion(pipelineSpec, resourceReferences, r) if err != nil { return nil, util.Wrap(err, "Failed to find default version to create run with pipeline id.") } - workflowSpecManifestBytes, err = r.getWorkflowSpecBytesFromPipelineVersion(*resourceReferences) + manifestBytes, err = r.getManifestBytesFromPipelineVersion(*resourceReferences) if err != nil { - return nil, util.Wrap(err, "Failed to fetch workflow spec.") + return nil, util.Wrap(err, "Failed to fetch manifest bytes.") } } - return workflowSpecManifestBytes, nil + return manifestBytes, nil } // Used to initialize the Experiment database with a default to be used for runs @@ -1174,6 +1092,9 @@ func (r *ResourceManager) ReadArtifact(runID string, nodeID string, artifactName if err != nil { return nil, err } + if run.WorkflowRuntimeManifest == "" { + return nil, util.NewInvalidInputError("read artifact from run with v2 IR spec is not supported") + } var storageWorkflow workflowapi.Workflow err = json.Unmarshal([]byte(run.WorkflowRuntimeManifest), &storageWorkflow) if err != nil { @@ -1206,10 +1127,6 @@ func (r *ResourceManager) MarkSampleLoaded() error { return r.dBStatusStore.MarkSampleLoaded() } -func (r *ResourceManager) getDefaultSA() string { - return common.GetStringConfigWithDefault(common.DefaultPipelineRunnerServiceAccount, defaultPipelineRunnerServiceAccount) -} - func (r *ResourceManager) CreatePipelineVersion(apiVersion *api.PipelineVersion, pipelineFile []byte, updateDefaultVersion bool) (*model.PipelineVersion, error) { // Extract pipeline id var pipelineId = "" @@ -1221,7 +1138,7 @@ func (r *ResourceManager) CreatePipelineVersion(apiVersion *api.PipelineVersion, if len(pipelineId) == 0 { return nil, util.NewInvalidInputError("Create pipeline version failed due to missing pipeline id") } - tmpl, err := util.NewTemplate(pipelineFile) + tmpl, err := template.New(pipelineFile) if err != nil { return nil, util.Wrap(err, "Create pipeline version failed") } @@ -1405,19 +1322,6 @@ func (r *ResourceManager) GetNamespaceFromPipelineVersion(versionId string) (str return r.GetNamespaceFromPipelineID(pipelineVersion.PipelineId) } -func (r *ResourceManager) setDefaultServiceAccount(workflow *util.Workflow, serviceAccount string) { - if len(serviceAccount) > 0 { - workflow.SetServiceAccount(serviceAccount) - return - } - workflowServiceAccount := workflow.Spec.ServiceAccountName - if len(workflowServiceAccount) == 0 || workflowServiceAccount == defaultPipelineRunnerServiceAccount { - // To reserve SDK backward compatibility, the backend only replaces - // serviceaccount when it is empty or equal to default value set by SDK. - workflow.SetServiceAccount(r.getDefaultSA()) - } -} - func (r *ResourceManager) getNamespaceFromExperiment(references []*api.ResourceReference) (string, error) { experimentID := common.GetExperimentIDFromAPIResourceReferences(references) experiment, err := r.GetExperiment(experimentID) diff --git a/backend/src/apiserver/resource/resource_manager_test.go b/backend/src/apiserver/resource/resource_manager_test.go index 077e3c5b03e..b7497dbe3e1 100644 --- a/backend/src/apiserver/resource/resource_manager_test.go +++ b/backend/src/apiserver/resource/resource_manager_test.go @@ -18,6 +18,7 @@ import ( "context" "encoding/json" "fmt" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" "strings" "testing" "time" @@ -183,6 +184,26 @@ func initWithJob(t *testing.T) (*FakeClientManager, *ResourceManager, *model.Job return store, manager, j } +// Util function to create an initial state with pipeline uploaded +func initWithJobV2(t *testing.T) (*FakeClientManager, *ResourceManager, *model.Job) { + store, manager, exp := initWithExperiment(t) + job := &api.Job{ + Name: "j1", + Enabled: true, + PipelineSpec: &api.PipelineSpec{PipelineManifest: v2SpecHelloWorld}, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: exp.UUID}, + Relationship: api.Relationship_OWNER, + }, + }, + } + j, err := manager.CreateJob(context.Background(), job) + assert.Nil(t, err) + + return store, manager, j +} + func initWithOneTimeRun(t *testing.T) (*FakeClientManager, *ResourceManager, *model.RunDetail) { store, manager, exp := initWithExperiment(t) apiRun := &api.Run{ @@ -205,6 +226,25 @@ func initWithOneTimeRun(t *testing.T) (*FakeClientManager, *ResourceManager, *mo return store, manager, runDetail } +func initWithOneTimeRunV2(t *testing.T) (*FakeClientManager, *ResourceManager, *model.RunDetail) { + store, manager, exp := initWithExperiment(t) + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + PipelineManifest: v2SpecHelloWorld, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: exp.UUID}, + Relationship: api.Relationship_OWNER, + }, + }, + } + runDetail, err := manager.CreateRun(context.Background(), apiRun) + assert.Nil(t, err) + return store, manager, runDetail +} + func initWithPatchedRun(t *testing.T) (*FakeClientManager, *ResourceManager, *model.RunDetail) { store, manager, exp := initWithExperiment(t) apiRun := &api.Run{ @@ -384,7 +424,7 @@ func TestCreatePipeline(t *testing.T) { msg: "InvalidTemplate", template: "I am invalid yaml", errorCode: codes.InvalidArgument, - errorIs: util.ErrorInvalidPipelineSpec, + errorIs: template.ErrorInvalidPipelineSpec, }, { msg: "BadDB", @@ -489,6 +529,7 @@ func TestGetPipelineTemplate_PipelineFileNotFound(t *testing.T) { assert.Contains(t, err.Error(), "object not found") } +// TODO: use table driven test to test CreateRun api func TestCreateRun_ThroughPipelineID(t *testing.T) { store, manager, p := initWithPipeline(t) defer store.Close() @@ -539,7 +580,7 @@ func TestCreateRun_ThroughPipelineID(t *testing.T) { expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{{Name: "param1", Value: v1alpha1.AnyStringPtr("world")}} - expectedRuntimeWorkflow.Spec.ServiceAccountName = defaultPipelineRunnerServiceAccount + expectedRuntimeWorkflow.Spec.ServiceAccountName = common.DefaultPipelineRunnerServiceAccount expectedRuntimeWorkflow.Spec.PodMetadata = &v1alpha1.Metadata{ Labels: map[string]string{ util.LabelKeyWorkflowRunId: DefaultFakeUUID, @@ -592,6 +633,42 @@ func TestCreateRun_ThroughPipelineID(t *testing.T) { assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") } +func TestCreateRun_ThroughWorkflowSpecV2(t *testing.T) { + store, manager, runDetail := initWithOneTimeRunV2(t) + expectedExperimentUUID := runDetail.ExperimentUUID + + expectedRunDetail := &model.RunDetail{ + Run: model.Run{ + UUID: "123e4567-e89b-12d3-a456-426655440000", + ExperimentUUID: expectedExperimentUUID, + DisplayName: "run1", + Name: "hello-world-0", + ServiceAccount: "pipeline-runner", + StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), + CreatedAtInSec: 2, + PipelineSpec: model.PipelineSpec{ + PipelineSpecManifest: v2SpecHelloWorld, + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Run, + ReferenceUUID: DefaultFakeUUID, + ReferenceName: "e1", + ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + }, + }, + } + assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") + assert.Equal(t, 1, store.ArgoClientFake.GetWorkflowCount(), "Workflow CRD is not created.") + runDetail, err := manager.GetRun(runDetail.UUID) + assert.Nil(t, err) + assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") +} + + func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { store, manager, runDetail := initWithOneTimeRun(t) expectedExperimentUUID := runDetail.ExperimentUUID @@ -600,7 +677,7 @@ func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{{Name: "param1", Value: v1alpha1.AnyStringPtr("world")}} - expectedRuntimeWorkflow.Spec.ServiceAccountName = defaultPipelineRunnerServiceAccount + expectedRuntimeWorkflow.Spec.ServiceAccountName = common.DefaultPipelineRunnerServiceAccount expectedRuntimeWorkflow.Spec.PodMetadata = &v1alpha1.Metadata{ Labels: map[string]string{ util.LabelKeyWorkflowRunId: DefaultFakeUUID, @@ -645,9 +722,9 @@ func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { } func TestCreateRun_ThroughWorkflowSpecWithPatch(t *testing.T) { - viper.Set(HasDefaultBucketEnvVar, "true") - viper.Set(ProjectIDEnvVar, "test-project-id") - viper.Set(DefaultBucketNameEnvVar, "test-default-bucket") + viper.Set(common.HasDefaultBucketEnvVar, "true") + viper.Set(common.ProjectIDEnvVar, "test-project-id") + viper.Set(common.DefaultBucketNameEnvVar, "test-default-bucket") store, manager, runDetail := initWithPatchedRun(t) expectedExperimentUUID := runDetail.ExperimentUUID expectedRuntimeWorkflow := testWorkflow.DeepCopy() @@ -655,7 +732,7 @@ func TestCreateRun_ThroughWorkflowSpecWithPatch(t *testing.T) { expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{{Name: "param1", Value: v1alpha1.AnyStringPtr("test-default-bucket")}} - expectedRuntimeWorkflow.Spec.ServiceAccountName = defaultPipelineRunnerServiceAccount + expectedRuntimeWorkflow.Spec.ServiceAccountName = common.DefaultPipelineRunnerServiceAccount expectedRuntimeWorkflow.Spec.PodMetadata = &v1alpha1.Metadata{ Labels: map[string]string{ util.LabelKeyWorkflowRunId: DefaultFakeUUID, @@ -952,7 +1029,7 @@ func TestCreateRun_EmptyPipelineSpec(t *testing.T) { } _, err := manager.CreateRun(context.Background(), apiRun) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Failed to fetch workflow spec") + assert.Contains(t, err.Error(), "Failed to fetch manifest bytes") } func TestCreateRun_InvalidWorkflowSpec(t *testing.T) { @@ -970,7 +1047,7 @@ func TestCreateRun_InvalidWorkflowSpec(t *testing.T) { } _, err := manager.CreateRun(context.Background(), apiRun) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Failed to unmarshal workflow spec manifest") + assert.Contains(t, err.Error(), "unknown template format") } func TestCreateRun_NullWorkflowSpec(t *testing.T) { @@ -988,7 +1065,7 @@ func TestCreateRun_NullWorkflowSpec(t *testing.T) { } _, err := manager.CreateRun(context.Background(), apiRun) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Failed to fetch workflow spec manifest.: ResourceNotFoundError: WorkflowSpecManifest run1 not found.") + assert.Contains(t, err.Error(), "unknown template format") } func TestCreateRun_OverrideParametersError(t *testing.T) { @@ -1252,6 +1329,7 @@ func TestRetryRun_UpdateAndCreateFailed(t *testing.T) { assert.Contains(t, err.Error(), "Failed to create or update the run") } +// TODO Use table driven to write UT to test CreateJob func TestCreateJob_ThroughWorkflowSpec(t *testing.T) { store, _, job := initWithJob(t) defer store.Close() @@ -1282,6 +1360,39 @@ func TestCreateJob_ThroughWorkflowSpec(t *testing.T) { assert.Equal(t, expectedJob, job) } +func TestCreateJob_ThroughWorkflowSpecV2(t *testing.T) { + store, manager, job := initWithJobV2(t) + defer store.Close() + expectedJob := &model.Job{ + UUID: "123e4567-e89b-12d3-a456-426655440000", + DisplayName: "j1", + Name: "j1", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", + Enabled: true, + CreatedAtInSec: 2, + UpdatedAtInSec: 2, + Conditions: "NO_STATUS", + PipelineSpec: model.PipelineSpec{ + PipelineSpecManifest: v2SpecHelloWorld, + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Job, + ReferenceUUID: DefaultFakeUUID, + ReferenceName: "e1", + ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + }, + } + assert.Equal(t, expectedJob, job) + fetchedJob, err := manager.GetJob(job.UUID) + assert.Nil(t, err) + assert.Equal(t, expectedJob, fetchedJob, "CreateJob stored invalid data in database") +} + func TestCreateJob_ThroughPipelineID(t *testing.T) { store, manager, pipeline := initWithPipeline(t) defer store.Close() @@ -1538,7 +1649,7 @@ func TestCreateJob_EmptyPipelineSpec(t *testing.T) { } _, err := manager.CreateJob(context.Background(), job) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Failed to fetch workflow spec") + assert.Contains(t, err.Error(), "Failed to fetch manifest bytes") } func TestCreateJob_InvalidWorkflowSpec(t *testing.T) { @@ -1557,7 +1668,7 @@ func TestCreateJob_InvalidWorkflowSpec(t *testing.T) { } _, err := manager.CreateJob(context.Background(), job) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Failed to unmarshal workflow spec manifest") + assert.Contains(t, err.Error(), "unknown template format") } func TestCreateJob_NullWorkflowSpec(t *testing.T) { @@ -1576,7 +1687,7 @@ func TestCreateJob_NullWorkflowSpec(t *testing.T) { } _, err := manager.CreateJob(context.Background(), job) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Failed to fetch workflow spec manifest.: ResourceNotFoundError: WorkflowSpecManifest pp 1 not found.") + assert.Contains(t, err.Error(), "unknown template format") } func TestCreateJob_ExtraInputParameterError(t *testing.T) { @@ -2942,7 +3053,7 @@ func TestCreatePipelineVersion(t *testing.T) { msg: "InvalidTemplate", template: "I am invalid yaml", errorCode: codes.InvalidArgument, - errorIs: util.ErrorInvalidPipelineSpec, + errorIs: template.ErrorInvalidPipelineSpec, }, { msg: "BadDB", @@ -3055,7 +3166,7 @@ func TestCreatePipelineOrVersion_V2PipelineName(t *testing.T) { require.Nil(t, err) bytes, err := manager.GetPipelineTemplate(createdPipeline.UUID) require.Nil(t, err) - tmpl, err := util.NewTemplate(bytes) + tmpl, err := template.New(bytes) require.Nil(t, err) assert.Equal(t, test.pipelineName, tmpl.V2PipelineName()) @@ -3075,7 +3186,7 @@ func TestCreatePipelineOrVersion_V2PipelineName(t *testing.T) { require.Nil(t, err) bytes, err = manager.GetPipelineVersionTemplate(version.UUID) require.Nil(t, err) - tmpl, err = util.NewTemplate(bytes) + tmpl, err = template.New(bytes) require.Nil(t, err) assert.Equal(t, test.pipelineName, tmpl.V2PipelineName()) }) diff --git a/backend/src/apiserver/resource/resource_manager_util.go b/backend/src/apiserver/resource/resource_manager_util.go index 102a373bfde..859efa02da2 100644 --- a/backend/src/apiserver/resource/resource_manager_util.go +++ b/backend/src/apiserver/resource/resource_manager_util.go @@ -17,109 +17,17 @@ package resource import ( "context" "errors" - "fmt" - "regexp" - "strings" - "time" - wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo-workflows/v3/workflow/common" api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/client" - servercommon "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" - scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" apierr "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "strings" ) -func toCRDTrigger(apiTrigger *api.Trigger) *scheduledworkflow.Trigger { - var crdTrigger scheduledworkflow.Trigger - if apiTrigger.GetCronSchedule() != nil { - crdTrigger.CronSchedule = toCRDCronSchedule(apiTrigger.GetCronSchedule()) - } - if apiTrigger.GetPeriodicSchedule() != nil { - crdTrigger.PeriodicSchedule = toCRDPeriodicSchedule(apiTrigger.GetPeriodicSchedule()) - } - return &crdTrigger -} - -func toCRDCronSchedule(cronSchedule *api.CronSchedule) *scheduledworkflow.CronSchedule { - if cronSchedule == nil || cronSchedule.Cron == "" { - return nil - } - crdCronSchedule := scheduledworkflow.CronSchedule{} - crdCronSchedule.Cron = cronSchedule.Cron - - if cronSchedule.StartTime != nil { - startTime := v1.NewTime(time.Unix(cronSchedule.StartTime.Seconds, 0)) - crdCronSchedule.StartTime = &startTime - } - if cronSchedule.EndTime != nil { - endTime := v1.NewTime(time.Unix(cronSchedule.EndTime.Seconds, 0)) - crdCronSchedule.EndTime = &endTime - } - return &crdCronSchedule -} - -func toCRDPeriodicSchedule(periodicSchedule *api.PeriodicSchedule) *scheduledworkflow.PeriodicSchedule { - if periodicSchedule == nil || periodicSchedule.IntervalSecond == 0 { - return nil - } - crdPeriodicSchedule := scheduledworkflow.PeriodicSchedule{} - crdPeriodicSchedule.IntervalSecond = periodicSchedule.IntervalSecond - if periodicSchedule.StartTime != nil { - startTime := v1.NewTime(time.Unix(periodicSchedule.StartTime.Seconds, 0)) - crdPeriodicSchedule.StartTime = &startTime - } - if periodicSchedule.EndTime != nil { - endTime := v1.NewTime(time.Unix(periodicSchedule.EndTime.Seconds, 0)) - crdPeriodicSchedule.EndTime = &endTime - } - return &crdPeriodicSchedule -} - -func toCRDParameter(apiParams []*api.Parameter) []scheduledworkflow.Parameter { - var swParams []scheduledworkflow.Parameter - for _, apiParam := range apiParams { - swParam := scheduledworkflow.Parameter{ - Name: apiParam.Name, - Value: apiParam.Value, - } - swParams = append(swParams, swParam) - } - return swParams -} - -// Process the job name to remove special char, prepend with "job-" prefix if empty, and -// truncate size to <=25 -func toSWFCRDResourceGeneratedName(displayName string) (string, error) { - const ( - // K8s resource name only allow lower case alphabetic char, number and - - swfCompatibleNameRegx = "[^a-z0-9-]+" - ) - reg, err := regexp.Compile(swfCompatibleNameRegx) - if err != nil { - return "", util.NewInternalServerError(err, "Failed to compile ScheduledWorkflow name replacer Regex.") - } - processedName := reg.ReplaceAllString(strings.ToLower(displayName), "") - if processedName == "" { - processedName = "job-" - } - return util.Truncate(processedName, 25), nil -} - -func toParametersMap(apiParams []*api.Parameter) map[string]string { - // Preprocess workflow by appending parameter and add pipeline specific labels - desiredParamsMap := make(map[string]string) - for _, param := range apiParams { - desiredParamsMap[param.Name] = param.Value - } - return desiredParamsMap -} - func formulateRetryWorkflow(wf *util.Workflow) (*util.Workflow, []string, error) { switch wf.Status.Phase { case wfv1.WorkflowFailed, wfv1.WorkflowError: @@ -186,62 +94,6 @@ func deletePods(ctx context.Context, k8sCoreClient client.KubernetesCoreInterfac return nil } -// Mutate default values of specified pipeline spec. -// Args: -// text: (part of) pipeline file in string. -func PatchPipelineDefaultParameter(text string) (string, error) { - defaultBucket := servercommon.GetStringConfig(DefaultBucketNameEnvVar) - projectId := servercommon.GetStringConfig(ProjectIDEnvVar) - toPatch := map[string]string{ - "{{kfp-default-bucket}}": defaultBucket, - "{{kfp-project-id}}": projectId, - } - for key, value := range toPatch { - text = strings.Replace(text, key, value, -1) - } - return text, nil -} - -// Patch the system-specified default parameters if available. -func OverrideParameterWithSystemDefault(workflow util.Workflow, apiRun *api.Run) error { - // Patch the default value to workflow spec. - if servercommon.GetBoolConfigWithDefault(HasDefaultBucketEnvVar, false) { - patchedSlice := make([]wfv1.Parameter, 0) - for _, currentParam := range workflow.Spec.Arguments.Parameters { - if currentParam.Value != nil { - desiredValue, err := PatchPipelineDefaultParameter(currentParam.Value.String()) - if err != nil { - return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) - } - patchedSlice = append(patchedSlice, wfv1.Parameter{ - Name: currentParam.Name, - Value: wfv1.AnyStringPtr(desiredValue), - }) - } else if currentParam.Default != nil { - desiredValue, err := PatchPipelineDefaultParameter(currentParam.Default.String()) - if err != nil { - return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) - } - patchedSlice = append(patchedSlice, wfv1.Parameter{ - Name: currentParam.Name, - Value: wfv1.AnyStringPtr(desiredValue), - }) - } - } - workflow.Spec.Arguments.Parameters = patchedSlice - - // Patched the default value to apiRun - for _, param := range apiRun.PipelineSpec.Parameters { - var err error - param.Value, err = PatchPipelineDefaultParameter(param.Value) - if err != nil { - return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) - } - } - } - return nil -} - // Convert PipelineId in PipelineSpec to the pipeline's default pipeline version. // This is for legacy usage of pipeline id to create run. The standard way to // create run is by specifying the pipeline version. diff --git a/backend/src/apiserver/resource/resource_manager_util_test.go b/backend/src/apiserver/resource/resource_manager_util_test.go index f68d606f012..188e3aa066a 100644 --- a/backend/src/apiserver/resource/resource_manager_util_test.go +++ b/backend/src/apiserver/resource/resource_manager_util_test.go @@ -15,137 +15,15 @@ package resource import ( - "testing" - "time" - "github.com/ghodss/yaml" + api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/storage" "github.com/kubeflow/pipelines/backend/src/common/util" - v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - - "github.com/golang/protobuf/ptypes/timestamp" - api "github.com/kubeflow/pipelines/backend/api/go_client" - scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" "github.com/stretchr/testify/assert" + "testing" ) -func TestToSwfCRDResourceGeneratedName_SpecialCharsAndSpace(t *testing.T) { - name, err := toSWFCRDResourceGeneratedName("! HaVe ä £unky name") - assert.Nil(t, err) - assert.Equal(t, name, "haveunkyname") -} - -func TestToSwfCRDResourceGeneratedName_TruncateLongName(t *testing.T) { - name, err := toSWFCRDResourceGeneratedName("AloooooooooooooooooongName") - assert.Nil(t, err) - assert.Equal(t, name, "aloooooooooooooooooongnam") -} - -func TestToSwfCRDResourceGeneratedName_EmptyName(t *testing.T) { - name, err := toSWFCRDResourceGeneratedName("") - assert.Nil(t, err) - assert.Equal(t, name, "job-") -} - -func TestToCrdParameter(t *testing.T) { - assert.Equal(t, - toCRDParameter([]*api.Parameter{{Name: "param2", Value: "world"}, {Name: "param1", Value: "hello"}}), - []scheduledworkflow.Parameter{{Name: "param2", Value: "world"}, {Name: "param1", Value: "hello"}}) -} -func TestToCrdCronSchedule(t *testing.T) { - actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ - Cron: "123", - StartTime: ×tamp.Timestamp{Seconds: 123}, - EndTime: ×tamp.Timestamp{Seconds: 456}, - }) - startTime := v1.NewTime(time.Unix(123, 0)) - endTime := v1.NewTime(time.Unix(456, 0)) - assert.Equal(t, actualCronSchedule, &scheduledworkflow.CronSchedule{ - Cron: "123", - StartTime: &startTime, - EndTime: &endTime, - }) -} - -func TestToCrdCronSchedule_NilCron(t *testing.T) { - actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 123}, - EndTime: ×tamp.Timestamp{Seconds: 456}, - }) - assert.Nil(t, actualCronSchedule) -} - -func TestToCrdCronSchedule_NilStartTime(t *testing.T) { - actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ - Cron: "123", - EndTime: ×tamp.Timestamp{Seconds: 456}, - }) - endTime := v1.NewTime(time.Unix(456, 0)) - assert.Equal(t, actualCronSchedule, &scheduledworkflow.CronSchedule{ - Cron: "123", - EndTime: &endTime, - }) -} - -func TestToCrdCronSchedule_NilEndTime(t *testing.T) { - actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ - Cron: "123", - StartTime: ×tamp.Timestamp{Seconds: 123}, - }) - startTime := v1.NewTime(time.Unix(123, 0)) - assert.Equal(t, actualCronSchedule, &scheduledworkflow.CronSchedule{ - Cron: "123", - StartTime: &startTime, - }) -} - -func TestToCrdPeriodicSchedule(t *testing.T) { - actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ - IntervalSecond: 123, - StartTime: ×tamp.Timestamp{Seconds: 1}, - EndTime: ×tamp.Timestamp{Seconds: 2}, - }) - startTime := v1.NewTime(time.Unix(1, 0)) - endTime := v1.NewTime(time.Unix(2, 0)) - assert.Equal(t, actualPeriodicSchedule, &scheduledworkflow.PeriodicSchedule{ - IntervalSecond: 123, - StartTime: &startTime, - EndTime: &endTime, - }) -} - -func TestToCrdPeriodicSchedule_NilInterval(t *testing.T) { - actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ - StartTime: ×tamp.Timestamp{Seconds: 1}, - EndTime: ×tamp.Timestamp{Seconds: 2}, - }) - assert.Nil(t, actualPeriodicSchedule) -} - -func TestToCrdPeriodicSchedule_NilStartTime(t *testing.T) { - actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ - IntervalSecond: 123, - EndTime: ×tamp.Timestamp{Seconds: 2}, - }) - endTime := v1.NewTime(time.Unix(2, 0)) - assert.Equal(t, actualPeriodicSchedule, &scheduledworkflow.PeriodicSchedule{ - IntervalSecond: 123, - EndTime: &endTime, - }) -} - -func TestToCrdPeriodicSchedule_NilEndTime(t *testing.T) { - actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ - IntervalSecond: 123, - StartTime: ×tamp.Timestamp{Seconds: 1}, - }) - startTime := v1.NewTime(time.Unix(1, 0)) - assert.Equal(t, actualPeriodicSchedule, &scheduledworkflow.PeriodicSchedule{ - IntervalSecond: 123, - StartTime: &startTime, - }) -} func TestRetryWorkflowWith(t *testing.T) { wf := ` diff --git a/backend/src/apiserver/server/api_converter.go b/backend/src/apiserver/server/api_converter.go index ee4e87ac2d8..02e51781ed5 100644 --- a/backend/src/apiserver/server/api_converter.go +++ b/backend/src/apiserver/server/api_converter.go @@ -19,6 +19,7 @@ import ( api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" "github.com/kubeflow/pipelines/backend/src/common/util" ) @@ -129,7 +130,7 @@ func toApiParameters(paramsString string) ([]*api.Parameter, error) { if paramsString == "" { return nil, nil } - params, err := util.UnmarshalParameters(paramsString) + params, err := template.UnmarshalParameters(paramsString) if err != nil { return nil, util.NewInternalServerError(err, "Parameter with wrong format is stored") } diff --git a/backend/src/apiserver/server/job_server_test.go b/backend/src/apiserver/server/job_server_test.go index c881ed3baa0..26de4dd0a62 100644 --- a/backend/src/apiserver/server/job_server_test.go +++ b/backend/src/apiserver/server/job_server_test.go @@ -160,7 +160,7 @@ func TestValidateApiJob_NoValidPipelineSpecOrPipelineVersion(t *testing.T) { err := server.validateCreateJobRequest(&api.CreateJobRequest{Job: apiJob}) assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest) or (pipeline id or/and pipeline version).") + assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest or pipeline manifest) or (pipeline id or/and pipeline version).") } func TestValidateApiJob_WorkflowManifestAndPipelineVersion(t *testing.T) { @@ -185,7 +185,7 @@ func TestValidateApiJob_WorkflowManifestAndPipelineVersion(t *testing.T) { err := server.validateCreateJobRequest(&api.CreateJobRequest{Job: apiJob}) assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest.") + assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest or pipeline manifest.") } func TestValidateApiJob_ValidatePipelineSpecFailed(t *testing.T) { diff --git a/backend/src/apiserver/server/run_server_test.go b/backend/src/apiserver/server/run_server_test.go index 53b79394e5b..5079180c00e 100644 --- a/backend/src/apiserver/server/run_server_test.go +++ b/backend/src/apiserver/server/run_server_test.go @@ -531,7 +531,7 @@ func TestValidateCreateRunRequest_NilPipelineSpecAndEmptyPipelineVersion(t *test } err := server.validateCreateRunRequest(&api.CreateRunRequest{Run: run}) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest) or (pipeline id or/and pipeline version).") + assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest or pipeline manifest) or (pipeline id or/and pipeline version).") } func TestValidateCreateRunRequest_WorkflowManifestAndPipelineVersion(t *testing.T) { @@ -548,7 +548,7 @@ func TestValidateCreateRunRequest_WorkflowManifestAndPipelineVersion(t *testing. } err := server.validateCreateRunRequest(&api.CreateRunRequest{Run: run}) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest.") + assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest or pipeline manifest.") } func TestValidateCreateRunRequest_InvalidPipelineSpec(t *testing.T) { @@ -566,7 +566,7 @@ func TestValidateCreateRunRequest_InvalidPipelineSpec(t *testing.T) { } err := server.validateCreateRunRequest(&api.CreateRunRequest{Run: run}) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest.") + assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest or pipeline manifest.") } func TestValidateCreateRunRequest_TooMuchParameters(t *testing.T) { diff --git a/backend/src/apiserver/server/util.go b/backend/src/apiserver/server/util.go index ba4232a6ace..52a72984073 100644 --- a/backend/src/apiserver/server/util.go +++ b/backend/src/apiserver/server/util.go @@ -8,6 +8,8 @@ import ( "compress/gzip" "context" "encoding/json" + "github.com/golang/protobuf/jsonpb" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" "io" "io/ioutil" "net/url" @@ -219,18 +221,25 @@ func ValidateExperimentResourceReference(resourceManager *resource.ResourceManag func ValidatePipelineSpecAndResourceReferences(resourceManager *resource.ResourceManager, spec *api.PipelineSpec, resourceReferences []*api.ResourceReference) error { pipelineId := spec.GetPipelineId() workflowManifest := spec.GetWorkflowManifest() + pipelineManifest := spec.GetPipelineManifest() pipelineVersionId := getPipelineVersionIdFromResourceReferences(resourceManager, resourceReferences) - if workflowManifest != "" { + if workflowManifest != "" || pipelineManifest != ""{ + if workflowManifest != "" && pipelineManifest != "" { + return util.NewInvalidInputError("Please don't specify both workflow manifest and pipeline manifest.") + } if pipelineId != "" || pipelineVersionId != "" { - return util.NewInvalidInputError("Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest.") + return util.NewInvalidInputError("Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest or pipeline manifest.") + } + if err := validateWorkflowManifest(workflowManifest); err != nil { + return err } - if err := validateWorkflowManifest(spec.GetWorkflowManifest()); err != nil { + if err := validatePipelineManifest(pipelineManifest); err != nil { return err } } else { if pipelineId == "" && pipelineVersionId == "" { - return util.NewInvalidInputError("Please specify a pipeline by providing a (workflow manifest) or (pipeline id or/and pipeline version).") + return util.NewInvalidInputError("Please specify a pipeline by providing a (workflow manifest or pipeline manifest) or (pipeline id or/and pipeline version).") } if err := validatePipelineId(resourceManager, pipelineId); err != nil { return err @@ -247,7 +256,16 @@ func ValidatePipelineSpecAndResourceReferences(resourceManager *resource.Resourc } } } - return validateParameters(spec.GetParameters()) + if spec.GetParameters() != nil && spec.GetRuntimeConfig() != nil { + return util.NewInvalidInputError("Please don't specify both parameters and runtime config.") + } + if err := validateParameters(spec.GetParameters()); err != nil { + return err + } + if err := validateRuntimeConfig(spec.GetRuntimeConfig()); err != nil { + return err + } + return nil } func validateParameters(parameters []*api.Parameter) error { if parameters != nil { @@ -264,6 +282,21 @@ func validateParameters(parameters []*api.Parameter) error { return nil } +func validateRuntimeConfig(runtimeConfig *api.PipelineSpec_RuntimeConfig) error { + if runtimeConfig.GetParameters() != nil { + paramsBytes, err := json.Marshal(runtimeConfig.GetParameters()) + if err != nil { + return util.NewInternalServerError(err, + "Failed to Marshall the runtime config parameters into bytes.") + } + if len(paramsBytes) > util.MaxParameterBytes { + return util.NewInvalidInputError("The input parameter length exceed maximum size of %v.", util.MaxParameterBytes) + } + } + return nil +} + + func validatePipelineId(resourceManager *resource.ResourceManager, pipelineId string) error { if pipelineId != "" { // Verify pipeline exist @@ -286,6 +319,18 @@ func validateWorkflowManifest(workflowManifest string) error { return nil } +func validatePipelineManifest(pipelineManifest string) error { + if pipelineManifest != "" { + // Verify valid IR spec + spec := &pipelinespec.PipelineSpec{} + if err := jsonpb.UnmarshalString(pipelineManifest, spec); err != nil { + return util.NewInvalidInputErrorWithDetails(err, + "Invalid IR spec format.") + } + } + return nil +} + func getPipelineVersionIdFromResourceReferences(resourceManager *resource.ResourceManager, resourceReferences []*api.ResourceReference) string { var pipelineVersionId = "" for _, resourceReference := range resourceReferences { diff --git a/backend/src/apiserver/server/util_test.go b/backend/src/apiserver/server/util_test.go index 0249eaccf78..5ba0961768c 100644 --- a/backend/src/apiserver/server/util_test.go +++ b/backend/src/apiserver/server/util_test.go @@ -284,7 +284,7 @@ func TestValidatePipelineSpecAndResourceReferences_WorkflowManifestAndPipelineVe WorkflowManifest: testWorkflow.ToStringForStore()} err := ValidatePipelineSpecAndResourceReferences(manager, spec, validReferencesOfExperimentAndPipelineVersion) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest.") + assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest or pipeline manifest.") } func TestValidatePipelineSpecAndResourceReferences_WorkflowManifestAndPipelineID(t *testing.T) { @@ -295,7 +295,7 @@ func TestValidatePipelineSpecAndResourceReferences_WorkflowManifestAndPipelineID WorkflowManifest: testWorkflow.ToStringForStore()} err := ValidatePipelineSpecAndResourceReferences(manager, spec, validReference) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest.") + assert.Contains(t, err.Error(), "Please don't specify a pipeline version or pipeline ID when you specify a workflow manifest or pipeline manifest.") } func TestValidatePipelineSpecAndResourceReferences_InvalidWorkflowManifest(t *testing.T) { @@ -312,7 +312,7 @@ func TestValidatePipelineSpecAndResourceReferences_NilPipelineSpecAndEmptyPipeli defer clients.Close() err := ValidatePipelineSpecAndResourceReferences(manager, nil, validReference) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest) or (pipeline id or/and pipeline version).") + assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest or pipeline manifest) or (pipeline id or/and pipeline version).") } func TestValidatePipelineSpecAndResourceReferences_EmptyPipelineSpecAndEmptyPipelineVersion(t *testing.T) { @@ -321,7 +321,7 @@ func TestValidatePipelineSpecAndResourceReferences_EmptyPipelineSpecAndEmptyPipe spec := &api.PipelineSpec{} err := ValidatePipelineSpecAndResourceReferences(manager, spec, validReference) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest) or (pipeline id or/and pipeline version).") + assert.Contains(t, err.Error(), "Please specify a pipeline by providing a (workflow manifest or pipeline manifest) or (pipeline id or/and pipeline version).") } func TestValidatePipelineSpecAndResourceReferences_InvalidPipelineId(t *testing.T) { diff --git a/backend/src/apiserver/storage/run_store.go b/backend/src/apiserver/storage/run_store.go index c65cc753c25..b2abb39bc3a 100644 --- a/backend/src/apiserver/storage/run_store.go +++ b/backend/src/apiserver/storage/run_store.go @@ -207,7 +207,7 @@ func (s *RunStore) GetRun(runId string) (*model.RunDetail, error) { if len(runs) == 0 { return nil, util.NewResourceNotFoundError("Run", fmt.Sprint(runId)) } - if runs[0].WorkflowRuntimeManifest == "" { + if runs[0].WorkflowRuntimeManifest == "" && runs[0].WorkflowSpecManifest != ""{ // This can only happen when workflow reporting is failed. return nil, util.NewResourceNotFoundError("Failed to get run: %s", runId) } @@ -316,7 +316,7 @@ func (s *RunStore) scanRowsToRunDetails(rows *sql.Rows) ([]*model.RunDetail, err PipelineSpec: model.PipelineSpec{ PipelineId: pipelineId, PipelineName: pipelineName, - PipelineSpecManifest: pipelineRuntimeManifest, + PipelineSpecManifest: pipelineSpecManifest, WorkflowSpecManifest: workflowSpecManifest, Parameters: parameters, }, diff --git a/backend/src/apiserver/template/argo_template.go b/backend/src/apiserver/template/argo_template.go new file mode 100644 index 00000000000..90c1954faaf --- /dev/null +++ b/backend/src/apiserver/template/argo_template.go @@ -0,0 +1,203 @@ +package template + +import ( + "fmt" + workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/argoproj/argo-workflows/v3/workflow/validate" + "github.com/ghodss/yaml" + + api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/common/util" + scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +func (t *Argo) RunWorkflow(apiRun *api.Run, options RunWorkflowOptions) (*util.Workflow, error) { + workflow := util.NewWorkflow(t.wf.Workflow.DeepCopy()) + + // Add a KFP specific label for cache service filtering. The cache_enabled flag here is a global control for whether cache server will + // receive targeting pods. Since cache server only receives pods in step level, the resource manager here will set this global label flag + // on every single step/pod so the cache server can understand. + // TODO: Add run_level flag with similar logic by reading flag value from create_run api. + workflow.SetLabelsToAllTemplates(util.LabelKeyCacheEnabled, common.IsCacheEnabled()) + parameters := toParametersMap(apiRun.GetPipelineSpec().GetParameters()) + // Verify no additional parameter provided + if err := workflow.VerifyParameters(parameters); err != nil { + return nil, util.Wrap(err, "Failed to verify parameters.") + } + // Append provided parameter + workflow.OverrideParameters(parameters) + + // Replace macros + formatter := util.NewRunParameterFormatter(options.RunId, options.RunAt) + formattedParams := formatter.FormatWorkflowParameters(workflow.GetWorkflowParametersAsMap()) + workflow.OverrideParameters(formattedParams) + + setDefaultServiceAccount(workflow, apiRun.GetServiceAccount()) + + // Disable istio sidecar injection if not specified + workflow.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) + + err := OverrideParameterWithSystemDefault(workflow) + if err != nil { + return nil, err + } + + // Add label to the workflow so it can be persisted by persistent agent later. + workflow.SetLabels(util.LabelKeyWorkflowRunId, options.RunId) + // Add run name annotation to the workflow so that it can be logged by the Metadata Writer. + workflow.SetAnnotations(util.AnnotationKeyRunName, apiRun.Name) + // Replace {{workflow.uid}} with runId + err = workflow.ReplaceUID(options.RunId) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to replace workflow ID") + } + workflow.SetPodMetadataLabels(util.LabelKeyWorkflowRunId, options.RunId) + + // Marking auto-added artifacts as optional. Otherwise most older workflows will start failing after upgrade to Argo 2.3. + // TODO: Fix the components to explicitly declare the artifacts they really output. + for templateIdx, template := range workflow.Workflow.Spec.Templates { + for artIdx, artifact := range template.Outputs.Artifacts { + if artifact.Name == "mlpipeline-ui-metadata" || artifact.Name == "mlpipeline-metrics" { + workflow.Workflow.Spec.Templates[templateIdx].Outputs.Artifacts[artIdx].Optional = true + } + } + } + return workflow, nil + +} + +type Argo struct { + wf *util.Workflow +} + +func (t *Argo) ScheduledWorkflow(apiJob *api.Job) (*scheduledworkflow.ScheduledWorkflow, error) { + workflow := util.NewWorkflow(t.wf.Workflow.DeepCopy()) + + parameters := toParametersMap(apiJob.GetPipelineSpec().GetParameters()) + // Verify no additional parameter provided + if err := workflow.VerifyParameters(parameters); err != nil { + return nil, util.Wrap(err, "Failed to verify parameters.") + } + // Append provided parameter + workflow.OverrideParameters(parameters) + setDefaultServiceAccount(workflow, apiJob.GetServiceAccount()) + // Disable istio sidecar injection if not specified + workflow.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) + swfGeneratedName, err := toSWFCRDResourceGeneratedName(apiJob.Name) + if err != nil { + return nil, util.Wrap(err, "Create job failed") + } + scheduledWorkflow := &scheduledworkflow.ScheduledWorkflow{ + ObjectMeta: metav1.ObjectMeta{GenerateName: swfGeneratedName}, + Spec: scheduledworkflow.ScheduledWorkflowSpec{ + Enabled: apiJob.Enabled, + MaxConcurrency: &apiJob.MaxConcurrency, + Trigger: *toCRDTrigger(apiJob.Trigger), + Workflow: &scheduledworkflow.WorkflowResource{ + Parameters: toCRDParameter(apiJob.GetPipelineSpec().GetParameters()), + Spec: workflow.Spec, + }, + NoCatchup: util.BoolPointer(apiJob.NoCatchup), + }, + } + + // Marking auto-added artifacts as optional. Otherwise most older workflows will start failing after upgrade to Argo 2.3. + // TODO: Fix the components to explicitly declare the artifacts they really output. + for templateIdx, template := range scheduledWorkflow.Spec.Workflow.Spec.Templates { + for artIdx, artifact := range template.Outputs.Artifacts { + if artifact.Name == "mlpipeline-ui-metadata" || artifact.Name == "mlpipeline-metrics" { + scheduledWorkflow.Spec.Workflow.Spec.Templates[templateIdx].Outputs.Artifacts[artIdx].Optional = true + } + } + } + return scheduledWorkflow, nil +} + +func (t *Argo) GetTemplateType() TemplateType { + return V1 +} + +func NewArgoTemplate(bytes []byte) (*Argo, error) { + wf, err := ValidateWorkflow(bytes) + if err != nil { + return nil, err + } + return &Argo{wf}, nil +} + +func (t *Argo) Bytes() []byte { + if t == nil { + return nil + } + return []byte(t.wf.ToStringForStore()) +} + +func (t *Argo) IsV2() bool { + if t == nil { + return false + } + return t.wf.IsV2Compatible() +} + +const ( + paramV2compatPipelineName = "pipeline-name" +) + +func (t *Argo) V2PipelineName() string { + if t == nil { + return "" + } + return t.wf.GetWorkflowParametersAsMap()[paramV2compatPipelineName] +} + +func (t *Argo) OverrideV2PipelineName(name, namespace string) { + if t == nil || !t.wf.IsV2Compatible() { + return + } + var pipelineRef string + if namespace != "" { + pipelineRef = fmt.Sprintf("namespace/%s/pipeline/%s", namespace, name) + } else { + pipelineRef = fmt.Sprintf("pipeline/%s", name) + } + overrides := make(map[string]string) + overrides[paramV2compatPipelineName] = pipelineRef + t.wf.OverrideParameters(overrides) +} + +func (t *Argo) ParametersJSON() (string, error) { + if t == nil { + return "", nil + } + return MarshalParameters(t.wf.Spec.Arguments.Parameters) +} + +func NewArgoTemplateFromWorkflow(wf *workflowapi.Workflow) (*Argo, error) { + return &Argo{wf: &util.Workflow{Workflow: wf}}, nil +} + +func ValidateWorkflow(template []byte) (*util.Workflow, error) { + var wf workflowapi.Workflow + err := yaml.Unmarshal(template, &wf) + if err != nil { + return nil, util.NewInvalidInputErrorWithDetails(err, "Failed to parse the workflow template.") + } + if wf.APIVersion != argoVersion { + return nil, util.NewInvalidInputError("Unsupported argo version. Expected: %v. Received: %v", argoVersion, wf.APIVersion) + } + if wf.Kind != argoK8sResource { + return nil, util.NewInvalidInputError("Unexpected resource type. Expected: %v. Received: %v", argoK8sResource, wf.Kind) + } + _, err = validate.ValidateWorkflow(nil, nil, &wf, validate.ValidateOpts{ + Lint: true, + IgnoreEntrypoint: true, + WorkflowTemplateValidation: false, // not used by kubeflow + }) + if err != nil { + return nil, err + } + return util.NewWorkflow(&wf), nil +} + diff --git a/backend/src/apiserver/template/template.go b/backend/src/apiserver/template/template.go new file mode 100644 index 00000000000..23b6cb0fc1d --- /dev/null +++ b/backend/src/apiserver/template/template.go @@ -0,0 +1,308 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "encoding/json" + "fmt" + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + api "github.com/kubeflow/pipelines/backend/api/go_client" + "regexp" + "strings" + "time" + + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + wfv1 "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/ghodss/yaml" + + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/common/util" + scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" + "google.golang.org/protobuf/encoding/protojson" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +type TemplateType string + +const ( + V1 TemplateType = "v1Argo" + V2 TemplateType = "v2" + Unknown TemplateType = "Unknown" + + argoGroup = "argoproj.io/" + argoVersion = "argoproj.io/v1alpha1" + argoK8sResource = "Workflow" + +) + +// Unmarshal parameters from JSON encoded string. +func UnmarshalParameters(paramsString string) ([]v1alpha1.Parameter, error) { + if paramsString == "" { + return nil, nil + } + var params []v1alpha1.Parameter + err := json.Unmarshal([]byte(paramsString), ¶ms) + if err != nil { + return nil, util.NewInternalServerError(err, "Parameters have wrong format") + } + return params, nil +} + +// Marshal parameters to JSON encoded string. +// This also checks result is not longer than a limit. +func MarshalParameters(params []v1alpha1.Parameter) (string, error) { + if params == nil { + return "[]", nil + } + paramBytes, err := json.Marshal(params) + if err != nil { + return "", util.NewInvalidInputErrorWithDetails(err, "Failed to marshal the parameter.") + } + if len(paramBytes) > util.MaxParameterBytes { + return "", util.NewInvalidInputError("The input parameter length exceed maximum size of %v.", util.MaxParameterBytes) + } + return string(paramBytes), nil +} + +var ErrorInvalidPipelineSpec = fmt.Errorf("pipeline spec is invalid") + +// inferTemplateFormat infers format from pipeline template. +// There is no guarantee that the template is valid in inferred format, so validation +// is still needed. +func inferTemplateFormat(template []byte) TemplateType { + switch { + case len(template) == 0: + return Unknown + case isArgoWorkflow(template): + return V1 + case isPipelineSpec(template): + return V2 + default: + return Unknown + } +} + +// isArgoWorkflow returns whether template is in argo workflow spec format. +func isArgoWorkflow(template []byte) bool { + var meta metav1.TypeMeta + err := yaml.Unmarshal(template, &meta) + if err != nil { + return false + } + return strings.HasPrefix(meta.APIVersion, argoGroup) && meta.Kind == argoK8sResource +} + +// isPipelineSpec returns whether template is in KFP api/v2alpha1/PipelineSpec format. +func isPipelineSpec(template []byte) bool { + var spec pipelinespec.PipelineSpec + err := protojson.Unmarshal(template, &spec) + return err == nil && spec.GetPipelineInfo().GetName() != "" && spec.GetRoot() != nil +} + +// Pipeline template +type Template interface { + IsV2() bool + // Gets v2 pipeline name. + V2PipelineName() string + // Overrides v2 pipeline name to distinguish shared/namespaced pipelines. + // The name is used as ML Metadata pipeline context name. + OverrideV2PipelineName(name, namespace string) + // Gets parameters in JSON format. + ParametersJSON() (string, error) + // Get bytes content. + Bytes() []byte + GetTemplateType() TemplateType + + //Get workflow + RunWorkflow(apiRun *api.Run, options RunWorkflowOptions) (*util.Workflow, error) + + ScheduledWorkflow(apiJob *api.Job) (*scheduledworkflow.ScheduledWorkflow, error) +} + +type RunWorkflowOptions struct { + RunId string + RunAt int64 +} + +func New(bytes []byte) (Template, error) { + format := inferTemplateFormat(bytes) + switch format { + case V1: + return NewArgoTemplate(bytes) + case V2: + return NewV2SpecTemplate(bytes) + default: + return nil, util.NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, "unknown template format") + } +} + + + + + +func toParametersMap(apiParams []*api.Parameter) map[string]string { + // Preprocess workflow by appending parameter and add pipeline specific labels + desiredParamsMap := make(map[string]string) + for _, param := range apiParams { + desiredParamsMap[param.Name] = param.Value + } + return desiredParamsMap +} + +// Patch the system-specified default parameters if available. +func OverrideParameterWithSystemDefault(workflow *util.Workflow) error { + // Patch the default value to workflow spec. + if common.GetBoolConfigWithDefault(common.HasDefaultBucketEnvVar, false) { + patchedSlice := make([]wfv1.Parameter, 0) + for _, currentParam := range workflow.Spec.Arguments.Parameters { + if currentParam.Value != nil { + desiredValue, err := common.PatchPipelineDefaultParameter(currentParam.Value.String()) + if err != nil { + return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) + } + patchedSlice = append(patchedSlice, wfv1.Parameter{ + Name: currentParam.Name, + Value: wfv1.AnyStringPtr(desiredValue), + }) + } else if currentParam.Default != nil { + desiredValue, err := common.PatchPipelineDefaultParameter(currentParam.Default.String()) + if err != nil { + return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) + } + patchedSlice = append(patchedSlice, wfv1.Parameter{ + Name: currentParam.Name, + Value: wfv1.AnyStringPtr(desiredValue), + }) + } + } + workflow.Spec.Arguments.Parameters = patchedSlice + } + return nil +} + +func setDefaultServiceAccount(workflow *util.Workflow, serviceAccount string) { + if len(serviceAccount) > 0 { + workflow.SetServiceAccount(serviceAccount) + return + } + workflowServiceAccount := workflow.Spec.ServiceAccountName + if len(workflowServiceAccount) == 0 || workflowServiceAccount == common.DefaultPipelineRunnerServiceAccount { + // To reserve SDK backward compatibility, the backend only replaces + // serviceaccount when it is empty or equal to default value set by SDK. + workflow.SetServiceAccount(common.GetStringConfigWithDefault(common.DefaultPipelineRunnerServiceAccount, common.DefaultPipelineRunnerServiceAccount)) + } +} + +// Process the job name to remove special char, prepend with "job-" prefix if empty, and +// truncate size to <=25 +func toSWFCRDResourceGeneratedName(displayName string) (string, error) { + const ( + // K8s resource name only allow lower case alphabetic char, number and - + swfCompatibleNameRegx = "[^a-z0-9-]+" + ) + reg, err := regexp.Compile(swfCompatibleNameRegx) + if err != nil { + return "", util.NewInternalServerError(err, "Failed to compile ScheduledWorkflow name replacer Regex.") + } + processedName := reg.ReplaceAllString(strings.ToLower(displayName), "") + if processedName == "" { + processedName = "job-" + } + return util.Truncate(processedName, 25), nil +} + +func toCRDTrigger(apiTrigger *api.Trigger) *scheduledworkflow.Trigger { + var crdTrigger scheduledworkflow.Trigger + if apiTrigger.GetCronSchedule() != nil { + crdTrigger.CronSchedule = toCRDCronSchedule(apiTrigger.GetCronSchedule()) + } + if apiTrigger.GetPeriodicSchedule() != nil { + crdTrigger.PeriodicSchedule = toCRDPeriodicSchedule(apiTrigger.GetPeriodicSchedule()) + } + return &crdTrigger +} + +func toCRDCronSchedule(cronSchedule *api.CronSchedule) *scheduledworkflow.CronSchedule { + if cronSchedule == nil || cronSchedule.Cron == "" { + return nil + } + crdCronSchedule := scheduledworkflow.CronSchedule{} + crdCronSchedule.Cron = cronSchedule.Cron + + if cronSchedule.StartTime != nil { + startTime := metav1.NewTime(time.Unix(cronSchedule.StartTime.Seconds, 0)) + crdCronSchedule.StartTime = &startTime + } + if cronSchedule.EndTime != nil { + endTime := metav1.NewTime(time.Unix(cronSchedule.EndTime.Seconds, 0)) + crdCronSchedule.EndTime = &endTime + } + return &crdCronSchedule +} + +func toCRDPeriodicSchedule(periodicSchedule *api.PeriodicSchedule) *scheduledworkflow.PeriodicSchedule { + if periodicSchedule == nil || periodicSchedule.IntervalSecond == 0 { + return nil + } + crdPeriodicSchedule := scheduledworkflow.PeriodicSchedule{} + crdPeriodicSchedule.IntervalSecond = periodicSchedule.IntervalSecond + if periodicSchedule.StartTime != nil { + startTime := metav1.NewTime(time.Unix(periodicSchedule.StartTime.Seconds, 0)) + crdPeriodicSchedule.StartTime = &startTime + } + if periodicSchedule.EndTime != nil { + endTime := metav1.NewTime(time.Unix(periodicSchedule.EndTime.Seconds, 0)) + crdPeriodicSchedule.EndTime = &endTime + } + return &crdPeriodicSchedule +} + +func toCRDParameter(apiParams []*api.Parameter) []scheduledworkflow.Parameter { + var swParams []scheduledworkflow.Parameter + for _, apiParam := range apiParams { + swParam := scheduledworkflow.Parameter{ + Name: apiParam.Name, + Value: apiParam.Value, + } + swParams = append(swParams, swParam) + } + return swParams +} + +func toPipelineJobRuntimeConfig(apiRuntimeConfig *api.PipelineSpec_RuntimeConfig) (*pipelinespec.PipelineJob_RuntimeConfig, error) { + if apiRuntimeConfig == nil { + return nil, nil + } + runTimeConfig := &pipelinespec.PipelineJob_RuntimeConfig{} + runTimeConfig.Parameters = make(map[string]*pipelinespec.Value) + for k, v := range apiRuntimeConfig.GetParameters() { + value := &pipelinespec.Value{} + switch t := v.Value.(type) { + case *api.Value_StringValue: + value.Value = &pipelinespec.Value_StringValue{StringValue: v.GetStringValue()} + case *api.Value_DoubleValue: + value.Value = &pipelinespec.Value_DoubleValue{DoubleValue: v.GetDoubleValue()} + case *api.Value_IntValue: + value.Value = &pipelinespec.Value_IntValue{IntValue: v.GetIntValue()} + default: + return nil, fmt.Errorf("unknown property type in pipelineSpec runtimeConfig Parameters: %T", t) + } + runTimeConfig.Parameters[k] = value + } + if apiRuntimeConfig.GetPipelineRoot() != "" { + runTimeConfig.GcsOutputDirectory = apiRuntimeConfig.GetPipelineRoot() + } + return runTimeConfig, nil +} diff --git a/backend/src/apiserver/template/template_test.go b/backend/src/apiserver/template/template_test.go new file mode 100644 index 00000000000..582f573febc --- /dev/null +++ b/backend/src/apiserver/template/template_test.go @@ -0,0 +1,336 @@ +// Copyright 2018 The Kubeflow Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package template + +import ( + "github.com/golang/protobuf/ptypes/timestamp" + api "github.com/kubeflow/pipelines/backend/api/go_client" + scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "testing" + "time" + + "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" + "github.com/ghodss/yaml" + commonutil "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc/codes" +) + +func TestFailValidation(t *testing.T) { + wf := unmarshalWf(emptyName) + wf.Spec.Arguments.Parameters = []v1alpha1.Parameter{{Name: "dup", Value: v1alpha1.AnyStringPtr("value1")}} + templateBytes, _ := yaml.Marshal(wf) + _, err := ValidateWorkflow([]byte(templateBytes)) + if assert.NotNil(t, err) { + assert.Contains(t, err.Error(), "name is required") + } +} + +func TestValidateWorkflow_ParametersTooLong(t *testing.T) { + var params []v1alpha1.Parameter + // Create a long enough parameter string so it exceed the length limit of parameter. + for i := 0; i < 10000; i++ { + params = append(params, v1alpha1.Parameter{Name: "name1", Value: v1alpha1.AnyStringPtr("value1")}) + } + template := v1alpha1.Workflow{Spec: v1alpha1.WorkflowSpec{Arguments: v1alpha1.Arguments{ + Parameters: params}}} + templateBytes, _ := yaml.Marshal(template) + _, err := ValidateWorkflow(templateBytes) + assert.Equal(t, codes.InvalidArgument, err.(*commonutil.UserError).ExternalStatusCode()) +} + +func TestParseSpecFormat(t *testing.T) { + tt := []struct { + template string + templateType TemplateType + }{{ + // standard match + template: ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow`, + templateType: V1, + }, { // template contains content too + template: template, + templateType: V1, + }, { + // version does not matter + template: ` +apiVersion: argoproj.io/v1alpha2 +kind: Workflow`, + templateType: V1, + }, { + template: v2SpecHelloWorld, + templateType: V2, + }, { + template: "", + templateType: Unknown, + }, { + template: "{}", + templateType: Unknown, + }, { + // group incorrect + template: ` +apiVersion: pipelines.kubeflow.org/v1alpha1 +kind: Workflow`, + templateType: Unknown, + }, { + // kind incorrect + template: ` +apiVersion: argoproj.io/v1alpha1 +kind: CronWorkflow`, + templateType: Unknown, + }, { + template: `{"abc": "def", "b": {"key": 3}}`, + templateType: Unknown, + }} + for _, test := range tt { + format := inferTemplateFormat([]byte(test.template)) + if format != test.templateType { + t.Errorf("InferSpecFormat(%s)=%q, expect %q", test.template, format, test.templateType) + } + } +} + +func unmarshalWf(yamlStr string) *v1alpha1.Workflow { + var wf v1alpha1.Workflow + err := yaml.Unmarshal([]byte(yamlStr), &wf) + if err != nil { + panic(err) + } + return &wf +} + +var template = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: hello-world- +spec: + entrypoint: whalesay + templates: + - name: whalesay + inputs: + parameters: + - name: dup + value: "value1" + container: + image: docker/whalesay:latest` + +var emptyName = ` +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: hello-world- +spec: + entrypoint: whalesay + templates: + - name: whalesay + inputs: + parameters: + - name: "" + value: "value1" + container: + image: docker/whalesay:latest` + +var v2SpecHelloWorld = ` +{ + "components": { + "comp-hello-world": { + "executorLabel": "exec-hello-world", + "inputDefinitions": { + "parameters": { + "text": { + "type": "STRING" + } + } + } + } + }, + "deploymentSpec": { + "executors": { + "exec-hello-world": { + "container": { + "args": [ + "--text", + "{{$.inputs.parameters['text']}}" + ], + "command": [ + "sh", + "-ec", + "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", + "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" + ], + "image": "python:3.7" + } + } + } + }, + "pipelineInfo": { + "name": "hello-world" + }, + "root": { + "dag": { + "tasks": { + "hello-world": { + "cachingOptions": { + "enableCache": true + }, + "componentRef": { + "name": "comp-hello-world" + }, + "inputs": { + "parameters": { + "text": { + "componentInputParameter": "text" + } + } + }, + "taskInfo": { + "name": "hello-world" + } + } + } + }, + "inputDefinitions": { + "parameters": { + "text": { + "type": "STRING" + } + } + } + }, + "schemaVersion": "2.0.0", + "sdkVersion": "kfp-1.6.5" +} +` + +func TestToSwfCRDResourceGeneratedName_SpecialCharsAndSpace(t *testing.T) { + name, err := toSWFCRDResourceGeneratedName("! HaVe ä £unky name") + assert.Nil(t, err) + assert.Equal(t, name, "haveunkyname") +} + +func TestToSwfCRDResourceGeneratedName_TruncateLongName(t *testing.T) { + name, err := toSWFCRDResourceGeneratedName("AloooooooooooooooooongName") + assert.Nil(t, err) + assert.Equal(t, name, "aloooooooooooooooooongnam") +} + +func TestToSwfCRDResourceGeneratedName_EmptyName(t *testing.T) { + name, err := toSWFCRDResourceGeneratedName("") + assert.Nil(t, err) + assert.Equal(t, name, "job-") +} + +func TestToCrdParameter(t *testing.T) { + assert.Equal(t, + toCRDParameter([]*api.Parameter{{Name: "param2", Value: "world"}, {Name: "param1", Value: "hello"}}), + []scheduledworkflow.Parameter{{Name: "param2", Value: "world"}, {Name: "param1", Value: "hello"}}) +} + +func TestToCrdCronSchedule(t *testing.T) { + actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ + Cron: "123", + StartTime: ×tamp.Timestamp{Seconds: 123}, + EndTime: ×tamp.Timestamp{Seconds: 456}, + }) + startTime := metav1.NewTime(time.Unix(123, 0)) + endTime := metav1.NewTime(time.Unix(456, 0)) + assert.Equal(t, actualCronSchedule, &scheduledworkflow.CronSchedule{ + Cron: "123", + StartTime: &startTime, + EndTime: &endTime, + }) +} + +func TestToCrdCronSchedule_NilCron(t *testing.T) { + actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 123}, + EndTime: ×tamp.Timestamp{Seconds: 456}, + }) + assert.Nil(t, actualCronSchedule) +} + +func TestToCrdCronSchedule_NilStartTime(t *testing.T) { + actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ + Cron: "123", + EndTime: ×tamp.Timestamp{Seconds: 456}, + }) + endTime := metav1.NewTime(time.Unix(456, 0)) + assert.Equal(t, actualCronSchedule, &scheduledworkflow.CronSchedule{ + Cron: "123", + EndTime: &endTime, + }) +} + +func TestToCrdCronSchedule_NilEndTime(t *testing.T) { + actualCronSchedule := toCRDCronSchedule(&api.CronSchedule{ + Cron: "123", + StartTime: ×tamp.Timestamp{Seconds: 123}, + }) + startTime := metav1.NewTime(time.Unix(123, 0)) + assert.Equal(t, actualCronSchedule, &scheduledworkflow.CronSchedule{ + Cron: "123", + StartTime: &startTime, + }) +} + +func TestToCrdPeriodicSchedule(t *testing.T) { + actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ + IntervalSecond: 123, + StartTime: ×tamp.Timestamp{Seconds: 1}, + EndTime: ×tamp.Timestamp{Seconds: 2}, + }) + startTime := metav1.NewTime(time.Unix(1, 0)) + endTime := metav1.NewTime(time.Unix(2, 0)) + assert.Equal(t, actualPeriodicSchedule, &scheduledworkflow.PeriodicSchedule{ + IntervalSecond: 123, + StartTime: &startTime, + EndTime: &endTime, + }) +} + +func TestToCrdPeriodicSchedule_NilInterval(t *testing.T) { + actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + EndTime: ×tamp.Timestamp{Seconds: 2}, + }) + assert.Nil(t, actualPeriodicSchedule) +} + +func TestToCrdPeriodicSchedule_NilStartTime(t *testing.T) { + actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ + IntervalSecond: 123, + EndTime: ×tamp.Timestamp{Seconds: 2}, + }) + endTime := metav1.NewTime(time.Unix(2, 0)) + assert.Equal(t, actualPeriodicSchedule, &scheduledworkflow.PeriodicSchedule{ + IntervalSecond: 123, + EndTime: &endTime, + }) +} + +func TestToCrdPeriodicSchedule_NilEndTime(t *testing.T) { + actualPeriodicSchedule := toCRDPeriodicSchedule(&api.PeriodicSchedule{ + IntervalSecond: 123, + StartTime: ×tamp.Timestamp{Seconds: 1}, + }) + startTime := metav1.NewTime(time.Unix(1, 0)) + assert.Equal(t, actualPeriodicSchedule, &scheduledworkflow.PeriodicSchedule{ + IntervalSecond: 123, + StartTime: &startTime, + }) +} diff --git a/backend/src/apiserver/template/v2_template.go b/backend/src/apiserver/template/v2_template.go new file mode 100644 index 00000000000..4e0597200c0 --- /dev/null +++ b/backend/src/apiserver/template/v2_template.go @@ -0,0 +1,158 @@ +package template + +import ( + "fmt" + structpb "github.com/golang/protobuf/ptypes/struct" + + "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" + api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/common/util" + scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" + "github.com/kubeflow/pipelines/v2/compiler" + "google.golang.org/protobuf/encoding/protojson" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +type V2Spec struct { + spec *pipelinespec.PipelineSpec +} + +func (t *V2Spec) ScheduledWorkflow(apiJob *api.Job) (*scheduledworkflow.ScheduledWorkflow, error) { + bytes, err := protojson.Marshal(t.spec) + if err != nil { + return nil, util.Wrap(err, "Failed marshal pipeline spec to json") + } + spec := &structpb.Struct{} + if err := protojson.Unmarshal(bytes, spec); err != nil { + return nil, util.Wrap(err, "Failed to parse pipeline spec") + } + job := &pipelinespec.PipelineJob{PipelineSpec: spec} + jobRuntimeConfig, err := toPipelineJobRuntimeConfig(apiJob.GetPipelineSpec().GetRuntimeConfig()) + if err != nil { + return nil, util.Wrap(err, "Failed to convert to PipelineJob RuntimeConfig") + } + job.RuntimeConfig = jobRuntimeConfig + wf, err := compiler.Compile(job, nil) + if err != nil { + return nil, util.Wrap(err, "Failed to compile job") + } + workflow := util.NewWorkflow(wf) + setDefaultServiceAccount(workflow, apiJob.GetServiceAccount()) + // Disable istio sidecar injection if not specified + workflow.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) + swfGeneratedName, err := toSWFCRDResourceGeneratedName(apiJob.Name) + if err != nil { + return nil, util.Wrap(err, "Create job failed") + } + scheduledWorkflow := &scheduledworkflow.ScheduledWorkflow{ + ObjectMeta: metav1.ObjectMeta{GenerateName: swfGeneratedName}, + Spec: scheduledworkflow.ScheduledWorkflowSpec{ + Enabled: apiJob.Enabled, + MaxConcurrency: &apiJob.MaxConcurrency, + Trigger: *toCRDTrigger(apiJob.Trigger), + Workflow: &scheduledworkflow.WorkflowResource{ + Parameters: toCRDParameter(apiJob.GetPipelineSpec().GetParameters()), + Spec: workflow.Spec, + }, + NoCatchup: util.BoolPointer(apiJob.NoCatchup), + }, + } + return scheduledWorkflow, nil +} + +func (t *V2Spec) GetTemplateType() TemplateType { + return V2 +} + +func NewV2SpecTemplate(template []byte) (*V2Spec, error) { + var spec pipelinespec.PipelineSpec + err := protojson.Unmarshal(template, &spec) + if err != nil { + return nil, util.NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, fmt.Sprintf("invalid v2 pipeline spec: %s", err.Error())) + } + if spec.GetPipelineInfo().GetName() == "" { + return nil, util.NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, "invalid v2 pipeline spec: name is empty") + } + if spec.GetRoot() == nil { + return nil, util.NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, "invalid v2 pipeline spec: root component is empty") + } + return &V2Spec{spec: &spec}, nil +} + +func (t *V2Spec) Bytes() []byte { + if t == nil { + return nil + } + bytes, err := protojson.Marshal(t.spec) + if err != nil { + // this is unexpected + return nil + } + return bytes +} + +func (t *V2Spec) IsV2() bool { + return true +} + +func (t *V2Spec) V2PipelineName() string { + if t == nil { + return "" + } + return t.spec.GetPipelineInfo().GetName() +} + +func (t *V2Spec) OverrideV2PipelineName(name, namespace string) { + if t == nil { + return + } + var pipelineRef string + if namespace != "" { + pipelineRef = fmt.Sprintf("namespace/%s/pipeline/%s", namespace, name) + } else { + pipelineRef = fmt.Sprintf("pipeline/%s", name) + } + t.spec.PipelineInfo.Name = pipelineRef +} + +func (t *V2Spec) ParametersJSON() (string, error) { + // TODO(v2): implement this after pipeline spec can contain parameter defaults + return "[]", nil +} + +func (t *V2Spec) RunWorkflow(apiRun *api.Run, options RunWorkflowOptions) (*util.Workflow, error) { + bytes, err := protojson.Marshal(t.spec) + if err != nil { + return nil, util.Wrap(err, "Failed marshal pipeline spec to json") + } + spec := &structpb.Struct{} + if err := protojson.Unmarshal(bytes, spec); err != nil { + return nil, util.Wrap(err, "Failed to parse pipeline spec") + } + job := &pipelinespec.PipelineJob{PipelineSpec: spec} + jobRuntimeConfig, err := toPipelineJobRuntimeConfig(apiRun.GetPipelineSpec().GetRuntimeConfig()) + if err != nil { + return nil, util.Wrap(err, "Failed to convert to PipelineJob RuntimeConfig") + } + job.RuntimeConfig = jobRuntimeConfig + wf, err := compiler.Compile(job, nil) + if err != nil { + return nil, util.Wrap(err, "Failed to compile job") + } + workflow := util.NewWorkflow(wf) + setDefaultServiceAccount(workflow, apiRun.GetServiceAccount()) + // Disable istio sidecar injection if not specified + workflow.SetAnnotationsToAllTemplatesIfKeyNotExist(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) + // Add label to the workflow so it can be persisted by persistent agent later. + workflow.SetLabels(util.LabelKeyWorkflowRunId, options.RunId) + // Add run name annotation to the workflow so that it can be logged by the Metadata Writer. + workflow.SetAnnotations(util.AnnotationKeyRunName, apiRun.Name) + // Replace {{workflow.uid}} with runId + err = workflow.ReplaceUID(options.RunId) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to replace workflow ID") + } + workflow.SetPodMetadataLabels(util.LabelKeyWorkflowRunId, options.RunId) + return workflow, nil + +} diff --git a/backend/src/common/client/api_server/pipeline_client.go b/backend/src/common/client/api_server/pipeline_client.go index ca12f8e86fa..56e87301d0e 100644 --- a/backend/src/common/client/api_server/pipeline_client.go +++ b/backend/src/common/client/api_server/pipeline_client.go @@ -7,6 +7,7 @@ import ( apiclient "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client" params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" "github.com/kubeflow/pipelines/backend/src/common/util" "golang.org/x/net/context" _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" @@ -17,7 +18,7 @@ type PipelineInterface interface { Create(params *params.CreatePipelineParams) (*model.APIPipeline, error) Get(params *params.GetPipelineParams) (*model.APIPipeline, error) Delete(params *params.DeletePipelineParams) error - GetTemplate(params *params.GetTemplateParams) (util.Template, error) + GetTemplate(params *params.GetTemplateParams) (template.Template, error) List(params *params.ListPipelinesParams) ([]*model.APIPipeline, int, string, error) ListAll(params *params.ListPipelinesParams, maxResultSize int) ( []*model.APIPipeline, error) @@ -136,7 +137,7 @@ func (c *PipelineClient) Delete(parameters *params.DeletePipelineParams) error { return nil } -func (c *PipelineClient) GetTemplate(parameters *params.GetTemplateParams) (util.Template, error) { +func (c *PipelineClient) GetTemplate(parameters *params.GetTemplateParams) (template.Template, error) { // Create context with timeout ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) defer cancel() @@ -157,7 +158,7 @@ func (c *PipelineClient) GetTemplate(parameters *params.GetTemplateParams) (util } // Unmarshal response - return util.NewTemplate([]byte(response.Payload.Template)) + return template.New([]byte(response.Payload.Template)) } func (c *PipelineClient) List(parameters *params.ListPipelinesParams) ( @@ -286,7 +287,7 @@ func (c *PipelineClient) GetPipelineVersion(parameters *params.GetPipelineVersio } func (c *PipelineClient) GetPipelineVersionTemplate(parameters *params.GetPipelineVersionTemplateParams) ( - util.Template, error) { + template.Template, error) { // Create context with timeout ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) defer cancel() @@ -307,5 +308,5 @@ func (c *PipelineClient) GetPipelineVersionTemplate(parameters *params.GetPipeli } // Unmarshal response - return util.NewTemplate([]byte(response.Payload.Template)) + return template.New([]byte(response.Payload.Template)) } diff --git a/backend/src/common/client/api_server/pipeline_client_fake.go b/backend/src/common/client/api_server/pipeline_client_fake.go index e55e2d427b0..5356310aedc 100644 --- a/backend/src/common/client/api_server/pipeline_client_fake.go +++ b/backend/src/common/client/api_server/pipeline_client_fake.go @@ -2,6 +2,7 @@ package api_server import ( "fmt" + "github.com/kubeflow/pipelines/backend/src/apiserver/template" "path" @@ -10,7 +11,6 @@ import ( params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" pipelineparams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" pipelinemodel "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" - "github.com/kubeflow/pipelines/backend/src/common/util" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -42,8 +42,8 @@ func getDefaultWorkflow() *workflowapi.Workflow { }} } -func getDefaultTemplate() util.Template { - tmpl, _ := util.NewArgoTemplateFromWorkflow(&workflowapi.Workflow{ +func getDefaultTemplate() template.Template { + tmpl, _ := template.NewArgoTemplateFromWorkflow(&workflowapi.Workflow{ ObjectMeta: metav1.ObjectMeta{ Namespace: "MY_NAMESPACE", Name: "MY_NAME", @@ -92,7 +92,7 @@ func (c *PipelineClientFake) Delete(params *pipelineparams.DeletePipelineParams) } func (c *PipelineClientFake) GetTemplate(params *pipelineparams.GetTemplateParams) ( - util.Template, error) { + template.Template, error) { switch params.ID { case PipelineForClientErrorTest: return nil, fmt.Errorf(ClientErrorString) diff --git a/backend/src/common/util/template_util.go b/backend/src/common/util/template_util.go deleted file mode 100644 index 19f7af96f3d..00000000000 --- a/backend/src/common/util/template_util.go +++ /dev/null @@ -1,277 +0,0 @@ -// Copyright 2018 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - workflowapi "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/argoproj/argo-workflows/v3/workflow/validate" - "github.com/ghodss/yaml" - "github.com/kubeflow/pipelines/api/v2alpha1/go/pipelinespec" - "google.golang.org/protobuf/encoding/protojson" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" -) - -type TemplateType string - -const ( - V1 TemplateType = "v1" - V2 TemplateType = "v2" - Unknown TemplateType = "Unknown" - - argoGroup = "argoproj.io/" - argoVersion = "argoproj.io/v1alpha1" - argoK8sResource = "Workflow" -) - -// Unmarshal parameters from JSON encoded string. -func UnmarshalParameters(paramsString string) ([]v1alpha1.Parameter, error) { - if paramsString == "" { - return nil, nil - } - var params []v1alpha1.Parameter - err := json.Unmarshal([]byte(paramsString), ¶ms) - if err != nil { - return nil, NewInternalServerError(err, "Parameters have wrong format") - } - return params, nil -} - -// Marshal parameters to JSON encoded string. -// This also checks result is not longer than a limit. -func MarshalParameters(params []v1alpha1.Parameter) (string, error) { - if params == nil { - return "[]", nil - } - paramBytes, err := json.Marshal(params) - if err != nil { - return "", NewInvalidInputErrorWithDetails(err, "Failed to marshal the parameter.") - } - if len(paramBytes) > MaxParameterBytes { - return "", NewInvalidInputError("The input parameter length exceed maximum size of %v.", MaxParameterBytes) - } - return string(paramBytes), nil -} - -func ValidateWorkflow(template []byte) (*Workflow, error) { - var wf v1alpha1.Workflow - err := yaml.Unmarshal(template, &wf) - if err != nil { - return nil, NewInvalidInputErrorWithDetails(err, "Failed to parse the workflow template.") - } - if wf.APIVersion != argoVersion { - return nil, NewInvalidInputError("Unsupported argo version. Expected: %v. Received: %v", argoVersion, wf.APIVersion) - } - if wf.Kind != argoK8sResource { - return nil, NewInvalidInputError("Unexpected resource type. Expected: %v. Received: %v", argoK8sResource, wf.Kind) - } - _, err = validate.ValidateWorkflow(nil, nil, &wf, validate.ValidateOpts{ - Lint: true, - IgnoreEntrypoint: true, - WorkflowTemplateValidation: false, // not used by kubeflow - }) - if err != nil { - return nil, err - } - return NewWorkflow(&wf), nil -} - -var ErrorInvalidPipelineSpec = fmt.Errorf("pipeline spec is invalid") - -// InferTemplateFormat infers format from pipeline template. -// There is no guarantee that the template is valid in inferred format, so validation -// is still needed. -func InferTemplateFormat(template []byte) TemplateType { - switch { - case len(template) == 0: - return Unknown - case isArgoWorkflow(template): - return V1 - case isPipelineSpec(template): - return V2 - default: - return Unknown - } -} - -// isArgoWorkflow returns whether template is in argo workflow spec format. -func isArgoWorkflow(template []byte) bool { - var meta metav1.TypeMeta - err := yaml.Unmarshal(template, &meta) - if err != nil { - return false - } - return strings.HasPrefix(meta.APIVersion, argoGroup) && meta.Kind == argoK8sResource -} - -// isPipelineSpec returns whether template is in KFP api/v2alpha1/PipelineSpec format. -func isPipelineSpec(template []byte) bool { - var spec pipelinespec.PipelineSpec - err := protojson.Unmarshal(template, &spec) - return err == nil && spec.GetPipelineInfo().GetName() != "" && spec.GetRoot() != nil -} - -// Pipeline template -type Template interface { - IsV2() bool - // Gets v2 pipeline name. - V2PipelineName() string - // Overrides v2 pipeline name to distinguish shared/namespaced pipelines. - // The name is used as ML Metadata pipeline context name. - OverrideV2PipelineName(name, namespace string) - // Gets parameters in JSON format. - ParametersJSON() (string, error) - // Get bytes content. - Bytes() []byte -} - -func NewTemplate(bytes []byte) (Template, error) { - format := InferTemplateFormat(bytes) - switch format { - case V1: - return NewArgoTemplate(bytes) - case V2: - return NewV2SpecTemplate(bytes) - default: - return nil, NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, "unknown template format") - } -} - -type ArgoTemplate struct { - wf *Workflow -} - -func NewArgoTemplate(bytes []byte) (*ArgoTemplate, error) { - wf, err := ValidateWorkflow(bytes) - if err != nil { - return nil, err - } - return &ArgoTemplate{wf}, nil -} - -func NewArgoTemplateFromWorkflow(wf *workflowapi.Workflow) (*ArgoTemplate, error) { - return &ArgoTemplate{wf: &Workflow{wf}}, nil -} - -func (t *ArgoTemplate) Bytes() []byte { - if t == nil { - return nil - } - return []byte(t.wf.ToStringForStore()) -} - -func (t *ArgoTemplate) IsV2() bool { - if t == nil { - return false - } - return t.wf.IsV2Compatible() -} - -const ( - paramV2compatPipelineName = "pipeline-name" -) - -func (t *ArgoTemplate) V2PipelineName() string { - if t == nil { - return "" - } - return t.wf.GetWorkflowParametersAsMap()[paramV2compatPipelineName] -} - -func (t *ArgoTemplate) OverrideV2PipelineName(name, namespace string) { - if t == nil || !t.wf.IsV2Compatible() { - return - } - var pipelineRef string - if namespace != "" { - pipelineRef = fmt.Sprintf("namespace/%s/pipeline/%s", namespace, name) - } else { - pipelineRef = fmt.Sprintf("pipeline/%s", name) - } - overrides := make(map[string]string) - overrides[paramV2compatPipelineName] = pipelineRef - t.wf.OverrideParameters(overrides) -} - -func (t *ArgoTemplate) ParametersJSON() (string, error) { - if t == nil { - return "", nil - } - return MarshalParameters(t.wf.Spec.Arguments.Parameters) -} - -type V2SpecTemplate struct { - spec *pipelinespec.PipelineSpec -} - -func NewV2SpecTemplate(template []byte) (*V2SpecTemplate, error) { - var spec pipelinespec.PipelineSpec - err := protojson.Unmarshal(template, &spec) - if err != nil { - return nil, NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, fmt.Sprintf("invalid v2 pipeline spec: %s", err.Error())) - } - if spec.GetPipelineInfo().GetName() == "" { - return nil, NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, "invalid v2 pipeline spec: name is empty") - } - if spec.GetRoot() == nil { - return nil, NewInvalidInputErrorWithDetails(ErrorInvalidPipelineSpec, "invalid v2 pipeline spec: root component is empty") - } - return &V2SpecTemplate{spec: &spec}, nil -} - -func (t *V2SpecTemplate) Bytes() []byte { - if t == nil { - return nil - } - bytes, err := protojson.Marshal(t.spec) - if err != nil { - // this is unexpected - return nil - } - return bytes -} - -func (t *V2SpecTemplate) IsV2() bool { - return true -} - -func (t *V2SpecTemplate) V2PipelineName() string { - if t == nil { - return "" - } - return t.spec.GetPipelineInfo().GetName() -} - -func (t *V2SpecTemplate) OverrideV2PipelineName(name, namespace string) { - if t == nil { - return - } - var pipelineRef string - if namespace != "" { - pipelineRef = fmt.Sprintf("namespace/%s/pipeline/%s", namespace, name) - } else { - pipelineRef = fmt.Sprintf("pipeline/%s", name) - } - t.spec.PipelineInfo.Name = pipelineRef -} - -func (t *V2SpecTemplate) ParametersJSON() (string, error) { - // TODO(v2): implement this after pipeline spec can contain parameter defaults - return "[]", nil -} diff --git a/backend/src/common/util/template_util_test.go b/backend/src/common/util/template_util_test.go deleted file mode 100644 index c302da714aa..00000000000 --- a/backend/src/common/util/template_util_test.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2018 The Kubeflow Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package util - -import ( - "testing" - - "github.com/argoproj/argo-workflows/v3/pkg/apis/workflow/v1alpha1" - "github.com/ghodss/yaml" - "github.com/stretchr/testify/assert" - "google.golang.org/grpc/codes" -) - -func TestFailValidation(t *testing.T) { - wf := unmarshalWf(emptyName) - wf.Spec.Arguments.Parameters = []v1alpha1.Parameter{{Name: "dup", Value: v1alpha1.AnyStringPtr("value1")}} - templateBytes, _ := yaml.Marshal(wf) - _, err := ValidateWorkflow([]byte(templateBytes)) - if assert.NotNil(t, err) { - assert.Contains(t, err.Error(), "name is required") - } -} - -func TestValidateWorkflow_ParametersTooLong(t *testing.T) { - var params []v1alpha1.Parameter - // Create a long enough parameter string so it exceed the length limit of parameter. - for i := 0; i < 10000; i++ { - params = append(params, v1alpha1.Parameter{Name: "name1", Value: v1alpha1.AnyStringPtr("value1")}) - } - template := v1alpha1.Workflow{Spec: v1alpha1.WorkflowSpec{Arguments: v1alpha1.Arguments{ - Parameters: params}}} - templateBytes, _ := yaml.Marshal(template) - _, err := ValidateWorkflow(templateBytes) - assert.Equal(t, codes.InvalidArgument, err.(*UserError).ExternalStatusCode()) -} - -func unmarshalWf(yamlStr string) *v1alpha1.Workflow { - var wf v1alpha1.Workflow - err := yaml.Unmarshal([]byte(yamlStr), &wf) - if err != nil { - panic(err) - } - return &wf -} - -var template = ` -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: hello-world- -spec: - entrypoint: whalesay - templates: - - name: whalesay - inputs: - parameters: - - name: dup - value: "value1" - container: - image: docker/whalesay:latest` - -var emptyName = ` -apiVersion: argoproj.io/v1alpha1 -kind: Workflow -metadata: - generateName: hello-world- -spec: - entrypoint: whalesay - templates: - - name: whalesay - inputs: - parameters: - - name: "" - value: "value1" - container: - image: docker/whalesay:latest` diff --git a/backend/test/integration/pipeline_api_test.go b/backend/test/integration/pipeline_api_test.go index 3e16b021103..dbe8f328c90 100644 --- a/backend/test/integration/pipeline_api_test.go +++ b/backend/test/integration/pipeline_api_test.go @@ -10,6 +10,7 @@ import ( "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" + pipelinetemplate "github.com/kubeflow/pipelines/backend/src/apiserver/template" uploadParams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service" "github.com/kubeflow/pipelines/backend/src/common/client/api_server" @@ -191,14 +192,14 @@ func (s *PipelineApiTest) TestPipelineAPI() { require.Nil(t, err) bytes, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") require.Nil(t, err) - expected, err := util.NewTemplate(bytes) + expected, err := pipelinetemplate.New(bytes) assert.Equal(t, expected, template) template, err = s.pipelineClient.GetTemplate(¶ms.GetTemplateParams{ID: v2HelloPipeline.ID}) require.Nil(t, err) bytes, err = ioutil.ReadFile("../resources/v2-hello-world.json") require.Nil(t, err) - expected, err = util.NewTemplate(bytes) + expected, err = pipelinetemplate.New(bytes) expected.OverrideV2PipelineName("v2-hello-world.json", "") assert.Equal(t, expected, template) } diff --git a/backend/test/integration/pipeline_version_api_test.go b/backend/test/integration/pipeline_version_api_test.go index 4a6c8d6d552..dd312d2fd81 100644 --- a/backend/test/integration/pipeline_version_api_test.go +++ b/backend/test/integration/pipeline_version_api_test.go @@ -9,6 +9,7 @@ import ( params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" uploadParams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service" + pipelinetemplate "github.com/kubeflow/pipelines/backend/src/apiserver/template" "github.com/kubeflow/pipelines/backend/src/common/client/api_server" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" @@ -290,7 +291,7 @@ func (s *PipelineVersionApiTest) TestArgoSpec() { require.Nil(t, err) bytes, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") require.Nil(t, err) - expected, err := util.NewTemplate(bytes) + expected, err := pipelinetemplate.New(bytes) require.Nil(t, err) assert.Equal(t, expected, template) } @@ -323,7 +324,7 @@ func (s *PipelineVersionApiTest) TestV2Spec() { require.Nil(t, err) bytes, err := ioutil.ReadFile("../resources/v2-hello-world.json") require.Nil(t, err) - expected, err := util.NewTemplate(bytes) + expected, err := pipelinetemplate.New(bytes) require.Nil(t, err) expected.OverrideV2PipelineName("test_v2_pipeline", "") assert.Equal(t, expected, template) diff --git a/backend/test/integration/upgrade_test.go b/backend/test/integration/upgrade_test.go index 98245546c20..7cf3d15acc8 100644 --- a/backend/test/integration/upgrade_test.go +++ b/backend/test/integration/upgrade_test.go @@ -20,6 +20,7 @@ import ( uploadParams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service" runParams "github.com/kubeflow/pipelines/backend/api/go_http_client/run_client/run_service" "github.com/kubeflow/pipelines/backend/api/go_http_client/run_model" + pipelinetemplate "github.com/kubeflow/pipelines/backend/src/apiserver/template" "github.com/kubeflow/pipelines/backend/src/common/client/api_server" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" @@ -232,7 +233,7 @@ func (s *UpgradeTests) VerifyPipelines() { require.Nil(t, err) bytes, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") require.Nil(t, err) - expected, err := util.NewTemplate(bytes) + expected, err := pipelinetemplate.New(bytes) require.Nil(t, err) assert.Equal(t, expected, template) } diff --git a/backend/third_party_licenses/apiserver.csv b/backend/third_party_licenses/apiserver.csv index 492f5f67004..b63e81c8e1c 100644 --- a/backend/third_party_licenses/apiserver.csv +++ b/backend/third_party_licenses/apiserver.csv @@ -17,7 +17,6 @@ github.com/cenkalti/backoff, https://github.com/cenkalti/backoff/blob/v2.2.1/LIC github.com/cespare/xxhash/v2, https://github.com/cespare/xxhash/blob/v2.1.1/LICENSE.txt, MIT github.com/colinmarc/hdfs, https://github.com/colinmarc/hdfs/blob/9746310a4d31/LICENSE.txt, MIT github.com/davecgh/go-spew, https://github.com/davecgh/go-spew/blob/v1.1.1/LICENSE, ISC -github.com/docker/spdystream, https://github.com/docker/spdystream/blob/6480d4af844c/LICENSE, Apache-2.0 github.com/doublerebel/bellows, https://github.com/doublerebel/bellows/blob/f177d92a03d3/LICENSE, MIT github.com/emicklei/go-restful, https://github.com/emicklei/go-restful/blob/v2.15.0/LICENSE, MIT github.com/fsnotify/fsnotify, https://github.com/fsnotify/fsnotify/blob/v1.4.9/LICENSE, BSD-3-Clause @@ -37,6 +36,7 @@ github.com/go-stack/stack, https://github.com/go-stack/stack/blob/v1.8.0/LICENSE github.com/gogo/protobuf, https://github.com/gogo/protobuf/blob/v1.3.2/LICENSE, BSD-3-Clause / BSD-2-Clause github.com/golang/glog, https://github.com/golang/glog/blob/23def4e6c14b/LICENSE, Apache-2.0 github.com/golang/protobuf, https://github.com/golang/protobuf/blob/v1.5.0/LICENSE, BSD-3-Clause +github.com/google/go-cmp, https://github.com/google/go-cmp/blob/v0.5.5/LICENSE, BSD-3-Clause github.com/google/gofuzz, https://github.com/google/gofuzz/blob/v1.1.0/LICENSE, Apache-2.0 github.com/google/uuid, https://github.com/google/uuid/blob/v1.1.2/LICENSE, BSD-3-Clause github.com/googleapis/gnostic, https://github.com/googleapis/gnostic/blob/v0.5.1/LICENSE, Apache-2.0 @@ -62,6 +62,7 @@ github.com/klauspost/compress, https://github.com/klauspost/compress/blob/v1.11. github.com/klauspost/compress, https://github.com/klauspost/compress/blob/v1.11.9/zstd/internal/xxhash/LICENSE.txt, MIT github.com/klauspost/pgzip, https://github.com/klauspost/pgzip/blob/v1.2.5/GO_LICENSE, BSD-3-Clause github.com/klauspost/pgzip, https://github.com/klauspost/pgzip/blob/v1.2.5/LICENSE, MIT +github.com/kubeflow/pipelines/v2, https://github.com/kubeflow/pipelines/blob/2e3fb5efff56/LICENSE, Apache-2.0 github.com/lann/builder, https://github.com/lann/builder/blob/47ae307949d0/LICENSE, MIT github.com/lann/ps, https://github.com/lann/ps/blob/62de8c46ede0/LICENSE, MIT github.com/lestrrat-go/strftime, https://github.com/lestrrat-go/strftime/blob/v1.0.4/LICENSE, MIT @@ -76,6 +77,7 @@ github.com/mitchellh/copystructure, https://github.com/mitchellh/copystructure/b github.com/mitchellh/go-homedir, https://github.com/mitchellh/go-homedir/blob/v1.1.0/LICENSE, MIT github.com/mitchellh/mapstructure, https://github.com/mitchellh/mapstructure/blob/v1.4.1/LICENSE, MIT github.com/mitchellh/reflectwalk, https://github.com/mitchellh/reflectwalk/blob/v1.0.1/LICENSE, MIT +github.com/moby/spdystream, https://github.com/moby/spdystream/blob/v0.2.0/LICENSE, Apache-2.0 github.com/modern-go/concurrent, https://github.com/modern-go/concurrent/blob/bacd9c7ef1dd/LICENSE, Apache-2.0 github.com/modern-go/reflect2, https://github.com/modern-go/reflect2/blob/v1.0.1/LICENSE, Apache-2.0 github.com/oliveagle/jsonpath, https://github.com/oliveagle/jsonpath/blob/2e52cf6e6852/LICENSE, MIT @@ -105,13 +107,13 @@ go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/THIRD-PARTY-NOTICES#L31-L61, BSD-3-Clause golang.org/x/crypto, https://github.com/golang/crypto/blob/0c34fe9e7dc2/LICENSE, BSD-3-Clause golang.org/x/net, https://github.com/golang/net/blob/6b1517762897/LICENSE, BSD-3-Clause -golang.org/x/oauth2, https://github.com/golang/oauth2/blob/bf48bf16ab8d/LICENSE, BSD-3-Clause -golang.org/x/sys, https://github.com/golang/sys/blob/47abb6519492/LICENSE, BSD-3-Clause +golang.org/x/oauth2, https://github.com/golang/oauth2/blob/0b49973bad19/LICENSE, BSD-3-Clause +golang.org/x/sys, https://github.com/golang/sys/blob/d19ff857e887/LICENSE, BSD-3-Clause golang.org/x/term, https://github.com/golang/term/blob/7de9c90e9dd1/LICENSE, BSD-3-Clause golang.org/x/text, https://github.com/golang/text/blob/v0.3.5/LICENSE, BSD-3-Clause golang.org/x/time, https://github.com/golang/time/blob/3af7569d3a1e/LICENSE, BSD-3-Clause -google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/86f49bd18e98/LICENSE, Apache-2.0 -google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.34.0/LICENSE, Apache-2.0 +google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/6486ece9c497/LICENSE, Apache-2.0 +google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.36.0/LICENSE, Apache-2.0 google.golang.org/protobuf, https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/LICENSE, BSD-3-Clause gopkg.in/inf.v0, https://github.com/go-inf/inf/blob/v0.9.1/LICENSE, BSD-3-Clause gopkg.in/ini.v1, https://github.com/go-ini/ini/blob/v1.57.0/LICENSE, Apache-2.0 @@ -121,15 +123,15 @@ gopkg.in/jcmturner/gokrb5.v5, https://github.com/jcmturner/gokrb5/blob/v5.3.0/LI gopkg.in/jcmturner/rpc.v0, https://github.com/jcmturner/rpc/blob/v0.0.2/LICENSE, Apache-2.0 gopkg.in/yaml.v2, https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE, Apache-2.0 / MIT gopkg.in/yaml.v3, https://github.com/go-yaml/yaml/blob/496545a6307b/LICENSE, MIT -k8s.io/api, https://github.com/kubernetes/api/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.5.0/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/errors/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/spec/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/strfmt/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/validate/LICENSE, Apache-2.0 +k8s.io/api, https://github.com/kubernetes/api/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.21.2/LICENSE, Apache-2.0 +k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.8.0/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/errors/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/spec/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/strfmt/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/validate/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/pkg/credentialprovider/azure/azure_acr_helper.go, MIT k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_party/forked/golang/LICENSE, BSD-3-Clause @@ -140,5 +142,5 @@ k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_p k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/LICENSE, Apache-2.0 k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/inotify/LICENSE, BSD-3-Clause k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/third_party/forked/golang/LICENSE, BSD-3-Clause -sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.0.2/LICENSE, Apache-2.0 +sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.1.0/LICENSE, Apache-2.0 sigs.k8s.io/yaml, https://github.com/kubernetes-sigs/yaml/blob/v1.2.0/LICENSE, MIT / BSD-3-Clause diff --git a/backend/third_party_licenses/cache_server.csv b/backend/third_party_licenses/cache_server.csv index f9b11ebbae8..58b802849b1 100644 --- a/backend/third_party_licenses/cache_server.csv +++ b/backend/third_party_licenses/cache_server.csv @@ -1,24 +1,13 @@ # Generated by https://github.com/google/go-licenses/v2. DO NOT EDIT. github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/LICENSE, Apache-2.0 github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/backend/src/apiserver/archive/log.go, Apache-2.0 -github.com/Masterminds/goutils, https://github.com/Masterminds/goutils/blob/v1.1.0/LICENSE.txt, Apache-2.0 -github.com/Masterminds/semver, https://github.com/Masterminds/semver/blob/v1.5.0/LICENSE.txt, MIT -github.com/Masterminds/sprig, https://github.com/Masterminds/sprig/blob/v2.22.0/LICENSE.txt, MIT github.com/PuerkitoBio/purell, https://github.com/PuerkitoBio/purell/blob/v1.1.1/LICENSE, BSD-3-Clause github.com/PuerkitoBio/urlesc, https://github.com/PuerkitoBio/urlesc/blob/de5bf2ad4578/LICENSE, BSD-3-Clause -github.com/antonmedv/expr, https://github.com/antonmedv/expr/blob/v1.8.8/LICENSE, MIT github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 -github.com/argoproj/pkg, https://github.com/argoproj/pkg/blob/v0.10.1/LICENSE, Apache-2.0 github.com/asaskevich/govalidator, https://github.com/asaskevich/govalidator/blob/7a23bdc65eef/LICENSE, MIT -github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT github.com/cenkalti/backoff, https://github.com/cenkalti/backoff/blob/v2.2.1/LICENSE, MIT -github.com/cespare/xxhash/v2, https://github.com/cespare/xxhash/blob/v2.1.1/LICENSE.txt, MIT -github.com/colinmarc/hdfs, https://github.com/colinmarc/hdfs/blob/9746310a4d31/LICENSE.txt, MIT github.com/davecgh/go-spew, https://github.com/davecgh/go-spew/blob/v1.1.1/LICENSE, ISC -github.com/docker/spdystream, https://github.com/docker/spdystream/blob/6480d4af844c/LICENSE, Apache-2.0 -github.com/doublerebel/bellows, https://github.com/doublerebel/bellows/blob/f177d92a03d3/LICENSE, MIT github.com/emicklei/go-restful, https://github.com/emicklei/go-restful/blob/v2.15.0/LICENSE, MIT -github.com/ghodss/yaml, https://github.com/ghodss/yaml/blob/25d852aebe32/LICENSE, MIT / BSD-3-Clause github.com/go-logr/logr, https://github.com/go-logr/logr/blob/v0.4.0/LICENSE, Apache-2.0 github.com/go-openapi/errors, https://github.com/go-openapi/errors/blob/v0.19.9/LICENSE, Apache-2.0 github.com/go-openapi/jsonpointer, https://github.com/go-openapi/jsonpointer/blob/v0.19.5/LICENSE, Apache-2.0 @@ -33,18 +22,14 @@ github.com/go-stack/stack, https://github.com/go-stack/stack/blob/v1.8.0/LICENSE github.com/gogo/protobuf, https://github.com/gogo/protobuf/blob/v1.3.2/LICENSE, BSD-3-Clause / BSD-2-Clause github.com/golang/glog, https://github.com/golang/glog/blob/23def4e6c14b/LICENSE, Apache-2.0 github.com/golang/protobuf, https://github.com/golang/protobuf/blob/v1.5.0/LICENSE, BSD-3-Clause +github.com/google/go-cmp, https://github.com/google/go-cmp/blob/v0.5.5/LICENSE, BSD-3-Clause github.com/google/gofuzz, https://github.com/google/gofuzz/blob/v1.1.0/LICENSE, Apache-2.0 github.com/google/uuid, https://github.com/google/uuid/blob/v1.1.2/LICENSE, BSD-3-Clause github.com/googleapis/gnostic, https://github.com/googleapis/gnostic/blob/v0.5.1/LICENSE, Apache-2.0 -github.com/gorilla/websocket, https://github.com/gorilla/websocket/blob/v1.4.2/LICENSE, BSD-2-Clause -github.com/gorilla/websocket, https://github.com/gorilla/websocket/blob/v1.4.2/examples/autobahn/server.go, MIT github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/LICENSE.txt, BSD-3-Clause github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/internal/casing/LICENSE.md, BSD-3-Clause github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/third_party/googleapis/LICENSE, Apache-2.0 -github.com/hashicorp/go-uuid, https://github.com/hashicorp/go-uuid/blob/v1.0.2/LICENSE, MPL-2.0 -github.com/huandu/xstrings, https://github.com/huandu/xstrings/blob/v1.3.1/LICENSE, MIT github.com/imdario/mergo, https://github.com/imdario/mergo/blob/v0.3.12/LICENSE, BSD-3-Clause -github.com/jcmturner/gofork, https://github.com/jcmturner/gofork/blob/v1.0.0/LICENSE, BSD-3-Clause github.com/jinzhu/gorm, https://github.com/jinzhu/gorm/blob/v1.9.1/License, MIT github.com/jinzhu/inflection, https://github.com/jinzhu/inflection/blob/04140366298a/LICENSE, MIT github.com/josharian/intern, https://github.com/josharian/intern/blob/v1.0.0/license.md, MIT @@ -54,55 +39,37 @@ github.com/mailru/easyjson, https://github.com/mailru/easyjson/blob/v0.7.6/LICEN github.com/mailru/easyjson, https://github.com/mailru/easyjson/blob/v0.7.6/parser/modulepath.go, BSD-3-Clause github.com/mattn/go-sqlite3, https://github.com/mattn/go-sqlite3/blob/v1.9.0/LICENSE, MIT github.com/mattn/go-sqlite3/., https://github.com/mattn/go-sqlite3/blob/v1.9.0/sqlite3-binding.h#L2-L11, blessing -github.com/matttproud/golang_protobuf_extensions, https://github.com/matttproud/golang_protobuf_extensions/blob/c182affec369/LICENSE, Apache-2.0 -github.com/mitchellh/copystructure, https://github.com/mitchellh/copystructure/blob/v1.0.0/LICENSE, MIT github.com/mitchellh/mapstructure, https://github.com/mitchellh/mapstructure/blob/v1.4.1/LICENSE, MIT -github.com/mitchellh/reflectwalk, https://github.com/mitchellh/reflectwalk/blob/v1.0.1/LICENSE, MIT github.com/modern-go/concurrent, https://github.com/modern-go/concurrent/blob/bacd9c7ef1dd/LICENSE, Apache-2.0 github.com/modern-go/reflect2, https://github.com/modern-go/reflect2/blob/v1.0.1/LICENSE, Apache-2.0 -github.com/oliveagle/jsonpath, https://github.com/oliveagle/jsonpath/blob/2e52cf6e6852/LICENSE, MIT github.com/peterhellberg/duration, https://github.com/peterhellberg/duration/blob/ec6baeebcd10/LICENSE, MIT github.com/pkg/errors, https://github.com/pkg/errors/blob/v0.9.1/LICENSE, BSD-2-Clause -github.com/prometheus/client_golang, https://github.com/prometheus/client_golang/blob/v1.9.0/LICENSE, Apache-2.0 -github.com/prometheus/client_model, https://github.com/prometheus/client_model/blob/v0.2.0/LICENSE, Apache-2.0 -github.com/prometheus/common, https://github.com/prometheus/common/blob/v0.15.0/LICENSE, Apache-2.0 -github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg, https://github.com/prometheus/common/blob/v0.15.0/internal/bitbucket.org/ww/goautoneg/README.txt, BSD-3-Clause -github.com/prometheus/procfs, https://github.com/prometheus/procfs/blob/v0.2.0/LICENSE, Apache-2.0 -github.com/robfig/cron/v3, https://github.com/robfig/cron/blob/v3.0.1/LICENSE, MIT -github.com/sirupsen/logrus, https://github.com/sirupsen/logrus/blob/v1.6.0/LICENSE, MIT -github.com/sirupsen/logrus, https://github.com/sirupsen/logrus/blob/v1.6.0/alt_exit.go, MIT github.com/spf13/pflag, https://github.com/spf13/pflag/blob/v1.0.5/LICENSE, BSD-3-Clause -github.com/valyala/bytebufferpool, https://github.com/valyala/bytebufferpool/blob/v1.0.0/LICENSE, MIT -github.com/valyala/fasttemplate, https://github.com/valyala/fasttemplate/blob/v1.1.0/LICENSE, MIT go.mongodb.org/mongo-driver, https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/LICENSE, Apache-2.0 go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/THIRD-PARTY-NOTICES#L1-L29, BSD-2-Clause go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/THIRD-PARTY-NOTICES#L31-L61, BSD-3-Clause golang.org/x/crypto, https://github.com/golang/crypto/blob/0c34fe9e7dc2/LICENSE, BSD-3-Clause golang.org/x/net, https://github.com/golang/net/blob/6b1517762897/LICENSE, BSD-3-Clause -golang.org/x/oauth2, https://github.com/golang/oauth2/blob/bf48bf16ab8d/LICENSE, BSD-3-Clause -golang.org/x/sys, https://github.com/golang/sys/blob/47abb6519492/LICENSE, BSD-3-Clause +golang.org/x/oauth2, https://github.com/golang/oauth2/blob/0b49973bad19/LICENSE, BSD-3-Clause +golang.org/x/sys, https://github.com/golang/sys/blob/d19ff857e887/LICENSE, BSD-3-Clause golang.org/x/term, https://github.com/golang/term/blob/7de9c90e9dd1/LICENSE, BSD-3-Clause golang.org/x/text, https://github.com/golang/text/blob/v0.3.5/LICENSE, BSD-3-Clause golang.org/x/time, https://github.com/golang/time/blob/3af7569d3a1e/LICENSE, BSD-3-Clause -google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/86f49bd18e98/LICENSE, Apache-2.0 -google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.34.0/LICENSE, Apache-2.0 +google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/6486ece9c497/LICENSE, Apache-2.0 +google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.36.0/LICENSE, Apache-2.0 google.golang.org/protobuf, https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/LICENSE, BSD-3-Clause gopkg.in/inf.v0, https://github.com/go-inf/inf/blob/v0.9.1/LICENSE, BSD-3-Clause -gopkg.in/jcmturner/aescts.v1, https://github.com/jcmturner/aescts/blob/v1.0.1/LICENSE, Apache-2.0 -gopkg.in/jcmturner/dnsutils.v1, https://github.com/jcmturner/dnsutils/blob/v1.0.1/LICENSE, Apache-2.0 -gopkg.in/jcmturner/gokrb5.v5, https://github.com/jcmturner/gokrb5/blob/v5.3.0/LICENSE, Apache-2.0 -gopkg.in/jcmturner/rpc.v0, https://github.com/jcmturner/rpc/blob/v0.0.2/LICENSE, Apache-2.0 gopkg.in/yaml.v2, https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE, Apache-2.0 / MIT gopkg.in/yaml.v3, https://github.com/go-yaml/yaml/blob/496545a6307b/LICENSE, MIT -k8s.io/api, https://github.com/kubernetes/api/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.5.0/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/errors/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/spec/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/strfmt/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/validate/LICENSE, Apache-2.0 +k8s.io/api, https://github.com/kubernetes/api/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.21.2/LICENSE, Apache-2.0 +k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.8.0/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/errors/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/spec/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/strfmt/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/validate/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/pkg/credentialprovider/azure/azure_acr_helper.go, MIT k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_party/forked/golang/LICENSE, BSD-3-Clause @@ -113,5 +80,5 @@ k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_p k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/LICENSE, Apache-2.0 k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/inotify/LICENSE, BSD-3-Clause k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/third_party/forked/golang/LICENSE, BSD-3-Clause -sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.0.2/LICENSE, Apache-2.0 +sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.1.0/LICENSE, Apache-2.0 sigs.k8s.io/yaml, https://github.com/kubernetes-sigs/yaml/blob/v1.2.0/LICENSE, MIT / BSD-3-Clause diff --git a/backend/third_party_licenses/persistence_agent.csv b/backend/third_party_licenses/persistence_agent.csv index a9dbcae0b54..90fee8e4c60 100644 --- a/backend/third_party_licenses/persistence_agent.csv +++ b/backend/third_party_licenses/persistence_agent.csv @@ -1,26 +1,16 @@ # Generated by https://github.com/google/go-licenses/v2. DO NOT EDIT. github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/LICENSE, Apache-2.0 github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/backend/src/apiserver/archive/log.go, Apache-2.0 -cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/LICENSE, Apache-2.0 -cloud.google.com/go/cmd/go-cloud-debug-agent/internal/debug/elf, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go#L1-L43, BSD-2-Clause -github.com/Masterminds/goutils, https://github.com/Masterminds/goutils/blob/v1.1.0/LICENSE.txt, Apache-2.0 -github.com/Masterminds/semver, https://github.com/Masterminds/semver/blob/v1.5.0/LICENSE.txt, MIT -github.com/Masterminds/sprig, https://github.com/Masterminds/sprig/blob/v2.22.0/LICENSE.txt, MIT +cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/LICENSE, Apache-2.0 +cloud.google.com/go/cmd/go-cloud-debug-agent/internal/debug/elf, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go#L1-L43, BSD-2-Clause +cloud.google.com/go/third_party/pkgsite, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/third_party/pkgsite/LICENSE, BSD-3-Clause github.com/PuerkitoBio/purell, https://github.com/PuerkitoBio/purell/blob/v1.1.1/LICENSE, BSD-3-Clause github.com/PuerkitoBio/urlesc, https://github.com/PuerkitoBio/urlesc/blob/de5bf2ad4578/LICENSE, BSD-3-Clause -github.com/antonmedv/expr, https://github.com/antonmedv/expr/blob/v1.8.8/LICENSE, MIT github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 -github.com/argoproj/pkg, https://github.com/argoproj/pkg/blob/v0.10.1/LICENSE, Apache-2.0 github.com/asaskevich/govalidator, https://github.com/asaskevich/govalidator/blob/7a23bdc65eef/LICENSE, MIT -github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT github.com/cenkalti/backoff, https://github.com/cenkalti/backoff/blob/v2.2.1/LICENSE, MIT -github.com/cespare/xxhash/v2, https://github.com/cespare/xxhash/blob/v2.1.1/LICENSE.txt, MIT -github.com/colinmarc/hdfs, https://github.com/colinmarc/hdfs/blob/9746310a4d31/LICENSE.txt, MIT github.com/davecgh/go-spew, https://github.com/davecgh/go-spew/blob/v1.1.1/LICENSE, ISC -github.com/docker/spdystream, https://github.com/docker/spdystream/blob/6480d4af844c/LICENSE, Apache-2.0 -github.com/doublerebel/bellows, https://github.com/doublerebel/bellows/blob/f177d92a03d3/LICENSE, MIT github.com/emicklei/go-restful, https://github.com/emicklei/go-restful/blob/v2.15.0/LICENSE, MIT -github.com/ghodss/yaml, https://github.com/ghodss/yaml/blob/25d852aebe32/LICENSE, MIT / BSD-3-Clause github.com/go-logr/logr, https://github.com/go-logr/logr/blob/v0.4.0/LICENSE, Apache-2.0 github.com/go-openapi/errors, https://github.com/go-openapi/errors/blob/v0.19.9/LICENSE, Apache-2.0 github.com/go-openapi/jsonpointer, https://github.com/go-openapi/jsonpointer/blob/v0.19.5/LICENSE, Apache-2.0 @@ -38,69 +28,48 @@ github.com/google/go-cmp, https://github.com/google/go-cmp/blob/v0.5.5/LICENSE, github.com/google/gofuzz, https://github.com/google/gofuzz/blob/v1.1.0/LICENSE, Apache-2.0 github.com/google/uuid, https://github.com/google/uuid/blob/v1.1.2/LICENSE, BSD-3-Clause github.com/googleapis/gnostic, https://github.com/googleapis/gnostic/blob/v0.5.1/LICENSE, Apache-2.0 -github.com/gorilla/websocket, https://github.com/gorilla/websocket/blob/v1.4.2/LICENSE, BSD-2-Clause -github.com/gorilla/websocket, https://github.com/gorilla/websocket/blob/v1.4.2/examples/autobahn/server.go, MIT github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/LICENSE.txt, BSD-3-Clause github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/internal/casing/LICENSE.md, BSD-3-Clause github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/third_party/googleapis/LICENSE, Apache-2.0 -github.com/hashicorp/go-uuid, https://github.com/hashicorp/go-uuid/blob/v1.0.2/LICENSE, MPL-2.0 github.com/hashicorp/golang-lru, https://github.com/hashicorp/golang-lru/blob/v0.5.4/LICENSE, MPL-2.0 -github.com/huandu/xstrings, https://github.com/huandu/xstrings/blob/v1.3.1/LICENSE, MIT github.com/imdario/mergo, https://github.com/imdario/mergo/blob/v0.3.12/LICENSE, BSD-3-Clause -github.com/jcmturner/gofork, https://github.com/jcmturner/gofork/blob/v1.0.0/LICENSE, BSD-3-Clause github.com/josharian/intern, https://github.com/josharian/intern/blob/v1.0.0/license.md, MIT github.com/json-iterator/go, https://github.com/json-iterator/go/blob/v1.1.10/LICENSE, MIT github.com/lestrrat-go/strftime, https://github.com/lestrrat-go/strftime/blob/v1.0.4/LICENSE, MIT github.com/mailru/easyjson, https://github.com/mailru/easyjson/blob/v0.7.6/LICENSE, MIT github.com/mailru/easyjson, https://github.com/mailru/easyjson/blob/v0.7.6/parser/modulepath.go, BSD-3-Clause -github.com/matttproud/golang_protobuf_extensions, https://github.com/matttproud/golang_protobuf_extensions/blob/c182affec369/LICENSE, Apache-2.0 -github.com/mitchellh/copystructure, https://github.com/mitchellh/copystructure/blob/v1.0.0/LICENSE, MIT github.com/mitchellh/mapstructure, https://github.com/mitchellh/mapstructure/blob/v1.4.1/LICENSE, MIT -github.com/mitchellh/reflectwalk, https://github.com/mitchellh/reflectwalk/blob/v1.0.1/LICENSE, MIT github.com/modern-go/concurrent, https://github.com/modern-go/concurrent/blob/bacd9c7ef1dd/LICENSE, Apache-2.0 github.com/modern-go/reflect2, https://github.com/modern-go/reflect2/blob/v1.0.1/LICENSE, Apache-2.0 -github.com/oliveagle/jsonpath, https://github.com/oliveagle/jsonpath/blob/2e52cf6e6852/LICENSE, MIT github.com/pkg/errors, https://github.com/pkg/errors/blob/v0.9.1/LICENSE, BSD-2-Clause -github.com/prometheus/client_golang, https://github.com/prometheus/client_golang/blob/v1.9.0/LICENSE, Apache-2.0 -github.com/prometheus/client_model, https://github.com/prometheus/client_model/blob/v0.2.0/LICENSE, Apache-2.0 -github.com/prometheus/common, https://github.com/prometheus/common/blob/v0.15.0/LICENSE, Apache-2.0 -github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg, https://github.com/prometheus/common/blob/v0.15.0/internal/bitbucket.org/ww/goautoneg/README.txt, BSD-3-Clause -github.com/prometheus/procfs, https://github.com/prometheus/procfs/blob/v0.2.0/LICENSE, Apache-2.0 -github.com/robfig/cron/v3, https://github.com/robfig/cron/blob/v3.0.1/LICENSE, MIT github.com/sirupsen/logrus, https://github.com/sirupsen/logrus/blob/v1.6.0/LICENSE, MIT github.com/sirupsen/logrus, https://github.com/sirupsen/logrus/blob/v1.6.0/alt_exit.go, MIT github.com/spf13/pflag, https://github.com/spf13/pflag/blob/v1.0.5/LICENSE, BSD-3-Clause -github.com/valyala/bytebufferpool, https://github.com/valyala/bytebufferpool/blob/v1.0.0/LICENSE, MIT -github.com/valyala/fasttemplate, https://github.com/valyala/fasttemplate/blob/v1.1.0/LICENSE, MIT go.mongodb.org/mongo-driver, https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/LICENSE, Apache-2.0 go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/THIRD-PARTY-NOTICES#L1-L29, BSD-2-Clause go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/THIRD-PARTY-NOTICES#L31-L61, BSD-3-Clause golang.org/x/crypto, https://github.com/golang/crypto/blob/0c34fe9e7dc2/LICENSE, BSD-3-Clause golang.org/x/net, https://github.com/golang/net/blob/6b1517762897/LICENSE, BSD-3-Clause -golang.org/x/oauth2, https://github.com/golang/oauth2/blob/bf48bf16ab8d/LICENSE, BSD-3-Clause -golang.org/x/sys, https://github.com/golang/sys/blob/47abb6519492/LICENSE, BSD-3-Clause +golang.org/x/oauth2, https://github.com/golang/oauth2/blob/0b49973bad19/LICENSE, BSD-3-Clause +golang.org/x/sys, https://github.com/golang/sys/blob/d19ff857e887/LICENSE, BSD-3-Clause golang.org/x/term, https://github.com/golang/term/blob/7de9c90e9dd1/LICENSE, BSD-3-Clause golang.org/x/text, https://github.com/golang/text/blob/v0.3.5/LICENSE, BSD-3-Clause golang.org/x/time, https://github.com/golang/time/blob/3af7569d3a1e/LICENSE, BSD-3-Clause -google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/86f49bd18e98/LICENSE, Apache-2.0 -google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.34.0/LICENSE, Apache-2.0 +google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/6486ece9c497/LICENSE, Apache-2.0 +google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.36.0/LICENSE, Apache-2.0 google.golang.org/protobuf, https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/LICENSE, BSD-3-Clause gopkg.in/inf.v0, https://github.com/go-inf/inf/blob/v0.9.1/LICENSE, BSD-3-Clause -gopkg.in/jcmturner/aescts.v1, https://github.com/jcmturner/aescts/blob/v1.0.1/LICENSE, Apache-2.0 -gopkg.in/jcmturner/dnsutils.v1, https://github.com/jcmturner/dnsutils/blob/v1.0.1/LICENSE, Apache-2.0 -gopkg.in/jcmturner/gokrb5.v5, https://github.com/jcmturner/gokrb5/blob/v5.3.0/LICENSE, Apache-2.0 -gopkg.in/jcmturner/rpc.v0, https://github.com/jcmturner/rpc/blob/v0.0.2/LICENSE, Apache-2.0 gopkg.in/yaml.v2, https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE, Apache-2.0 / MIT gopkg.in/yaml.v3, https://github.com/go-yaml/yaml/blob/496545a6307b/LICENSE, MIT -k8s.io/api, https://github.com/kubernetes/api/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.5.0/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/errors/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/spec/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/strfmt/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/validate/LICENSE, Apache-2.0 +k8s.io/api, https://github.com/kubernetes/api/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.21.2/LICENSE, Apache-2.0 +k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.8.0/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/errors/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/spec/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/strfmt/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/validate/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/pkg/credentialprovider/azure/azure_acr_helper.go, MIT k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_party/forked/golang/LICENSE, BSD-3-Clause @@ -111,5 +80,5 @@ k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_p k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/LICENSE, Apache-2.0 k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/inotify/LICENSE, BSD-3-Clause k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/third_party/forked/golang/LICENSE, BSD-3-Clause -sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.0.2/LICENSE, Apache-2.0 +sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.1.0/LICENSE, Apache-2.0 sigs.k8s.io/yaml, https://github.com/kubernetes-sigs/yaml/blob/v1.2.0/LICENSE, MIT / BSD-3-Clause diff --git a/backend/third_party_licenses/swf.csv b/backend/third_party_licenses/swf.csv index 9a2b34bfbfc..a1bac36a78d 100644 --- a/backend/third_party_licenses/swf.csv +++ b/backend/third_party_licenses/swf.csv @@ -1,8 +1,9 @@ # Generated by https://github.com/google/go-licenses/v2. DO NOT EDIT. github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/LICENSE, Apache-2.0 github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/backend/src/apiserver/archive/log.go, Apache-2.0 -cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/LICENSE, Apache-2.0 -cloud.google.com/go/cmd/go-cloud-debug-agent/internal/debug/elf, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go#L1-L43, BSD-2-Clause +cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/LICENSE, Apache-2.0 +cloud.google.com/go/cmd/go-cloud-debug-agent/internal/debug/elf, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go#L1-L43, BSD-2-Clause +cloud.google.com/go/third_party/pkgsite, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/third_party/pkgsite/LICENSE, BSD-3-Clause github.com/Masterminds/goutils, https://github.com/Masterminds/goutils/blob/v1.1.0/LICENSE.txt, Apache-2.0 github.com/Masterminds/semver, https://github.com/Masterminds/semver/blob/v1.5.0/LICENSE.txt, MIT github.com/Masterminds/sprig, https://github.com/Masterminds/sprig/blob/v2.22.0/LICENSE.txt, MIT @@ -12,16 +13,11 @@ github.com/antonmedv/expr, https://github.com/antonmedv/expr/blob/v1.8.8/LICENSE github.com/argoproj/argo-workflows/v3, https://github.com/argoproj/argo-workflows/blob/v3.1.14/LICENSE, Apache-2.0 github.com/argoproj/pkg, https://github.com/argoproj/pkg/blob/v0.10.1/LICENSE, Apache-2.0 github.com/asaskevich/govalidator, https://github.com/asaskevich/govalidator/blob/7a23bdc65eef/LICENSE, MIT -github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT github.com/cenkalti/backoff, https://github.com/cenkalti/backoff/blob/v2.2.1/LICENSE, MIT -github.com/cespare/xxhash/v2, https://github.com/cespare/xxhash/blob/v2.1.1/LICENSE.txt, MIT -github.com/colinmarc/hdfs, https://github.com/colinmarc/hdfs/blob/9746310a4d31/LICENSE.txt, MIT github.com/davecgh/go-spew, https://github.com/davecgh/go-spew/blob/v1.1.1/LICENSE, ISC -github.com/docker/spdystream, https://github.com/docker/spdystream/blob/6480d4af844c/LICENSE, Apache-2.0 github.com/doublerebel/bellows, https://github.com/doublerebel/bellows/blob/f177d92a03d3/LICENSE, MIT github.com/emicklei/go-restful, https://github.com/emicklei/go-restful/blob/v2.15.0/LICENSE, MIT github.com/fsnotify/fsnotify, https://github.com/fsnotify/fsnotify/blob/v1.4.9/LICENSE, BSD-3-Clause -github.com/ghodss/yaml, https://github.com/ghodss/yaml/blob/25d852aebe32/LICENSE, MIT / BSD-3-Clause github.com/go-logr/logr, https://github.com/go-logr/logr/blob/v0.4.0/LICENSE, Apache-2.0 github.com/go-openapi/errors, https://github.com/go-openapi/errors/blob/v0.19.9/LICENSE, Apache-2.0 github.com/go-openapi/jsonpointer, https://github.com/go-openapi/jsonpointer/blob/v0.19.5/LICENSE, Apache-2.0 @@ -45,34 +41,26 @@ github.com/gorilla/websocket, https://github.com/gorilla/websocket/blob/v1.4.2/e github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/LICENSE.txt, BSD-3-Clause github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/internal/casing/LICENSE.md, BSD-3-Clause github.com/grpc-ecosystem/grpc-gateway, https://github.com/grpc-ecosystem/grpc-gateway/blob/v1.16.0/third_party/googleapis/LICENSE, Apache-2.0 -github.com/hashicorp/go-uuid, https://github.com/hashicorp/go-uuid/blob/v1.0.2/LICENSE, MPL-2.0 github.com/hashicorp/golang-lru, https://github.com/hashicorp/golang-lru/blob/v0.5.4/LICENSE, MPL-2.0 github.com/hashicorp/hcl, https://github.com/hashicorp/hcl/blob/v1.0.0/LICENSE, MPL-2.0 github.com/huandu/xstrings, https://github.com/huandu/xstrings/blob/v1.3.1/LICENSE, MIT github.com/imdario/mergo, https://github.com/imdario/mergo/blob/v0.3.12/LICENSE, BSD-3-Clause -github.com/jcmturner/gofork, https://github.com/jcmturner/gofork/blob/v1.0.0/LICENSE, BSD-3-Clause github.com/josharian/intern, https://github.com/josharian/intern/blob/v1.0.0/license.md, MIT github.com/json-iterator/go, https://github.com/json-iterator/go/blob/v1.1.10/LICENSE, MIT github.com/lestrrat-go/strftime, https://github.com/lestrrat-go/strftime/blob/v1.0.4/LICENSE, MIT github.com/magiconair/properties, https://github.com/magiconair/properties/blob/v1.8.1/LICENSE, BSD-2-Clause github.com/mailru/easyjson, https://github.com/mailru/easyjson/blob/v0.7.6/LICENSE, MIT github.com/mailru/easyjson, https://github.com/mailru/easyjson/blob/v0.7.6/parser/modulepath.go, BSD-3-Clause -github.com/matttproud/golang_protobuf_extensions, https://github.com/matttproud/golang_protobuf_extensions/blob/c182affec369/LICENSE, Apache-2.0 github.com/mitchellh/copystructure, https://github.com/mitchellh/copystructure/blob/v1.0.0/LICENSE, MIT github.com/mitchellh/mapstructure, https://github.com/mitchellh/mapstructure/blob/v1.4.1/LICENSE, MIT github.com/mitchellh/reflectwalk, https://github.com/mitchellh/reflectwalk/blob/v1.0.1/LICENSE, MIT +github.com/moby/spdystream, https://github.com/moby/spdystream/blob/v0.2.0/LICENSE, Apache-2.0 github.com/modern-go/concurrent, https://github.com/modern-go/concurrent/blob/bacd9c7ef1dd/LICENSE, Apache-2.0 github.com/modern-go/reflect2, https://github.com/modern-go/reflect2/blob/v1.0.1/LICENSE, Apache-2.0 github.com/oliveagle/jsonpath, https://github.com/oliveagle/jsonpath/blob/2e52cf6e6852/LICENSE, MIT github.com/pelletier/go-toml, https://github.com/pelletier/go-toml/blob/v1.8.0/LICENSE, MIT github.com/pkg/errors, https://github.com/pkg/errors/blob/v0.9.1/LICENSE, BSD-2-Clause -github.com/prometheus/client_golang, https://github.com/prometheus/client_golang/blob/v1.9.0/LICENSE, Apache-2.0 -github.com/prometheus/client_model, https://github.com/prometheus/client_model/blob/v0.2.0/LICENSE, Apache-2.0 -github.com/prometheus/common, https://github.com/prometheus/common/blob/v0.15.0/LICENSE, Apache-2.0 -github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg, https://github.com/prometheus/common/blob/v0.15.0/internal/bitbucket.org/ww/goautoneg/README.txt, BSD-3-Clause -github.com/prometheus/procfs, https://github.com/prometheus/procfs/blob/v0.2.0/LICENSE, Apache-2.0 github.com/robfig/cron, https://github.com/robfig/cron/blob/v1.2.0/LICENSE, MIT -github.com/robfig/cron/v3, https://github.com/robfig/cron/blob/v3.0.1/LICENSE, MIT github.com/sirupsen/logrus, https://github.com/sirupsen/logrus/blob/v1.6.0/LICENSE, MIT github.com/sirupsen/logrus, https://github.com/sirupsen/logrus/blob/v1.6.0/alt_exit.go, MIT github.com/spf13/afero, https://github.com/spf13/afero/blob/v1.3.2/LICENSE.txt, Apache-2.0 @@ -88,31 +76,27 @@ go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v go.mongodb.org/mongo-driver/., https://github.com/mongodb/mongo-go-driver/blob/v1.4.4/THIRD-PARTY-NOTICES#L31-L61, BSD-3-Clause golang.org/x/crypto, https://github.com/golang/crypto/blob/0c34fe9e7dc2/LICENSE, BSD-3-Clause golang.org/x/net, https://github.com/golang/net/blob/6b1517762897/LICENSE, BSD-3-Clause -golang.org/x/oauth2, https://github.com/golang/oauth2/blob/bf48bf16ab8d/LICENSE, BSD-3-Clause -golang.org/x/sys, https://github.com/golang/sys/blob/47abb6519492/LICENSE, BSD-3-Clause +golang.org/x/oauth2, https://github.com/golang/oauth2/blob/0b49973bad19/LICENSE, BSD-3-Clause +golang.org/x/sys, https://github.com/golang/sys/blob/d19ff857e887/LICENSE, BSD-3-Clause golang.org/x/term, https://github.com/golang/term/blob/7de9c90e9dd1/LICENSE, BSD-3-Clause golang.org/x/text, https://github.com/golang/text/blob/v0.3.5/LICENSE, BSD-3-Clause golang.org/x/time, https://github.com/golang/time/blob/3af7569d3a1e/LICENSE, BSD-3-Clause -google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/86f49bd18e98/LICENSE, Apache-2.0 -google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.34.0/LICENSE, Apache-2.0 +google.golang.org/genproto, https://github.com/googleapis/go-genproto/blob/6486ece9c497/LICENSE, Apache-2.0 +google.golang.org/grpc, https://github.com/grpc/grpc-go/blob/v1.36.0/LICENSE, Apache-2.0 google.golang.org/protobuf, https://github.com/protocolbuffers/protobuf-go/blob/v1.27.1/LICENSE, BSD-3-Clause gopkg.in/inf.v0, https://github.com/go-inf/inf/blob/v0.9.1/LICENSE, BSD-3-Clause gopkg.in/ini.v1, https://github.com/go-ini/ini/blob/v1.57.0/LICENSE, Apache-2.0 -gopkg.in/jcmturner/aescts.v1, https://github.com/jcmturner/aescts/blob/v1.0.1/LICENSE, Apache-2.0 -gopkg.in/jcmturner/dnsutils.v1, https://github.com/jcmturner/dnsutils/blob/v1.0.1/LICENSE, Apache-2.0 -gopkg.in/jcmturner/gokrb5.v5, https://github.com/jcmturner/gokrb5/blob/v5.3.0/LICENSE, Apache-2.0 -gopkg.in/jcmturner/rpc.v0, https://github.com/jcmturner/rpc/blob/v0.0.2/LICENSE, Apache-2.0 gopkg.in/yaml.v2, https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE, Apache-2.0 / MIT gopkg.in/yaml.v3, https://github.com/go-yaml/yaml/blob/496545a6307b/LICENSE, MIT -k8s.io/api, https://github.com/kubernetes/api/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.5.0/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/errors/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/spec/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/strfmt/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/validate/LICENSE, Apache-2.0 +k8s.io/api, https://github.com/kubernetes/api/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.21.2/LICENSE, Apache-2.0 +k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.20.4/LICENSE, Apache-2.0 +k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.8.0/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/errors/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/spec/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/strfmt/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/validate/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/LICENSE, Apache-2.0 k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/pkg/credentialprovider/azure/azure_acr_helper.go, MIT k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_party/forked/golang/LICENSE, BSD-3-Clause @@ -123,5 +107,5 @@ k8s.io/kubernetes, https://github.com/kubernetes/kubernetes/blob/v1.11.1/third_p k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/LICENSE, Apache-2.0 k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/inotify/LICENSE, BSD-3-Clause k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/third_party/forked/golang/LICENSE, BSD-3-Clause -sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.0.2/LICENSE, Apache-2.0 +sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.1.0/LICENSE, Apache-2.0 sigs.k8s.io/yaml, https://github.com/kubernetes-sigs/yaml/blob/v1.2.0/LICENSE, MIT / BSD-3-Clause diff --git a/backend/third_party_licenses/viewer.csv b/backend/third_party_licenses/viewer.csv index ca922b9c2b4..f209cba213f 100644 --- a/backend/third_party_licenses/viewer.csv +++ b/backend/third_party_licenses/viewer.csv @@ -1,8 +1,9 @@ # Generated by https://github.com/google/go-licenses/v2. DO NOT EDIT. github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/LICENSE, Apache-2.0 github.com/kubeflow/pipelines, https://github.com/kubeflow/pipelines/blob/master/backend/src/apiserver/archive/log.go, Apache-2.0 -cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/LICENSE, Apache-2.0 -cloud.google.com/go/cmd/go-cloud-debug-agent/internal/debug/elf, https://github.com/googleapis/google-cloud-go/blob/v0.55.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go#L1-L43, BSD-2-Clause +cloud.google.com/go, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/LICENSE, Apache-2.0 +cloud.google.com/go/cmd/go-cloud-debug-agent/internal/debug/elf, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/cmd/go-cloud-debug-agent/internal/debug/elf/elf.go#L1-L43, BSD-2-Clause +cloud.google.com/go/third_party/pkgsite, https://github.com/googleapis/google-cloud-go/blob/v0.72.0/third_party/pkgsite/LICENSE, BSD-3-Clause github.com/beorn7/perks, https://github.com/beorn7/perks/blob/v1.0.1/LICENSE, MIT github.com/cespare/xxhash/v2, https://github.com/cespare/xxhash/blob/v2.1.1/LICENSE.txt, MIT github.com/davecgh/go-spew, https://github.com/davecgh/go-spew/blob/v1.1.1/LICENSE, ISC @@ -32,8 +33,8 @@ github.com/prometheus/procfs, https://github.com/prometheus/procfs/blob/v0.2.0/L github.com/spf13/pflag, https://github.com/spf13/pflag/blob/v1.0.5/LICENSE, BSD-3-Clause golang.org/x/crypto, https://github.com/golang/crypto/blob/0c34fe9e7dc2/LICENSE, BSD-3-Clause golang.org/x/net, https://github.com/golang/net/blob/6b1517762897/LICENSE, BSD-3-Clause -golang.org/x/oauth2, https://github.com/golang/oauth2/blob/bf48bf16ab8d/LICENSE, BSD-3-Clause -golang.org/x/sys, https://github.com/golang/sys/blob/47abb6519492/LICENSE, BSD-3-Clause +golang.org/x/oauth2, https://github.com/golang/oauth2/blob/0b49973bad19/LICENSE, BSD-3-Clause +golang.org/x/sys, https://github.com/golang/sys/blob/d19ff857e887/LICENSE, BSD-3-Clause golang.org/x/term, https://github.com/golang/term/blob/7de9c90e9dd1/LICENSE, BSD-3-Clause golang.org/x/text, https://github.com/golang/text/blob/v0.3.5/LICENSE, BSD-3-Clause golang.org/x/time, https://github.com/golang/time/blob/3af7569d3a1e/LICENSE, BSD-3-Clause @@ -42,20 +43,20 @@ google.golang.org/protobuf, https://github.com/protocolbuffers/protobuf-go/blob/ gopkg.in/inf.v0, https://github.com/go-inf/inf/blob/v0.9.1/LICENSE, BSD-3-Clause gopkg.in/yaml.v2, https://github.com/go-yaml/yaml/blob/v2.4.0/LICENSE, Apache-2.0 / MIT gopkg.in/yaml.v3, https://github.com/go-yaml/yaml/blob/496545a6307b/LICENSE, MIT -k8s.io/api, https://github.com/kubernetes/api/blob/v0.19.6/LICENSE, Apache-2.0 +k8s.io/api, https://github.com/kubernetes/api/blob/v0.20.4/LICENSE, Apache-2.0 k8s.io/apiextensions-apiserver, https://github.com/kubernetes/apiextensions-apiserver/blob/v0.19.2/LICENSE, Apache-2.0 -k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.19.6/LICENSE, Apache-2.0 -k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.19.6/LICENSE, Apache-2.0 +k8s.io/apimachinery, https://github.com/kubernetes/apimachinery/blob/v0.21.2/LICENSE, Apache-2.0 +k8s.io/client-go, https://github.com/kubernetes/client-go/blob/v0.20.4/LICENSE, Apache-2.0 k8s.io/component-base, https://github.com/kubernetes/component-base/blob/v0.19.2/LICENSE, Apache-2.0 -k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.5.0/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/errors/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/spec/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/strfmt/LICENSE, Apache-2.0 -k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/d219536bb9fd/pkg/validation/validate/LICENSE, Apache-2.0 +k8s.io/klog/v2, https://github.com/kubernetes/klog/blob/v2.8.0/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/errors/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/spec/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/strfmt/LICENSE, Apache-2.0 +k8s.io/kube-openapi, https://github.com/kubernetes/kube-openapi/blob/591a79e4bda7/pkg/validation/validate/LICENSE, Apache-2.0 k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/LICENSE, Apache-2.0 k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/inotify/LICENSE, BSD-3-Clause k8s.io/utils, https://github.com/kubernetes/utils/blob/67b214c5f920/third_party/forked/golang/LICENSE, BSD-3-Clause sigs.k8s.io/controller-runtime, https://github.com/kubernetes-sigs/controller-runtime/blob/v0.7.0/LICENSE, Apache-2.0 -sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.0.2/LICENSE, Apache-2.0 +sigs.k8s.io/structured-merge-diff/v4, https://github.com/kubernetes-sigs/structured-merge-diff/blob/v4.1.0/LICENSE, Apache-2.0 sigs.k8s.io/yaml, https://github.com/kubernetes-sigs/yaml/blob/v1.2.0/LICENSE, MIT / BSD-3-Clause diff --git a/go-licenses.yaml b/go-licenses.yaml index 9d0a8b042dd..dd3127a57f2 100644 --- a/go-licenses.yaml +++ b/go-licenses.yaml @@ -102,7 +102,7 @@ module: # We do not use UI code here. - third_party/swagger-ui/lib - name: cloud.google.com/go - version: v0.55.0 + version: v0.72.0 license: path: LICENSE spdxId: Apache-2.0 @@ -113,6 +113,10 @@ module: spdxId: BSD-2-Clause lineStart: 1 lineEnd: 43 + - path: third_party/pkgsite + license: + path: LICENSE + spdxId: BSD-3-Clause - name: cloud.google.com/go/storage version: v1.12.0 license: diff --git a/go.mod b/go.mod index 69e68931501..4b4021aaa1a 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,6 @@ require ( github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f github.com/argoproj/argo-workflows/v3 v3.1.14 github.com/cenkalti/backoff v2.2.1+incompatible - github.com/denisenkom/go-mssqldb v0.0.0-20181014144952-4e0d7dc8888f // indirect github.com/eapache/go-resiliency v1.2.0 github.com/elazarl/goproxy v0.0.0-20181111060418-2ce16c963a8a // indirect github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 // indirect @@ -28,7 +27,8 @@ require ( github.com/jinzhu/gorm v1.9.1 github.com/jinzhu/inflection v0.0.0-20180308033659-04140366298a // indirect github.com/jinzhu/now v0.0.0-20181116074157-8ec929ed50c3 // indirect - github.com/kubeflow/pipelines/api v0.0.0-20211013231727-1e2af8379f62 + github.com/kubeflow/pipelines/api v0.0.0-20211026071850-2e3fb5efff56 + github.com/kubeflow/pipelines/v2 v2.0.0-20211026071850-2e3fb5efff56 github.com/lestrrat-go/strftime v1.0.4 github.com/mattn/go-sqlite3 v1.9.0 github.com/minio/minio-go v6.0.14+incompatible @@ -40,14 +40,14 @@ require ( github.com/spf13/viper v1.7.0 github.com/stretchr/testify v1.7.0 golang.org/x/net v0.0.0-20210326060303-6b1517762897 - google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98 - google.golang.org/grpc v1.34.0 + google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 + google.golang.org/grpc v1.36.0 google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 google.golang.org/protobuf v1.27.1 honnef.co/go/tools v0.2.0 // indirect - k8s.io/api v0.19.6 - k8s.io/apimachinery v0.19.6 - k8s.io/client-go v0.19.6 + k8s.io/api v0.20.4 + k8s.io/apimachinery v0.21.2 + k8s.io/client-go v0.20.4 k8s.io/code-generator v0.19.6 k8s.io/kubernetes v0.17.9 sigs.k8s.io/controller-runtime v0.7.0 diff --git a/go.sum b/go.sum index 386336ab954..ef61c586c7a 100644 --- a/go.sum +++ b/go.sum @@ -1,6 +1,8 @@ +bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.39.0/go.mod h1:rVLT6fkc8chs9sfPtFc1SBH6em7n+ZoXaG+87tDISts= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= @@ -9,30 +11,58 @@ cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6T cloud.google.com/go v0.51.0/go.mod h1:hWtGJ6gnXH+KgDv+V0zFGDvpi07n3z8ZNj3T1RW0Gcw= cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= cloud.google.com/go v0.55.0 h1:eoz/lYxKSL4CNAiaUJ0ZfD1J3bfMYbU5B3rwM1C1EIU= cloud.google.com/go v0.55.0/go.mod h1:ZHmoY+/lIMNkN2+fBmuTiqZ4inFhvQad8ft7MT8IV5Y= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.66.0/go.mod h1:dgqGAjKCDxyhGTtC9dAREQGUJpkceNm1yt590Qno0Ko= +cloud.google.com/go v0.72.0 h1:eWRCuwubtDrCJG0oSUMgnsbD4CmPFQF2ei4OFbXvwww= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/firestore v1.4.0/go.mod h1:NjjGEnxCS3CAKYp+vmALu20QzcqasGodQp48WxJGAYc= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.9.0/go.mod h1:G3o6/kJvEMIEAN5urdkaP4be49WQsjNiykBIto9LFtY= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.12.0/go.mod h1:fFLk2dp2oAhDz8QFKwqrjdJvxSp/W2g7nillojlL5Ho= +contrib.go.opencensus.io/exporter/aws v0.0.0-20200617204711-c478e41e60e9/go.mod h1:uu1P0UCM/6RbsMrgPa98ll8ZcHM858i/AD06a9aLRCA= +contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= +contrib.go.opencensus.io/integrations/ocsql v0.1.7/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/Azure/azure-amqp-common-go/v3 v3.0.0/go.mod h1:SY08giD/XbhTz07tJdpw1SoxQXHPN30+DI3Z04SYqyg= github.com/Azure/azure-amqp-common-go/v3 v3.0.1/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= github.com/Azure/azure-amqp-common-go/v3 v3.1.0/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= +github.com/Azure/azure-event-hubs-go/v3 v3.3.0/go.mod h1:LSZw8Q6j0iylRjGk4g9BPd+FzS35+Eff5gvs+t37iOM= github.com/Azure/azure-event-hubs-go/v3 v3.3.7/go.mod h1:sszMsQpFy8Au2s2NColbnJY8lRVm1koW0XxBJ3rN5TY= github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= github.com/Azure/azure-pipeline-go v0.1.9/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= +github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= github.com/Azure/azure-sdk-for-go v37.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v43.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v49.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v52.6.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-service-bus-go v0.10.7/go.mod h1:o5z/3lDG1iT/T/G7vgIwIqVDTx9Qa2wndf5OdzSzpF8= github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y= +github.com/Azure/azure-storage-blob-go v0.13.0/go.mod h1:pA9kNqtjUeQF2zOSu4s//nUdBD+e64lEuc4sVnuOfNs= +github.com/Azure/go-amqp v0.12.6/go.mod h1:qApuH6OFTSKZFmCOxccvAv5rLizBQf4v8pRmG138DPo= github.com/Azure/go-amqp v0.13.0/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= github.com/Azure/go-amqp v0.13.1/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= github.com/Azure/go-amqp v0.13.6/go.mod h1:wbpCKA8tR5MLgRyIu+bb+S6ECdIDdYJ0NlpFE9xsBPI= @@ -41,16 +71,26 @@ github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSW github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0= github.com/Azure/go-autorest/autorest v0.9.6/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630= +github.com/Azure/go-autorest/autorest v0.11.1/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= github.com/Azure/go-autorest/autorest v0.11.3/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= +github.com/Azure/go-autorest/autorest v0.11.7/go.mod h1:V6p3pKZx1KKkJubbxnDWrzNhEIfOy/pTGasLqzHIPHs= +github.com/Azure/go-autorest/autorest v0.11.9/go.mod h1:eipySxLmqSyC5s5k1CLupqet0PSENBEDP93LQ9a8QYw= +github.com/Azure/go-autorest/autorest v0.11.12/go.mod h1:eipySxLmqSyC5s5k1CLupqet0PSENBEDP93LQ9a8QYw= github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= github.com/Azure/go-autorest/autorest/adal v0.8.2/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= +github.com/Azure/go-autorest/autorest/adal v0.9.2/go.mod h1:/3SMAM86bP6wC9Ev35peQDUeqFZBMH07vvUOmg4z/fE= +github.com/Azure/go-autorest/autorest/adal v0.9.4/go.mod h1:/3SMAM86bP6wC9Ev35peQDUeqFZBMH07vvUOmg4z/fE= +github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= +github.com/Azure/go-autorest/autorest/adal v0.9.6/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM= +github.com/Azure/go-autorest/autorest/azure/auth v0.5.3/go.mod h1:4bJZhUhcq8LB20TruwHbAQsmUs2Xh+QR7utuJpLXX3A= github.com/Azure/go-autorest/autorest/azure/cli v0.3.1/go.mod h1:ZG5p860J94/0kI9mNJVoIoLgXcirM2gF5i2kWloofxw= +github.com/Azure/go-autorest/autorest/azure/cli v0.4.2/go.mod h1:7qkJkT+j6b+hIpzMOwPChJhTqS8VbsqqgULzMNRugoM= github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= @@ -62,6 +102,7 @@ github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935 github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA= github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI= +github.com/Azure/go-autorest/autorest/validation v0.3.0/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= @@ -73,6 +114,7 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DataDog/datadog-go v2.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/GoogleCloudPlatform/cloudsql-proxy v1.19.1/go.mod h1:+yYmuKqcBVkgRePGpUhTA9OEg0XsnFE96eZ6nJ2yCQM= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Masterminds/goutils v1.1.0 h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= @@ -126,9 +168,12 @@ github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= github.com/ardielle/ardielle-tools v1.5.4/go.mod h1:oZN+JRMnqGiIhrzkRN9l26Cej9dEx4jeNG6A+AdkShk= +github.com/argoproj/argo-events v1.2.0/go.mod h1:eY+egQNBLXAz/AF4mqgHsMMa4Aur7frHjUfBg+RpX04= github.com/argoproj/argo-events v1.4.0/go.mod h1:wI5A0U3Wj9ZvfPn3ioL18Dz29+7aibtlyU9pS0Ry+bg= +github.com/argoproj/argo-workflows/v3 v3.1.1/go.mod h1:Z8Wc7uDOGw8TRdhqqREHLFE5SAgS0ENqqwaLakv56MU= github.com/argoproj/argo-workflows/v3 v3.1.14 h1:JTcCK2el7sTWfvbDJw+hcZ/1sCa5igPq6AxIodv7egw= github.com/argoproj/argo-workflows/v3 v3.1.14/go.mod h1:AOj9yCLSNPCCxEF/PT+0dMZCDBDWIGX6EL6PPvqTyMc= +github.com/argoproj/pkg v0.8.1/go.mod h1:ra+bQPmbVAoEL+gYSKesuigt4m49i3Qa3mE/xQcjCiA= github.com/argoproj/pkg v0.9.0/go.mod h1:ra+bQPmbVAoEL+gYSKesuigt4m49i3Qa3mE/xQcjCiA= github.com/argoproj/pkg v0.10.1 h1:B7y7IqEFKNaNGg82U0COeVe/V5uj4Dum027yFe5DxRU= github.com/argoproj/pkg v0.10.1/go.mod h1:ra+bQPmbVAoEL+gYSKesuigt4m49i3Qa3mE/xQcjCiA= @@ -147,9 +192,13 @@ github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef h1:46PFijGL github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71/go.mod h1:/ynarkO/43wP/JM2Okn61e8WFMtdbtA8he7GJxW+SFM= github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= +github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= +github.com/aws/aws-sdk-go v1.23.20/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.30.7/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/aws/aws-sdk-go v1.33.16/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= +github.com/aws/aws-sdk-go v1.36.1/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= github.com/beefsack/go-rate v0.0.0-20180408011153-efa7637bb9b6/go.mod h1:6YNgTHLutezwnBvyneBbwvB8C82y3dcoOj5EQJIdGXA= @@ -169,6 +218,7 @@ github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= @@ -184,6 +234,7 @@ github.com/cloudevents/sdk-go/v2 v2.1.0/go.mod h1:3CTrpB4+u7Iaj6fd7E2Xvm5IxMdRoa github.com/cloudfoundry/jibber_jabber v0.0.0-20151120183258-bcc4c8345a21/go.mod h1:po7NpZ/QiTKzBKyrsEAxwnTamCoh8uDk/egRpQ7siIc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= github.com/colinmarc/hdfs v1.1.4-0.20180802165501-48eb8d6c34a9/go.mod h1:0DumPviB681UcSuJErAbDIOx6SIaJWj463TymfZG02I= @@ -213,12 +264,13 @@ github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denisenkom/go-mssqldb v0.0.0-20181014144952-4e0d7dc8888f h1:WH0w/R4Yoey+04HhFxqZ6VX6I0d7RMyw5aXQ9UTvQPs= -github.com/denisenkom/go-mssqldb v0.0.0-20181014144952-4e0d7dc8888f/go.mod h1:xN/JuLBIz4bjkxNmByTiV1IbhfnYb6oo99phBn4Eqhc= +github.com/denisenkom/go-mssqldb v0.9.0 h1:RSohk2RsiZqLZ0zCjtfn3S4Gp4exhpBWHyQ7D0yGjAk= +github.com/denisenkom/go-mssqldb v0.9.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= +github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= github.com/dimfeld/httptreemux v5.0.1+incompatible/go.mod h1:rbUlSV+CCpv/SuqUTP/8Bk2O3LyUV436/yaRGkhP6Z0= github.com/docker/docker v0.7.3-0.20190327010347-be7ac8be2ae0/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= @@ -254,6 +306,7 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y= github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0= @@ -283,6 +336,8 @@ github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2H github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 h1:Mn26/9ZMNWSw9C9ERFA1PUxfmGpolnw2v0bKOREu5ew= github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32/go.mod h1:GIjDIg/heH5DOkXY3YJ/wNhfHsQHoXGjl8G8amsYQ1I= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= github.com/gizak/termui/v3 v3.1.0/go.mod h1:bXQEBkJpzxUAKf0+xq9MSWAvWZlE7c+aidmyFlkYTrY= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q= @@ -295,6 +350,7 @@ github.com/go-git/go-git/v5 v5.3.0/go.mod h1:xdX4bWJ48aOrdhnl2XqHYstHbbp6+LFS4r4 github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-ini/ini v1.51.1 h1:/QG3cj23k5V8mOl4JnNzUNhc1kr/jzMiNsNuWKcx8gM= github.com/go-ini/ini v1.51.1/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= @@ -402,6 +458,10 @@ github.com/go-openapi/validate v0.19.12/go.mod h1:Rzou8hA/CBw8donlS6WNEUQupNvUZ0 github.com/go-openapi/validate v0.19.15/go.mod h1:tbn/fdOwYHgrhPBzidZfJC2MIVvs9GA7monOmWBbeCI= github.com/go-openapi/validate v0.20.1 h1:QGQ5CvK74E28t3DkegGweKR+auemUi5IdpMc4x3UW6s= github.com/go-openapi/validate v0.20.1/go.mod h1:b60iJT+xNNLfaQJUqLI7946tYiFEOuE9E4k54HpKcJ0= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= github.com/go-python/gpython v0.0.3/go.mod h1:bmk0l57W/7Cs67MMnz4U28SoYyvz5NTMYyJvUqytJhs= github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-resty/resty/v2 v2.3.0/go.mod h1:UpN9CgLZNsv4e9XG50UU8xdI0F43UQ4HmxLBDwaroHU= @@ -439,6 +499,9 @@ github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGt github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= github.com/gobwas/glob v0.2.4-0.20181002190808-e7a84e9525fe/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= @@ -447,6 +510,8 @@ github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5 github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY= +github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -459,13 +524,16 @@ github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfb github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= @@ -487,28 +555,41 @@ github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5a github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-github/v31 v31.0.0/go.mod h1:NQPZol8/1sMoWYGN2yaALIBytu17gAWfhbweiEed3pM= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/go-replayers/grpcreplay v1.0.0/go.mod h1:8Ig2Idjpr6gifRd6pNVggX6TC1Zw6Jx74AKp7QNH2QE= +github.com/google/go-replayers/httpreplay v0.1.2/go.mod h1:YKZViNhiGgqdBlUbI2MwGpq4pXxNmhJLPHQ7cv2b5no= github.com/google/gofuzz v0.0.0-20161122191042-44d81051d367/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200905233945-acf8798be1f7/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/wire v0.4.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= @@ -531,12 +612,14 @@ github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB7 github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.0.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.1.0/go.mod h1:f5nM7jw/oeRSadq3xCzHAvxcr8HZnzsqU6ILg/0NiiE= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= @@ -583,11 +666,13 @@ github.com/hashicorp/raft-boltdb v0.0.0-20171010151810-6e5ba93211ea/go.mod h1:pN github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hokaccha/go-prettyjson v0.0.0-20190818114111-108c894c2c0e/go.mod h1:pFlLw2CfqZiIBOx6BuCeRLCrfxBJipTY0nIOF/VbGcI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.3.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/huandu/xstrings v1.3.1 h1:4jgBlKK6tLKFvO8u5pmYjG91cqytmDCDvGh7ECVFfFs= github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= github.com/iancoleman/strcase v0.1.1/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= @@ -611,6 +696,7 @@ github.com/jinzhu/inflection v0.0.0-20180308033659-04140366298a h1:eeaG9XMUvRBYX github.com/jinzhu/inflection v0.0.0-20180308033659-04140366298a/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v0.0.0-20181116074157-8ec929ed50c3 h1:xvj06l8iSwiWpYgm8MbPp+naBg+pwfqmdXabzqPCn/8= github.com/jinzhu/now v0.0.0-20181116074157-8ec929ed50c3/go.mod h1:oHTiXerJ20+SfYcrdlBO7rzZRJWGwSTQ0iUY2jI6Gfc= +github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= @@ -626,6 +712,7 @@ github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBv github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= @@ -648,7 +735,9 @@ github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+o github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.8/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.10.8/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.8/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.9 h1:5OCMOdde1TCT2sookEuVeEZzA8bmRSFV3AwPDZAG8AA= github.com/klauspost/compress v1.11.9/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= @@ -672,20 +761,26 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kubeflow/pipelines/api v0.0.0-20211013231727-1e2af8379f62 h1:7teFwqme7fjyWYltBEM38vPQ8PNBhqoaNLG/B7qE4nA= -github.com/kubeflow/pipelines/api v0.0.0-20211013231727-1e2af8379f62/go.mod h1:ItI8RjFTt0RY6X0g6B3VocSaphuE+DNuNTzAY9NF8EY= +github.com/kubeflow/pipelines/api v0.0.0-20211020193552-20f28631517d/go.mod h1:ItI8RjFTt0RY6X0g6B3VocSaphuE+DNuNTzAY9NF8EY= +github.com/kubeflow/pipelines/api v0.0.0-20211026071850-2e3fb5efff56 h1:hzWxrPYGY6MHKkJKjpBFOAgBO1gjQAoM5C583D/wWxQ= +github.com/kubeflow/pipelines/api v0.0.0-20211026071850-2e3fb5efff56/go.mod h1:ItI8RjFTt0RY6X0g6B3VocSaphuE+DNuNTzAY9NF8EY= +github.com/kubeflow/pipelines/v2 v2.0.0-20211026071850-2e3fb5efff56 h1:e2cnrbByKrPQBcRFwy8qzRALSOqbPksgary85Bx0Tqc= +github.com/kubeflow/pipelines/v2 v2.0.0-20211026071850-2e3fb5efff56/go.mod h1:ytz5OLX4hTyYxBkd/ejS4H3ELqGgXAoJNjyPFEqOlE4= github.com/labstack/echo v3.2.1+incompatible/go.mod h1:0INS7j/VjnFxD4E2wkz67b8cVwCLbBmJyDaka6Cmk1s= github.com/labstack/gommon v0.2.7/go.mod h1:/tj9csK2iPSBvn+3NLM9e52usepMtrd5ilFYA+wQNJ4= github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc h1:RKf14vYWi2ttpEmkA4aQ3j4u9dStX2t4M8UM6qqNsG8= github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc/go.mod h1:kopuH9ugFRkIXf3YoqHKyrJ9YfUFsckUU9S7B+XP+is= github.com/lestrrat-go/strftime v1.0.4 h1:T1Rb9EPkAhgxKqbcMIPguPq8glqXTA1koF8n9BHElA8= github.com/lestrrat-go/strftime v1.0.4/go.mod h1:E1nN3pCbtMSu1yjSVeyuRFVm/U0xoR76fd03sz+Qz4g= github.com/lib/pq v1.3.0 h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU= github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.9.0 h1:L8nSXQQzAYByakOFMTwpjRoHsMJklur4Gi59b6VivR8= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= github.com/lightstep/tracecontext.go v0.0.0-20181129014701-1757c391b1ac/go.mod h1:Frd2bnT3w5FB5q49ENTfVlztJES+1k/7lyWX2+9gq/M= @@ -710,6 +805,7 @@ github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaO github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= @@ -753,6 +849,8 @@ github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.1 h1:FVzMWA5RllMAKIdUSC8mdWo3XtwoecrH79BY70sEEpE= github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= github.com/moby/term v0.0.0-20200312100748-672ec06f55cd/go.mod h1:DdlQx2hp0Ss5/fLikoLlEeIYiATotOjgB//nb973jeo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= @@ -980,11 +1078,13 @@ github.com/stripe/stripe-go v70.15.0+incompatible/go.mod h1:A1dQZmO/QypXmsL0T8ax github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/tidwall/gjson v1.6.0/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJHhxOls= +github.com/tidwall/gjson v1.6.8/go.mod h1:zeFuBCIqD4sN/gmqBzZ4j7Jd6UcA2Fc56x7QFsv+8fI= github.com/tidwall/gjson v1.7.5/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk= github.com/tidwall/match v1.0.1/go.mod h1:LujAq0jyVjBy028G1WhWfIzbpQfMO8bBZ6Tyb0+pL9E= github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.0.1/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= +github.com/tidwall/pretty v1.0.2/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.1.0 h1:K3hMW5epkdAVwibsQEfR/7Zj0Qgt4DxtNumTq/VloO8= github.com/tidwall/pretty v1.1.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/sjson v1.1.1/go.mod h1:yvVuSnpEQv5cYIrO+AT6kw4QVfd5SDZoGIS7/5+fZFs= @@ -993,7 +1093,9 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1 github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= @@ -1042,12 +1144,15 @@ go.mongodb.org/mongo-driver v1.3.5/go.mod h1:Ual6Gkco7ZGQw8wE1t4tLnvBsf6yVSM60qW go.mongodb.org/mongo-driver v1.4.3/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= go.mongodb.org/mongo-driver v1.4.4 h1:bsPHfODES+/yx2PCWzUYMH8xj6PVniPI8DQrsJuSXSs= go.mongodb.org/mongo-driver v1.4.4/go.mod h1:WcMNYLx/IlOxLe6JRJiv2uXuCz6zBLndR4SoGjYphSc= +go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0= go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -1066,6 +1171,7 @@ go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.15.0 h1:ZZCA22JRF2gQE5FoNmhmrf7jeJJ2uhqDUNRYKm8dvmM= go.uber.org/zap v1.15.0/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc= +gocloud.dev v0.22.0/go.mod h1:z3jKIQ0Es9LALVZFQ3wOvwqAsSLq1R5c/2RdmghDucw= golang.org/x/crypto v0.0.0-20180723164146-c126467f60eb/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -1076,6 +1182,7 @@ golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -1096,6 +1203,7 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2 h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= @@ -1159,11 +1267,14 @@ golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -1171,16 +1282,22 @@ golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200904194848-62affa334b73/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210224082022-3d97a244fca7/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226101413-39120d07d75e/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210326060303-6b1517762897 h1:KrsHThm5nFk34YtATK1LsThyGhGbGe1olrte/HInHvs= @@ -1192,6 +1309,10 @@ golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4Iltr golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201203001011-0b49973bad19 h1:ZD+2Sd/BnevwJp8PSli8WgGAGzb9IZtxBsv1iZMYeEA= +golang.org/x/oauth2 v0.0.0-20201203001011-0b49973bad19/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1241,6 +1362,7 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1255,19 +1377,31 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200317113312-5766fd39f98d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210324051608-47abb6519492 h1:Paq34FxTluEPvVyayQqMPgHm+vTOrIifmcYxFBx9TLg= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210426230700-d19ff857e887 h1:dXfMednGJh/SUUFjTLsWJz3P+TQt9qnR11GgeI3vWKs= +golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -1303,6 +1437,7 @@ golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -1318,6 +1453,7 @@ golang.org/x/tools v0.0.0-20190808195139-e713427fea3f/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191010075000-0337d82405ff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1338,11 +1474,30 @@ golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200317043434-63da46f3035e/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200616133436-c1934b75d054/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200717024301-6ddee64345a6/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200828161849-5deb26317202/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20200915173823-2db8f0ff891c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20200918232735-d647fc253266/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201202200335-bef1c476418a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201203202102-a1a1cbeaa516/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= @@ -1358,30 +1513,46 @@ gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6d gonum.org/v1/netlib v0.0.0-20190331212654-76723241ea4e/go.mod h1:kS+toOQn6AQKjmKJ7gzohV1XkqsFehRA2FbsbkopSuQ= google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.5.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.10.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.1/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.31.0/go.mod h1:CL+9IBCa2WWU6gRuBWaKqGWLFFwbEUXkfeMkHLQWYWo= +google.golang.org/api v0.32.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.2/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190508193815-b515fa19cec8/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= @@ -1395,11 +1566,31 @@ google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvx google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200317114155-1f3552e48f24/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98 h1:LCO0fg4kb6WwkXQXRQQgUYsFeFb5taTX5WAx5O/Vt28= google.golang.org/genproto v0.0.0-20200806141610-86f49bd18e98/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200831141814-d751682dd103/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200914193844-75d14daec038/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200921151605-7abf4a1a14d5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 h1:jDYzwXmX9tLnuG4sL85HPmE1ruErXOopALp2i/0AHnI= +google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= @@ -1414,11 +1605,18 @@ google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.28.1/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0 h1:raiipEjMOIC/TO2AvyTxP25XFdLxNIBwzDh3FM3XztI= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/grpc/examples v0.0.0-20201226181154-53788aa5dcb4/go.mod h1:Ly7ZA/ARzg8fnPU9TyZIxoz33sEUuWX7txiqs8lPTgE= @@ -1505,6 +1703,7 @@ honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.2.0 h1:ws8AfbgTX3oIczLPNPCu5166oBg9ST2vNs0rcht+mDE= honnef.co/go/tools v0.2.0/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= k8s.io/api v0.17.0/go.mod h1:npsyOePkeP0CPwyGfXDHxvypiYMJxBWAMpQxCaJ4ZxI= @@ -1512,6 +1711,8 @@ k8s.io/api v0.17.8/go.mod h1:N++Llhs8kCixMUoCaXXAyMMPbo8dDVnh+IQ36xZV2/0= k8s.io/api v0.19.2/go.mod h1:IQpK0zFQ1xc5iNIQPqzgoOwuFugaYHK4iCknlAQP9nI= k8s.io/api v0.19.6 h1:F3lfwgpKcKms6F1mMqkQXFzXmme8QqHTJBtBkev3TOg= k8s.io/api v0.19.6/go.mod h1:Plxx44Nh4zVblkJrIgxVPgPre1mvng6tXf1Sj3bs0fU= +k8s.io/api v0.20.4 h1:xZjKidCirayzX6tHONRQyTNDVIR55TYVqgATqo6ZULY= +k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= k8s.io/apiextensions-apiserver v0.17.0/go.mod h1:XiIFUakZywkUl54fVXa7QTEHcqQz9HG55nHd1DCoHj8= k8s.io/apiextensions-apiserver v0.19.2 h1:oG84UwiDsVDu7dlsGQs5GySmQHCzMhknfhFExJMz9tA= k8s.io/apiextensions-apiserver v0.19.2/go.mod h1:EYNjpqIAvNZe+svXVx9j4uBaVhTB4C94HkY3w058qcg= @@ -1520,6 +1721,9 @@ k8s.io/apimachinery v0.17.8/go.mod h1:Lg8zZ5iC/O8UjCqW6DNhcQG2m4TdjF9kwG3891OWbb k8s.io/apimachinery v0.19.2/go.mod h1:DnPGDnARWFvYa3pMHgSxtbZb7gpzzAZ1pTfaUNDVlmA= k8s.io/apimachinery v0.19.6 h1:kBLzSGuDdY1NdSV2uFzI+FwZ9wtkmG+X3ZVcWXSqNgA= k8s.io/apimachinery v0.19.6/go.mod h1:6sRbGRAVY5DOCuZwB5XkqguBqpqLU6q/kOaOdk29z6Q= +k8s.io/apimachinery v0.20.4/go.mod h1:WlLqWAHZGg07AeltaI0MV5uk1Omp8xaN0JGLY6gkRpU= +k8s.io/apimachinery v0.21.2 h1:vezUc/BHqWlQDnZ+XkrpXSmnANSLbpnlpwo0Lhk0gpc= +k8s.io/apimachinery v0.21.2/go.mod h1:CdTY8fU/BlvAbJ2z/8kBwimGki5Zp8/fbVuLY8gJumM= k8s.io/apiserver v0.17.0/go.mod h1:ABM+9x/prjINN6iiffRVNCBR2Wk7uY4z+EtEGZD48cg= k8s.io/apiserver v0.19.2/go.mod h1:FreAq0bJ2vtZFj9Ago/X0oNGC51GfubKK/ViOKfVAOA= k8s.io/client-go v0.17.0/go.mod h1:TYgR6EUHs6k45hb6KWjVD6jFZvJV4gHDikv/It0xz+k= @@ -1527,6 +1731,8 @@ k8s.io/client-go v0.17.8/go.mod h1:SJsDS64AAtt9VZyeaQMb4Ck5etCitZ/FwajWdzua5eY= k8s.io/client-go v0.19.2/go.mod h1:S5wPhCqyDNAlzM9CnEdgTGV4OqhsW3jGO1UM1epwfJA= k8s.io/client-go v0.19.6 h1:vtPb33nP8DBMW+/CyuJ8fiie36c3CM1Ts6L4Tsr+PtU= k8s.io/client-go v0.19.6/go.mod h1:gEiS+efRlXYUEQ9Oz4lmNXlxAl5JZ8y2zbTDGhvXXnk= +k8s.io/client-go v0.20.4 h1:85crgh1IotNkLpKYKZHVNI1JT86nr/iDCvq2iWKsql4= +k8s.io/client-go v0.20.4/go.mod h1:LiMv25ND1gLUdBeYxBIwKpkSC5IsozMMmOOeSJboP+k= k8s.io/code-generator v0.17.0/go.mod h1:DVmfPQgxQENqDIzVR2ddLXMH34qeszkKSdH/N+s+38s= k8s.io/code-generator v0.19.2/go.mod h1:moqLn7w0t9cMs4+5CQyxnfA/HV8MF6aAVENF+WZZhgk= k8s.io/code-generator v0.19.6 h1:N7PlZyX25j5Jl9oIBphWN2qp1AKZOwXdDVfj2Z0V0p8= @@ -1547,13 +1753,18 @@ k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= +k8s.io/klog/v2 v2.4.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= k8s.io/klog/v2 v2.5.0 h1:8mOnjf1RmUPW6KRqQCfYSZq/K20Unmp3IhuZUhxl8KI= k8s.io/klog/v2 v2.5.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= +k8s.io/klog/v2 v2.8.0 h1:Q3gmuM9hKEjefWFFYF0Mat+YyFJvsUyYuwyNNJ5C9Ts= +k8s.io/klog/v2 v2.8.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec= k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E= k8s.io/kube-openapi v0.0.0-20200410145947-bcb3869e6f29/go.mod h1:F+5wygcW0wmRTnM3cOgIqGivxkwSWIWT5YdsDbeAOaU= k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o= k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd h1:sOHNzJIkytDF6qadMNKhhDRpc6ODik8lVC6nOur7B2c= k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM= +k8s.io/kube-openapi v0.0.0-20210305001622-591a79e4bda7 h1:vEx13qjvaZ4yfObSSXW7BrMc/KQBBT/Jyee8XtLf4x0= +k8s.io/kube-openapi v0.0.0-20210305001622-591a79e4bda7/go.mod h1:wXW5VT87nVfh/iLV8FpR2uDvrFyomxbtb1KivDbvPTE= k8s.io/kubernetes v1.11.1 h1:wHOPX+teuYaSlUWfL/b24jMH0n7HECbj4Xt8i7kSZIw= k8s.io/kubernetes v1.11.1/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk= k8s.io/utils v0.0.0-20191114184206-e782cd3c129f/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew= @@ -1567,6 +1778,7 @@ modernc.org/mathutil v1.0.0/go.mod h1:wU0vUrJsVWBZ4P6e7xtFJEhFSNsfRLJ8H458uRjg03 modernc.org/strutil v1.0.0/go.mod h1:lstksw84oURvj9y3tn8lGvRxyRC1S2+g5uuIzNfIOBs= modernc.org/xc v1.0.0/go.mod h1:mRNCo0bvLjGhHO9WsyuKVU4q0ceiDDDoEeWDJHrNx8I= moul.io/http2curl v1.0.1-0.20190925090545-5cd742060b0e/go.mod h1:nejbQVfXh96n9dSF6cH3Jsk/QI1Z2oEL7sSI2ifXFNA= +nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= @@ -1581,6 +1793,8 @@ sigs.k8s.io/structured-merge-diff/v2 v2.0.1/go.mod h1:Wb7vfKAodbKgf6tn1Kl0VvGj7m sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/structured-merge-diff/v4 v4.0.2 h1:YHQV7Dajm86OuqnIR6zAelnDWBRjo+YhYV9PmGrh1s8= sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= +sigs.k8s.io/structured-merge-diff/v4 v4.1.0 h1:C4r9BgJ98vrKnnVCjwCSXcWjWe0NKcUQkmzDXZXGwH8= +sigs.k8s.io/structured-merge-diff/v4 v4.1.0/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q= sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc= From f1bb852852b16ccf0e316037834274ab5b60aab3 Mon Sep 17 00:00:00 2001 From: Yaqi Ji Date: Thu, 28 Oct 2021 22:30:34 -0700 Subject: [PATCH 29/31] feat(sdk): add load_component_from_* (#6822) * feat(sdk): add load_component_from_* * address comments' : * update release notes --- sdk/RELEASE.md | 1 + .../experimental/test_data/simple_yaml.yaml | 16 ++++ .../components/experimental/yaml_component.py | 38 +++++++- .../experimental/yaml_component_test.py | 86 +++++++++++++++++++ 4 files changed, 140 insertions(+), 1 deletion(-) create mode 100644 sdk/python/kfp/v2/components/experimental/test_data/simple_yaml.yaml create mode 100644 sdk/python/kfp/v2/components/experimental/yaml_component_test.py diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 0b2851eaaf6..231858833c2 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -4,6 +4,7 @@ * Support passing parameters in v2 using google.protobuf.Value [\#6804](https://github.com/kubeflow/pipelines/pull/6804). * Implement experimental v2 `@component` component [\#6825](https://github.com/kubeflow/pipelines/pull/6825) +* Add load_component_from_* for v2 [\#6822](https://github.com/kubeflow/pipelines/pull/6822) ## Breaking Changes diff --git a/sdk/python/kfp/v2/components/experimental/test_data/simple_yaml.yaml b/sdk/python/kfp/v2/components/experimental/test_data/simple_yaml.yaml new file mode 100644 index 00000000000..7035bcd07aa --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/test_data/simple_yaml.yaml @@ -0,0 +1,16 @@ +name: component_1 +inputs: + input1: {type: String} +outputs: + output1: {type: String} +implementation: + container: + image: alpine + commands: + - sh + - -c + - 'set -ex + + echo "$0" > "$1"' + - {inputValue: input1} + - {outputPath: output1} \ No newline at end of file diff --git a/sdk/python/kfp/v2/components/experimental/yaml_component.py b/sdk/python/kfp/v2/components/experimental/yaml_component.py index 9cc855a9cd5..2e38e02cd4a 100644 --- a/sdk/python/kfp/v2/components/experimental/yaml_component.py +++ b/sdk/python/kfp/v2/components/experimental/yaml_component.py @@ -13,6 +13,12 @@ # limitations under the License. """Functions for loading component from yaml.""" +__all__ = [ + 'load_component_from_text', + 'load_component_from_url', + 'load_component_from_file', +] + from kfp.v2.components.experimental import base_component from kfp.v2.components.experimental import structures @@ -27,4 +33,34 @@ def execute(self, *args, **kwargs): def load_component_from_text(text: str) -> base_component.BaseComponent: """Loads component from text.""" return YamlComponent( - component_spec=structures.ComponentSpec.load_from_component_yaml(text)) + structures.ComponentSpec.load_from_component_yaml(text)) + +def load_component_from_file(file_path: str) -> base_component.BaseComponent: + """Loads component from file. + + Args: + file_path: A string containing path to the YAML file. + """ + with open(file_path, 'rb') as component_stream: + return load_component_from_text(component_stream) + +def load_component_from_url(url: str, auth=None) -> base_component.BaseComponent: + """Loads component from url. + + Args: + url: A string containing path to the url containing YAML file. + auth: The authentication credentials necessary for url access. + """ + + if url is None: + raise TypeError + + if url.startswith('gs://'): + #Replacing the gs:// URI with https:// URI (works for public objects) + url = 'https://storage.googleapis.com/' + url[len('gs://'):] + + import requests + resp = requests.get(url, auth=auth) + resp.raise_for_status() + + return load_component_from_text(resp.content) diff --git a/sdk/python/kfp/v2/components/experimental/yaml_component_test.py b/sdk/python/kfp/v2/components/experimental/yaml_component_test.py new file mode 100644 index 00000000000..10668890f9e --- /dev/null +++ b/sdk/python/kfp/v2/components/experimental/yaml_component_test.py @@ -0,0 +1,86 @@ +# Copyright 2021 The Kubeflow Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for kfp.v2.components.experimental.yaml_component.""" + +import requests +import unittest +import textwrap + +from pathlib import Path +from unittest import mock + +from kfp.v2.components.experimental import yaml_component +from kfp.v2.components.experimental import structures + +SAMPLE_YAML = textwrap.dedent("""\ + name: component_1 + inputs: + input1: {type: String} + outputs: + output1: {type: String} + implementation: + container: + image: alpine + commands: + - sh + - -c + - 'set -ex + + echo "$0" > "$1"' + - {inputValue: input1} + - {outputPath: output1} + """) + +class YamlComponentTest(unittest.TestCase): + + def test_load_component_from_text(self): + component = yaml_component.load_component_from_text(SAMPLE_YAML) + self.assertEqual(component.component_spec.name, 'component_1') + self.assertEqual(component.component_spec.outputs, {'output1': structures.OutputSpec(type='String')}) + self.assertEqual(component._component_inputs, {'input1'}) + self.assertEqual(component.name, 'component_1') + self.assertEqual(component.component_spec.implementation.container.image, 'alpine') + + def test_load_component_from_file(self): + component_path = Path( + __file__).parent/'test_data'/'simple_yaml.yaml' + component = yaml_component.load_component_from_file(component_path) + self.assertEqual(component.component_spec.name, 'component_1') + self.assertEqual(component.component_spec.outputs, {'output1': structures.OutputSpec(type='String')}) + self.assertEqual(component._component_inputs, {'input1'}) + self.assertEqual(component.name, 'component_1') + self.assertEqual(component.component_spec.implementation.container.image, 'alpine') + + def test_load_component_from_url(self): + component_url = 'https://raw.githubusercontent.com/some/repo/components/component_group/component.yaml' + + def mock_response_factory(url, params=None, **kwargs): + if url == component_url: + response = requests.Response() + response.url = component_url + response.status_code = 200 + response._content = SAMPLE_YAML + return response + raise RuntimeError('Unexpected URL "{}"'.format(url)) + + with mock.patch('requests.get', mock_response_factory): + component = yaml_component.load_component_from_url(component_url) + self.assertEqual(component.component_spec.name, 'component_1') + self.assertEqual(component.component_spec.outputs, {'output1': structures.OutputSpec(type='String')}) + self.assertEqual(component._component_inputs, {'input1'}) + self.assertEqual(component.name, 'component_1') + self.assertEqual(component.component_spec.implementation.container.image, 'alpine') + +if __name__ == '__main__': + unittest.main() From eeb0b9cabb1e320b6ef1fa60dab35d064e13cdb2 Mon Sep 17 00:00:00 2001 From: Yang Pan Date: Thu, 28 Oct 2021 22:58:48 -0700 Subject: [PATCH 30/31] feat(components): not wait for dataflow job to finish in python component PiperOrigin-RevId: 406292369 --- .../dataflow/dataflow_python_job_remote_runner.py | 10 ---------- .../dataflow/test_dataflow_python_job_remote_runner.py | 3 --- 2 files changed, 13 deletions(-) diff --git a/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py index 958c09d32ee..f85480657dc 100644 --- a/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py +++ b/components/google-cloud/google_cloud_pipeline_components/container/experimental/dataflow/dataflow_python_job_remote_runner.py @@ -77,7 +77,6 @@ def create_python_job(python_module_path: str, with open(gcp_resources, 'w') as f: f.write(json_format.MessageToJson(job_resources)) break - sub_process.wait_and_check() if not job_id: raise RuntimeError( 'No dataflow job was found when running the python file.') @@ -178,12 +177,3 @@ def read_lines(self): for line in iter(self.process.stdout.readline, b''): logging.info('subprocess: %s', line) yield line - - def wait_and_check(self): - for _ in self.read_lines(): - pass - self.process.stdout.close() - return_code = self.process.wait() - logging.info('Subprocess exit with code %s.', return_code) - if return_code: - raise subprocess.CalledProcessError(return_code, self._cmd) diff --git a/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py b/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py index 2ea97ff5384..5ed5da851d7 100644 --- a/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py +++ b/components/google-cloud/tests/container/experimental/dataflow/test_dataflow_python_job_remote_runner.py @@ -148,7 +148,6 @@ def test_create_python_job_raises_error_on_no_job_id( gcp_resources=self._gcp_resources, location=self._location, temp_location=self._gcs_temp_path) - mock_process_client.wait_and_check.assert_called_once_with() @mock.patch.object( dataflow_python_job_remote_runner, 'stage_file', autospec=True) @@ -177,7 +176,6 @@ def test_create_python_job_parses_with_emtpy_args_list_parses_correctly( temp_location=self._gcs_temp_path) mock_prepare_cmd.assert_called_once_with(self._project, self._location, mock.ANY, [], self._gcs_temp_path) - mock_process_client.wait_and_check.assert_called_once_with() @mock.patch.object( dataflow_python_job_remote_runner, 'stage_file', autospec=True) @@ -208,7 +206,6 @@ def test_create_python_job_parses_with_json_array_args_list_parses_correctly( mock_prepare_cmd.assert_called_once_with(self._project, self._location, mock.ANY, self._args, self._gcs_temp_path) - mock_process_client.wait_and_check.assert_called_once_with() @mock.patch.object( dataflow_python_job_remote_runner, 'stage_file', autospec=True) From 0f2cab9b390c22da8855c3d984ba1eca53cf117a Mon Sep 17 00:00:00 2001 From: Chen Sun Date: Fri, 29 Oct 2021 00:30:51 -0700 Subject: [PATCH 31/31] fix(sdk.v2): Fix display name support for groups (#6832) * fix display name for experimental tasks_group * release note --- sdk/RELEASE.md | 1 + .../compiler/experimental/pipeline_spec_builder.py | 2 +- .../experimental_pipeline_with_exit_handler.json | 6 +++--- .../experimental_pipeline_with_exit_handler.py | 5 +++-- .../test_data/experimental_pipeline_with_loops.json | 4 ++-- .../test_data/experimental_pipeline_with_loops.py | 2 +- ...mental_pipeline_with_nested_conditions_yaml.json | 6 +++--- ...rimental_pipeline_with_nested_conditions_yaml.py | 4 ++-- .../kfp/v2/components/experimental/tasks_group.py | 13 ++++++------- 9 files changed, 22 insertions(+), 21 deletions(-) diff --git a/sdk/RELEASE.md b/sdk/RELEASE.md index 231858833c2..276a09c7f77 100644 --- a/sdk/RELEASE.md +++ b/sdk/RELEASE.md @@ -17,6 +17,7 @@ ## Bug Fixes and Other Changes * Fix importer ignoring reimport setting, and switch to Protobuf.Value for import uri [\#6827](https://github.com/kubeflow/pipelines/pull/6827) +* Fix display name support for groups [\#6832](https://github.com/kubeflow/pipelines/pull/6832) ## Documentation Updates diff --git a/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py index b2339c57584..688f69fceee 100644 --- a/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py +++ b/sdk/python/kfp/v2/compiler/experimental/pipeline_spec_builder.py @@ -729,7 +729,7 @@ def build_task_spec_for_group( A PipelineTaskSpec object representing the group. """ pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec() - pipeline_task_spec.task_info.name = group.name + pipeline_task_spec.task_info.name = group.display_name or group.name pipeline_task_spec.component_ref.name = ( component_utils.sanitize_component_name(group.name)) diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json index 28249e636d0..1de61289bda 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.json @@ -137,7 +137,7 @@ } }, "taskInfo": { - "name": "exit-handler-1" + "name": "Pipeline with exit handler" } }, "print-op": { @@ -160,7 +160,7 @@ } }, "taskInfo": { - "name": "print-op" + "name": "my exit handler" }, "triggerPolicy": { "strategy": "ALL_UPSTREAM_TASKS_COMPLETED" @@ -178,5 +178,5 @@ } }, "schemaVersion": "2.1.0", - "sdkVersion": "kfp-1.8.6" + "sdkVersion": "kfp-1.8.7" } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py index fb88b5252f9..a0c1b496770 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_exit_handler.py @@ -54,9 +54,10 @@ @dsl.pipeline(name='pipeline-with-exit-handler') def my_pipeline(message: str = 'Hello World!'): - exit_task = print_op(msg='Exit handler has worked!') + exit_task = print_op( + msg='Exit handler has worked!').set_display_name('my exit handler') - with dsl.ExitHandler(exit_task): + with dsl.ExitHandler(exit_task, name='Pipeline with exit handler'): print_op(msg=message) fail_op(msg='Task failed.') diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json index 9a20d3dde4d..8ed0bb61700 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.json @@ -383,7 +383,7 @@ } }, "taskInfo": { - "name": "for-loop-1" + "name": "loop through a list" } }, "for-loop-2": { @@ -438,5 +438,5 @@ } }, "schemaVersion": "2.1.0", - "sdkVersion": "kfp-1.8.6" + "sdkVersion": "kfp-1.8.7" } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py index 522b97a67cb..7567535cdca 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_loops.py @@ -79,7 +79,7 @@ def my_pipeline(loop_parameter: List[str]): # Loop argument is from a pipeline input - with dsl.ParallelFor(loop_parameter) as item: + with dsl.ParallelFor(loop_parameter, name='loop through a list') as item: print_op(msg=item) # Loop argument is from a component output diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json index 59c34e2b35a..b4c4050f0ba 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.json @@ -490,7 +490,7 @@ } }, "taskInfo": { - "name": "condition-1" + "name": "it was heads!" }, "triggerPolicy": { "condition": "inputs.parameter_values['pipelinechannel--flip-coin-output'] == 'heads'" @@ -514,7 +514,7 @@ } }, "taskInfo": { - "name": "condition-4" + "name": "it was tails!" }, "triggerPolicy": { "condition": "inputs.parameter_values['pipelinechannel--flip-coin-output'] == 'tails'" @@ -535,5 +535,5 @@ } }, "schemaVersion": "2.1.0", - "sdkVersion": "kfp-1.8.6" + "sdkVersion": "kfp-1.8.7" } \ No newline at end of file diff --git a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py index ea9c84db652..07cbb023545 100644 --- a/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py +++ b/sdk/python/kfp/v2/compiler_cli_tests/test_data/experimental_pipeline_with_nested_conditions_yaml.py @@ -70,14 +70,14 @@ def random_num_op(low, high): description='Shows how to use dsl.Condition().') def my_pipeline(): flip = flip_coin_op() - with dsl.Condition(flip.output == 'heads'): + with dsl.Condition(flip.output == 'heads', name='it was heads!'): random_num_head = random_num_op(0, 9)() with dsl.Condition(random_num_head.output > 5): print_op(msg='heads and %s > 5!' % random_num_head.output) with dsl.Condition(random_num_head.output <= 5): print_op(msg='heads and %s <= 5!' % random_num_head.output) - with dsl.Condition(flip.output == 'tails'): + with dsl.Condition(flip.output == 'tails', name='it was tails!'): random_num_tail = random_num_op(10, 19)() with dsl.Condition(random_num_tail.output > 15): print_op(msg='tails and %s > 15!' % random_num_tail.output) diff --git a/sdk/python/kfp/v2/components/experimental/tasks_group.py b/sdk/python/kfp/v2/components/experimental/tasks_group.py index 1c142c7f2a2..244a48da86d 100644 --- a/sdk/python/kfp/v2/components/experimental/tasks_group.py +++ b/sdk/python/kfp/v2/components/experimental/tasks_group.py @@ -42,7 +42,7 @@ class TasksGroup: group_type: The type of the TasksGroup. tasks: A list of all PipelineTasks in this group. groups: A list of TasksGroups in this group. - name: The optional user given name of the group. + display_name: The optional user given name of the group. dependencies: A list of tasks or groups this group depends on. """ @@ -55,12 +55,12 @@ def __init__( Args: group_type: The type of the group. - name: Optional; the name of the group. + name: Optional; the name of the group. Used as display name in UI. """ self.group_type = group_type self.tasks = list() self.groups = list() - self.name = name + self.display_name = name self.dependencies = [] def __enter__(self): @@ -80,10 +80,9 @@ def _make_name_unique(self): if not pipeline_context.Pipeline.get_default_pipeline(): raise ValueError('Default pipeline not defined.') - self.name = ( - self.group_type + '-' + - ('' if self.name is None else self.name + '-') + pipeline_context - .Pipeline.get_default_pipeline().get_next_group_id()) + group_id = pipeline_context.Pipeline.get_default_pipeline( + ).get_next_group_id() + self.name = f'{self.group_type}-{group_id}' self.name = self.name.replace('_', '-') def remove_task_recursive(self, task: pipeline_task.PipelineTask):