diff --git a/.gitignore b/.gitignore index 588872d177cc..12dc7cd609ff 100644 --- a/.gitignore +++ b/.gitignore @@ -39,6 +39,7 @@ frontend/test/ui/visual-regression/screenshots/screen *.pyc .DS_Store +build .ipynb_checkpoints *.egg-info @@ -46,6 +47,9 @@ frontend/test/ui/visual-regression/screenshots/screen # go vendor vendor +# Go module cache +backend/pkg/mod/cache + # Bazel output artifacts bazel-* @@ -61,3 +65,4 @@ _artifacts # Generated Python SDK documentation docs/_build + diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 000000000000..c915d10042a2 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,410 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist=numpy + +# Allow optimization of some AST trees. This will activate a peephole AST +# optimizer, which will apply various small optimizations. For instance, it can +# be used to obtain the result of joining multiple strings with the addition +# operator. Joining a lot of strings can lead to a maximum recursion error in +# Pylint and this flag can prevent that. It has one side effect, the resulting +# AST will be different than the one from reality. This option is deprecated +# and it will be removed in Pylint 2.0. +optimize-ast=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +# TODO(numerology): enable missing-module-docstring after finish the effort. +disable=missing-module-docstring + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". This option is deprecated +# and it will be removed in Pylint 2.0. +files-output=no + +# Tells whether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + + +[BASIC] + +# Good variable names which should always be accepted, separated by a comma +# s3 is whitelisted for its special meaning. +good-names=i,j,k,ex,Run,_,s3 + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Regular expression matching correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for variable names +variable-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for argument names +argument-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for method names +method-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for function names +function-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for attribute names +attr-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^test_ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[ELIF] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# Regexp for a line that is allowed to be longer than the limit. +# Ignoring 1) links in comment, 2) component yaml specs and 3) import statements. +ignore-long-lines=(^\s*(# )??$|^.*'https://raw\.githubusercontent\.com/kubeflow/pipelines\S*$|^\s*(import|from).*$) + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=y + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=10 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,matplotlib.cm,tensorflow.python,tensorflow,tensorflow.train.Example,RunOptions + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members=set_shape,np.float32 + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=(_+[a-zA-Z0-9_]*?$)|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=10 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=30 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=100 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=10 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=0 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=optparse + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception \ No newline at end of file diff --git a/.style.yapf b/.style.yapf new file mode 100644 index 000000000000..74248ecfe6e8 --- /dev/null +++ b/.style.yapf @@ -0,0 +1,9 @@ +[style] +based_on_style = google +blank_line_before_nested_class_or_def = true +column_limit = 80 +continuation_indent_width = 4 +dedent_closing_brackets = true +coalesce_brackets = true +indent_width = 2 +split_before_first_argument = true \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 6962de1e8297..ed73a18046bf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,9 +23,9 @@ matrix: # Frontend tests - cd $TRAVIS_BUILD_DIR/frontend - node -v - - npm i + - npm ci # Comment out next line if coverall has an ongoing outage to unblock. - - npm run test:coveralls + - npm run test:ci - language: generic env: - BAZEL_URL="https://github.com/bazelbuild/bazel/releases/download/0.23.0/bazel-0.23.0-installer-linux-x86_64.sh" @@ -64,26 +64,54 @@ matrix: - language: python python: "3.5" env: TOXENV=py35 + before_install: + - export PYTHONPATH=$PYTHONPATH:/home/travis/.local/lib/python3.5/site-packages/ install: &0 + - python3 -m pip install -r $TRAVIS_BUILD_DIR/sdk/python/requirements.txt # Additional dependencies - - pip3 install coverage coveralls jsonschema==3.0.1 + - pip3 install coverage coveralls # Sample test infra dependencies - pip3 install minio - pip3 install junit_xml # Visualization test dependencies - cd $TRAVIS_BUILD_DIR/backend/src/apiserver/visualization - pip3 install -r requirements-test.txt - script: &1 - - # DSL tests + script: &1 # DSL tests - cd $TRAVIS_BUILD_DIR/sdk/python - - python3 setup.py develop + - python3 -m pip install -e . - cd $TRAVIS_BUILD_DIR # Changing the current directory to the repo root for correct coverall paths - coverage run --source=kfp --append sdk/python/tests/dsl/main.py - coverage run --source=kfp --append sdk/python/tests/compiler/main.py - coverage run --source=kfp --append -m unittest discover --verbose --start-dir sdk/python/tests --top-level-directory=sdk/python - coveralls + # Test against TFX + # Compile and setup protobuf + - PROTOC_ZIP=protoc-3.7.1-linux-x86_64.zip + - curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/$PROTOC_ZIP + - sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc + - sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*' + - rm -f $PROTOC_ZIP + # Install TFX from head + - cd $TRAVIS_BUILD_DIR + - git clone https://github.com/tensorflow/tfx.git + - cd $TRAVIS_BUILD_DIR/tfx + - pip3 install --upgrade pip + - python3 -m pip install "tensorflow>=1.14,<2" + - set -x + - set -e + - python3 setup.py bdist_wheel + - WHEEL_PATH=$(find dist -name "tfx-*.whl") + - python3 -m pip install "${WHEEL_PATH}" --upgrade + - set +e + - set +x + # Two KFP-related unittests + - cd $TRAVIS_BUILD_DIR/tfx/tfx/orchestration/kubeflow + - python3 kubeflow_dag_runner_test.py + - cd $TRAVIS_BUILD_DIR/tfx/tfx/examples/chicago_taxi_pipeline + - python3 taxi_pipeline_kubeflow_gcp_test.py + - python3 taxi_pipeline_kubeflow_local_test.py + # Visualization test - cd $TRAVIS_BUILD_DIR/backend/src/apiserver/visualization - python3 test_exporter.py @@ -102,11 +130,15 @@ matrix: - language: python python: "3.6" env: TOXENV=py36 + before_install: + - export PYTHONPATH=$PYTHONPATH:/home/travis/.local/lib/python3.6/site-packages/ install: *0 script: *1 - language: python python: "3.7" env: TOXENV=py37 + before_install: + - export PYTHONPATH=$PYTHONPATH:/home/travis/.local/lib/python3.7/site-packages/ install: *0 script: *1 - name: "Lint Python code with flake8" diff --git a/CHANGELOG.md b/CHANGELOG.md index 4295c1474c57..c942317a1e3f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,190 @@ # Change Log +## [0.1.33](https://github.com/kubeflow/pipelines/tree/0.1.33) (2019-11-02) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.32...0.1.33) + +**Merged pull requests:** + +- Update kustomization.yaml [\#2530](https://github.com/kubeflow/pipelines/pull/2530) ([IronPan](https://github.com/IronPan)) +- Update setup.py [\#2528](https://github.com/kubeflow/pipelines/pull/2528) ([IronPan](https://github.com/IronPan)) +- Update \_\_init\_\_.py [\#2527](https://github.com/kubeflow/pipelines/pull/2527) ([IronPan](https://github.com/IronPan)) +- \[Sample\] Align preload TFX sample with TFX head [\#2526](https://github.com/kubeflow/pipelines/pull/2526) ([numerology](https://github.com/numerology)) +- SDK - Compiler - Fixed failures on Jinja placeholders [\#2522](https://github.com/kubeflow/pipelines/pull/2522) ([Ark-kun](https://github.com/Ark-kun)) +- \[Test\] Fix TFX related Travis tests [\#2521](https://github.com/kubeflow/pipelines/pull/2521) ([numerology](https://github.com/numerology)) +- update location for logo [\#2520](https://github.com/kubeflow/pipelines/pull/2520) ([IronPan](https://github.com/IronPan)) +- \[MKP\] Reduce the logo image size. [\#2519](https://github.com/kubeflow/pipelines/pull/2519) ([numerology](https://github.com/numerology)) +- Frontend - Added support for https artifact links [\#2517](https://github.com/kubeflow/pipelines/pull/2517) ([Ark-kun](https://github.com/Ark-kun)) +- Pin tensorboard version to 1.13.2 [\#2513](https://github.com/kubeflow/pipelines/pull/2513) ([IronPan](https://github.com/IronPan)) +- clean up viewer crd spec [\#2511](https://github.com/kubeflow/pipelines/pull/2511) ([IronPan](https://github.com/IronPan)) +- remove unnecessary namespace in UI rolebinding spec [\#2510](https://github.com/kubeflow/pipelines/pull/2510) ([IronPan](https://github.com/IronPan)) +- \[UI\] Fix metadata tabs loading state [\#2508](https://github.com/kubeflow/pipelines/pull/2508) ([Bobgy](https://github.com/Bobgy)) +- Regenerate api since https://github.com/kubeflow/pipelines/pull/2445 changed api proto [\#2506](https://github.com/kubeflow/pipelines/pull/2506) ([jingzhang36](https://github.com/jingzhang36)) +- \[Sample\] Replace deprecated KubeflowRunner in TFX sample [\#2499](https://github.com/kubeflow/pipelines/pull/2499) ([numerology](https://github.com/numerology)) +- Samples - Renamed component build to container build [\#2496](https://github.com/kubeflow/pipelines/pull/2496) ([Ark-kun](https://github.com/Ark-kun)) +- \[Frontend\] Enlarge choose pipeline dialog to show more description [\#2494](https://github.com/kubeflow/pipelines/pull/2494) ([Bobgy](https://github.com/Bobgy)) +- update inverse proxy custom permission setting and role setting [\#2493](https://github.com/kubeflow/pipelines/pull/2493) ([rmgogogo](https://github.com/rmgogogo)) +- Fix pipeline description 255 characters length limit [\#2492](https://github.com/kubeflow/pipelines/pull/2492) ([Bobgy](https://github.com/Bobgy)) +- \[MKP\] Fix metadata DB configmap [\#2491](https://github.com/kubeflow/pipelines/pull/2491) ([numerology](https://github.com/numerology)) +- \[Sample\] Add the run ID place holder to TFX sample, fix metadb config in preload sample as well [\#2487](https://github.com/kubeflow/pipelines/pull/2487) ([numerology](https://github.com/numerology)) +- Fix inverse proxy matching regex [\#2486](https://github.com/kubeflow/pipelines/pull/2486) ([IronPan](https://github.com/IronPan)) +- update base image to fix the GKE GC issue for marketplace deployment [\#2484](https://github.com/kubeflow/pipelines/pull/2484) ([IronPan](https://github.com/IronPan)) +- better doc for MKP-KFP deployment [\#2481](https://github.com/kubeflow/pipelines/pull/2481) ([rmgogogo](https://github.com/rmgogogo)) +- \[Frontend\] Fix log viewer cannot scroll horizontally + other minor issues [\#2480](https://github.com/kubeflow/pipelines/pull/2480) ([Bobgy](https://github.com/Bobgy)) +- \[Sample\] Update pre-load TFX::OSS sample [\#2476](https://github.com/kubeflow/pipelines/pull/2476) ([numerology](https://github.com/numerology)) +- SDK - Python components - Fixed bug when mixing file outputs with return value outputs [\#2473](https://github.com/kubeflow/pipelines/pull/2473) ([Ark-kun](https://github.com/Ark-kun)) +- Update samples/core/ai\_platform pipeline to follow data dependency [\#2472](https://github.com/kubeflow/pipelines/pull/2472) ([ucdmkt](https://github.com/ucdmkt)) +- Add option to hide tensorboard artifact [\#2466](https://github.com/kubeflow/pipelines/pull/2466) ([hlu09](https://github.com/hlu09)) +- Release notes for 0.1.32 [\#2465](https://github.com/kubeflow/pipelines/pull/2465) ([hongye-sun](https://github.com/hongye-sun)) +- \[Frontend\] Update CONTRIBUTING.md with frontend code style info [\#2464](https://github.com/kubeflow/pipelines/pull/2464) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] Check format in travis CI [\#2463](https://github.com/kubeflow/pipelines/pull/2463) ([Bobgy](https://github.com/Bobgy)) +- Format all source files under frontend/src using prettier [\#2462](https://github.com/kubeflow/pipelines/pull/2462) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] UI shows specified task display name in PipelineDetail page [\#2459](https://github.com/kubeflow/pipelines/pull/2459) ([Bobgy](https://github.com/Bobgy)) +- clusterrole for pipeline-runner with seldondeployments [\#2458](https://github.com/kubeflow/pipelines/pull/2458) ([MingfeiPan](https://github.com/MingfeiPan)) +- Use string literals for reference resource relation and for reference resource type in frontend [\#2453](https://github.com/kubeflow/pipelines/pull/2453) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Components - Added type to graph input references [\#2451](https://github.com/kubeflow/pipelines/pull/2451) ([Ark-kun](https://github.com/Ark-kun)) +- Fix documentation for filter.proto [\#2447](https://github.com/kubeflow/pipelines/pull/2447) ([neuromage](https://github.com/neuromage)) +- \[Request for comments\] Add config for yapf and pylintrc [\#2446](https://github.com/kubeflow/pipelines/pull/2446) ([numerology](https://github.com/numerology)) +- Runs and jobs can be created from pipeline version [\#2445](https://github.com/kubeflow/pipelines/pull/2445) ([jingzhang36](https://github.com/jingzhang36)) +- Fix CustomTable.tsx layout problems [\#2444](https://github.com/kubeflow/pipelines/pull/2444) ([Bobgy](https://github.com/Bobgy)) +- Add --bind\_all option for tensorboard [\#2441](https://github.com/kubeflow/pipelines/pull/2441) ([daikeshi](https://github.com/daikeshi)) +- \[Test\] Fix post-submit test [\#2439](https://github.com/kubeflow/pipelines/pull/2439) ([numerology](https://github.com/numerology)) +- SDK - Client - Makes the create\_run output nicer [\#2438](https://github.com/kubeflow/pipelines/pull/2438) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Setup - Added cloudpickle to requirements [\#2437](https://github.com/kubeflow/pipelines/pull/2437) ([Ark-kun](https://github.com/Ark-kun)) +- Add owner files for marketplace deployment [\#2436](https://github.com/kubeflow/pipelines/pull/2436) ([IronPan](https://github.com/IronPan)) +- Update backend OWNERS [\#2435](https://github.com/kubeflow/pipelines/pull/2435) ([IronPan](https://github.com/IronPan)) +- Diagnose me dev env [\#2425](https://github.com/kubeflow/pipelines/pull/2425) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Doc\] Update permission requirement in README [\#2422](https://github.com/kubeflow/pipelines/pull/2422) ([numerology](https://github.com/numerology)) +- Adding the core libraries for diagnose\_me tool. [\#2417](https://github.com/kubeflow/pipelines/pull/2417) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Doc\] Minor doc update [\#2394](https://github.com/kubeflow/pipelines/pull/2394) ([numerology](https://github.com/numerology)) +- License crawler for third party golang libraries [\#2393](https://github.com/kubeflow/pipelines/pull/2393) ([Bobgy](https://github.com/Bobgy)) +- update link so that user can easily jump to CAIP after deployment [\#2377](https://github.com/kubeflow/pipelines/pull/2377) ([rmgogogo](https://github.com/rmgogogo)) +- \[Frontend\] Make links in pipeline description clickable [\#2376](https://github.com/kubeflow/pipelines/pull/2376) ([Bobgy](https://github.com/Bobgy)) +- SDK/CLI: Implement kfp pipeline group [\#2340](https://github.com/kubeflow/pipelines/pull/2340) ([elikatsis](https://github.com/elikatsis)) +- SDK - Tests - Fixed most of the test warnings [\#2336](https://github.com/kubeflow/pipelines/pull/2336) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Refactoring - Split the K8sHelper class [\#2333](https://github.com/kubeflow/pipelines/pull/2333) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2332](https://github.com/kubeflow/pipelines/pull/2332) ([jay-saldanha](https://github.com/jay-saldanha)) +- Tests - Use base image for frontend tests [\#190](https://github.com/kubeflow/pipelines/pull/190) ([Ark-kun](https://github.com/Ark-kun)) + + +## [0.1.32](https://github.com/kubeflow/pipelines/tree/0.1.32) (2019-10-18) +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.31...0.1.32) + +**Merged pull requests:** + +- Update sdk release version [\#2434](https://github.com/kubeflow/pipelines/pull/2434) ([hongye-sun](https://github.com/hongye-sun)) +- Release e9b96de317989a9673ef88d88fb9dab9dac3005f [\#2433](https://github.com/kubeflow/pipelines/pull/2433) ([hongye-sun](https://github.com/hongye-sun)) +- \[Frontend\] Configure tslint for better DX [\#2431](https://github.com/kubeflow/pipelines/pull/2431) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] Upgrade typescript to 3.6 [\#2428](https://github.com/kubeflow/pipelines/pull/2428) ([Bobgy](https://github.com/Bobgy)) +- SDK - DSL - Make is\_exit\_handler unnecessary in ContainerOp [\#2411](https://github.com/kubeflow/pipelines/pull/2411) ([Ark-kun](https://github.com/Ark-kun)) +- \[Frontend\] Prettier config to be consistent with existing code style [\#2409](https://github.com/kubeflow/pipelines/pull/2409) ([Bobgy](https://github.com/Bobgy)) +- tech writer edits [\#2403](https://github.com/kubeflow/pipelines/pull/2403) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[Test/Sample test\] Fix model version in AI platform sample [\#2400](https://github.com/kubeflow/pipelines/pull/2400) ([numerology](https://github.com/numerology)) +- Update Watson ML default framework version [\#2398](https://github.com/kubeflow/pipelines/pull/2398) ([Tomcli](https://github.com/Tomcli)) +- Add Tomcli as kfs component reviewer [\#2396](https://github.com/kubeflow/pipelines/pull/2396) ([Tomcli](https://github.com/Tomcli)) +- quick custom spec fix [\#2390](https://github.com/kubeflow/pipelines/pull/2390) ([animeshsingh](https://github.com/animeshsingh)) +- add test config comment [\#2389](https://github.com/kubeflow/pipelines/pull/2389) ([gaoning777](https://github.com/gaoning777)) +- Remove jingzhang36 and rmgogogo from frontend reviewer list [\#2388](https://github.com/kubeflow/pipelines/pull/2388) ([Bobgy](https://github.com/Bobgy)) +- enable the check for dataflow [\#2387](https://github.com/kubeflow/pipelines/pull/2387) ([gaoning777](https://github.com/gaoning777)) +- Update samples/core/tfx-oss to tfx==0.14.0 and kfp=0.1.31 [\#2385](https://github.com/kubeflow/pipelines/pull/2385) ([ucdmkt](https://github.com/ucdmkt)) +- \[Sample\] Add back visualization in XGBoost sample [\#2384](https://github.com/kubeflow/pipelines/pull/2384) ([numerology](https://github.com/numerology)) +- move favicon path, root is override by inverse proxy [\#2382](https://github.com/kubeflow/pipelines/pull/2382) ([rmgogogo](https://github.com/rmgogogo)) +- \[Frontend\] Script and documentation to start a frontend dev env that works with all API endpoints [\#2381](https://github.com/kubeflow/pipelines/pull/2381) ([Bobgy](https://github.com/Bobgy)) +- add animesh to the approvers of the kfserving [\#2380](https://github.com/kubeflow/pipelines/pull/2380) ([gaoning777](https://github.com/gaoning777)) +- SDK - Added version [\#2374](https://github.com/kubeflow/pipelines/pull/2374) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2373](https://github.com/kubeflow/pipelines/pull/2373) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[Samples\] Add numerology as samples/OWNERS [\#2371](https://github.com/kubeflow/pipelines/pull/2371) ([numerology](https://github.com/numerology)) +- \[Frontend\] Fix cannot copy logs in LogViewer when scrolling [\#2370](https://github.com/kubeflow/pipelines/pull/2370) ([Bobgy](https://github.com/Bobgy)) +- KFServing move to v1alpha2 [\#2369](https://github.com/kubeflow/pipelines/pull/2369) ([animeshsingh](https://github.com/animeshsingh)) +- Components - Updated the gcp dataproc create\_cluster component image [\#2366](https://github.com/kubeflow/pipelines/pull/2366) ([Ark-kun](https://github.com/Ark-kun)) +- \[Doc\] Fix some description of preload samples [\#2361](https://github.com/kubeflow/pipelines/pull/2361) ([numerology](https://github.com/numerology)) +- \[Sample\] Improve tfx oss sample [\#2360](https://github.com/kubeflow/pipelines/pull/2360) ([numerology](https://github.com/numerology)) +- add cloud-platform scope in the test to reclaim the ai platform sample models [\#2355](https://github.com/kubeflow/pipelines/pull/2355) ([gaoning777](https://github.com/gaoning777)) +- Fix potential issue of the ai platform sample when running it in the ai platform notebook [\#2349](https://github.com/kubeflow/pipelines/pull/2349) ([gaoning777](https://github.com/gaoning777)) +- Typo in Data passing in python components.ipynb [\#2347](https://github.com/kubeflow/pipelines/pull/2347) ([pingsutw](https://github.com/pingsutw)) +- \[Test\] Add unittest against TFX [\#2346](https://github.com/kubeflow/pipelines/pull/2346) ([numerology](https://github.com/numerology)) +- SDK - Python components - Fixed handling multiline decorators [\#2345](https://github.com/kubeflow/pipelines/pull/2345) ([Ark-kun](https://github.com/Ark-kun)) +- \[License\] Fix third-party license [\#2344](https://github.com/kubeflow/pipelines/pull/2344) ([numerology](https://github.com/numerology)) +- \[Doc\] Fix a typo in MKP guide [\#2342](https://github.com/kubeflow/pipelines/pull/2342) ([numerology](https://github.com/numerology)) +- Fix pipeline cannot run bug when using marketplace managed storage [\#2341](https://github.com/kubeflow/pipelines/pull/2341) ([Bobgy](https://github.com/Bobgy)) +- enlarge MKP cluster constraint [\#2339](https://github.com/kubeflow/pipelines/pull/2339) ([rmgogogo](https://github.com/rmgogogo)) +- Add pipeline version api methods [\#2338](https://github.com/kubeflow/pipelines/pull/2338) ([jingzhang36](https://github.com/jingzhang36)) +- tech writer edits [\#2331](https://github.com/kubeflow/pipelines/pull/2331) ([jay-saldanha](https://github.com/jay-saldanha)) +- Add sample test for multiple output [\#2328](https://github.com/kubeflow/pipelines/pull/2328) ([gaoning777](https://github.com/gaoning777)) +- add ai\_platform test [\#2327](https://github.com/kubeflow/pipelines/pull/2327) ([gaoning777](https://github.com/gaoning777)) +- Tests - When testing SDK install it using pip [\#2325](https://github.com/kubeflow/pipelines/pull/2325) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2324](https://github.com/kubeflow/pipelines/pull/2324) ([jay-saldanha](https://github.com/jay-saldanha)) +- SDK - Compiler - Added the component spec annotations to the compiled workflow [\#2323](https://github.com/kubeflow/pipelines/pull/2323) ([Ark-kun](https://github.com/Ark-kun)) +- \[SDK/Compiler\] Add \_create\_and\_write\_workflow method [\#2321](https://github.com/kubeflow/pipelines/pull/2321) ([numerology](https://github.com/numerology)) +- \[Sample\] Add new TFX::OSS sample [\#2319](https://github.com/kubeflow/pipelines/pull/2319) ([numerology](https://github.com/numerology)) +- SDK - Containers - Made python package installation more robust [\#2316](https://github.com/kubeflow/pipelines/pull/2316) ([Ark-kun](https://github.com/Ark-kun)) +- Extend KFServing component with autoscaling and server mode [\#2315](https://github.com/kubeflow/pipelines/pull/2315) ([Tomcli](https://github.com/Tomcli)) +- SDK - Tests - Test creating component from the real AutoML pipeline [\#2314](https://github.com/kubeflow/pipelines/pull/2314) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2313](https://github.com/kubeflow/pipelines/pull/2313) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[MKP/doc\] Update doc for changes of service account credential. [\#2309](https://github.com/kubeflow/pipelines/pull/2309) ([numerology](https://github.com/numerology)) +- \[MKP\] Remove service account credential from deployment page. [\#2308](https://github.com/kubeflow/pipelines/pull/2308) ([numerology](https://github.com/numerology)) +- SDK/DSL: ContainerOp.add\_pvolume - Fix volume passed in add\_volume [\#2306](https://github.com/kubeflow/pipelines/pull/2306) ([elikatsis](https://github.com/elikatsis)) +- \[Frontend\] Node detail view now can show workflow input/output artifacts [\#2305](https://github.com/kubeflow/pipelines/pull/2305) ([eterna2](https://github.com/eterna2)) +- SDK - Compiler - Fixed deprecation warning when calling compile [\#2303](https://github.com/kubeflow/pipelines/pull/2303) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2301](https://github.com/kubeflow/pipelines/pull/2301) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[Component\] Add VPC Interface Endpoint Support for SageMaker [\#2299](https://github.com/kubeflow/pipelines/pull/2299) ([RedbackThomson](https://github.com/RedbackThomson)) +- SDK - Compiler - Fix bugs in the data passing rewriter [\#2297](https://github.com/kubeflow/pipelines/pull/2297) ([deepio-oc](https://github.com/deepio-oc)) +- Add CMLE deploy comment missing parameters [\#2296](https://github.com/kubeflow/pipelines/pull/2296) ([hongye-sun](https://github.com/hongye-sun)) +- Samples - Simplified pipeline submission code in samples [\#2293](https://github.com/kubeflow/pipelines/pull/2293) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Client - Added a way to set experiment name using environment variables [\#2292](https://github.com/kubeflow/pipelines/pull/2292) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2291](https://github.com/kubeflow/pipelines/pull/2291) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[MKP\] Fix gcr paths in values.yaml [\#2289](https://github.com/kubeflow/pipelines/pull/2289) ([numerology](https://github.com/numerology)) +- fix for MKP [\#2288](https://github.com/kubeflow/pipelines/pull/2288) ([rmgogogo](https://github.com/rmgogogo)) +- tech writer edits [\#2285](https://github.com/kubeflow/pipelines/pull/2285) ([jay-saldanha](https://github.com/jay-saldanha)) +- Disable cloudsql and update to v0.1.31 [\#2284](https://github.com/kubeflow/pipelines/pull/2284) ([rmgogogo](https://github.com/rmgogogo)) +- tech writer edits [\#2282](https://github.com/kubeflow/pipelines/pull/2282) ([jay-saldanha](https://github.com/jay-saldanha)) +- Remove usage of deprecated ContainerOp methods in use\_gcp\_secret [\#2280](https://github.com/kubeflow/pipelines/pull/2280) ([andrewsmartin](https://github.com/andrewsmartin)) +- Samples - Switched the build\_component sample to the new container API [\#2279](https://github.com/kubeflow/pipelines/pull/2279) ([Ark-kun](https://github.com/Ark-kun)) +- Components - Added the "AutoML Export data to GCS" component [\#2278](https://github.com/kubeflow/pipelines/pull/2278) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Deprecated build\_python\_component [\#2277](https://github.com/kubeflow/pipelines/pull/2277) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Containers - Deprecated build\_docker\_image [\#2276](https://github.com/kubeflow/pipelines/pull/2276) ([Ark-kun](https://github.com/Ark-kun)) +- Refactor resource op sample for sample test coverage [\#2274](https://github.com/kubeflow/pipelines/pull/2274) ([numerology](https://github.com/numerology)) +- SDK - Components - Creating graph components from python pipeline function [\#2273](https://github.com/kubeflow/pipelines/pull/2273) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Verify the object type when serializing primitive arguments [\#2272](https://github.com/kubeflow/pipelines/pull/2272) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Compiler - Make it possible to create more portable pipelines [\#2271](https://github.com/kubeflow/pipelines/pull/2271) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Reorganized TaskSpec execution options [\#2270](https://github.com/kubeflow/pipelines/pull/2270) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Fixed small bugs in graph component resolving [\#2269](https://github.com/kubeflow/pipelines/pull/2269) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Notebooks - Deprecated the docker magic [\#2266](https://github.com/kubeflow/pipelines/pull/2266) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - component\_ref.name should only be set when component was loaded by name [\#2265](https://github.com/kubeflow/pipelines/pull/2265) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Added the ComponentSpec.save method [\#2264](https://github.com/kubeflow/pipelines/pull/2264) ([Ark-kun](https://github.com/Ark-kun)) +- Components - Removed trailing whitespace from AutoML components code [\#2263](https://github.com/kubeflow/pipelines/pull/2263) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Restored attribute order when generating component.yaml files [\#2262](https://github.com/kubeflow/pipelines/pull/2262) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Lightweight - Convert the names of file inputs and outputs [\#2260](https://github.com/kubeflow/pipelines/pull/2260) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Compiler - Fixed small bug in data passing rewriter [\#2259](https://github.com/kubeflow/pipelines/pull/2259) ([Ark-kun](https://github.com/Ark-kun)) +- Samples - Added the data passing tutorial [\#2258](https://github.com/kubeflow/pipelines/pull/2258) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Deprecate the get and set methods for default image in favor of plain variable [\#2257](https://github.com/kubeflow/pipelines/pull/2257) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Containers - Getting namespace lazily [\#2256](https://github.com/kubeflow/pipelines/pull/2256) ([Ark-kun](https://github.com/Ark-kun)) +- remove default namespace [\#2250](https://github.com/kubeflow/pipelines/pull/2250) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- SDK - Lightweight - Added package installation support to func\_to\_container\_op [\#2245](https://github.com/kubeflow/pipelines/pull/2245) ([Ark-kun](https://github.com/Ark-kun)) +- SDK: fix label check for ContainerOP entities [\#2243](https://github.com/kubeflow/pipelines/pull/2243) ([solovyevt](https://github.com/solovyevt)) +- Update doc for MKP release [\#2242](https://github.com/kubeflow/pipelines/pull/2242) ([rmgogogo](https://github.com/rmgogogo)) +- Update changelog for release 0.1.31. \(And also for 0.1.30 and 0.1.29, whose are not added before\) [\#2232](https://github.com/kubeflow/pipelines/pull/2232) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Compiler - Move Argo volume specifications to templates [\#2229](https://github.com/kubeflow/pipelines/pull/2229) ([Ark-kun](https://github.com/Ark-kun)) +- Updated README Swagger CodeGen version [\#2228](https://github.com/kubeflow/pipelines/pull/2228) ([RedbackThomson](https://github.com/RedbackThomson)) +- SDK - Components - Fix - Stop serializing string values [\#2227](https://github.com/kubeflow/pipelines/pull/2227) ([Ark-kun](https://github.com/Ark-kun)) +- third\_party/metadata\_envoy: Modify license file [\#2224](https://github.com/kubeflow/pipelines/pull/2224) ([dushyanthsc](https://github.com/dushyanthsc)) +- \[SDK/Client\] Improve the url format check for kfp.Client [\#2222](https://github.com/kubeflow/pipelines/pull/2222) ([numerology](https://github.com/numerology)) +- \[Sample\] update XGBoost sample [\#2220](https://github.com/kubeflow/pipelines/pull/2220) ([numerology](https://github.com/numerology)) +- \[Component\] Add Managed Spot Training Support for SageMaker [\#2219](https://github.com/kubeflow/pipelines/pull/2219) ([RedbackThomson](https://github.com/RedbackThomson)) +- SDK - Containers - Added support for container image cache [\#2216](https://github.com/kubeflow/pipelines/pull/2216) ([Ark-kun](https://github.com/Ark-kun)) +- Add third party license + source code to argo and minio images to comply with their license [\#2201](https://github.com/kubeflow/pipelines/pull/2201) ([Bobgy](https://github.com/Bobgy)) +- SDK - Moved the \_container\_builder from kfp.compiler to kfp.containers [\#2192](https://github.com/kubeflow/pipelines/pull/2192) ([Ark-kun](https://github.com/Ark-kun)) +- Added the backend Go module cache to .gitignote [\#2190](https://github.com/kubeflow/pipelines/pull/2190) ([Ark-kun](https://github.com/Ark-kun)) +- Docs - Added the direct kfp module members to documentation [\#2183](https://github.com/kubeflow/pipelines/pull/2183) ([Ark-kun](https://github.com/Ark-kun)) +- Components - Added AutoML Tables components and tests [\#2174](https://github.com/kubeflow/pipelines/pull/2174) ([Ark-kun](https://github.com/Ark-kun)) +- GUI: should pop-up a correct info when delete more than one pipeline [\#2156](https://github.com/kubeflow/pipelines/pull/2156) ([QxiaoQ](https://github.com/QxiaoQ)) +- \[SDK-compiler\] Refactor Compiler to expose an API to write out yaml spec of pipeline. [\#2146](https://github.com/kubeflow/pipelines/pull/2146) ([numerology](https://github.com/numerology)) +- Add necessary data types to api and database to support pipeline version. [\#1873](https://github.com/kubeflow/pipelines/pull/1873) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Hiding Argo's workflow.uid placeholder behind DSL [\#1683](https://github.com/kubeflow/pipelines/pull/1683) ([Ark-kun](https://github.com/Ark-kun)) +- Allows uploading a pipeline in new run form [\#1643](https://github.com/kubeflow/pipelines/pull/1643) ([rileyjbauer](https://github.com/rileyjbauer)) +- SDK - Improve errors when ContainerOp.output is unavailable [\#1578](https://github.com/kubeflow/pipelines/pull/1578) ([Ark-kun](https://github.com/Ark-kun)) +- Use Remote Build Execution for Bazel builds. [\#1031](https://github.com/kubeflow/pipelines/pull/1031) ([neuromage](https://github.com/neuromage)) + ## [0.1.31](https://github.com/kubeflow/pipelines/tree/0.1.31) (2019-09-25) [Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.30...0.1.31) @@ -1765,4 +1950,4 @@ -\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)* \ No newline at end of file +\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)* diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ebbb59e5310f..5bff97828bd4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,6 +15,12 @@ You generally only need to submit a CLA once, so if you've already submitted one (even if it was for a different project), you probably don't need to do it again. +## Coding style + +The Python part of the project will follow [Google Python style guide](http://google.github.io/styleguide/pyguide.html). We provide a [yapf](https://github.com/google/yapf) configuration file to help contributors auto-format their code to adopt the Google Python style. Also, it is encouraged to lint python docstrings by [docformatter](https://github.com/myint/docformatter). + +The frontend part of the project uses [prettier](https://prettier.io/) for formatting, read [frontend/README.md#code-style](frontend/#code-style) for more details. + ## Code reviews All submissions, including submissions by project members, require review. We @@ -25,4 +31,4 @@ information on using pull requests. ## Community Guidelines This project follows -[Google's Open Source Community Guidelines](https://opensource.google.com/conduct/). \ No newline at end of file +[Google's Open Source Community Guidelines](https://opensource.google.com/conduct/). diff --git a/backend/OWNERS b/backend/OWNERS index dc4d9f39b852..a0ff390f99e1 100644 --- a/backend/OWNERS +++ b/backend/OWNERS @@ -1,6 +1,8 @@ approvers: - IronPan + - jingzhang36 - neuromage reviewers: - IronPan + - jingzhang36 - neuromage diff --git a/backend/api/experiment.proto b/backend/api/experiment.proto index ac925792fb32..841f90d27cb7 100644 --- a/backend/api/experiment.proto +++ b/backend/api/experiment.proto @@ -102,8 +102,9 @@ message ListExperimentsRequest { // Ascending by default. string sort_by = 3; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 4; } @@ -118,7 +119,7 @@ message ListExperimentsResponse { string next_page_token = 2; } -message DeleteExperimentRequest{ +message DeleteExperimentRequest { // The ID of the experiment to be deleted. string id = 1; } @@ -131,7 +132,7 @@ message Experiment { string name = 2; // Optional input field. Describing the purpose of the experiment - string description =3; + string description = 3; // Output. The time that the experiment created. google.protobuf.Timestamp created_at = 4; diff --git a/backend/api/go_client/pipeline.pb.go b/backend/api/go_client/pipeline.pb.go index 6962caad9544..7a069f7b3d56 100755 --- a/backend/api/go_client/pipeline.pb.go +++ b/backend/api/go_client/pipeline.pb.go @@ -52,7 +52,7 @@ func (m *Url) Reset() { *m = Url{} } func (m *Url) String() string { return proto.CompactTextString(m) } func (*Url) ProtoMessage() {} func (*Url) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{0} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{0} } func (m *Url) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Url.Unmarshal(m, b) @@ -90,7 +90,7 @@ func (m *CreatePipelineRequest) Reset() { *m = CreatePipelineRequest{} } func (m *CreatePipelineRequest) String() string { return proto.CompactTextString(m) } func (*CreatePipelineRequest) ProtoMessage() {} func (*CreatePipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{1} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{1} } func (m *CreatePipelineRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreatePipelineRequest.Unmarshal(m, b) @@ -128,7 +128,7 @@ func (m *GetPipelineRequest) Reset() { *m = GetPipelineRequest{} } func (m *GetPipelineRequest) String() string { return proto.CompactTextString(m) } func (*GetPipelineRequest) ProtoMessage() {} func (*GetPipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{2} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{2} } func (m *GetPipelineRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetPipelineRequest.Unmarshal(m, b) @@ -169,7 +169,7 @@ func (m *ListPipelinesRequest) Reset() { *m = ListPipelinesRequest{} } func (m *ListPipelinesRequest) String() string { return proto.CompactTextString(m) } func (*ListPipelinesRequest) ProtoMessage() {} func (*ListPipelinesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{3} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{3} } func (m *ListPipelinesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListPipelinesRequest.Unmarshal(m, b) @@ -230,7 +230,7 @@ func (m *ListPipelinesResponse) Reset() { *m = ListPipelinesResponse{} } func (m *ListPipelinesResponse) String() string { return proto.CompactTextString(m) } func (*ListPipelinesResponse) ProtoMessage() {} func (*ListPipelinesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{4} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{4} } func (m *ListPipelinesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListPipelinesResponse.Unmarshal(m, b) @@ -282,7 +282,7 @@ func (m *DeletePipelineRequest) Reset() { *m = DeletePipelineRequest{} } func (m *DeletePipelineRequest) String() string { return proto.CompactTextString(m) } func (*DeletePipelineRequest) ProtoMessage() {} func (*DeletePipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{5} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{5} } func (m *DeletePipelineRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeletePipelineRequest.Unmarshal(m, b) @@ -320,7 +320,7 @@ func (m *GetTemplateRequest) Reset() { *m = GetTemplateRequest{} } func (m *GetTemplateRequest) String() string { return proto.CompactTextString(m) } func (*GetTemplateRequest) ProtoMessage() {} func (*GetTemplateRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{6} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{6} } func (m *GetTemplateRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTemplateRequest.Unmarshal(m, b) @@ -358,7 +358,7 @@ func (m *GetTemplateResponse) Reset() { *m = GetTemplateResponse{} } func (m *GetTemplateResponse) String() string { return proto.CompactTextString(m) } func (*GetTemplateResponse) ProtoMessage() {} func (*GetTemplateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{7} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{7} } func (m *GetTemplateResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTemplateResponse.Unmarshal(m, b) @@ -385,6 +385,282 @@ func (m *GetTemplateResponse) GetTemplate() string { return "" } +type GetPipelineVersionTemplateRequest struct { + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPipelineVersionTemplateRequest) Reset() { *m = GetPipelineVersionTemplateRequest{} } +func (m *GetPipelineVersionTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*GetPipelineVersionTemplateRequest) ProtoMessage() {} +func (*GetPipelineVersionTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{8} +} +func (m *GetPipelineVersionTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPipelineVersionTemplateRequest.Unmarshal(m, b) +} +func (m *GetPipelineVersionTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPipelineVersionTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *GetPipelineVersionTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPipelineVersionTemplateRequest.Merge(dst, src) +} +func (m *GetPipelineVersionTemplateRequest) XXX_Size() int { + return xxx_messageInfo_GetPipelineVersionTemplateRequest.Size(m) +} +func (m *GetPipelineVersionTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPipelineVersionTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPipelineVersionTemplateRequest proto.InternalMessageInfo + +func (m *GetPipelineVersionTemplateRequest) GetVersionId() string { + if m != nil { + return m.VersionId + } + return "" +} + +type CreatePipelineVersionRequest struct { + Version *PipelineVersion `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreatePipelineVersionRequest) Reset() { *m = CreatePipelineVersionRequest{} } +func (m *CreatePipelineVersionRequest) String() string { return proto.CompactTextString(m) } +func (*CreatePipelineVersionRequest) ProtoMessage() {} +func (*CreatePipelineVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{9} +} +func (m *CreatePipelineVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreatePipelineVersionRequest.Unmarshal(m, b) +} +func (m *CreatePipelineVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreatePipelineVersionRequest.Marshal(b, m, deterministic) +} +func (dst *CreatePipelineVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreatePipelineVersionRequest.Merge(dst, src) +} +func (m *CreatePipelineVersionRequest) XXX_Size() int { + return xxx_messageInfo_CreatePipelineVersionRequest.Size(m) +} +func (m *CreatePipelineVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreatePipelineVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreatePipelineVersionRequest proto.InternalMessageInfo + +func (m *CreatePipelineVersionRequest) GetVersion() *PipelineVersion { + if m != nil { + return m.Version + } + return nil +} + +type GetPipelineVersionRequest struct { + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPipelineVersionRequest) Reset() { *m = GetPipelineVersionRequest{} } +func (m *GetPipelineVersionRequest) String() string { return proto.CompactTextString(m) } +func (*GetPipelineVersionRequest) ProtoMessage() {} +func (*GetPipelineVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{10} +} +func (m *GetPipelineVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPipelineVersionRequest.Unmarshal(m, b) +} +func (m *GetPipelineVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPipelineVersionRequest.Marshal(b, m, deterministic) +} +func (dst *GetPipelineVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPipelineVersionRequest.Merge(dst, src) +} +func (m *GetPipelineVersionRequest) XXX_Size() int { + return xxx_messageInfo_GetPipelineVersionRequest.Size(m) +} +func (m *GetPipelineVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPipelineVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPipelineVersionRequest proto.InternalMessageInfo + +func (m *GetPipelineVersionRequest) GetVersionId() string { + if m != nil { + return m.VersionId + } + return "" +} + +type ListPipelineVersionsRequest struct { + ResourceKey *ResourceKey `protobuf:"bytes,1,opt,name=resource_key,json=resourceKey,proto3" json:"resource_key,omitempty"` + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + SortBy string `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPipelineVersionsRequest) Reset() { *m = ListPipelineVersionsRequest{} } +func (m *ListPipelineVersionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListPipelineVersionsRequest) ProtoMessage() {} +func (*ListPipelineVersionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{11} +} +func (m *ListPipelineVersionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPipelineVersionsRequest.Unmarshal(m, b) +} +func (m *ListPipelineVersionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPipelineVersionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListPipelineVersionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPipelineVersionsRequest.Merge(dst, src) +} +func (m *ListPipelineVersionsRequest) XXX_Size() int { + return xxx_messageInfo_ListPipelineVersionsRequest.Size(m) +} +func (m *ListPipelineVersionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListPipelineVersionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPipelineVersionsRequest proto.InternalMessageInfo + +func (m *ListPipelineVersionsRequest) GetResourceKey() *ResourceKey { + if m != nil { + return m.ResourceKey + } + return nil +} + +func (m *ListPipelineVersionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListPipelineVersionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListPipelineVersionsRequest) GetSortBy() string { + if m != nil { + return m.SortBy + } + return "" +} + +func (m *ListPipelineVersionsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +type ListPipelineVersionsResponse struct { + Versions []*PipelineVersion `protobuf:"bytes,1,rep,name=versions,proto3" json:"versions,omitempty"` + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPipelineVersionsResponse) Reset() { *m = ListPipelineVersionsResponse{} } +func (m *ListPipelineVersionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListPipelineVersionsResponse) ProtoMessage() {} +func (*ListPipelineVersionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{12} +} +func (m *ListPipelineVersionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPipelineVersionsResponse.Unmarshal(m, b) +} +func (m *ListPipelineVersionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPipelineVersionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListPipelineVersionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPipelineVersionsResponse.Merge(dst, src) +} +func (m *ListPipelineVersionsResponse) XXX_Size() int { + return xxx_messageInfo_ListPipelineVersionsResponse.Size(m) +} +func (m *ListPipelineVersionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListPipelineVersionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPipelineVersionsResponse proto.InternalMessageInfo + +func (m *ListPipelineVersionsResponse) GetVersions() []*PipelineVersion { + if m != nil { + return m.Versions + } + return nil +} + +func (m *ListPipelineVersionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListPipelineVersionsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +type DeletePipelineVersionRequest struct { + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeletePipelineVersionRequest) Reset() { *m = DeletePipelineVersionRequest{} } +func (m *DeletePipelineVersionRequest) String() string { return proto.CompactTextString(m) } +func (*DeletePipelineVersionRequest) ProtoMessage() {} +func (*DeletePipelineVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{13} +} +func (m *DeletePipelineVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeletePipelineVersionRequest.Unmarshal(m, b) +} +func (m *DeletePipelineVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeletePipelineVersionRequest.Marshal(b, m, deterministic) +} +func (dst *DeletePipelineVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeletePipelineVersionRequest.Merge(dst, src) +} +func (m *DeletePipelineVersionRequest) XXX_Size() int { + return xxx_messageInfo_DeletePipelineVersionRequest.Size(m) +} +func (m *DeletePipelineVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeletePipelineVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeletePipelineVersionRequest proto.InternalMessageInfo + +func (m *DeletePipelineVersionRequest) GetVersionId() string { + if m != nil { + return m.VersionId + } + return "" +} + type Pipeline struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` CreatedAt *timestamp.Timestamp `protobuf:"bytes,2,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` @@ -393,6 +669,7 @@ type Pipeline struct { Parameters []*Parameter `protobuf:"bytes,5,rep,name=parameters,proto3" json:"parameters,omitempty"` Url *Url `protobuf:"bytes,7,opt,name=url,proto3" json:"url,omitempty"` Error string `protobuf:"bytes,6,opt,name=error,proto3" json:"error,omitempty"` + DefaultVersion *PipelineVersion `protobuf:"bytes,8,opt,name=default_version,json=defaultVersion,proto3" json:"default_version,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -402,7 +679,7 @@ func (m *Pipeline) Reset() { *m = Pipeline{} } func (m *Pipeline) String() string { return proto.CompactTextString(m) } func (*Pipeline) ProtoMessage() {} func (*Pipeline) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{8} + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{14} } func (m *Pipeline) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Pipeline.Unmarshal(m, b) @@ -471,6 +748,99 @@ func (m *Pipeline) GetError() string { return "" } +func (m *Pipeline) GetDefaultVersion() *PipelineVersion { + if m != nil { + return m.DefaultVersion + } + return nil +} + +type PipelineVersion struct { + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + CreatedAt *timestamp.Timestamp `protobuf:"bytes,3,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Parameters []*Parameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty"` + CodeSourceUrl string `protobuf:"bytes,5,opt,name=code_source_url,json=codeSourceUrl,proto3" json:"code_source_url,omitempty"` + PackageUrl *Url `protobuf:"bytes,6,opt,name=package_url,json=packageUrl,proto3" json:"package_url,omitempty"` + ResourceReferences []*ResourceReference `protobuf:"bytes,7,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PipelineVersion) Reset() { *m = PipelineVersion{} } +func (m *PipelineVersion) String() string { return proto.CompactTextString(m) } +func (*PipelineVersion) ProtoMessage() {} +func (*PipelineVersion) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_e46b3924128d1ae3, []int{15} +} +func (m *PipelineVersion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PipelineVersion.Unmarshal(m, b) +} +func (m *PipelineVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PipelineVersion.Marshal(b, m, deterministic) +} +func (dst *PipelineVersion) XXX_Merge(src proto.Message) { + xxx_messageInfo_PipelineVersion.Merge(dst, src) +} +func (m *PipelineVersion) XXX_Size() int { + return xxx_messageInfo_PipelineVersion.Size(m) +} +func (m *PipelineVersion) XXX_DiscardUnknown() { + xxx_messageInfo_PipelineVersion.DiscardUnknown(m) +} + +var xxx_messageInfo_PipelineVersion proto.InternalMessageInfo + +func (m *PipelineVersion) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *PipelineVersion) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PipelineVersion) GetCreatedAt() *timestamp.Timestamp { + if m != nil { + return m.CreatedAt + } + return nil +} + +func (m *PipelineVersion) GetParameters() []*Parameter { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *PipelineVersion) GetCodeSourceUrl() string { + if m != nil { + return m.CodeSourceUrl + } + return "" +} + +func (m *PipelineVersion) GetPackageUrl() *Url { + if m != nil { + return m.PackageUrl + } + return nil +} + +func (m *PipelineVersion) GetResourceReferences() []*ResourceReference { + if m != nil { + return m.ResourceReferences + } + return nil +} + func init() { proto.RegisterType((*Url)(nil), "api.Url") proto.RegisterType((*CreatePipelineRequest)(nil), "api.CreatePipelineRequest") @@ -480,7 +850,14 @@ func init() { proto.RegisterType((*DeletePipelineRequest)(nil), "api.DeletePipelineRequest") proto.RegisterType((*GetTemplateRequest)(nil), "api.GetTemplateRequest") proto.RegisterType((*GetTemplateResponse)(nil), "api.GetTemplateResponse") + proto.RegisterType((*GetPipelineVersionTemplateRequest)(nil), "api.GetPipelineVersionTemplateRequest") + proto.RegisterType((*CreatePipelineVersionRequest)(nil), "api.CreatePipelineVersionRequest") + proto.RegisterType((*GetPipelineVersionRequest)(nil), "api.GetPipelineVersionRequest") + proto.RegisterType((*ListPipelineVersionsRequest)(nil), "api.ListPipelineVersionsRequest") + proto.RegisterType((*ListPipelineVersionsResponse)(nil), "api.ListPipelineVersionsResponse") + proto.RegisterType((*DeletePipelineVersionRequest)(nil), "api.DeletePipelineVersionRequest") proto.RegisterType((*Pipeline)(nil), "api.Pipeline") + proto.RegisterType((*PipelineVersion)(nil), "api.PipelineVersion") } // Reference imports to suppress errors if they are not otherwise used. @@ -500,6 +877,11 @@ type PipelineServiceClient interface { ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*empty.Empty, error) GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) + CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) + DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*empty.Empty, error) + GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) } type pipelineServiceClient struct { @@ -555,6 +937,51 @@ func (c *pipelineServiceClient) GetTemplate(ctx context.Context, in *GetTemplate return out, nil } +func (c *pipelineServiceClient) CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, "/api.PipelineService/CreatePipelineVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) { + out := new(ListPipelineVersionsResponse) + err := c.cc.Invoke(ctx, "/api.PipelineService/ListPipelineVersions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/api.PipelineService/DeletePipelineVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) { + out := new(GetTemplateResponse) + err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineVersionTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // PipelineServiceServer is the server API for PipelineService service. type PipelineServiceServer interface { CreatePipeline(context.Context, *CreatePipelineRequest) (*Pipeline, error) @@ -562,6 +989,11 @@ type PipelineServiceServer interface { ListPipelines(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) DeletePipeline(context.Context, *DeletePipelineRequest) (*empty.Empty, error) GetTemplate(context.Context, *GetTemplateRequest) (*GetTemplateResponse, error) + CreatePipelineVersion(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) + GetPipelineVersion(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) + ListPipelineVersions(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) + DeletePipelineVersion(context.Context, *DeletePipelineVersionRequest) (*empty.Empty, error) + GetPipelineVersionTemplate(context.Context, *GetPipelineVersionTemplateRequest) (*GetTemplateResponse, error) } func RegisterPipelineServiceServer(s *grpc.Server, srv PipelineServiceServer) { @@ -658,6 +1090,96 @@ func _PipelineService_GetTemplate_Handler(srv interface{}, ctx context.Context, return interceptor(ctx, in, info, handler) } +func _PipelineService_CreatePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/CreatePipelineVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, req.(*CreatePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/GetPipelineVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineVersion(ctx, req.(*GetPipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_ListPipelineVersions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPipelineVersionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).ListPipelineVersions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/ListPipelineVersions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).ListPipelineVersions(ctx, req.(*ListPipelineVersionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_DeletePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/DeletePipelineVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, req.(*DeletePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineVersionTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineVersionTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/GetPipelineVersionTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, req.(*GetPipelineVersionTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _PipelineService_serviceDesc = grpc.ServiceDesc{ ServiceName: "api.PipelineService", HandlerType: (*PipelineServiceServer)(nil), @@ -682,64 +1204,106 @@ var _PipelineService_serviceDesc = grpc.ServiceDesc{ MethodName: "GetTemplate", Handler: _PipelineService_GetTemplate_Handler, }, + { + MethodName: "CreatePipelineVersion", + Handler: _PipelineService_CreatePipelineVersion_Handler, + }, + { + MethodName: "GetPipelineVersion", + Handler: _PipelineService_GetPipelineVersion_Handler, + }, + { + MethodName: "ListPipelineVersions", + Handler: _PipelineService_ListPipelineVersions_Handler, + }, + { + MethodName: "DeletePipelineVersion", + Handler: _PipelineService_DeletePipelineVersion_Handler, + }, + { + MethodName: "GetPipelineVersionTemplate", + Handler: _PipelineService_GetPipelineVersionTemplate_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "backend/api/pipeline.proto", } func init() { - proto.RegisterFile("backend/api/pipeline.proto", fileDescriptor_pipeline_a461ac7e0daee5b3) -} - -var fileDescriptor_pipeline_a461ac7e0daee5b3 = []byte{ - // 771 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0x4f, 0x53, 0x3b, 0x45, - 0x10, 0x75, 0x13, 0x08, 0x49, 0xe7, 0x97, 0x50, 0x0e, 0x7f, 0xb2, 0x2c, 0x20, 0x71, 0xa5, 0x30, - 0xa8, 0xec, 0x16, 0x70, 0xd2, 0x1b, 0x51, 0xcb, 0x8b, 0x56, 0x51, 0x01, 0x2e, 0x78, 0x48, 0xcd, - 0x26, 0x9d, 0x65, 0x64, 0xb3, 0xb3, 0xce, 0xcc, 0x82, 0x60, 0x79, 0xd1, 0xf2, 0xe4, 0x4d, 0xcf, - 0x7e, 0x2a, 0xbf, 0x82, 0x57, 0xbf, 0x83, 0xb5, 0xb3, 0x3b, 0x21, 0xff, 0xe0, 0x94, 0xf4, 0xeb, - 0xb7, 0xd3, 0xfd, 0x7a, 0x5e, 0x0f, 0x38, 0x01, 0x1d, 0xdc, 0x63, 0x3c, 0xf4, 0x69, 0xc2, 0xfc, - 0x84, 0x25, 0x18, 0xb1, 0x18, 0xbd, 0x44, 0x70, 0xc5, 0x49, 0x99, 0x26, 0xcc, 0xd9, 0x0b, 0x39, - 0x0f, 0x23, 0xd4, 0x79, 0x1a, 0xc7, 0x5c, 0x51, 0xc5, 0x78, 0x2c, 0x73, 0x8a, 0x73, 0x50, 0x64, - 0x75, 0x14, 0xa4, 0x23, 0x5f, 0xb1, 0x31, 0x4a, 0x45, 0xc7, 0x49, 0x41, 0xd8, 0x9d, 0x27, 0xe0, - 0x38, 0x51, 0x4f, 0x26, 0x39, 0x53, 0x9c, 0x0a, 0x3a, 0x46, 0x85, 0xa2, 0x48, 0xb6, 0xa6, 0x93, - 0x28, 0x04, 0x37, 0x89, 0xcf, 0xf4, 0xcf, 0xe0, 0x24, 0xc4, 0xf8, 0x44, 0x3e, 0xd2, 0x30, 0x44, - 0xe1, 0xf3, 0x44, 0x77, 0xb5, 0xd8, 0xa1, 0xdb, 0x81, 0xf2, 0x8d, 0x88, 0xc8, 0x87, 0xf0, 0xce, - 0xa8, 0xeb, 0xa7, 0x22, 0xb2, 0xad, 0xb6, 0xd5, 0xa9, 0xf5, 0xea, 0x06, 0xbb, 0x11, 0x91, 0xdb, - 0x85, 0xad, 0x2f, 0x05, 0x52, 0x85, 0x97, 0x05, 0xd8, 0xc3, 0x1f, 0x53, 0x94, 0x8a, 0x1c, 0x43, - 0xd5, 0xf0, 0xf4, 0x77, 0xf5, 0xb3, 0x86, 0x47, 0x13, 0xe6, 0x4d, 0x78, 0x93, 0xb4, 0x7b, 0x08, - 0xe4, 0x1b, 0x54, 0xf3, 0x07, 0x34, 0xa1, 0xc4, 0x86, 0x45, 0xc9, 0x12, 0x1b, 0xba, 0xbf, 0x59, - 0xb0, 0xf9, 0x2d, 0x93, 0x13, 0x9e, 0x34, 0xc4, 0x7d, 0x80, 0x84, 0x86, 0xd8, 0x57, 0xfc, 0x1e, - 0xe3, 0xe2, 0x83, 0x5a, 0x86, 0x5c, 0x67, 0x00, 0xd9, 0x05, 0x1d, 0xf4, 0x25, 0x7b, 0x46, 0xbb, - 0xd4, 0xb6, 0x3a, 0xab, 0xbd, 0x6a, 0x06, 0x5c, 0xb1, 0x67, 0x24, 0x2d, 0x58, 0x93, 0x5c, 0xa8, - 0x7e, 0xf0, 0x64, 0x97, 0xf5, 0x87, 0x95, 0x2c, 0xec, 0x3e, 0x91, 0x6d, 0xa8, 0x8c, 0x58, 0xa4, - 0x50, 0xd8, 0x2b, 0x39, 0x9e, 0x47, 0xee, 0x1f, 0x16, 0x6c, 0xcd, 0x75, 0x21, 0x13, 0x1e, 0x4b, - 0x24, 0x9f, 0x42, 0xcd, 0x28, 0x92, 0xb6, 0xd5, 0x2e, 0x2f, 0x2a, 0x7e, 0xc9, 0x67, 0x3d, 0x2b, - 0xae, 0x68, 0x94, 0x77, 0x55, 0xd6, 0x5d, 0xd5, 0x34, 0xa2, 0xdb, 0x3a, 0x82, 0xf5, 0x18, 0x7f, - 0x52, 0xfd, 0x29, 0x5d, 0x25, 0xdd, 0x46, 0x23, 0x83, 0x2f, 0x8d, 0x36, 0xf7, 0x63, 0xd8, 0xfa, - 0x0a, 0x23, 0x5c, 0x9c, 0xfe, 0xfc, 0xf0, 0xf2, 0x11, 0x5f, 0xe3, 0x38, 0x89, 0xa8, 0x7a, 0x95, - 0x75, 0x0a, 0x1b, 0x33, 0xac, 0x42, 0x99, 0x03, 0x55, 0x55, 0x60, 0x05, 0x79, 0x12, 0xbb, 0xff, - 0x59, 0x50, 0x35, 0xc5, 0xe7, 0xcf, 0x23, 0x9f, 0x03, 0x0c, 0xb4, 0x39, 0x86, 0x7d, 0xaa, 0xb4, - 0x82, 0xfa, 0x99, 0xe3, 0xe5, 0xe6, 0xf6, 0x8c, 0xb9, 0xbd, 0x6b, 0xe3, 0xfe, 0x5e, 0xad, 0x60, - 0x5f, 0x28, 0x42, 0x60, 0x25, 0xa6, 0x63, 0x2c, 0x6e, 0x45, 0xff, 0x27, 0x6d, 0xa8, 0x0f, 0x51, - 0x0e, 0x04, 0xd3, 0xbe, 0x2d, 0x2e, 0x66, 0x1a, 0x22, 0x5e, 0x66, 0x85, 0x62, 0x23, 0xa4, 0xbd, - 0xaa, 0x2f, 0xa1, 0x99, 0x5f, 0x82, 0x81, 0x7b, 0x53, 0x0c, 0xe2, 0x40, 0x39, 0xf3, 0xf5, 0x9a, - 0xee, 0xac, 0xaa, 0x89, 0x37, 0x22, 0xea, 0x65, 0x20, 0xd9, 0x84, 0x55, 0xbd, 0x40, 0x76, 0x45, - 0xd7, 0xc9, 0x83, 0xb3, 0xbf, 0x57, 0x60, 0xdd, 0xe8, 0xbd, 0x42, 0xf1, 0xc0, 0x06, 0x48, 0x46, - 0xd0, 0x9c, 0xdd, 0x01, 0xe2, 0xe8, 0xa3, 0x96, 0x2e, 0x86, 0x33, 0x6b, 0x0a, 0xf7, 0xf8, 0xd7, - 0x7f, 0xfe, 0xfd, 0xab, 0xf4, 0x91, 0xdb, 0xca, 0x56, 0x56, 0xfa, 0x0f, 0xa7, 0x01, 0x2a, 0x7a, - 0x3a, 0x79, 0x55, 0xe4, 0x17, 0x93, 0x3d, 0x21, 0xdf, 0x43, 0x7d, 0x6a, 0x4f, 0x48, 0x4b, 0x1f, - 0xb4, 0xb8, 0x39, 0xf3, 0x15, 0x0e, 0x75, 0x85, 0x0f, 0xc8, 0xde, 0x2b, 0x15, 0xfc, 0x9f, 0xd9, - 0xf0, 0x17, 0x12, 0x42, 0x63, 0xc6, 0xd7, 0x64, 0x47, 0x9f, 0xb2, 0x6c, 0xe3, 0x1c, 0x67, 0x59, - 0x2a, 0x37, 0x8b, 0x7b, 0xa0, 0xab, 0xed, 0x90, 0xd7, 0xf4, 0x90, 0x1f, 0xa0, 0x39, 0xeb, 0xd9, - 0x62, 0x5a, 0x4b, 0x8d, 0xec, 0x6c, 0x2f, 0xd8, 0xe5, 0xeb, 0xec, 0x2d, 0x34, 0xa2, 0x3e, 0x79, - 0x5b, 0x54, 0xa2, 0x27, 0x66, 0x0c, 0xfd, 0x32, 0xb1, 0xb9, 0x45, 0x70, 0xec, 0xc5, 0x44, 0x21, - 0xc7, 0xd3, 0x75, 0x3a, 0xe4, 0xe8, 0xad, 0x3a, 0xbe, 0x59, 0x07, 0xd9, 0xfd, 0xdd, 0xfa, 0xf3, - 0xe2, 0xbb, 0xde, 0x1e, 0xac, 0x0d, 0x71, 0x44, 0xd3, 0x48, 0x91, 0xf7, 0xc9, 0x3a, 0x34, 0x9c, - 0xba, 0x2e, 0x70, 0xa5, 0xa8, 0x4a, 0xe5, 0xed, 0x01, 0xec, 0x43, 0xa5, 0x8b, 0x54, 0xa0, 0x20, - 0x1b, 0xd5, 0x92, 0xd3, 0xa0, 0xa9, 0xba, 0xe3, 0x82, 0x3d, 0xeb, 0x37, 0xb8, 0x5d, 0x0a, 0xde, - 0x01, 0x4c, 0x08, 0xef, 0xdd, 0x9e, 0x87, 0x4c, 0xdd, 0xa5, 0x81, 0x37, 0xe0, 0x63, 0xff, 0x3e, - 0x0d, 0x70, 0x14, 0xf1, 0xc7, 0xa9, 0x06, 0xa6, 0x5f, 0xfc, 0x90, 0xf7, 0x07, 0x11, 0xc3, 0x58, - 0x05, 0x15, 0x3d, 0xaf, 0xf3, 0xff, 0x03, 0x00, 0x00, 0xff, 0xff, 0x8f, 0x62, 0x5e, 0x5b, 0xab, - 0x06, 0x00, 0x00, + proto.RegisterFile("backend/api/pipeline.proto", fileDescriptor_pipeline_e46b3924128d1ae3) +} + +var fileDescriptor_pipeline_e46b3924128d1ae3 = []byte{ + // 1127 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x56, 0xdd, 0x6e, 0xe3, 0xc4, + 0x17, 0x6f, 0x92, 0x36, 0x1f, 0x27, 0x9b, 0xf6, 0xff, 0x9f, 0xed, 0x47, 0xea, 0xa6, 0xdb, 0xd4, + 0x54, 0xa5, 0x05, 0x36, 0xa1, 0xed, 0x15, 0x2b, 0xed, 0xc5, 0x16, 0xd0, 0x0a, 0xb1, 0xa0, 0x95, + 0xdb, 0x72, 0xb1, 0x08, 0x99, 0x89, 0x73, 0x92, 0x35, 0x75, 0x6c, 0x33, 0x33, 0xe9, 0xd2, 0x22, + 0x6e, 0x40, 0x5c, 0x71, 0x07, 0x17, 0xbc, 0x04, 0x8f, 0x80, 0xc4, 0x43, 0x20, 0xf1, 0x04, 0x3c, + 0x08, 0xf2, 0x78, 0xc6, 0xb5, 0x9d, 0x8f, 0xed, 0x5e, 0x25, 0x73, 0xce, 0x6f, 0xe6, 0x9c, 0xf3, + 0x9b, 0x9f, 0xcf, 0x19, 0x30, 0x7a, 0xd4, 0xb9, 0x44, 0xbf, 0xdf, 0xa5, 0xa1, 0xdb, 0x0d, 0xdd, + 0x10, 0x3d, 0xd7, 0xc7, 0x4e, 0xc8, 0x02, 0x11, 0x90, 0x12, 0x0d, 0x5d, 0xa3, 0x35, 0x0c, 0x82, + 0xa1, 0x87, 0xd2, 0x4f, 0x7d, 0x3f, 0x10, 0x54, 0xb8, 0x81, 0xcf, 0x63, 0x88, 0xb1, 0xa3, 0xbc, + 0x72, 0xd5, 0x1b, 0x0f, 0xba, 0xc2, 0x1d, 0x21, 0x17, 0x74, 0x14, 0x2a, 0xc0, 0x56, 0x1e, 0x80, + 0xa3, 0x50, 0x5c, 0x2b, 0xe7, 0x46, 0x3a, 0x38, 0x32, 0x16, 0x30, 0xbd, 0x2b, 0x93, 0x15, 0x65, + 0x74, 0x84, 0x02, 0xb5, 0x73, 0x67, 0x5a, 0xca, 0x36, 0x0f, 0xd1, 0x51, 0x80, 0xbd, 0x34, 0x80, + 0x21, 0x0f, 0xc6, 0xcc, 0x41, 0x9b, 0xe1, 0x00, 0x19, 0xfa, 0x8e, 0xaa, 0xce, 0x78, 0x4f, 0xfe, + 0x38, 0x0f, 0x87, 0xe8, 0x3f, 0xe4, 0xaf, 0xe8, 0x70, 0x88, 0xac, 0x1b, 0x84, 0xb2, 0xb8, 0xc9, + 0x42, 0xcd, 0x03, 0x28, 0x5d, 0x30, 0x8f, 0xec, 0xc2, 0xbd, 0x24, 0xe2, 0x98, 0x79, 0xcd, 0x42, + 0xbb, 0x70, 0x50, 0xb3, 0xea, 0xda, 0x76, 0xc1, 0x3c, 0xf3, 0x14, 0xd6, 0x3e, 0x64, 0x48, 0x05, + 0x3e, 0x57, 0x46, 0x0b, 0xbf, 0x1d, 0x23, 0x17, 0xe4, 0x10, 0xaa, 0x1a, 0x27, 0xf7, 0xd5, 0x8f, + 0x1b, 0x1d, 0x1a, 0xba, 0x9d, 0x04, 0x97, 0xb8, 0xcd, 0x3d, 0x20, 0x4f, 0x51, 0xe4, 0x0f, 0x58, + 0x86, 0xa2, 0xdb, 0x57, 0x21, 0x8b, 0x6e, 0xdf, 0xfc, 0xa9, 0x00, 0xab, 0xcf, 0x5c, 0x9e, 0xe0, + 0xb8, 0x06, 0x6e, 0x03, 0x84, 0x74, 0x88, 0xb6, 0x08, 0x2e, 0xd1, 0x57, 0x1b, 0x6a, 0x91, 0xe5, + 0x3c, 0x32, 0x90, 0x2d, 0x90, 0x0b, 0x9b, 0xbb, 0x37, 0xd8, 0x2c, 0xb6, 0x0b, 0x07, 0x4b, 0x56, + 0x35, 0x32, 0x9c, 0xb9, 0x37, 0x48, 0x36, 0xa0, 0xc2, 0x03, 0x26, 0xec, 0xde, 0x75, 0xb3, 0x24, + 0x37, 0x96, 0xa3, 0xe5, 0xe9, 0x35, 0x59, 0x87, 0xf2, 0xc0, 0xf5, 0x04, 0xb2, 0xe6, 0x62, 0x6c, + 0x8f, 0x57, 0xe6, 0x2f, 0x05, 0x58, 0xcb, 0x65, 0xc1, 0xc3, 0xc0, 0xe7, 0x48, 0xde, 0x85, 0x9a, + 0xae, 0x88, 0x37, 0x0b, 0xed, 0xd2, 0x64, 0xc5, 0xb7, 0xfe, 0x28, 0x67, 0x11, 0x08, 0xea, 0xc5, + 0x59, 0x95, 0x64, 0x56, 0x35, 0x69, 0x91, 0x69, 0xed, 0xc3, 0x8a, 0x8f, 0xdf, 0x09, 0x3b, 0x55, + 0x57, 0x51, 0xa6, 0xd1, 0x88, 0xcc, 0xcf, 0x75, 0x6d, 0xe6, 0xdb, 0xb0, 0xf6, 0x11, 0x7a, 0x38, + 0xc9, 0x7e, 0x9e, 0xbc, 0x98, 0xe2, 0x73, 0x1c, 0x85, 0x1e, 0x15, 0x33, 0x51, 0x47, 0x70, 0x3f, + 0x83, 0x52, 0x95, 0x19, 0x50, 0x15, 0xca, 0xa6, 0xc0, 0xc9, 0xda, 0x3c, 0x85, 0xdd, 0xd4, 0xdd, + 0x7d, 0x81, 0x8c, 0xbb, 0x81, 0x9f, 0x8f, 0xb3, 0x0d, 0x70, 0x15, 0x7b, 0xec, 0x24, 0x5e, 0x4d, + 0x59, 0x3e, 0xe9, 0x9b, 0x9f, 0x43, 0x2b, 0xab, 0x21, 0x75, 0x8c, 0xde, 0xde, 0x81, 0x8a, 0x02, + 0x2b, 0x25, 0xad, 0x66, 0x78, 0xd5, 0x68, 0x0d, 0x32, 0x1f, 0xc1, 0xe6, 0x64, 0x4e, 0x77, 0xcc, + 0xe5, 0xaf, 0x02, 0x6c, 0xa5, 0xef, 0x57, 0xed, 0x4e, 0xc4, 0x76, 0x02, 0xf7, 0x92, 0x6f, 0xec, + 0x12, 0xaf, 0x55, 0x42, 0xff, 0x93, 0x09, 0x59, 0xca, 0xf1, 0x29, 0x5e, 0x5b, 0x75, 0x76, 0xbb, + 0x98, 0x2f, 0xc1, 0xac, 0x7c, 0x4b, 0x79, 0xf9, 0xa6, 0x14, 0xba, 0x38, 0x43, 0xa1, 0x4b, 0x19, + 0x85, 0xfe, 0x5e, 0x80, 0xd6, 0xf4, 0x0a, 0xd4, 0x75, 0xbe, 0x0f, 0x55, 0x55, 0xaf, 0xd6, 0xe9, + 0x74, 0x3e, 0x13, 0xd4, 0x5d, 0xe5, 0xf8, 0x1a, 0x55, 0x9b, 0x8f, 0xa1, 0x95, 0x55, 0xeb, 0x9b, + 0x5d, 0xcd, 0x1f, 0x45, 0xa8, 0xea, 0x9d, 0x79, 0xe9, 0x92, 0x0f, 0x00, 0x1c, 0xa9, 0xa1, 0xbe, + 0x4d, 0x85, 0xcc, 0xae, 0x7e, 0x6c, 0x74, 0xe2, 0x76, 0xdc, 0xd1, 0xed, 0xb8, 0x73, 0xae, 0xfb, + 0xb5, 0x55, 0x53, 0xe8, 0x27, 0x82, 0x10, 0x58, 0xf4, 0xe9, 0x08, 0x15, 0xf5, 0xf2, 0x3f, 0x69, + 0x43, 0xbd, 0x8f, 0xdc, 0x61, 0xae, 0x6c, 0x91, 0x8a, 0xf9, 0xb4, 0x89, 0x74, 0xa2, 0x6b, 0x53, + 0xad, 0x9a, 0x37, 0x97, 0x24, 0x8f, 0xcb, 0x31, 0x8f, 0xda, 0x6c, 0xa5, 0x10, 0xc4, 0x80, 0x52, + 0xd4, 0x42, 0x2b, 0x32, 0xb3, 0xaa, 0x04, 0x5e, 0x30, 0xcf, 0x8a, 0x8c, 0x64, 0x15, 0x96, 0xe4, + 0x3c, 0x68, 0x96, 0x65, 0x9c, 0x78, 0x41, 0x1e, 0xc3, 0x4a, 0x1f, 0x07, 0x74, 0xec, 0x09, 0x5b, + 0xcb, 0xbf, 0x3a, 0x47, 0xfe, 0xcb, 0x0a, 0xac, 0xd6, 0xe6, 0x9f, 0x45, 0x58, 0xc9, 0x61, 0x26, + 0x58, 0xd3, 0xa5, 0x17, 0x53, 0xa5, 0x67, 0x99, 0x2c, 0xbd, 0x09, 0x93, 0x59, 0x4e, 0x16, 0x5f, + 0xcb, 0xc9, 0x3e, 0xac, 0x38, 0x41, 0x1f, 0x6d, 0xf5, 0x3d, 0x45, 0xfc, 0xc4, 0x5a, 0x6e, 0x44, + 0xe6, 0x33, 0x69, 0x8d, 0xe6, 0xd0, 0x21, 0xd4, 0x43, 0xea, 0x5c, 0x46, 0xea, 0x8b, 0x30, 0xe5, + 0x1c, 0x87, 0xa0, 0x9c, 0x11, 0xf4, 0x29, 0xdc, 0x9f, 0x9c, 0x81, 0xbc, 0x59, 0x91, 0xb9, 0xac, + 0x67, 0x3e, 0x53, 0x4b, 0xbb, 0x2d, 0xc2, 0xf2, 0x26, 0x7e, 0xfc, 0x4f, 0xe5, 0x96, 0xbe, 0x33, + 0x64, 0x57, 0xae, 0x83, 0x64, 0x00, 0xcb, 0xd9, 0x46, 0x45, 0x0c, 0x79, 0xe2, 0xd4, 0x09, 0x68, + 0x64, 0xbb, 0xbf, 0x79, 0xf8, 0xe3, 0xdf, 0xff, 0xfe, 0x56, 0x7c, 0xcb, 0xdc, 0x88, 0x06, 0x35, + 0xef, 0x5e, 0x1d, 0xf5, 0x50, 0xd0, 0xa3, 0x64, 0xa4, 0xf3, 0x47, 0xc9, 0x40, 0x24, 0x5f, 0x42, + 0x3d, 0xd5, 0xc0, 0xc8, 0x86, 0x3c, 0x68, 0x72, 0x44, 0xe6, 0x23, 0xec, 0xc9, 0x08, 0x0f, 0x48, + 0x6b, 0x46, 0x84, 0xee, 0xf7, 0x6e, 0xff, 0x07, 0x32, 0x84, 0x46, 0x66, 0x80, 0x91, 0x4d, 0x79, + 0xca, 0xb4, 0xd1, 0x6a, 0x18, 0xd3, 0x5c, 0x71, 0x1b, 0x31, 0x77, 0x64, 0xb4, 0x4d, 0x32, 0xab, + 0x1e, 0xf2, 0x0d, 0x2c, 0x67, 0x3f, 0x77, 0xc5, 0xd6, 0xd4, 0x89, 0x65, 0xac, 0x4f, 0x48, 0xec, + 0xe3, 0xe8, 0xed, 0xa4, 0x8b, 0x7a, 0x67, 0x7e, 0x51, 0xa1, 0x64, 0x4c, 0xcf, 0x9d, 0x5b, 0xc6, + 0x72, 0x93, 0xc8, 0x68, 0x4e, 0x3a, 0x54, 0x39, 0x1d, 0x19, 0xe7, 0x80, 0xec, 0xcf, 0x8b, 0xd3, + 0xd5, 0x73, 0x8f, 0x13, 0x2b, 0xff, 0xf0, 0xd1, 0xdf, 0xd8, 0xee, 0x14, 0x49, 0x64, 0x1b, 0x9d, + 0x31, 0xf5, 0x03, 0x36, 0x17, 0xc8, 0xb3, 0xcc, 0x43, 0x48, 0x1f, 0xf8, 0x20, 0x7f, 0xfd, 0x77, + 0x3c, 0xed, 0xab, 0xec, 0x7b, 0x49, 0xcf, 0x01, 0xd2, 0x9e, 0xb8, 0xd4, 0xdc, 0x90, 0x33, 0x76, + 0xe7, 0x20, 0x14, 0x5d, 0x0b, 0xe4, 0x3c, 0xff, 0xf6, 0xc8, 0x12, 0x30, 0xaf, 0xd3, 0xcf, 0xbc, + 0xec, 0x05, 0xf2, 0x35, 0x18, 0xb3, 0xdf, 0x13, 0x64, 0x7f, 0x06, 0x15, 0x77, 0xbf, 0xe6, 0x85, + 0xd3, 0x9f, 0x0b, 0xbf, 0x3e, 0xf9, 0xcc, 0x6a, 0x41, 0x45, 0xf5, 0x4b, 0xf2, 0x7f, 0xb2, 0x02, + 0x0d, 0xa3, 0x2e, 0xb7, 0x9c, 0x09, 0x2a, 0xc6, 0xfc, 0xc5, 0x0e, 0x6c, 0x43, 0xf9, 0x14, 0x29, + 0x43, 0x46, 0xee, 0x57, 0x8b, 0x46, 0x83, 0x8e, 0xc5, 0xcb, 0x80, 0xb9, 0x37, 0xf2, 0x95, 0xdc, + 0x2e, 0xf6, 0xee, 0x01, 0x24, 0x80, 0x85, 0x17, 0x27, 0x43, 0x57, 0xbc, 0x1c, 0xf7, 0x3a, 0x4e, + 0x30, 0xea, 0x5e, 0x8e, 0x7b, 0x38, 0xf0, 0x82, 0x57, 0x29, 0xe5, 0xa4, 0x1f, 0xe8, 0xc3, 0xc0, + 0x76, 0x3c, 0x17, 0x7d, 0xd1, 0x2b, 0xcb, 0xda, 0x4f, 0xfe, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xca, + 0x2f, 0xa0, 0x4e, 0x94, 0x0c, 0x00, 0x00, } diff --git a/backend/api/go_client/resource_reference.pb.go b/backend/api/go_client/resource_reference.pb.go index 45fe30247b5c..2c3b693a6002 100755 --- a/backend/api/go_client/resource_reference.pb.go +++ b/backend/api/go_client/resource_reference.pb.go @@ -38,24 +38,30 @@ const ( ResourceType_UNKNOWN_RESOURCE_TYPE ResourceType = 0 ResourceType_EXPERIMENT ResourceType = 1 ResourceType_JOB ResourceType = 2 + ResourceType_PIPELINE ResourceType = 3 + ResourceType_PIPELINE_VERSION ResourceType = 4 ) var ResourceType_name = map[int32]string{ 0: "UNKNOWN_RESOURCE_TYPE", 1: "EXPERIMENT", 2: "JOB", + 3: "PIPELINE", + 4: "PIPELINE_VERSION", } var ResourceType_value = map[string]int32{ "UNKNOWN_RESOURCE_TYPE": 0, "EXPERIMENT": 1, "JOB": 2, + "PIPELINE": 3, + "PIPELINE_VERSION": 4, } func (x ResourceType) String() string { return proto.EnumName(ResourceType_name, int32(x)) } func (ResourceType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{0} + return fileDescriptor_resource_reference_81a9849386131b93, []int{0} } type Relationship int32 @@ -81,7 +87,7 @@ func (x Relationship) String() string { return proto.EnumName(Relationship_name, int32(x)) } func (Relationship) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{1} + return fileDescriptor_resource_reference_81a9849386131b93, []int{1} } type ResourceKey struct { @@ -96,7 +102,7 @@ func (m *ResourceKey) Reset() { *m = ResourceKey{} } func (m *ResourceKey) String() string { return proto.CompactTextString(m) } func (*ResourceKey) ProtoMessage() {} func (*ResourceKey) Descriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{0} + return fileDescriptor_resource_reference_81a9849386131b93, []int{0} } func (m *ResourceKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResourceKey.Unmarshal(m, b) @@ -143,7 +149,7 @@ func (m *ResourceReference) Reset() { *m = ResourceReference{} } func (m *ResourceReference) String() string { return proto.CompactTextString(m) } func (*ResourceReference) ProtoMessage() {} func (*ResourceReference) Descriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{1} + return fileDescriptor_resource_reference_81a9849386131b93, []int{1} } func (m *ResourceReference) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResourceReference.Unmarshal(m, b) @@ -192,30 +198,31 @@ func init() { } func init() { - proto.RegisterFile("backend/api/resource_reference.proto", fileDescriptor_resource_reference_05b185b0f444334d) -} - -var fileDescriptor_resource_reference_05b185b0f444334d = []byte{ - // 325 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x91, 0x41, 0x6b, 0xc2, 0x40, - 0x10, 0x85, 0x4d, 0x62, 0x2b, 0x8e, 0x22, 0x71, 0x69, 0x21, 0xbd, 0x89, 0xb4, 0x20, 0x1e, 0x12, - 0x50, 0xbc, 0x57, 0xed, 0x42, 0xad, 0x6d, 0x22, 0x6b, 0xc4, 0xb6, 0x97, 0x90, 0xc4, 0x51, 0x17, - 0x63, 0x76, 0x89, 0x91, 0x92, 0x6b, 0x7f, 0x79, 0x31, 0x54, 0xa2, 0xb7, 0x81, 0xef, 0xf1, 0xcd, - 0x83, 0x07, 0x8f, 0x81, 0x1f, 0xee, 0x30, 0x5e, 0x59, 0xbe, 0xe4, 0x56, 0x82, 0x07, 0x71, 0x4c, - 0x42, 0xf4, 0x12, 0x5c, 0x63, 0x82, 0x71, 0x88, 0xa6, 0x4c, 0x44, 0x2a, 0x88, 0xe6, 0x4b, 0xde, - 0x7e, 0x81, 0x1a, 0xfb, 0x0f, 0x4c, 0x31, 0x23, 0x4f, 0x50, 0x4e, 0x33, 0x89, 0x86, 0xd2, 0x52, - 0x3a, 0x8d, 0x5e, 0xd3, 0xf4, 0x25, 0x37, 0xcf, 0xdc, 0xcd, 0x24, 0xb2, 0x1c, 0x93, 0x06, 0xa8, - 0x7c, 0x65, 0xa8, 0x2d, 0xa5, 0x53, 0x65, 0x2a, 0x5f, 0xb5, 0x7f, 0x15, 0x68, 0x9e, 0x63, 0xec, - 0xfc, 0x86, 0xb4, 0x41, 0xdb, 0x61, 0x96, 0xbb, 0x6a, 0x3d, 0xfd, 0xca, 0x35, 0xc5, 0x8c, 0x9d, - 0x20, 0x21, 0x50, 0x8e, 0xfd, 0x3d, 0x1a, 0x5a, 0xee, 0xca, 0x6f, 0x32, 0x80, 0x7a, 0x82, 0x91, - 0x9f, 0x72, 0x11, 0x1f, 0xb6, 0x5c, 0xe6, 0x7f, 0x8a, 0x32, 0x05, 0x60, 0x57, 0xb1, 0xee, 0x08, - 0xea, 0x97, 0x55, 0xc9, 0x03, 0xdc, 0x2f, 0xec, 0xa9, 0xed, 0x2c, 0x6d, 0x8f, 0xd1, 0xb9, 0xb3, - 0x60, 0x63, 0xea, 0xb9, 0x5f, 0x33, 0xaa, 0x97, 0x48, 0x03, 0x80, 0x7e, 0xce, 0x28, 0x9b, 0x7c, - 0x50, 0xdb, 0xd5, 0x15, 0x52, 0x01, 0xed, 0xcd, 0x19, 0xe9, 0x6a, 0xf7, 0xf9, 0xe4, 0x28, 0x9c, - 0xc4, 0x80, 0xbb, 0xc2, 0xf1, 0x3e, 0x74, 0x27, 0x8e, 0x3d, 0x7f, 0x9d, 0xcc, 0xf4, 0x12, 0xa9, - 0xc2, 0x8d, 0xb3, 0xb4, 0x29, 0xd3, 0x15, 0x52, 0x83, 0xca, 0x98, 0xd1, 0xa1, 0xeb, 0x30, 0x5d, - 0x1d, 0x0d, 0xbe, 0xfb, 0x1b, 0x9e, 0x6e, 0x8f, 0x81, 0x19, 0x8a, 0xbd, 0xb5, 0x3b, 0x06, 0xb8, - 0x8e, 0xc4, 0x8f, 0x25, 0xb9, 0xc4, 0x88, 0xc7, 0x78, 0xb0, 0x2e, 0xb7, 0xd9, 0x08, 0x2f, 0x8c, - 0x38, 0xc6, 0x69, 0x70, 0x9b, 0x6f, 0xd2, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xa6, 0xdd, 0xe5, - 0xee, 0xbb, 0x01, 0x00, 0x00, + proto.RegisterFile("backend/api/resource_reference.proto", fileDescriptor_resource_reference_81a9849386131b93) +} + +var fileDescriptor_resource_reference_81a9849386131b93 = []byte{ + // 351 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x91, 0x41, 0xab, 0xda, 0x40, + 0x14, 0x85, 0x5f, 0x12, 0xdb, 0xd7, 0x77, 0x15, 0x99, 0x37, 0x58, 0x48, 0x77, 0x22, 0x2d, 0x88, + 0x8b, 0x04, 0x14, 0xf7, 0x55, 0x3b, 0xd0, 0x54, 0x3b, 0x09, 0x63, 0xac, 0x6d, 0x37, 0x21, 0x89, + 0x57, 0x1d, 0x8c, 0xc9, 0x10, 0x23, 0x25, 0xdb, 0xfe, 0xf2, 0x62, 0x68, 0x88, 0xee, 0x66, 0xf8, + 0x0e, 0xe7, 0xbb, 0x70, 0xe0, 0x63, 0x14, 0xc6, 0x27, 0x4c, 0x77, 0x76, 0xa8, 0xa4, 0x9d, 0xe3, + 0x25, 0xbb, 0xe6, 0x31, 0x06, 0x39, 0xee, 0x31, 0xc7, 0x34, 0x46, 0x4b, 0xe5, 0x59, 0x91, 0x51, + 0x23, 0x54, 0x72, 0xf0, 0x05, 0xda, 0xe2, 0x7f, 0x60, 0x89, 0x25, 0xfd, 0x04, 0xad, 0xa2, 0x54, + 0x68, 0x6a, 0x7d, 0x6d, 0xd8, 0x1d, 0xbf, 0x5a, 0xa1, 0x92, 0x56, 0xcd, 0xfd, 0x52, 0xa1, 0xa8, + 0x30, 0xed, 0x82, 0x2e, 0x77, 0xa6, 0xde, 0xd7, 0x86, 0x2f, 0x42, 0x97, 0xbb, 0xc1, 0x5f, 0x0d, + 0x5e, 0xeb, 0x98, 0xa8, 0x35, 0x74, 0x00, 0xc6, 0x09, 0xcb, 0xaa, 0xab, 0x3d, 0x26, 0x0f, 0x5d, + 0x4b, 0x2c, 0xc5, 0x0d, 0x52, 0x0a, 0xad, 0x34, 0x3c, 0xa3, 0x69, 0x54, 0x5d, 0xd5, 0x9b, 0x4e, + 0xa1, 0x93, 0x63, 0x12, 0x16, 0x32, 0x4b, 0x2f, 0x47, 0xa9, 0x2a, 0x4f, 0x73, 0x4c, 0x03, 0xc4, + 0x43, 0x6c, 0xb4, 0x87, 0xce, 0xfd, 0xa9, 0xf4, 0x03, 0xbc, 0xdf, 0xf0, 0x25, 0x77, 0xb7, 0x3c, + 0x10, 0x6c, 0xed, 0x6e, 0xc4, 0x82, 0x05, 0xfe, 0x2f, 0x8f, 0x91, 0x27, 0xda, 0x05, 0x60, 0x3f, + 0x3d, 0x26, 0x9c, 0xef, 0x8c, 0xfb, 0x44, 0xa3, 0xcf, 0x60, 0x7c, 0x73, 0xe7, 0x44, 0xa7, 0x1d, + 0x78, 0xe7, 0x39, 0x1e, 0x5b, 0x39, 0x9c, 0x11, 0x83, 0xf6, 0x80, 0xd4, 0xbf, 0xe0, 0x07, 0x13, + 0x6b, 0xc7, 0xe5, 0xa4, 0x35, 0xfa, 0x7c, 0xf3, 0x34, 0x5e, 0x6a, 0x42, 0xaf, 0xf1, 0xac, 0x66, + 0xbe, 0xe3, 0xf2, 0xf5, 0x57, 0xc7, 0x23, 0x4f, 0xf4, 0x05, 0xde, 0xb8, 0x5b, 0xce, 0x04, 0xd1, + 0x68, 0x1b, 0x9e, 0x17, 0x82, 0xcd, 0x7c, 0x57, 0x10, 0x7d, 0x3e, 0xfd, 0x3d, 0x39, 0xc8, 0xe2, + 0x78, 0x8d, 0xac, 0x38, 0x3b, 0xdb, 0xa7, 0x6b, 0x84, 0xfb, 0x24, 0xfb, 0x63, 0x2b, 0xa9, 0x30, + 0x91, 0x29, 0x5e, 0xec, 0xfb, 0xfd, 0x0e, 0x59, 0x10, 0x27, 0x12, 0xd3, 0x22, 0x7a, 0x5b, 0xed, + 0x36, 0xf9, 0x17, 0x00, 0x00, 0xff, 0xff, 0xa7, 0x32, 0x3a, 0x06, 0xdf, 0x01, 0x00, 0x00, } diff --git a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go index 944ca118fa3a..18a39e3f3b15 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go @@ -77,7 +77,7 @@ for the list experiment operation typically these are written to a http.Request type ListExperimentParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see + A url-encoded, JSON-serialized Filter protocol buffer (see filter.proto). */ diff --git a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go index 8b1e0acfb76b..9bde0a8072cd 100644 --- a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go @@ -89,7 +89,7 @@ for the list jobs operation typically these are written to a http.Request type ListJobsParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see + A url-encoded, JSON-serialized Filter protocol buffer (see filter.proto). */ diff --git a/backend/api/go_http_client/job_model/api_cron_schedule.go b/backend/api/go_http_client/job_model/api_cron_schedule.go index 2782d88bf82a..9d69899ea829 100644 --- a/backend/api/go_http_client/job_model/api_cron_schedule.go +++ b/backend/api/go_http_client/job_model/api_cron_schedule.go @@ -31,7 +31,8 @@ import ( // swagger:model apiCronSchedule type APICronSchedule struct { - // The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron + // The cron string. For details how to compose a cron, visit + // ttps://en.wikipedia.org/wiki/Cron Cron string `json:"cron,omitempty"` // The end time of the cron job diff --git a/backend/api/go_http_client/job_model/api_resource_type.go b/backend/api/go_http_client/job_model/api_resource_type.go index a3f511527986..cc2493d0627b 100644 --- a/backend/api/go_http_client/job_model/api_resource_type.go +++ b/backend/api/go_http_client/job_model/api_resource_type.go @@ -42,6 +42,12 @@ const ( // APIResourceTypeJOB captures enum value "JOB" APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" ) // for schema @@ -49,7 +55,7 @@ var apiResourceTypeEnum []interface{} func init() { var res []APIResourceType - if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB"]`), &res); err != nil { + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION"]`), &res); err != nil { panic(err) } for _, v := range res { diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go index 6375cecd1909..0336da5df203 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go @@ -77,7 +77,7 @@ for the list pipelines operation typically these are written to a http.Request type ListPipelinesParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see + A url-encoded, JSON-serialized Filter protocol buffer (see filter.proto). */ diff --git a/backend/api/go_http_client/pipeline_model/BUILD.bazel b/backend/api/go_http_client/pipeline_model/BUILD.bazel index a8aaa0c70009..a35b4bfea07c 100644 --- a/backend/api/go_http_client/pipeline_model/BUILD.bazel +++ b/backend/api/go_http_client/pipeline_model/BUILD.bazel @@ -4,9 +4,15 @@ go_library( name = "go_default_library", srcs = [ "api_get_template_response.go", + "api_list_pipeline_versions_response.go", "api_list_pipelines_response.go", "api_parameter.go", "api_pipeline.go", + "api_pipeline_version.go", + "api_relationship.go", + "api_resource_key.go", + "api_resource_reference.go", + "api_resource_type.go", "api_status.go", "api_url.go", "protobuf_any.go", diff --git a/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go new file mode 100644 index 000000000000..14225bdbae98 --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go @@ -0,0 +1,100 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "strconv" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIListPipelineVersionsResponse api list pipeline versions response +// swagger:model apiListPipelineVersionsResponse +type APIListPipelineVersionsResponse struct { + + // next page token + NextPageToken string `json:"next_page_token,omitempty"` + + // total size + TotalSize int32 `json:"total_size,omitempty"` + + // versions + Versions []*APIPipelineVersion `json:"versions"` +} + +// Validate validates this api list pipeline versions response +func (m *APIListPipelineVersionsResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateVersions(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIListPipelineVersionsResponse) validateVersions(formats strfmt.Registry) error { + + if swag.IsZero(m.Versions) { // not required + return nil + } + + for i := 0; i < len(m.Versions); i++ { + if swag.IsZero(m.Versions[i]) { // not required + continue + } + + if m.Versions[i] != nil { + if err := m.Versions[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("versions" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIListPipelineVersionsResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIListPipelineVersionsResponse) UnmarshalBinary(b []byte) error { + var res APIListPipelineVersionsResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_pipeline.go b/backend/api/go_http_client/pipeline_model/api_pipeline.go index d8a8f8f5849c..4b40d15542b7 100644 --- a/backend/api/go_http_client/pipeline_model/api_pipeline.go +++ b/backend/api/go_http_client/pipeline_model/api_pipeline.go @@ -37,6 +37,12 @@ type APIPipeline struct { // Format: date-time CreatedAt strfmt.DateTime `json:"created_at,omitempty"` + // Output only. The default version of the pipeline. As of now, the latest + // version is used as default. (In the future, if desired by customers, we + // can allow them to set default version.) + // Read Only: true + DefaultVersion *APIPipelineVersion `json:"default_version,omitempty"` + // Optional input field. Describing the purpose of the job. Description string `json:"description,omitempty"` @@ -53,10 +59,16 @@ type APIPipeline struct { Name string `json:"name,omitempty"` // Output. The input parameters for this pipeline. + // TODO(jingzhang36): replace this parameters field with the parameters field + // inside PipelineVersion when all usage of the former has been changed to use + // the latter. Parameters []*APIParameter `json:"parameters"` // The URL to the source of the pipeline. This is required when creating the // pipeine through CreatePipeline API. + // TODO(jingzhang36): replace this url field with the code_source_urls field + // inside PipelineVersion when all usage of the former has been changed to use + // the latter. URL *APIURL `json:"url,omitempty"` } @@ -68,6 +80,10 @@ func (m *APIPipeline) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validateDefaultVersion(formats); err != nil { + res = append(res, err) + } + if err := m.validateParameters(formats); err != nil { res = append(res, err) } @@ -95,6 +111,24 @@ func (m *APIPipeline) validateCreatedAt(formats strfmt.Registry) error { return nil } +func (m *APIPipeline) validateDefaultVersion(formats strfmt.Registry) error { + + if swag.IsZero(m.DefaultVersion) { // not required + return nil + } + + if m.DefaultVersion != nil { + if err := m.DefaultVersion.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("default_version") + } + return err + } + } + + return nil +} + func (m *APIPipeline) validateParameters(formats strfmt.Registry) error { if swag.IsZero(m.Parameters) { // not required diff --git a/backend/api/go_http_client/pipeline_model/api_pipeline_version.go b/backend/api/go_http_client/pipeline_model/api_pipeline_version.go new file mode 100644 index 000000000000..f071b3ab9a9c --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_pipeline_version.go @@ -0,0 +1,185 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "strconv" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// APIPipelineVersion api pipeline version +// swagger:model apiPipelineVersion +type APIPipelineVersion struct { + + // Input. Optional. Pipeline version code source. + CodeSourceURL string `json:"code_source_url,omitempty"` + + // Output. The time this pipeline version is created. + // Format: date-time + CreatedAt strfmt.DateTime `json:"created_at,omitempty"` + + // Output. Unique version ID. Generated by API server. + ID string `json:"id,omitempty"` + + // Optional input field. Version name provided by user. + Name string `json:"name,omitempty"` + + // Input. Required. Pipeline version package url. + // Whe calling CreatePipelineVersion API method, need to provide one package + // file location. + PackageURL *APIURL `json:"package_url,omitempty"` + + // Output. The input parameters for this pipeline. + Parameters []*APIParameter `json:"parameters"` + + // Input. Required. E.g., specify which pipeline this pipeline version belongs + // to. + ResourceReferences []*APIResourceReference `json:"resource_references"` +} + +// Validate validates this api pipeline version +func (m *APIPipelineVersion) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateCreatedAt(formats); err != nil { + res = append(res, err) + } + + if err := m.validatePackageURL(formats); err != nil { + res = append(res, err) + } + + if err := m.validateParameters(formats); err != nil { + res = append(res, err) + } + + if err := m.validateResourceReferences(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipelineVersion) validateCreatedAt(formats strfmt.Registry) error { + + if swag.IsZero(m.CreatedAt) { // not required + return nil + } + + if err := validate.FormatOf("created_at", "body", "date-time", m.CreatedAt.String(), formats); err != nil { + return err + } + + return nil +} + +func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { + + if swag.IsZero(m.PackageURL) { // not required + return nil + } + + if m.PackageURL != nil { + if err := m.PackageURL.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("package_url") + } + return err + } + } + + return nil +} + +func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { + + if swag.IsZero(m.Parameters) { // not required + return nil + } + + for i := 0; i < len(m.Parameters); i++ { + if swag.IsZero(m.Parameters[i]) { // not required + continue + } + + if m.Parameters[i] != nil { + if err := m.Parameters[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipelineVersion) validateResourceReferences(formats strfmt.Registry) error { + + if swag.IsZero(m.ResourceReferences) { // not required + return nil + } + + for i := 0; i < len(m.ResourceReferences); i++ { + if swag.IsZero(m.ResourceReferences[i]) { // not required + continue + } + + if m.ResourceReferences[i] != nil { + if err := m.ResourceReferences[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIPipelineVersion) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIPipelineVersion) UnmarshalBinary(b []byte) error { + var res APIPipelineVersion + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_relationship.go b/backend/api/go_http_client/pipeline_model/api_relationship.go new file mode 100644 index 000000000000..14793853f975 --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_relationship.go @@ -0,0 +1,80 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIRelationship api relationship +// swagger:model apiRelationship +type APIRelationship string + +const ( + + // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" + APIRelationshipUNKNOWNRELATIONSHIP APIRelationship = "UNKNOWN_RELATIONSHIP" + + // APIRelationshipOWNER captures enum value "OWNER" + APIRelationshipOWNER APIRelationship = "OWNER" + + // APIRelationshipCREATOR captures enum value "CREATOR" + APIRelationshipCREATOR APIRelationship = "CREATOR" +) + +// for schema +var apiRelationshipEnum []interface{} + +func init() { + var res []APIRelationship + if err := json.Unmarshal([]byte(`["UNKNOWN_RELATIONSHIP","OWNER","CREATOR"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiRelationshipEnum = append(apiRelationshipEnum, v) + } +} + +func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { + if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api relationship +func (m APIRelationship) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIRelationshipEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_resource_key.go b/backend/api/go_http_client/pipeline_model/api_resource_key.go new file mode 100644 index 000000000000..b1dd0f9d6334 --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_resource_key.go @@ -0,0 +1,86 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceKey api resource key +// swagger:model apiResourceKey +type APIResourceKey struct { + + // The ID of the resource that referred to. + ID string `json:"id,omitempty"` + + // The type of the resource that referred to. + Type APIResourceType `json:"type,omitempty"` +} + +// Validate validates this api resource key +func (m *APIResourceKey) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) validateType(formats strfmt.Registry) error { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceKey) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceKey) UnmarshalBinary(b []byte) error { + var res APIResourceKey + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_resource_reference.go b/backend/api/go_http_client/pipeline_model/api_resource_reference.go new file mode 100644 index 000000000000..1af1cedc6c02 --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_resource_reference.go @@ -0,0 +1,111 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceReference api resource reference +// swagger:model apiResourceReference +type APIResourceReference struct { + + // key + Key *APIResourceKey `json:"key,omitempty"` + + // The name of the resource that referred to. + Name string `json:"name,omitempty"` + + // Required field. The relationship from referred resource to the object. + Relationship APIRelationship `json:"relationship,omitempty"` +} + +// Validate validates this api resource reference +func (m *APIResourceReference) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateKey(formats); err != nil { + res = append(res, err) + } + + if err := m.validateRelationship(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if m.Key != nil { + if err := m.Key.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceReference) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceReference) UnmarshalBinary(b []byte) error { + var res APIResourceReference + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_resource_type.go b/backend/api/go_http_client/pipeline_model/api_resource_type.go new file mode 100644 index 000000000000..363e3e9b21d2 --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_resource_type.go @@ -0,0 +1,86 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIResourceType api resource type +// swagger:model apiResourceType +type APIResourceType string + +const ( + + // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" + APIResourceTypeUNKNOWNRESOURCETYPE APIResourceType = "UNKNOWN_RESOURCE_TYPE" + + // APIResourceTypeEXPERIMENT captures enum value "EXPERIMENT" + APIResourceTypeEXPERIMENT APIResourceType = "EXPERIMENT" + + // APIResourceTypeJOB captures enum value "JOB" + APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" +) + +// for schema +var apiResourceTypeEnum []interface{} + +func init() { + var res []APIResourceType + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiResourceTypeEnum = append(apiResourceTypeEnum, v) + } +} + +func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { + if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api resource type +func (m APIResourceType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIResourceTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go index a1ff43897854..213a42156030 100644 --- a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go @@ -89,7 +89,7 @@ for the list runs operation typically these are written to a http.Request type ListRunsParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see + A url-encoded, JSON-serialized Filter protocol buffer (see filter.proto). */ diff --git a/backend/api/go_http_client/run_model/api_resource_type.go b/backend/api/go_http_client/run_model/api_resource_type.go index d0fc5c2d1714..7e9c02d7db74 100644 --- a/backend/api/go_http_client/run_model/api_resource_type.go +++ b/backend/api/go_http_client/run_model/api_resource_type.go @@ -42,6 +42,12 @@ const ( // APIResourceTypeJOB captures enum value "JOB" APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" ) // for schema @@ -49,7 +55,7 @@ var apiResourceTypeEnum []interface{} func init() { var res []APIResourceType - if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB"]`), &res); err != nil { + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION"]`), &res); err != nil { panic(err) } for _, v := range res { diff --git a/backend/api/go_http_client/run_model/api_run_metric.go b/backend/api/go_http_client/run_model/api_run_metric.go index fc3eddc2c9ce..2c5b92ec495c 100644 --- a/backend/api/go_http_client/run_model/api_run_metric.go +++ b/backend/api/go_http_client/run_model/api_run_metric.go @@ -33,14 +33,15 @@ type APIRunMetric struct { // The display format of metric. Format RunMetricFormat `json:"format,omitempty"` - // Required. The user defined name of the metric. It must between 1 and 63 characters - // long and must conform to the following regular expression: + // Required. The user defined name of the metric. It must between 1 and 63 + // characters long and must conform to the following regular expression: // `[a-z]([-a-z0-9]*[a-z0-9])?`. Name string `json:"name,omitempty"` - // Required. The runtime node ID which reports the metric. The node ID can be found in - // the RunDetail.workflow.Status. Metric with same (node_id, name) - // are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. + // Required. The runtime node ID which reports the metric. The node ID can be + // found in the RunDetail.workflow.Status. Metric with same (node_id, name) + // are considerd as duplicate. Only the first reporting will be recorded. Max + // length is 128. NodeID string `json:"node_id,omitempty"` // The number value of the metric. diff --git a/backend/api/job.proto b/backend/api/job.proto index 42829b82c7cb..c1dc076d5935 100644 --- a/backend/api/job.proto +++ b/backend/api/job.proto @@ -123,8 +123,9 @@ message ListJobsRequest { // resource_reference_key.type=EXPERIMENT&resource_reference_key.id=123 ResourceKey resource_reference_key = 4; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 5; } @@ -158,7 +159,8 @@ message CronSchedule { // The end time of the cron job google.protobuf.Timestamp end_time = 2; - // The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron + // The cron string. For details how to compose a cron, visit + // ttps://en.wikipedia.org/wiki/Cron string cron = 3; } diff --git a/backend/api/pipeline.proto b/backend/api/pipeline.proto index fa775bed4f8e..d47385ee53c8 100644 --- a/backend/api/pipeline.proto +++ b/backend/api/pipeline.proto @@ -91,6 +91,45 @@ service PipelineService { get: "/apis/v1beta1/pipelines/{id}/templates" }; } + + rpc CreatePipelineVersion(CreatePipelineVersionRequest) + returns (PipelineVersion) { + // TODO(jingzhang36): uncomment when exposing this API method. + // option (google.api.http) = { + // post: "/apis/v1beta1/pipeline_versions" + // body: "version" + // }; + } + + rpc GetPipelineVersion(GetPipelineVersionRequest) returns (PipelineVersion) { + // TODO(jingzhang36): uncomment when exposing this API method. + // option (google.api.http) = { + // get: "/apis/v1beta1/pipeline_versions/{version_id}" + // }; + } + + rpc ListPipelineVersions(ListPipelineVersionsRequest) + returns (ListPipelineVersionsResponse) { + // TODO(jingzhang36): uncomment when exposing this API method. + // option (google.api.http) = { + // get: "/apis/v1beta1/pipeline_versions" + // }; + } + + rpc DeletePipelineVersion(DeletePipelineVersionRequest) + returns (google.protobuf.Empty) { + // TODO(jingzhang36): uncomment when exposing this API method. + // option (google.api.http) = { + // delete: "/apis/v1beta1/pipeline_versions/{version_id}" + // }; + } + + rpc GetPipelineVersionTemplate(GetPipelineVersionTemplateRequest) returns (GetTemplateResponse) { + // TODO(jingzhang36): uncomment when exposing this API method. + // option (google.api.http) = { + // get: "/apis/v1beta1/pipeline_versions/{version_id}/templates" + // }; + } } message Url { @@ -115,8 +154,9 @@ message ListPipelinesRequest { // Ascending by default. string sort_by = 3; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 4; } @@ -138,7 +178,7 @@ message GetTemplateResponse { string template = 1; } -message GetPipelineVersionTemplateRequest{ +message GetPipelineVersionTemplateRequest { string version_id = 1; } @@ -157,11 +197,23 @@ message ListPipelineVersionsRequest { ResourceKey resource_key = 1; int32 page_size = 2; string page_token = 3; + + // Can be format of "field_name", "field_name asc" or "field_name des" + // Ascending by default. + string sort_by = 4; + // A base-64 encoded, JSON-serialized Filter protocol buffer (see + // filter.proto). + string filter = 5; } message ListPipelineVersionsResponse { repeated PipelineVersion versions = 1; string next_page_token = 2; + int32 total_size = 3; +} + +message DeletePipelineVersionRequest { + string version_id = 1; } message Pipeline { @@ -199,8 +251,7 @@ message Pipeline { // Output only. The default version of the pipeline. As of now, the latest // version is used as default. (In the future, if desired by customers, we // can allow them to set default version.) - // TODO(jingzhang36): expose this in API pipeline definition with FE changes. - // PipelineVersion default_version = 8; + PipelineVersion default_version = 8; } message PipelineVersion { @@ -228,4 +279,3 @@ message PipelineVersion { // to. repeated ResourceReference resource_references = 7; } - diff --git a/backend/api/resource_reference.proto b/backend/api/resource_reference.proto index d866843d0631..f1ba7d70b420 100644 --- a/backend/api/resource_reference.proto +++ b/backend/api/resource_reference.proto @@ -22,6 +22,7 @@ enum ResourceType { EXPERIMENT = 1; JOB = 2; PIPELINE = 3; + PIPELINE_VERSION = 4; } enum Relationship { diff --git a/backend/api/run.proto b/backend/api/run.proto index bb1728f04084..2f01172b5c27 100644 --- a/backend/api/run.proto +++ b/backend/api/run.proto @@ -59,7 +59,9 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { } }; +//Initiate a run. service RunService { + // Creates a new run. rpc CreateRun(CreateRunRequest) returns (RunDetail) { option (google.api.http) = { post: "/apis/v1beta1/runs" @@ -67,30 +69,35 @@ service RunService { }; } + //Finds a specific run by ID. rpc GetRun(GetRunRequest) returns (RunDetail) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}" }; } + //Finds all runs. rpc ListRuns(ListRunsRequest) returns (ListRunsResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs" }; } + //Archives a run. rpc ArchiveRun(ArchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:archive" }; } + //Restores an archived run. rpc UnarchiveRun(UnarchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:unarchive" }; } + //Deletes a run. rpc DeleteRun(DeleteRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/runs/{id}" @@ -98,28 +105,32 @@ service RunService { } // ReportRunMetrics reports metrics of a run. Each metric is reported in its - // own transaction, so this API accepts partial failures. Metric can be uniquely - // identified by (run_id, node_id, name). Duplicate reporting will be + // own transaction, so this API accepts partial failures. Metric can be + // uniquely identified by (run_id, node_id, name). Duplicate reporting will be // ignored by the API. First reporting wins. - rpc ReportRunMetrics(ReportRunMetricsRequest) returns (ReportRunMetricsResponse) { + rpc ReportRunMetrics(ReportRunMetricsRequest) + returns (ReportRunMetricsResponse) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}:reportMetrics" body: "*" }; } + //Finds a run's artifact data. rpc ReadArtifact(ReadArtifactRequest) returns (ReadArtifactResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read" }; } + //Terminates an active run. rpc TerminateRun(TerminateRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/terminate" }; } + //Re-initiates a failed or terminated run. rpc RetryRun(RetryRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/retry" @@ -139,7 +150,7 @@ message ListRunsRequest { string page_token = 1; int32 page_size = 2; // Can be format of "field_name", "field_name asc" or "field_name des" - // Ascending by default. + // (Example, "name asc" or "id des"). Ascending by default. string sort_by = 3; // What resource reference to filter on. @@ -147,8 +158,9 @@ message ListRunsRequest { // resource_reference_key.type=EXPERIMENT&resource_reference_key.id=123 ResourceKey resource_reference_key = 4; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 5; } @@ -245,14 +257,15 @@ message RunDetail { } message RunMetric { - // Required. The user defined name of the metric. It must between 1 and 63 characters - // long and must conform to the following regular expression: + // Required. The user defined name of the metric. It must between 1 and 63 + // characters long and must conform to the following regular expression: // `[a-z]([-a-z0-9]*[a-z0-9])?`. string name = 1; - // Required. The runtime node ID which reports the metric. The node ID can be found in - // the RunDetail.workflow.Status. Metric with same (node_id, name) - // are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. + // Required. The runtime node ID which reports the metric. The node ID can be + // found in the RunDetail.workflow.Status. Metric with same (node_id, name) + // are considerd as duplicate. Only the first reporting will be recorded. Max + // length is 128. string node_id = 2; oneof value { diff --git a/backend/api/swagger/experiment.swagger.json b/backend/api/swagger/experiment.swagger.json index 231960c43c7d..2c1e31bff018 100644 --- a/backend/api/swagger/experiment.swagger.json +++ b/backend/api/swagger/experiment.swagger.json @@ -55,7 +55,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" diff --git a/backend/api/swagger/job.swagger.json b/backend/api/swagger/job.swagger.json index 7e4ffb27192f..adfd13f0f2f7 100644 --- a/backend/api/swagger/job.swagger.json +++ b/backend/api/swagger/job.swagger.json @@ -62,7 +62,9 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -75,7 +77,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" @@ -266,7 +268,7 @@ }, "cron": { "type": "string", - "title": "The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron" + "title": "The cron string. For details how to compose a cron, visit\nttps://en.wikipedia.org/wiki/Cron" } }, "title": "CronSchedule allow scheduling the job with unix-like cron" @@ -456,7 +458,9 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION" ], "default": "UNKNOWN_RESOURCE_TYPE" }, diff --git a/backend/api/swagger/kfp_api_single_file.swagger.json b/backend/api/swagger/kfp_api_single_file.swagger.json index 88c96c352cf2..124fee5c6aef 100644 --- a/backend/api/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/swagger/kfp_api_single_file.swagger.json @@ -63,7 +63,9 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -76,7 +78,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" @@ -344,7 +346,7 @@ }, "/apis/v1beta1/runs/{run_id}:reportMetrics": { "post": { - "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be uniquely\nidentified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", + "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be\nuniquely identified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", "operationId": "ReportRunMetrics", "responses": { "200": { @@ -429,7 +431,9 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -442,7 +446,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" @@ -646,7 +650,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" @@ -815,7 +819,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" @@ -1163,7 +1167,9 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -1244,11 +1250,11 @@ "properties": { "name": { "type": "string", - "description": "Required. The user defined name of the metric. It must between 1 and 63 characters\nlong and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." + "description": "Required. The user defined name of the metric. It must between 1 and 63\ncharacters long and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." }, "node_id": { "type": "string", - "description": "Required. The runtime node ID which reports the metric. The node ID can be found in\nthe RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max length is 128." + "description": "Required. The runtime node ID which reports the metric. The node ID can be\nfound in the RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max\nlength is 128." }, "number_value": { "type": "number", @@ -1319,7 +1325,7 @@ }, "cron": { "type": "string", - "title": "The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron" + "title": "The cron string. For details how to compose a cron, visit\nttps://en.wikipedia.org/wiki/Cron" } }, "title": "CronSchedule allow scheduling the job with unix-like cron" @@ -1447,6 +1453,24 @@ } } }, + "apiListPipelineVersionsResponse": { + "type": "object", + "properties": { + "versions": { + "type": "array", + "items": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "next_page_token": { + "type": "string" + }, + "total_size": { + "type": "integer", + "format": "int32" + } + } + }, "apiListPipelinesResponse": { "type": "object", "properties": { @@ -1490,15 +1514,60 @@ "items": { "$ref": "#/definitions/apiParameter" }, - "description": "Output. The input parameters for this pipeline." + "description": "Output. The input parameters for this pipeline.\nTODO(jingzhang36): replace this parameters field with the parameters field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "url": { "$ref": "#/definitions/apiUrl", - "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API." + "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API.\nTODO(jingzhang36): replace this url field with the code_source_urls field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "error": { "type": "string", "description": "In case any error happens retrieving a pipeline field, only pipeline ID\nand the error message is returned. Client has the flexibility of choosing\nhow to handle error. This is especially useful during listing call." + }, + "default_version": { + "$ref": "#/definitions/apiPipelineVersion", + "title": "Output only. The default version of the pipeline. As of now, the latest\nversion is used as default. (In the future, if desired by customers, we\ncan allow them to set default version.)", + "readOnly": true + } + } + }, + "apiPipelineVersion": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Output. Unique version ID. Generated by API server." + }, + "name": { + "type": "string", + "description": "Optional input field. Version name provided by user." + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output. The time this pipeline version is created." + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/apiParameter" + }, + "description": "Output. The input parameters for this pipeline." + }, + "code_source_url": { + "type": "string", + "description": "Input. Optional. Pipeline version code source." + }, + "package_url": { + "$ref": "#/definitions/apiUrl", + "description": "Input. Required. Pipeline version package url.\nWhe calling CreatePipelineVersion API method, need to provide one package\nfile location." + }, + "resource_references": { + "type": "array", + "items": { + "$ref": "#/definitions/apiResourceReference" + }, + "description": "Input. Required. E.g., specify which pipeline this pipeline version belongs\nto." } } }, diff --git a/backend/api/swagger/pipeline.swagger.json b/backend/api/swagger/pipeline.swagger.json index 5f77efa2d1ea..4b075c056021 100644 --- a/backend/api/swagger/pipeline.swagger.json +++ b/backend/api/swagger/pipeline.swagger.json @@ -55,7 +55,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" @@ -194,6 +194,24 @@ } } }, + "apiListPipelineVersionsResponse": { + "type": "object", + "properties": { + "versions": { + "type": "array", + "items": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "next_page_token": { + "type": "string" + }, + "total_size": { + "type": "integer", + "format": "int32" + } + } + }, "apiListPipelinesResponse": { "type": "object", "properties": { @@ -248,18 +266,112 @@ "items": { "$ref": "#/definitions/apiParameter" }, - "description": "Output. The input parameters for this pipeline." + "description": "Output. The input parameters for this pipeline.\nTODO(jingzhang36): replace this parameters field with the parameters field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "url": { "$ref": "#/definitions/apiUrl", - "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API." + "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API.\nTODO(jingzhang36): replace this url field with the code_source_urls field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "error": { "type": "string", "description": "In case any error happens retrieving a pipeline field, only pipeline ID\nand the error message is returned. Client has the flexibility of choosing\nhow to handle error. This is especially useful during listing call." + }, + "default_version": { + "$ref": "#/definitions/apiPipelineVersion", + "title": "Output only. The default version of the pipeline. As of now, the latest\nversion is used as default. (In the future, if desired by customers, we\ncan allow them to set default version.)", + "readOnly": true } } }, + "apiPipelineVersion": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Output. Unique version ID. Generated by API server." + }, + "name": { + "type": "string", + "description": "Optional input field. Version name provided by user." + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output. The time this pipeline version is created." + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/apiParameter" + }, + "description": "Output. The input parameters for this pipeline." + }, + "code_source_url": { + "type": "string", + "description": "Input. Optional. Pipeline version code source." + }, + "package_url": { + "$ref": "#/definitions/apiUrl", + "description": "Input. Required. Pipeline version package url.\nWhe calling CreatePipelineVersion API method, need to provide one package\nfile location." + }, + "resource_references": { + "type": "array", + "items": { + "$ref": "#/definitions/apiResourceReference" + }, + "description": "Input. Required. E.g., specify which pipeline this pipeline version belongs\nto." + } + } + }, + "apiRelationship": { + "type": "string", + "enum": [ + "UNKNOWN_RELATIONSHIP", + "OWNER", + "CREATOR" + ], + "default": "UNKNOWN_RELATIONSHIP" + }, + "apiResourceKey": { + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/apiResourceType", + "description": "The type of the resource that referred to." + }, + "id": { + "type": "string", + "description": "The ID of the resource that referred to." + } + } + }, + "apiResourceReference": { + "type": "object", + "properties": { + "key": { + "$ref": "#/definitions/apiResourceKey" + }, + "name": { + "type": "string", + "description": "The name of the resource that referred to." + }, + "relationship": { + "$ref": "#/definitions/apiRelationship", + "description": "Required field. The relationship from referred resource to the object." + } + } + }, + "apiResourceType": { + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, "apiStatus": { "type": "object", "properties": { diff --git a/backend/api/swagger/run.swagger.json b/backend/api/swagger/run.swagger.json index d944bfa54e95..863278a88a8b 100644 --- a/backend/api/swagger/run.swagger.json +++ b/backend/api/swagger/run.swagger.json @@ -62,7 +62,9 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -75,7 +77,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", "in": "query", "required": false, "type": "string" @@ -343,7 +345,7 @@ }, "/apis/v1beta1/runs/{run_id}:reportMetrics": { "post": { - "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be uniquely\nidentified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", + "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be\nuniquely identified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", "operationId": "ReportRunMetrics", "responses": { "200": { @@ -584,7 +586,9 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -665,11 +669,11 @@ "properties": { "name": { "type": "string", - "description": "Required. The user defined name of the metric. It must between 1 and 63 characters\nlong and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." + "description": "Required. The user defined name of the metric. It must between 1 and 63\ncharacters long and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." }, "node_id": { "type": "string", - "description": "Required. The runtime node ID which reports the metric. The node ID can be found in\nthe RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max length is 128." + "description": "Required. The runtime node ID which reports the metric. The node ID can be\nfound in the RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max\nlength is 128." }, "number_value": { "type": "number", diff --git a/backend/src/apiserver/BUILD.bazel b/backend/src/apiserver/BUILD.bazel index 8489d944f59c..eaa254f7a64c 100644 --- a/backend/src/apiserver/BUILD.bazel +++ b/backend/src/apiserver/BUILD.bazel @@ -26,7 +26,6 @@ go_library( "@com_github_grpc_ecosystem_grpc_gateway//runtime:go_default_library", "@com_github_jinzhu_gorm//:go_default_library", "@com_github_jinzhu_gorm//dialects/sqlite:go_default_library", - "@com_github_masterminds_squirrel//:go_default_library", "@com_github_minio_minio_go//:go_default_library", "@com_github_spf13_viper//:go_default_library", "@io_k8s_client_go//kubernetes/typed/core/v1:go_default_library", diff --git a/backend/src/apiserver/client/sql.go b/backend/src/apiserver/client/sql.go index 949de2164910..a7a93bce7bf0 100644 --- a/backend/src/apiserver/client/sql.go +++ b/backend/src/apiserver/client/sql.go @@ -21,13 +21,18 @@ import ( ) func CreateMySQLConfig(user, password string, mysqlServiceHost string, - mysqlServicePort string, dbName string) *mysql.Config { + mysqlServicePort string, dbName string, mysqlGroupConcatMaxLen string) *mysql.Config { return &mysql.Config{ User: user, Passwd: password, Net: "tcp", Addr: fmt.Sprintf("%s:%s", mysqlServiceHost, mysqlServicePort), - Params: map[string]string{"charset": "utf8", "parseTime": "True", "loc": "Local"}, + Params: map[string]string{ + "charset": "utf8", + "parseTime": "True", + "loc": "Local", + "group_concat_max_len": mysqlGroupConcatMaxLen, + }, DBName: dbName, AllowNativePasswords: true, } diff --git a/backend/src/apiserver/client_manager.go b/backend/src/apiserver/client_manager.go index 4a81fbf6e784..179e9e728a51 100644 --- a/backend/src/apiserver/client_manager.go +++ b/backend/src/apiserver/client_manager.go @@ -44,6 +44,7 @@ const ( mysqlUser = "DBConfig.User" mysqlPassword = "DBConfig.Password" mysqlDBName = "DBConfig.DBName" + mysqlGroupConcatMaxLen = "DBConfig.GroupConcatMaxLen" visualizationServiceHost = "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST" visualizationServicePort = "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT" @@ -204,7 +205,7 @@ func initDBClient(initConnectionTimeout time.Duration) *storage.DB { glog.Fatalf("Failed to initialize the databases.") } - response = db.Model(&model.ResourceReference{}).ModifyColumn("Payload", "longtext") + response = db.Model(&model.ResourceReference{}).ModifyColumn("Payload", "longtext not null") if response.Error != nil { glog.Fatalf("Failed to update the resource reference payload type. Error: %s", response.Error) } @@ -226,6 +227,11 @@ func initDBClient(initConnectionTimeout time.Duration) *storage.DB { initPipelineVersionsFromPipelines(db) } + response = db.Model(&model.Pipeline{}).ModifyColumn("Description", "longtext not null") + if response.Error != nil { + glog.Fatalf("Failed to update pipeline description type. Error: %s", response.Error) + } + return storage.NewDB(db.DB(), storage.NewMySQLDialect()) } @@ -237,7 +243,9 @@ func initMysql(driverName string, initConnectionTimeout time.Duration) string { common.GetStringConfigWithDefault(mysqlPassword, ""), common.GetStringConfigWithDefault(mysqlServiceHost, "mysql"), common.GetStringConfigWithDefault(mysqlServicePort, "3306"), - "") + "", + common.GetStringConfigWithDefault(mysqlGroupConcatMaxLen, "1024"), + ) var db *sql.DB var err error diff --git a/backend/src/apiserver/common/const.go b/backend/src/apiserver/common/const.go index a1fcd849b5e2..5a27fa7ed759 100644 --- a/backend/src/apiserver/common/const.go +++ b/backend/src/apiserver/common/const.go @@ -23,10 +23,11 @@ type ResourceType string type Relationship string const ( - Experiment ResourceType = "Experiment" - Job ResourceType = "Job" - Run ResourceType = "Run" - Pipeline ResourceType = "pipeline" + Experiment ResourceType = "Experiment" + Job ResourceType = "Job" + Run ResourceType = "Run" + Pipeline ResourceType = "pipeline" + PipelineVersion ResourceType = "PipelineVersion" ) const ( @@ -40,6 +41,8 @@ func ToModelResourceType(apiType api.ResourceType) (ResourceType, error) { return Experiment, nil case api.ResourceType_JOB: return Job, nil + case api.ResourceType_PIPELINE_VERSION: + return PipelineVersion, nil default: return "", util.NewInvalidInputError("Unsupported resource type: %s", api.ResourceType_name[int32(apiType)]) } diff --git a/backend/src/apiserver/config/config.json b/backend/src/apiserver/config/config.json index 15ce69a2980b..7cf3bff7576b 100644 --- a/backend/src/apiserver/config/config.json +++ b/backend/src/apiserver/config/config.json @@ -2,7 +2,8 @@ "DBConfig": { "DriverName": "mysql", "DataSourceName": "", - "DBName": "mlpipeline" + "DBName": "mlpipeline", + "GroupConcatMaxLen": "4194304" }, "ObjectStoreConfig":{ "AccessKey": "minio", diff --git a/backend/src/apiserver/config/sample_config.json b/backend/src/apiserver/config/sample_config.json index e61cf04dc95d..57077ccff8b6 100644 --- a/backend/src/apiserver/config/sample_config.json +++ b/backend/src/apiserver/config/sample_config.json @@ -1,27 +1,32 @@ [ { - "name":"[Sample] ML - XGBoost - Training with Confusion Matrix", - "description":"A trainer that does end-to-end distributed training for XGBoost models. For source code, refer to https://github.com/kubeflow/pipelines/tree/master/samples/core/xgboost-spark", - "file":"/samples/core/xgboost_training_cm/xgboost_training_cm.py.tar.gz" + "name": "[Sample] ML - XGBoost - Training with Confusion Matrix", + "description": "[GCP Permission requirements](https://github.com/kubeflow/pipelines/tree/master/samples/core/xgboost_training_cm#requirements). A trainer that does end-to-end distributed training for XGBoost models. [source code](https://github.com/kubeflow/pipelines/tree/master/samples/core/xgboost_training_cm)", + "file": "/samples/core/xgboost_training_cm/xgboost_training_cm.py.tar.gz" }, { - "name":"[Sample] Basic - Sequential execution", - "description":"A pipeline with two sequential steps. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/sequential/sequential.py", - "file":"/samples/core/sequential/sequential.py.tar.gz" + "name": "[Sample] Unified DSL - Taxi Tip Prediction Model Trainer", + "description": "[GCP Permission requirements](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/parameterized_tfx_oss#permission). Example pipeline that does classification with model analysis based on a public tax cab BigQuery dataset. [source code](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/parameterized_tfx_oss)", + "file": "/samples/contrib/parameterized_tfx_oss/parameterized_tfx_oss.tar.gz" }, { - "name":"[Sample] Basic - Parallel execution", - "description":"A pipeline that downloads two messages in parallel and prints the concatenated result. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/parallel_join/parallel_join.py", - "file":"/samples/core/parallel_join/parallel_join.py.tar.gz" + "name": "[Sample] Basic - Sequential execution", + "description": "A pipeline with two sequential steps. [source code](https://github.com/kubeflow/pipelines/blob/master/samples/core/sequential/sequential.py)", + "file": "/samples/core/sequential/sequential.py.tar.gz" }, { - "name":"[Sample] Basic - Conditional execution", - "description":"A pipeline shows how to use dsl.Condition. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/condition/condition.py", - "file":"/samples/core/condition/condition.py.tar.gz" + "name": "[Sample] Basic - Parallel execution", + "description": "A pipeline that downloads two messages in parallel and prints the concatenated result. [source code](https://github.com/kubeflow/pipelines/blob/master/samples/core/parallel_join/parallel_join.py)", + "file": "/samples/core/parallel_join/parallel_join.py.tar.gz" }, { - "name":"[Sample] Basic - Exit Handler", - "description":"A pipeline that downloads a message and prints it out. Exit Handler will run at the end. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/exit_handler/exit_handler.py", - "file":"/samples/core/exit_handler/exit_handler.py.tar.gz" + "name": "[Sample] Basic - Conditional execution", + "description": "A pipeline shows how to use dsl.Condition. [source code](https://github.com/kubeflow/pipelines/blob/master/samples/core/condition/condition.py)", + "file": "/samples/core/condition/condition.py.tar.gz" + }, + { + "name": "[Sample] Basic - Exit Handler", + "description": "A pipeline that downloads a message and prints it out. Exit Handler will run at the end. [source code](https://github.com/kubeflow/pipelines/blob/master/samples/core/exit_handler/exit_handler.py)", + "file": "/samples/core/exit_handler/exit_handler.py.tar.gz" } ] diff --git a/backend/src/apiserver/model/BUILD.bazel b/backend/src/apiserver/model/BUILD.bazel index e6a15d7878f5..366879d57580 100644 --- a/backend/src/apiserver/model/BUILD.bazel +++ b/backend/src/apiserver/model/BUILD.bazel @@ -21,18 +21,13 @@ go_library( go_test( name = "go_default_test", - srcs = [ - "pipeline_version_test.go", - ], + srcs = ["pipeline_version_test.go"], + embed = [":go_default_library"], importpath = "github.com/kubeflow/pipelines/backend/src/apiserver/model", visibility = ["//visibility:public"], - embed = [":go_default_library"], deps = [ "//backend/api:go_default_library", - "//backend/src/apiserver/filter:go_default_library", "//backend/src/apiserver/list:go_default_library", - "@com_github_google_go_cmp//cmp:go_default_library", - "@com_github_google_go_cmp//cmp/cmpopts:go_default_library", "@com_github_masterminds_squirrel//:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", ], diff --git a/backend/src/apiserver/model/pipeline.go b/backend/src/apiserver/model/pipeline.go index 5baec4caefce..08100e49ce19 100644 --- a/backend/src/apiserver/model/pipeline.go +++ b/backend/src/apiserver/model/pipeline.go @@ -30,7 +30,7 @@ type Pipeline struct { UUID string `gorm:"column:UUID; not null; primary_key"` CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null"` Name string `gorm:"column:Name; not null; unique"` - Description string `gorm:"column:Description; not null"` + Description string `gorm:"column:Description; not null; size:65535"` // Same as below, set size to large number so it will be stored as longtext // TODO(jingzhang36): remove Parameters when no code is accessing this // field. Should use PipelineVersion.Parameters instead. /* Set size to 65535 so it will be stored as longtext. https://dev.mysql.com/doc/refman/8.0/en/column-count-limit.html */ diff --git a/backend/src/apiserver/resource/client_manager_fake.go b/backend/src/apiserver/resource/client_manager_fake.go index a0e62ce9add6..2cbea55c72e2 100644 --- a/backend/src/apiserver/resource/client_manager_fake.go +++ b/backend/src/apiserver/resource/client_manager_fake.go @@ -25,6 +25,7 @@ import ( const ( DefaultFakeUUID = "123e4567-e89b-12d3-a456-426655440000" + FakeUUIDOne = "123e4567-e89b-12d3-a456-426655440001" ) type FakeClientManager struct { @@ -45,7 +46,7 @@ type FakeClientManager struct { } func NewFakeClientManager(time util.TimeInterface, uuid util.UUIDGeneratorInterface) ( - *FakeClientManager, error) { + *FakeClientManager, error) { if time == nil { glog.Fatalf("The time parameter must not be null.") // Must never happen diff --git a/backend/src/apiserver/resource/model_converter.go b/backend/src/apiserver/resource/model_converter.go index 82f6d0b9e4e9..8f52b8fccfa1 100644 --- a/backend/src/apiserver/resource/model_converter.go +++ b/backend/src/apiserver/resource/model_converter.go @@ -37,17 +37,17 @@ func (r *ResourceManager) ToModelRunMetric(metric *api.RunMetric, runUUID string // The input run might not contain workflowSpecManifest, but instead a pipeline ID. // The caller would retrieve workflowSpecManifest and pass in. func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow *util.Workflow, workflowSpecManifest string) (*model.RunDetail, error) { - params, err := toModelParameters(run.PipelineSpec.Parameters) + params, err := toModelParameters(run.GetPipelineSpec().GetParameters()) if err != nil { return nil, util.Wrap(err, "Unable to parse the parameter.") } - resourceReferences, err := r.toModelResourceReferences(runId, common.Run, run.ResourceReferences) + resourceReferences, err := r.toModelResourceReferences(runId, common.Run, run.GetResourceReferences()) if err != nil { return nil, util.Wrap(err, "Unable to convert resource references.") } var pipelineName string - if run.PipelineSpec.GetPipelineId() != "" { - pipelineName, err = r.getResourceName(common.Pipeline, run.PipelineSpec.GetPipelineId()) + if run.GetPipelineSpec().GetPipelineId() != "" { + pipelineName, err = r.getResourceName(common.Pipeline, run.GetPipelineSpec().GetPipelineId()) if err != nil { return nil, util.Wrap(err, "Error getting the pipeline name") } @@ -63,7 +63,7 @@ func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow Description: run.Description, ResourceReferences: resourceReferences, PipelineSpec: model.PipelineSpec{ - PipelineId: run.PipelineSpec.GetPipelineId(), + PipelineId: run.GetPipelineSpec().GetPipelineId(), PipelineName: pipelineName, WorkflowSpecManifest: workflowSpecManifest, Parameters: params, @@ -76,17 +76,17 @@ func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow } func (r *ResourceManager) ToModelJob(job *api.Job, swf *util.ScheduledWorkflow, workflowSpecManifest string) (*model.Job, error) { - params, err := toModelParameters(job.PipelineSpec.Parameters) + params, err := toModelParameters(job.GetPipelineSpec().GetParameters()) if err != nil { return nil, util.Wrap(err, "Error parsing the input job.") } - resourceReferences, err := r.toModelResourceReferences(string(swf.UID), common.Job, job.ResourceReferences) + resourceReferences, err := r.toModelResourceReferences(string(swf.UID), common.Job, job.GetResourceReferences()) if err != nil { return nil, util.Wrap(err, "Error to convert resource references.") } var pipelineName string - if job.PipelineSpec.GetPipelineId() != "" { - pipelineName, err = r.getResourceName(common.Pipeline, job.PipelineSpec.GetPipelineId()) + if job.GetPipelineSpec().GetPipelineId() != "" { + pipelineName, err = r.getResourceName(common.Pipeline, job.GetPipelineSpec().GetPipelineId()) if err != nil { return nil, util.Wrap(err, "Error getting the pipeline name") } @@ -103,7 +103,7 @@ func (r *ResourceManager) ToModelJob(job *api.Job, swf *util.ScheduledWorkflow, MaxConcurrency: job.MaxConcurrency, ResourceReferences: resourceReferences, PipelineSpec: model.PipelineSpec{ - PipelineId: job.PipelineSpec.GetPipelineId(), + PipelineId: job.GetPipelineSpec().GetPipelineId(), PipelineName: pipelineName, WorkflowSpecManifest: workflowSpecManifest, Parameters: params, @@ -238,6 +238,12 @@ func (r *ResourceManager) getResourceName(resourceType common.ResourceType, reso return "", util.Wrap(err, "Referred run not found.") } return run.DisplayName, nil + case common.PipelineVersion: + version, err := r.GetPipelineVersion(resourceId) + if err != nil { + return "", util.Wrap(err, "Referred pipeline version not found.") + } + return version.Name, nil default: return "", util.NewInvalidInputError("Unsupported resource type: %s", string(resourceType)) } diff --git a/backend/src/apiserver/resource/resource_manager.go b/backend/src/apiserver/resource/resource_manager.go index 57e617d1b8c4..b0f43f213621 100644 --- a/backend/src/apiserver/resource/resource_manager.go +++ b/backend/src/apiserver/resource/resource_manager.go @@ -230,10 +230,16 @@ func (r *ResourceManager) GetPipelineTemplate(pipelineId string) ([]byte, error) } func (r *ResourceManager) CreateRun(apiRun *api.Run) (*model.RunDetail, error) { - // Get workflow from pipeline spec, which might be pipeline ID or an argo workflow + // Get workflow from either of the two places: + // (1) raw pipeline manifest in pipeline_spec + // (2) pipeline version in resource_references + var workflowSpecManifestBytes []byte workflowSpecManifestBytes, err := r.getWorkflowSpecBytes(apiRun.GetPipelineSpec()) if err != nil { - return nil, util.Wrap(err, "Failed to fetch workflow spec.") + workflowSpecManifestBytes, err = r.getWorkflowSpecBytesFromPipelineVersion(apiRun.GetResourceReferences()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch workflow spec.") + } } uuid, err := r.uuid.NewRandom() if err != nil { @@ -282,7 +288,7 @@ func (r *ResourceManager) CreateRun(apiRun *api.Run) (*model.RunDetail, error) { } // Add a reference to the default experiment if run does not already have a containing experiment - ref, err := r.getDefaultExperimentIfNoExperiment(apiRun.ResourceReferences) + ref, err := r.getDefaultExperimentIfNoExperiment(apiRun.GetResourceReferences()) if err != nil { return nil, err } @@ -441,11 +447,18 @@ func (r *ResourceManager) GetJob(id string) (*model.Job, error) { } func (r *ResourceManager) CreateJob(apiJob *api.Job) (*model.Job, error) { - // Get workflow from pipeline spec, which might be pipeline ID or an argo workflow + // Get workflow from either of the two places: + // (1) raw pipeline manifest in pipeline_spec + // (2) pipeline version in resource_references + var workflowSpecManifestBytes []byte workflowSpecManifestBytes, err := r.getWorkflowSpecBytes(apiJob.GetPipelineSpec()) if err != nil { - return nil, util.Wrap(err, "Failed to fetch workflow spec.") + workflowSpecManifestBytes, err = r.getWorkflowSpecBytesFromPipelineVersion(apiJob.GetResourceReferences()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch workflow spec.") + } } + var workflow util.Workflow err = json.Unmarshal(workflowSpecManifestBytes, &workflow) if err != nil { @@ -454,7 +467,7 @@ func (r *ResourceManager) CreateJob(apiJob *api.Job) (*model.Job, error) { } // Verify no additional parameter provided - err = workflow.VerifyParameters(toParametersMap(apiJob.PipelineSpec.Parameters)) + err = workflow.VerifyParameters(toParametersMap(apiJob.GetPipelineSpec().GetParameters())) if err != nil { return nil, util.Wrap(err, "Create job failed") } @@ -473,7 +486,7 @@ func (r *ResourceManager) CreateJob(apiJob *api.Job) (*model.Job, error) { MaxConcurrency: &apiJob.MaxConcurrency, Trigger: *toCRDTrigger(apiJob.Trigger), Workflow: &scheduledworkflow.WorkflowResource{ - Parameters: toCRDParameter(apiJob.PipelineSpec.Parameters), + Parameters: toCRDParameter(apiJob.GetPipelineSpec().GetParameters()), Spec: workflow.Spec, }, }, @@ -484,12 +497,12 @@ func (r *ResourceManager) CreateJob(apiJob *api.Job) (*model.Job, error) { } // Add a reference to the default experiment if run does not already have a containing experiment - ref, err := r.getDefaultExperimentIfNoExperiment(apiJob.ResourceReferences) + ref, err := r.getDefaultExperimentIfNoExperiment(apiJob.GetResourceReferences()) if err != nil { return nil, err } if ref != nil { - apiJob.ResourceReferences = append(apiJob.ResourceReferences, ref) + apiJob.ResourceReferences = append(apiJob.GetResourceReferences(), ref) } job, err := r.ToModelJob(apiJob, util.NewScheduledWorkflow(newScheduledWorkflow), string(workflowSpecManifestBytes)) @@ -688,6 +701,8 @@ func (r *ResourceManager) checkRunExist(runID string) (*model.RunDetail, error) } func (r *ResourceManager) getWorkflowSpecBytes(spec *api.PipelineSpec) ([]byte, error) { + // TODO(jingzhang36): after FE is enabled to use pipeline version to create + // run, we'll only check for the raw manifest in pipeline_spec. if spec.GetPipelineId() != "" { var workflow util.Workflow err := r.objectStore.GetFromYamlFile(&workflow, storage.CreatePipelinePath(spec.GetPipelineId())) @@ -702,6 +717,25 @@ func (r *ResourceManager) getWorkflowSpecBytes(spec *api.PipelineSpec) ([]byte, return nil, util.NewInvalidInputError("Please provide a valid pipeline spec") } +func (r *ResourceManager) getWorkflowSpecBytesFromPipelineVersion(references []*api.ResourceReference) ([]byte, error) { + var pipelineVersionId = "" + for _, reference := range references { + if reference.Key.Type == api.ResourceType_PIPELINE_VERSION && reference.Relationship == api.Relationship_CREATOR { + pipelineVersionId = reference.Key.Id + } + } + if len(pipelineVersionId) == 0 { + return nil, util.NewInvalidInputError("No pipeline version.") + } + var workflow util.Workflow + err := r.objectStore.GetFromYamlFile(&workflow, storage.CreatePipelinePath(pipelineVersionId)) + if err != nil { + return nil, util.Wrap(err, "Get pipeline YAML failed.") + } + + return []byte(workflow.ToStringForStore()), nil +} + // Used to initialize the Experiment database with a default to be used for runs func (r *ResourceManager) CreateDefaultExperiment() (string, error) { // First check that we don't already have a default experiment ID in the DB. @@ -814,3 +848,100 @@ func (r *ResourceManager) MarkSampleLoaded() error { func (r *ResourceManager) getDefaultSA() string { return common.GetStringConfigWithDefault(defaultPipelineRunnerServiceAccountEnvVar, defaultPipelineRunnerServiceAccount) } + +func (r *ResourceManager) CreatePipelineVersion(apiVersion *api.PipelineVersion, pipelineFile []byte) (*model.PipelineVersion, error) { + // Extract the parameters from the pipeline + params, err := util.GetParameters(pipelineFile) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + // Extract pipeline id + var pipelineId = "" + for _, resourceReference := range apiVersion.ResourceReferences { + if resourceReference.Key.Type == api.ResourceType_PIPELINE && resourceReference.Relationship == api.Relationship_OWNER { + pipelineId = resourceReference.Key.Id + } + } + if len(pipelineId) == 0 { + return nil, util.Wrap(err, "Create pipeline version failed due to missing pipeline id") + } + + // Construct model.PipelineVersion + version := &model.PipelineVersion{ + Name: apiVersion.Name, + PipelineId: pipelineId, + Status: model.PipelineVersionCreating, + Parameters: params, + CodeSourceUrl: apiVersion.CodeSourceUrl, + } + version, err = r.pipelineStore.CreatePipelineVersion(version) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + // Store the pipeline file + err = r.objectStore.AddFile(pipelineFile, storage.CreatePipelinePath(fmt.Sprint(version.UUID))) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + // After pipeline version being created in DB and pipeline file being + // saved in minio server, set this pieline version to status ready. + version.Status = model.PipelineVersionReady + err = r.pipelineStore.UpdatePipelineVersionStatus(version.UUID, version.Status) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + return version, nil +} + +func (r *ResourceManager) GetPipelineVersion(versionId string) (*model.PipelineVersion, error) { + return r.pipelineStore.GetPipelineVersion(versionId) +} + +func (r *ResourceManager) ListPipelineVersions(pipelineId string, opts *list.Options) (pipelines []*model.PipelineVersion, total_size int, nextPageToken string, err error) { + return r.pipelineStore.ListPipelineVersions(pipelineId, opts) +} + +func (r *ResourceManager) DeletePipelineVersion(pipelineVersionId string) error { + _, err := r.pipelineStore.GetPipelineVersion(pipelineVersionId) + if err != nil { + return util.Wrap(err, "Delete pipeline version failed") + } + + // Mark pipeline as deleting so it's not visible to user. + err = r.pipelineStore.UpdatePipelineVersionStatus(pipelineVersionId, model.PipelineVersionDeleting) + if err != nil { + return util.Wrap(err, "Delete pipeline version failed") + } + + err = r.objectStore.DeleteFile(storage.CreatePipelinePath(fmt.Sprint(pipelineVersionId))) + if err != nil { + glog.Errorf("%v", errors.Wrapf(err, "Failed to delete pipeline file for pipeline version %v", pipelineVersionId)) + return util.Wrap(err, "Delete pipeline version failed") + } + err = r.pipelineStore.DeletePipelineVersion(pipelineVersionId) + if err != nil { + glog.Errorf("%v", errors.Wrapf(err, "Failed to delete pipeline DB entry for pipeline %v", pipelineVersionId)) + return util.Wrap(err, "Delete pipeline version failed") + } + + return nil +} + +func (r *ResourceManager) GetPipelineVersionTemplate(versionId string) ([]byte, error) { + // Verify pipeline version exist + _, err := r.pipelineStore.GetPipelineVersion(versionId) + if err != nil { + return nil, util.Wrap(err, "Get pipeline version template failed") + } + + template, err := r.objectStore.GetFile(storage.CreatePipelinePath(fmt.Sprint(versionId))) + if err != nil { + return nil, util.Wrap(err, "Get pipeline version template failed") + } + + return template, nil +} diff --git a/backend/src/apiserver/resource/resource_manager_test.go b/backend/src/apiserver/resource/resource_manager_test.go index 5416770adc71..716d48df8ce7 100644 --- a/backend/src/apiserver/resource/resource_manager_test.go +++ b/backend/src/apiserver/resource/resource_manager_test.go @@ -376,6 +376,96 @@ func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") } +func TestCreateRun_ThroughPipelineVersion(t *testing.T) { + // Create experiment, pipeline, and pipeline version. + store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) + defer store.Close() + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_run", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipeline.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + Parameters: []*api.Parameter{ + {Name: "param1", Value: "world"}, + }, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: version.UUID}, + Relationship: api.Relationship_CREATOR, + }, + }, + } + runDetail, err := manager.CreateRun(apiRun) + assert.Nil(t, err) + + expectedRuntimeWorkflow := testWorkflow.DeepCopy() + expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ + {Name: "param1", Value: util.StringPointer("world")}} + expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Spec.ServiceAccountName = defaultPipelineRunnerServiceAccount + + expectedRunDetail := &model.RunDetail{ + Run: model.Run{ + UUID: "123e4567-e89b-12d3-a456-426655440000", + DisplayName: "run1", + Name: "workflow-name", + StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), + CreatedAtInSec: 4, + Conditions: "Running", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: testWorkflow.ToStringForStore(), + Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Run, + ReferenceUUID: experiment.UUID, + ReferenceName: "e1", + ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Run, + ReferenceUUID: version.UUID, + ReferenceName: "version_for_run", + ReferenceType: common.PipelineVersion, + Relationship: common.Creator, + }, + }, + }, + PipelineRuntime: model.PipelineRuntime{ + WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + }, + } + assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") + assert.Equal(t, 1, store.workflowClientFake.GetWorkflowCount(), "Workflow CRD is not created.") + runDetail, err = manager.GetRun(runDetail.UUID) + assert.Nil(t, err) + assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") +} + func TestCreateRun_NoExperiment(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) manager := NewResourceManager(store) @@ -764,6 +854,83 @@ func TestCreateJob_ThroughPipelineID(t *testing.T) { assert.Equal(t, expectedJob, newJob) } +func TestCreateJob_ThroughPipelineVersion(t *testing.T) { + // Create experiment, pipeline and pipeline version. + store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) + defer store.Close() + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_job", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipeline.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + job := &api.Job{ + Name: "j1", + Enabled: true, + PipelineSpec: &api.PipelineSpec{ + Parameters: []*api.Parameter{ + {Name: "param1", Value: "world"}, + }, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: version.UUID}, + Relationship: api.Relationship_CREATOR, + }, + }, + } + newJob, err := manager.CreateJob(job) + expectedJob := &model.Job{ + UUID: "123", + DisplayName: "j1", + Name: "j1", + Namespace: "default", + Enabled: true, + CreatedAtInSec: 4, + UpdatedAtInSec: 4, + Conditions: "NO_STATUS", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: testWorkflow.ToStringForStore(), + Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123", + ResourceType: common.Job, + ReferenceUUID: experiment.UUID, + ReferenceName: "e1", + ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + { + ResourceUUID: "123", + ResourceType: common.Job, + ReferenceUUID: version.UUID, + ReferenceName: "version_for_job", + ReferenceType: common.PipelineVersion, + Relationship: common.Creator, + }, + }, + } + assert.Nil(t, err) + assert.Equal(t, expectedJob, newJob) +} + func TestCreateJob_EmptyPipelineSpec(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() @@ -1759,3 +1926,266 @@ spec: valueFrom: path: /output.txt` ) + +func TestCreatePipelineVersion(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + manager := NewResourceManager(store) + + // Create a pipeline before versions. + _, err := manager.CreatePipeline("p", "", []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "p_v", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + defer store.Close() + pipelineVersionExpected := &model.PipelineVersion{ + UUID: FakeUUIDOne, + CreatedAtInSec: 2, + Name: "p_v", + Parameters: "[{\"name\":\"param1\"}]", + Status: model.PipelineVersionReady, + PipelineId: DefaultFakeUUID, + } + assert.Equal(t, pipelineVersionExpected, version) +} + +func TestCreatePipelineVersion_ComplexPipelineVersion(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + createdPipeline, err := manager.CreatePipeline("pipeline", "", []byte(strings.TrimSpace(complexPipeline))) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte(strings.TrimSpace(complexPipeline))) + assert.Nil(t, err) + + _, err = manager.GetPipeline(createdPipeline.UUID) + assert.Nil(t, err) + + _, err = manager.GetPipelineVersion(version.UUID) + assert.Nil(t, err) +} + +func TestCreatePipelineVersion_CreatePipelineVersionFileError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte(strings.TrimSpace(complexPipeline))) + assert.Nil(t, err) + + // Switch to a bad object store + manager.objectStore = &FakeBadObjectStore{} + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "bad object store") + + // Verify the pipeline version in DB is in status PipelineVersionCreating. + version, err := manager.pipelineStore.GetPipelineVersionWithStatus(FakeUUIDOne, model.PipelineVersionCreating) + assert.Nil(t, err) + assert.NotNil(t, version) +} + +func TestCreatePipelineVersion_GetParametersError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("I am invalid yaml")) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "Failed to parse the parameter") +} + +func TestCreatePipelineVersion_StorePipelineVersionMetadataError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline( + "pipeline", + "", + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + + // Close db. + store.DB().Close() + + // Create a version under the above pipeline, resulting in error because of + // closed db. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal( + FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "database is closed") +} + +func TestDeletePipelineVersion(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + + // Delete the above pipeline_version. + err = manager.DeletePipelineVersion(FakeUUIDOne) + assert.Nil(t, err) + + // Verify the version doesn't exist. + _, err = manager.GetPipelineVersion(FakeUUIDOne) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) +} + +func TestDeletePipelineVersion_FileError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + + // Switch to a bad object store + manager.objectStore = &FakeBadObjectStore{} + + // Delete the above pipeline_version. + err = manager.DeletePipelineVersion(FakeUUIDOne) + assert.NotNil(t, err) + + // Verify the version in deleting status. + version, err := manager.pipelineStore.GetPipelineVersionWithStatus(FakeUUIDOne, model.PipelineVersionDeleting) + assert.Nil(t, err) + assert.NotNil(t, version) +} diff --git a/backend/src/apiserver/server/BUILD.bazel b/backend/src/apiserver/server/BUILD.bazel index 3ffdcea74834..83c307b0b8a4 100644 --- a/backend/src/apiserver/server/BUILD.bazel +++ b/backend/src/apiserver/server/BUILD.bazel @@ -27,7 +27,6 @@ go_library( "//backend/src/common/util:go_default_library", "//backend/src/crd/pkg/apis/scheduledworkflow/v1beta1:go_default_library", "@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library", - "@com_github_cenkalti_backoff//:go_default_library", "@com_github_golang_glog//:go_default_library", "@com_github_golang_protobuf//jsonpb:go_default_library_gen", "@com_github_robfig_cron//:go_default_library", diff --git a/backend/src/apiserver/server/api_converter.go b/backend/src/apiserver/server/api_converter.go index 20da9ee97d31..9bc42b010c47 100644 --- a/backend/src/apiserver/server/api_converter.go +++ b/backend/src/apiserver/server/api_converter.go @@ -58,22 +58,21 @@ func ToApiPipeline(pipeline *model.Pipeline) *api.Pipeline { } } - // TODO(jingzhang36): uncomment when exposing versions to API. - // defaultVersion, err := ToApiPipelineVersion(pipeline.DefaultVersion) - // if err != nil { - // return &api.Pipeline{ - // Id: pipeline.UUID, - // Error: err.Error(), - // } - // } + defaultVersion, err := ToApiPipelineVersion(pipeline.DefaultVersion) + if err != nil { + return &api.Pipeline{ + Id: pipeline.UUID, + Error: err.Error(), + } + } return &api.Pipeline{ - Id: pipeline.UUID, - CreatedAt: ×tamp.Timestamp{Seconds: pipeline.CreatedAtInSec}, - Name: pipeline.Name, - Description: pipeline.Description, - Parameters: params, - // DefaultVersion: defaultVersion, + Id: pipeline.UUID, + CreatedAt: ×tamp.Timestamp{Seconds: pipeline.CreatedAtInSec}, + Name: pipeline.Name, + Description: pipeline.Description, + Parameters: params, + DefaultVersion: defaultVersion, } } diff --git a/backend/src/apiserver/server/api_converter_test.go b/backend/src/apiserver/server/api_converter_test.go index e123a2ccf9e3..940589a77865 100644 --- a/backend/src/apiserver/server/api_converter_test.go +++ b/backend/src/apiserver/server/api_converter_test.go @@ -30,36 +30,34 @@ func TestToApiPipeline(t *testing.T) { UUID: "pipeline1", CreatedAtInSec: 1, Parameters: "[]", - // TODO(jingzhang36): uncomment when exposing versions to API. - // DefaultVersion: &model.PipelineVersion{ - // UUID: "pipelineversion1", - // CreatedAtInSec: 1, - // Parameters: "[]", - // PipelineId: "pipeline1", - // CodeSourceUrl: "http://repo/22222", - // }, + DefaultVersion: &model.PipelineVersion{ + UUID: "pipelineversion1", + CreatedAtInSec: 1, + Parameters: "[]", + PipelineId: "pipeline1", + CodeSourceUrl: "http://repo/22222", + }, } apiPipeline := ToApiPipeline(modelPipeline) expectedApiPipeline := &api.Pipeline{ Id: "pipeline1", CreatedAt: ×tamp.Timestamp{Seconds: 1}, Parameters: []*api.Parameter{}, - // TODO(jingzhang36): uncomment when exposing versions to API. - // DefaultVersion: &api.PipelineVersion{ - // Id: "pipelineversion1", - // CreatedAt: ×tamp.Timestamp{Seconds: 1}, - // Parameters: []*api.Parameter{}, - // CodeSourceUrl: "http://repo/22222", - // ResourceReferences: []*api.ResourceReference{ - // &api.ResourceReference{ - // Key: &api.ResourceKey{ - // Id: "pipeline1", - // Type: api.ResourceType_PIPELINE, - // }, - // Relationship: api.Relationship_OWNER, - // }, - // }, - // }, + DefaultVersion: &api.PipelineVersion{ + Id: "pipelineversion1", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + Parameters: []*api.Parameter{}, + CodeSourceUrl: "http://repo/22222", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline1", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, } assert.Equal(t, expectedApiPipeline, apiPipeline) } @@ -69,8 +67,7 @@ func TestToApiPipeline_ErrorParsingField(t *testing.T) { UUID: "pipeline1", CreatedAtInSec: 1, Parameters: "[invalid parameter", - // TODO(jingzhang36): uncomment when exposing versions to API. - // DefaultVersion: &model.PipelineVersion{}, + DefaultVersion: &model.PipelineVersion{}, } apiPipeline := ToApiPipeline(modelPipeline) expectedApiPipeline := &api.Pipeline{ diff --git a/backend/src/apiserver/server/job_server.go b/backend/src/apiserver/server/job_server.go index 40858ede950a..759826c80c1f 100644 --- a/backend/src/apiserver/server/job_server.go +++ b/backend/src/apiserver/server/job_server.go @@ -87,7 +87,9 @@ func (s *JobServer) validateCreateJobRequest(request *api.CreateJobRequest) erro job := request.Job if err := ValidatePipelineSpec(s.resourceManager, job.PipelineSpec); err != nil { - return util.Wrap(err, "The pipeline spec is invalid.") + if _, errResourceReference := CheckPipelineVersionReference(s.resourceManager, job.ResourceReferences); errResourceReference != nil { + return util.Wrap(err, "Neither pipeline spec nor pipeline version is valid."+errResourceReference.Error()) + } } if job.MaxConcurrency > 10 || job.MaxConcurrency < 1 { diff --git a/backend/src/apiserver/server/job_server_test.go b/backend/src/apiserver/server/job_server_test.go index 125a15775184..e40e7f7e4ff9 100644 --- a/backend/src/apiserver/server/job_server_test.go +++ b/backend/src/apiserver/server/job_server_test.go @@ -36,6 +36,26 @@ func TestValidateApiJob(t *testing.T) { assert.Nil(t, err) } +func TestValidateApiJob_WithPipelineVersion(t *testing.T) { + clients, manager, _ := initWithExperimentAndPipelineVersion(t) + defer clients.Close() + server := NewJobServer(manager) + apiJob := &api.Job{ + Id: "job1", + Name: "name1", + Enabled: true, + MaxConcurrency: 1, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + ResourceReferences: validReferencesOfExperimentAndPipelineVersion, + } + err := server.validateCreateJobRequest(&api.CreateJobRequest{Job: apiJob}) + assert.Nil(t, err) +} + func TestValidateApiJob_ValidateNoExperimentResourceReferenceSucceeds(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() @@ -87,6 +107,27 @@ func TestValidateApiJob_ValidatePipelineSpecFailed(t *testing.T) { assert.Contains(t, err.Error(), "Pipeline not_exist_pipeline not found") } +func TestValidateApiJob_NoValidPipelineSpecOrPipelineVersion(t *testing.T) { + clients, manager, _ := initWithExperimentAndPipelineVersion(t) + defer clients.Close() + server := NewJobServer(manager) + apiJob := &api.Job{ + Id: "job1", + Name: "name1", + Enabled: true, + MaxConcurrency: 1, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + ResourceReferences: validReference, + } + err := server.validateCreateJobRequest(&api.CreateJobRequest{Job: apiJob}) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "Neither pipeline spec nor pipeline version is valid") +} + func TestValidateApiJob_InvalidCron(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() diff --git a/backend/src/apiserver/server/pipeline_server.go b/backend/src/apiserver/server/pipeline_server.go index 8c3641440e60..635b5e0b35a7 100644 --- a/backend/src/apiserver/server/pipeline_server.go +++ b/backend/src/apiserver/server/pipeline_server.go @@ -118,3 +118,81 @@ func ValidateCreatePipelineRequest(request *api.CreatePipelineRequest) error { func NewPipelineServer(resourceManager *resource.ResourceManager) *PipelineServer { return &PipelineServer{resourceManager: resourceManager, httpClient: http.DefaultClient} } + +func (s *PipelineServer) CreatePipelineVersion(ctx context.Context, request *api.CreatePipelineVersionRequest) (*api.PipelineVersion, error) { + // Read pipeline file. + if request.Version == nil || request.Version.PackageUrl == nil || + len(request.Version.PackageUrl.PipelineUrl) == 0 { + return nil, util.NewInvalidInputError("Pipeline URL is empty. Please specify a valid URL.") + } + pipelineUrl := request.Version.PackageUrl.PipelineUrl + if _, err := url.ParseRequestURI(request.Version.PackageUrl.PipelineUrl); err != nil { + return nil, util.NewInvalidInputError("Invalid Pipeline URL %v. Please specify a valid URL", request.Version.PackageUrl.PipelineUrl) + } + resp, err := s.httpClient.Get(pipelineUrl) + if err != nil || resp.StatusCode != http.StatusOK { + return nil, util.NewInternalServerError(err, "Failed to download the pipeline from %v. Please double check the URL is valid and can be accessed by the pipeline system.", pipelineUrl) + } + pipelineFileName := path.Base(pipelineUrl) + pipelineFile, err := ReadPipelineFile(pipelineFileName, resp.Body, MaxFileLength) + if err != nil { + return nil, util.Wrap(err, "The URL is valid but pipeline system failed to read the file.") + } + + version, err := s.resourceManager.CreatePipelineVersion(request.Version, pipelineFile) + if err != nil { + return nil, util.Wrap(err, "Failed to create a version.") + } + return ToApiPipelineVersion(version) +} + +func (s *PipelineServer) GetPipelineVersion(ctx context.Context, request *api.GetPipelineVersionRequest) (*api.PipelineVersion, error) { + version, err := s.resourceManager.GetPipelineVersion(request.VersionId) + if err != nil { + return nil, util.Wrap(err, "Get pipeline version failed.") + } + return ToApiPipelineVersion(version) +} + +func (s *PipelineServer) ListPipelineVersions(ctx context.Context, request *api.ListPipelineVersionsRequest) (*api.ListPipelineVersionsResponse, error) { + opts, err := validatedListOptions( + &model.PipelineVersion{}, + request.PageToken, + int(request.PageSize), + request.SortBy, + request.Filter) + + if err != nil { + return nil, util.Wrap(err, "Failed to create list options") + } + + pipelineVersions, total_size, nextPageToken, err := + s.resourceManager.ListPipelineVersions(request.ResourceKey.Id, opts) + if err != nil { + return nil, util.Wrap(err, "List pipeline versions failed.") + } + apiPipelineVersions, _ := ToApiPipelineVersions(pipelineVersions) + + return &api.ListPipelineVersionsResponse{ + Versions: apiPipelineVersions, + NextPageToken: nextPageToken, + TotalSize: int32(total_size)}, nil +} + +func (s *PipelineServer) DeletePipelineVersion(ctx context.Context, request *api.DeletePipelineVersionRequest) (*empty.Empty, error) { + err := s.resourceManager.DeletePipelineVersion(request.VersionId) + if err != nil { + return nil, util.Wrap(err, "Delete pipeline versions failed.") + } + + return &empty.Empty{}, nil +} + +func (s *PipelineServer) GetPipelineVersionTemplate(ctx context.Context, request *api.GetPipelineVersionTemplateRequest) (*api.GetTemplateResponse, error) { + template, err := s.resourceManager.GetPipelineVersionTemplate(request.VersionId) + if err != nil { + return nil, util.Wrap(err, "Get pipeline template failed.") + } + + return &api.GetTemplateResponse{Template: string(template)}, nil +} diff --git a/backend/src/apiserver/server/pipeline_server_test.go b/backend/src/apiserver/server/pipeline_server_test.go index 300284463164..62679969f89c 100644 --- a/backend/src/apiserver/server/pipeline_server_test.go +++ b/backend/src/apiserver/server/pipeline_server_test.go @@ -27,8 +27,8 @@ func TestCreatePipeline_YAML(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} pipeline, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, + Name: "argument-parameters", }}) assert.Nil(t, err) @@ -54,8 +54,8 @@ func TestCreatePipeline_Tarball(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} pipeline, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/arguments_tarball/arguments.tar.gz"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments_tarball/arguments.tar.gz"}, + Name: "argument-parameters", }}) assert.Nil(t, err) @@ -81,8 +81,8 @@ func TestCreatePipeline_InvalidYAML(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} _, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", }}) assert.NotNil(t, err) @@ -101,12 +101,145 @@ func TestCreatePipeline_InvalidURL(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} _, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", }}) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) } +func TestCreatePipelineVersion_YAML(t *testing.T) { + httpServer := getMockServer(t) + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal( + util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{ + resourceManager: resourceManager, httpClient: httpServer.Client()} + pipelineVersion, err := pipelineServer.CreatePipelineVersion( + context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.Nil(t, err) + assert.NotNil(t, pipelineVersion) + assert.Equal(t, "argument-parameters", pipelineVersion.Name) + newPipelineVersion, err := resourceManager.GetPipelineVersion( + pipelineVersion.Id) + assert.Nil(t, err) + assert.NotNil(t, newPipelineVersion) + var params []api.Parameter + err = json.Unmarshal([]byte(newPipelineVersion.Parameters), ¶ms) + assert.Nil(t, err) + assert.Equal(t, []api.Parameter{ + {Name: "param1", Value: "hello"}, {Name: "param2"}}, params) +} + +func TestCreatePipelineVersion_InvalidYAML(t *testing.T) { + httpServer := getMockServer(t) + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} + _, err := pipelineServer.CreatePipelineVersion( + context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.NotNil(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "Unexpected resource type") +} + +func TestCreatePipelineVersion_Tarball(t *testing.T) { + httpServer := getMockServer(t) + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} + pipelineVersion, err := pipelineServer.CreatePipelineVersion( + context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + + "/arguments_tarball/arguments.tar.gz"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.Nil(t, err) + assert.NotNil(t, pipelineVersion) + assert.Equal(t, "argument-parameters", pipelineVersion.Name) + newPipelineVersion, err := resourceManager.GetPipelineVersion(pipelineVersion.Id) + assert.Nil(t, err) + assert.NotNil(t, newPipelineVersion) + var params []api.Parameter + err = json.Unmarshal([]byte(newPipelineVersion.Parameters), ¶ms) + assert.Nil(t, err) + assert.Equal(t, []api.Parameter{{Name: "param1", Value: "hello"}, {Name: "param2"}}, params) +} + +func TestCreatePipelineVersion_InvalidURL(t *testing.T) { + // Use a bad mock server + httpServer := getBadMockServer() + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} + _, err := pipelineServer.CreatePipelineVersion(context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} + func getMockServer(t *testing.T) *httptest.Server { httpServer := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { // Send response to be tested diff --git a/backend/src/apiserver/server/run_server.go b/backend/src/apiserver/server/run_server.go index e39b780485d3..9d09c23dbdb8 100644 --- a/backend/src/apiserver/server/run_server.go +++ b/backend/src/apiserver/server/run_server.go @@ -16,6 +16,7 @@ package server import ( "context" + "github.com/golang/protobuf/ptypes/empty" api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/model" @@ -128,7 +129,10 @@ func (s *RunServer) validateCreateRunRequest(request *api.CreateRunRequest) erro } if err := ValidatePipelineSpec(s.resourceManager, run.PipelineSpec); err != nil { - return util.Wrap(err, "The pipeline spec is invalid.") + if _, errResourceReference := CheckPipelineVersionReference(s.resourceManager, run.ResourceReferences); errResourceReference != nil { + return util.Wrap(err, "Neither pipeline spec nor pipeline version is valid. "+errResourceReference.Error()) + } + return nil } return nil } diff --git a/backend/src/apiserver/server/run_server_test.go b/backend/src/apiserver/server/run_server_test.go index 2dd81bd09bdb..44dac875b13d 100644 --- a/backend/src/apiserver/server/run_server_test.go +++ b/backend/src/apiserver/server/run_server_test.go @@ -112,6 +112,18 @@ func TestValidateCreateRunRequest(t *testing.T) { assert.Nil(t, err) } +func TestValidateCreateRunRequest_WithPipelineVersionReference(t *testing.T) { + clients, manager, _ := initWithExperimentAndPipelineVersion(t) + defer clients.Close() + server := NewRunServer(manager) + run := &api.Run{ + Name: "123", + ResourceReferences: validReferencesOfExperimentAndPipelineVersion, + } + err := server.validateCreateRunRequest(&api.CreateRunRequest{Run: run}) + assert.Nil(t, err) +} + func TestValidateCreateRunRequest_EmptyName(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() @@ -144,7 +156,7 @@ func TestValidateCreateRunRequest_NoExperiment(t *testing.T) { assert.Nil(t, err) } -func TestValidateCreateRunRequest_EmptyPipelineSpec(t *testing.T) { +func TestValidateCreateRunRequest_EmptyPipelineSpecAndEmptyPipelineVersion(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() server := NewRunServer(manager) @@ -154,7 +166,7 @@ func TestValidateCreateRunRequest_EmptyPipelineSpec(t *testing.T) { } err := server.validateCreateRunRequest(&api.CreateRunRequest{Run: run}) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please specify a pipeline by providing a pipeline ID or workflow manifest") + assert.Contains(t, err.Error(), "Neither pipeline spec nor pipeline version is valid") } func TestValidateCreateRunRequest_TooMuchParameters(t *testing.T) { diff --git a/backend/src/apiserver/server/test_util.go b/backend/src/apiserver/server/test_util.go index 1712f5719910..890e992ce971 100644 --- a/backend/src/apiserver/server/test_util.go +++ b/backend/src/apiserver/server/test_util.go @@ -47,6 +47,23 @@ var validReference = []*api.ResourceReference{ }, } +var validReferencesOfExperimentAndPipelineVersion = []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, + Id: resource.DefaultFakeUUID, + }, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{ + Type: api.ResourceType_PIPELINE_VERSION, + Id: resource.DefaultFakeUUID, + }, + Relationship: api.Relationship_CREATOR, + }, +} + func initWithExperiment(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Experiment) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager) @@ -56,6 +73,35 @@ func initWithExperiment(t *testing.T) (*resource.FakeClientManager, *resource.Re return clientManager, resourceManager, experiment } +func initWithExperimentAndPipelineVersion(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Experiment) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + // Create an experiment. + experiment := &model.Experiment{Name: "123"} + experiment, err := resourceManager.CreateExperiment(experiment) + assert.Nil(t, err) + + // Create a pipeline and then a pipeline version. + _, err = resourceManager.CreatePipeline("pipeline", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + _, err = resourceManager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: resource.DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + + return clientManager, resourceManager, experiment +} + func initWithOneTimeRun(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.RunDetail) { clientManager, manager, exp := initWithExperiment(t) apiRun := &api.Run{ diff --git a/backend/src/apiserver/server/util.go b/backend/src/apiserver/server/util.go index bd274c18e501..bffd1d52960a 100644 --- a/backend/src/apiserver/server/util.go +++ b/backend/src/apiserver/server/util.go @@ -7,6 +7,7 @@ import ( "bytes" "compress/gzip" "encoding/json" + api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" @@ -239,4 +240,31 @@ func ValidatePipelineSpec(resourceManager *resource.ResourceManager, spec *api.P return util.NewInvalidInputError("The input parameter length exceed maximum size of %v.", util.MaxParameterBytes) } return nil -} \ No newline at end of file +} + +// Verify that +// (1) a pipeline version is specified in references as a creator. +// (2) the above pipeline version does exists in pipeline version store and is +// in ready status. +func CheckPipelineVersionReference(resourceManager *resource.ResourceManager, references []*api.ResourceReference) (*string, error) { + if references == nil { + return nil, util.NewInvalidInputError("Please specify a pipeline version in Run's resource references") + } + + var pipelineVersionId = "" + for _, reference := range references { + if reference.Key.Type == api.ResourceType_PIPELINE_VERSION && reference.Relationship == api.Relationship_CREATOR { + pipelineVersionId = reference.Key.Id + } + } + if len(pipelineVersionId) == 0 { + return nil, util.NewInvalidInputError("Please specify a pipeline version in Run's resource references") + } + + // Verify pipeline version exists + if _, err := resourceManager.GetPipelineVersion(pipelineVersionId); err != nil { + return nil, util.Wrap(err, "Please specify a valid pipeline version in Run's resource references.") + } + + return &pipelineVersionId, nil +} diff --git a/backend/src/apiserver/storage/pipeline_store.go b/backend/src/apiserver/storage/pipeline_store.go index 02dce6aaf74f..0b70051afc9e 100644 --- a/backend/src/apiserver/storage/pipeline_store.go +++ b/backend/src/apiserver/storage/pipeline_store.go @@ -43,6 +43,16 @@ var pipelineColumns = []string{ "pipeline_versions.CodeSourceUrl", } +var pipelineVersionColumns = []string{ + "pipeline_versions.UUID", + "pipeline_versions.CreatedAtInSec", + "pipeline_versions.Name", + "pipeline_versions.Parameters", + "pipeline_versions.PipelineId", + "pipeline_versions.Status", + "pipeline_versions.CodeSourceUrl", +} + type PipelineStoreInterface interface { ListPipelines(opts *list.Options) ([]*model.Pipeline, int, string, error) GetPipeline(pipelineId string) (*model.Pipeline, error) @@ -50,7 +60,13 @@ type PipelineStoreInterface interface { DeletePipeline(pipelineId string) error CreatePipeline(*model.Pipeline) (*model.Pipeline, error) UpdatePipelineStatus(string, model.PipelineStatus) error + UpdatePipelineDefaultVersion(string, string) error + CreatePipelineVersion(*model.PipelineVersion) (*model.PipelineVersion, error) + GetPipelineVersion(versionId string) (*model.PipelineVersion, error) + GetPipelineVersionWithStatus(versionId string, status model.PipelineVersionStatus) (*model.PipelineVersion, error) + ListPipelineVersions(pipelineId string, opts *list.Options) ([]*model.PipelineVersion, int, string, error) + DeletePipelineVersion(pipelineVersionId string) error // Change status of a particular version. UpdatePipelineVersionStatus(pipelineVersionId string, status model.PipelineVersionStatus) error // TODO(jingzhang36): remove this temporary method after resource manager's @@ -206,8 +222,7 @@ func (s *PipelineStore) GetPipelineWithStatus(id string, status model.PipelineSt Select(pipelineColumns...). From("pipelines"). LeftJoin("pipeline_versions on pipelines.DefaultVersionId = pipeline_versions.UUID"). - Where(sq.Eq{"pipelines.uuid": id}). - Where(sq.Eq{"pipelines.Status": status}). + Where(sq.And{sq.Eq{"pipelines.uuid": id}, sq.Eq{"pipelines.Status": status}}). Limit(1).ToSql() if err != nil { return nil, util.NewInternalServerError(err, "Failed to create query to get pipeline: %v", err.Error()) @@ -381,6 +396,12 @@ func (s *PipelineStore) UpdatePipelineVersionStatus(id string, status model.Pipe func (s *PipelineStore) UpdatePipelineAndVersionsStatus(id string, status model.PipelineStatus, pipelineVersionId string, pipelineVersionStatus model.PipelineVersionStatus) error { tx, err := s.db.Begin() + if err != nil { + return util.NewInternalServerError( + err, + "Failed to start a transaction: %s", + err.Error()) + } sql, args, err := sq. Update("pipelines"). @@ -425,3 +446,348 @@ func (s *PipelineStore) UpdatePipelineAndVersionsStatus(id string, status model. func NewPipelineStore(db *DB, time util.TimeInterface, uuid util.UUIDGeneratorInterface) *PipelineStore { return &PipelineStore{db: db, time: time, uuid: uuid} } + +func (s *PipelineStore) CreatePipelineVersion(v *model.PipelineVersion) (*model.PipelineVersion, error) { + newPipelineVersion := *v + newPipelineVersion.CreatedAtInSec = s.time.Now().Unix() + id, err := s.uuid.NewRandom() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create a pipeline version id.") + } + newPipelineVersion.UUID = id.String() + + // Prepare queries of inserting new version and updating default version. + versionSql, versionArgs, versionErr := sq. + Insert("pipeline_versions"). + SetMap( + sq.Eq{ + "UUID": newPipelineVersion.UUID, + "CreatedAtInSec": newPipelineVersion.CreatedAtInSec, + "Name": newPipelineVersion.Name, + "Parameters": newPipelineVersion.Parameters, + "PipelineId": newPipelineVersion.PipelineId, + "Status": string(newPipelineVersion.Status), + "CodeSourceUrl": newPipelineVersion.CodeSourceUrl}). + ToSql() + if versionErr != nil { + return nil, util.NewInternalServerError( + versionErr, + "Failed to create query to insert version to pipeline version table: %v", + versionErr.Error()) + } + pipelineSql, pipelineArgs, pipelineErr := sq. + Update("pipelines"). + SetMap(sq.Eq{"DefaultVersionId": newPipelineVersion.UUID}). + Where(sq.Eq{"UUID": newPipelineVersion.PipelineId}). + ToSql() + if pipelineErr != nil { + return nil, util.NewInternalServerError( + pipelineErr, + "Failed to create query to update pipeline default version id: %v", + pipelineErr.Error()) + } + + // In a single transaction, insert new version and update default version. + tx, err := s.db.Begin() + if err != nil { + return nil, util.NewInternalServerError( + err, + "Failed to start a transaction: %v", + err.Error()) + } + + _, err = tx.Exec(versionSql, versionArgs...) + if err != nil { + tx.Rollback() + if s.db.IsDuplicateError(err) { + return nil, util.NewInvalidInputError( + "Failed to create a new pipeline version. The name %v already exist. Please specify a new name.", v.Name) + } + return nil, util.NewInternalServerError(err, "Failed to add version to pipeline version table: %v", + err.Error()) + } + _, err = tx.Exec(pipelineSql, pipelineArgs...) + if err != nil { + tx.Rollback() + return nil, util.NewInternalServerError(err, "Failed to update pipeline default version id: %v", + err.Error()) + } + if err := tx.Commit(); err != nil { + return nil, util.NewInternalServerError(err, "Failed to create new pipeline version: %v", + err.Error()) + } + + return &newPipelineVersion, nil +} + +func (s *PipelineStore) UpdatePipelineDefaultVersion(pipelineId string, versionId string) error { + sql, args, err := sq. + Update("pipelines"). + SetMap(sq.Eq{"DefaultVersionId": versionId}). + Where(sq.Eq{"UUID": pipelineId}). + ToSql() + if err != nil { + return util.NewInternalServerError(err, "Failed to create query to update the pipeline default version: %s", err.Error()) + } + _, err = s.db.Exec(sql, args...) + if err != nil { + return util.NewInternalServerError(err, "Failed to update the pipeline default version: %s", err.Error()) + } + + return nil +} + +func (s *PipelineStore) GetPipelineVersion(versionId string) (*model.PipelineVersion, error) { + return s.GetPipelineVersionWithStatus(versionId, model.PipelineVersionReady) +} + +func (s *PipelineStore) GetPipelineVersionWithStatus(versionId string, status model.PipelineVersionStatus) (*model.PipelineVersion, error) { + sql, args, err := sq. + Select(pipelineVersionColumns...). + From("pipeline_versions"). + Where(sq.And{sq.Eq{"UUID": versionId}, sq.Eq{"Status": status}}). + Limit(1). + ToSql() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to get pipeline version: %v", err.Error()) + } + r, err := s.db.Query(sql, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to get pipeline version: %v", err.Error()) + } + defer r.Close() + versions, err := s.scanPipelineVersionRows(r) + + if err != nil || len(versions) > 1 { + return nil, util.NewInternalServerError(err, "Failed to get pipeline version: %v", err.Error()) + } + if len(versions) == 0 { + return nil, util.NewResourceNotFoundError("Version", fmt.Sprint(versionId)) + } + return versions[0], nil +} + +func (s *PipelineStore) scanPipelineVersionRows(rows *sql.Rows) ([]*model.PipelineVersion, error) { + var pipelineVersions []*model.PipelineVersion + for rows.Next() { + var uuid, name, parameters, pipelineId, codeSourceUrl, status sql.NullString + var createdAtInSec sql.NullInt64 + if err := rows.Scan( + &uuid, + &createdAtInSec, + &name, + ¶meters, + &pipelineId, + &status, + &codeSourceUrl, + ); err != nil { + return nil, err + } + if uuid.Valid { + pipelineVersions = append(pipelineVersions, &model.PipelineVersion{ + UUID: uuid.String, + CreatedAtInSec: createdAtInSec.Int64, + Name: name.String, + Parameters: parameters.String, + PipelineId: pipelineId.String, + CodeSourceUrl: codeSourceUrl.String, + Status: model.PipelineVersionStatus(status.String)}) + } + } + return pipelineVersions, nil +} + +func (s *PipelineStore) ListPipelineVersions(pipelineId string, opts *list.Options) ([]*model.PipelineVersion, int, string, error) { + errorF := func(err error) ([]*model.PipelineVersion, int, string, error) { + return nil, 0, "", util.NewInternalServerError(err, "Failed to list pipeline versions: %v", err) + } + + buildQuery := func(sqlBuilder sq.SelectBuilder) sq.SelectBuilder { + return sqlBuilder. + From("pipeline_versions"). + Where(sq.And{sq.Eq{"PipelineId": pipelineId}, sq.Eq{"status": model.PipelineVersionReady}}) + } + + // SQL for pipeline version list + rowsSql, rowsArgs, err := opts.AddPaginationToSelect( + buildQuery(sq.Select(pipelineVersionColumns...))).ToSql() + if err != nil { + return errorF(err) + } + + // SQL for getting total size of pipeline versions. + sizeSql, sizeArgs, err := buildQuery(sq.Select("count(*)")).ToSql() + if err != nil { + return errorF(err) + } + + // Use a transaction to make sure we're returning the total_size of the same + // rows queried. + tx, err := s.db.Begin() + if err != nil { + glog.Errorf("Failed to start transaction to list pipelines") + return errorF(err) + } + + rows, err := tx.Query(rowsSql, rowsArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + pipelineVersions, err := s.scanPipelineVersionRows(rows) + if err != nil { + tx.Rollback() + return errorF(err) + } + rows.Close() + + sizeRow, err := tx.Query(sizeSql, sizeArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + total_size, err := list.ScanRowToTotalSize(sizeRow) + if err != nil { + tx.Rollback() + return errorF(err) + } + sizeRow.Close() + + err = tx.Commit() + if err != nil { + glog.Errorf("Failed to commit transaction to list pipelines") + return errorF(err) + } + + if len(pipelineVersions) <= opts.PageSize { + return pipelineVersions, total_size, "", nil + } + + npt, err := opts.NextPageToken(pipelineVersions[opts.PageSize]) + return pipelineVersions[:opts.PageSize], total_size, npt, err +} + +func (s *PipelineStore) DeletePipelineVersion(versionId string) error { + // If this version is used as default version for a pipeline, we have to + // find a new default version for that pipeline, which is usually the latest + // version of that pipeline. Then we'll have 3 operations in a single + // transactions: (1) delete version (2) get new default version id (3) use + // new default version id to update pipeline. + tx, err := s.db.Begin() + if err != nil { + return util.NewInternalServerError( + err, + "Failed to start an transaction while trying to delete pipeline version: %v", + err.Error()) + } + + // (1) delete version. + _, err = tx.Exec( + "delete from pipeline_versions where UUID = ?", + versionId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to delete pipeline version: %v", + err.Error()) + } + + // (2) check whether this version is used as default version. + r, err := tx.Query( + "select UUID from pipelines where DefaultVersionId = ?", + versionId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + `Failed to query pipelines table while deleting pipeline version: + %v`, + err.Error()) + } + var pipelineId = "" + if r.Next() { + if err := r.Scan(&pipelineId); err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to get pipeline id for version id: %v", + err.Error()) + } + } + if len(pipelineId) == 0 { + // The deleted version is not used as a default version. So no extra + // work is needed. We commit the deletion now. + if err := tx.Commit(); err != nil { + return util.NewInternalServerError( + err, + "Failed to delete pipeline version: %v", + err.Error()) + } + } + + // (3) find a new default version. + r, err = tx.Query( + `select UUID from pipeline_versions + where PipelineId = ? and Status = ? + order by CreatedAtInSec DESC + limit 1`, + pipelineId, + model.PipelineVersionReady) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to get a new default version id: %v", + err.Error()) + } + var newDefaultVersionId = "" + if r.Next() { + if err := r.Scan(&newDefaultVersionId); err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to get a new default version id: %v", + err.Error()) + } + } + if len(newDefaultVersionId) == 0 { + // No new default version. The pipeline's default version id will be + // null. + _, err = tx.Exec( + "update pipelines set DefaultVerionId = null where UUID = ?", + pipelineId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to update pipeline's default version id: %v", + err.Error()) + } + } else { + _, err = tx.Exec( + "update pipelines set DefaultVersionId = ? where UUID = ?", + newDefaultVersionId, pipelineId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to update pipeline's default version id: %v", + err.Error()) + } + } + + if err := tx.Commit(); err != nil { + return util.NewInternalServerError( + err, + "Failed to delete pipeline version: %v", + err.Error()) + } + return nil +} + +// SetUUIDGenerator is for unit tests in other packages who need to set uuid, +// since uuid is not exported. +func (s *PipelineStore) SetUUIDGenerator(new_uuid util.UUIDGeneratorInterface) { + s.uuid = new_uuid +} diff --git a/backend/src/apiserver/storage/pipeline_store_test.go b/backend/src/apiserver/storage/pipeline_store_test.go index 980acff52c3a..cc8ddbfcb4b3 100644 --- a/backend/src/apiserver/storage/pipeline_store_test.go +++ b/backend/src/apiserver/storage/pipeline_store_test.go @@ -29,6 +29,7 @@ const ( fakeUUIDTwo = "123e4567-e89b-12d3-a456-426655440001" fakeUUIDThree = "123e4567-e89b-12d3-a456-426655440002" fakeUUIDFour = "123e4567-e89b-12d3-a456-426655440003" + fakeUUIDFive = "123e4567-e89b-12d3-a456-426655440004" ) func createPipeline(name string) *model.Pipeline { @@ -505,3 +506,721 @@ func TestUpdatePipelineStatusError(t *testing.T) { err := pipelineStore.UpdatePipelineStatus(fakeUUID, model.PipelineDeleting) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) } + +func TestCreatePipelineVersion(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline first. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineVersion := &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + CodeSourceUrl: "code_source_url", + } + pipelineVersionCreated, err := pipelineStore.CreatePipelineVersion( + pipelineVersion) + + // Check whether created pipeline version is as expected. + pipelineVersionExpected := model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineVersionCreating, + PipelineId: fakeUUID, + CodeSourceUrl: "code_source_url", + } + assert.Nil(t, err) + assert.Equal( + t, + pipelineVersionExpected, + *pipelineVersionCreated, + "Got unexpected pipeline.") + + // Check whether pipeline has updated default version id. + pipeline, err := pipelineStore.GetPipeline(fakeUUID) + assert.Nil(t, err) + assert.Equal(t, pipeline.DefaultVersionId, fakeUUIDTwo, "Got unexpected default version id.") +} + +func TestCreatePipelineVersion_DuplicateKey(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + + // Create another new version with same name. + _, err := pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param2"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "The name pipeline_version_1 already exist") +} + +func TestCreatePipelineVersion_InternalServerError_DBClosed(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Try to create a new version but db is closed. + _, err := pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + }) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), + "Expected create pipeline version to return error") +} + +func TestDeletePipelineVersion(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create a second version, which will become the default version. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Delete version with id being fakeUUIDThree. + err := pipelineStore.DeletePipelineVersion(fakeUUIDThree) + assert.Nil(t, err) + + // Check version removed. + _, err = pipelineStore.GetPipelineVersion(fakeUUIDThree) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) + + // Check new default version is version with id being fakeUUIDTwo. + pipeline, err := pipelineStore.GetPipeline(fakeUUID) + assert.Nil(t, err) + assert.Equal(t, pipeline.DefaultVersionId, fakeUUIDTwo) +} + +func TestDeletePipelineVersionError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + db.Close() + // On closed db, create pipeline version ends in internal error. + err := pipelineStore.DeletePipelineVersion(fakeUUIDTwo) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} + +func TestGetPipelineVersion(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Get pipeline version. + pipelineVersion, err := pipelineStore.GetPipelineVersion(fakeUUIDTwo) + assert.Nil(t, err) + assert.Equal( + t, + model.PipelineVersion{ + UUID: fakeUUIDTwo, + Name: "pipeline_version_1", + CreatedAtInSec: 2, + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + *pipelineVersion, "Got unexpected pipeline version.") +} + +func TestGetPipelineVersion_InternalError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Internal error because of closed DB. + _, err := pipelineStore.GetPipelineVersion("123") + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), + "Expected get pipeline to return internal error") +} + +func TestGetPipelineVersion_NotFound_VersionStatusCreating(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + + _, err := pipelineStore.GetPipelineVersion(fakeUUIDTwo) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode(), + "Expected get pipeline to return not found") +} + +func TestGetPipelineVersion_NotFoundError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + _, err := pipelineStore.GetPipelineVersion(fakeUUID) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode(), + "Expected get pipeline to return not found") +} + +func TestListPipelineVersion_FilterOutNotReady(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a first version with status ready. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create a second version with status ready. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create a third version with status creating. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFour, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + + pipelineVersionsExpected := []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady}, + &model.PipelineVersion{ + UUID: fakeUUIDThree, + CreatedAtInSec: 3, + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady}} + + opts, err := list.NewOptions(&model.PipelineVersion{}, 10, "id", nil) + assert.Nil(t, err) + + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 2, total_size) + assert.Equal(t, pipelineVersionsExpected, pipelineVersions) +} + +func TestListPipelineVersions_Pagination(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create "version_1" with fakeUUIDTwo. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_3" with fakeUUIDThree. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_2" with fakeUUIDFour. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFour, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_4" with fakeUUIDFive. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFive, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // List results in 2 pages: first page containing version_1 and version_2; + // and second page containing verion_3 and version_4. + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "name", nil) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.NotEmpty(t, nextPageToken) + assert.Equal(t, 4, total_size) + + // First page. + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDFour, + CreatedAtInSec: 4, + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) + + opts, err = list.NewOptionsFromToken(nextPageToken, 2) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err = + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + + // Second page. + assert.Empty(t, nextPageToken) + assert.Equal(t, 4, total_size) + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDThree, + CreatedAtInSec: 3, + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDFive, + CreatedAtInSec: 5, + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) +} + +func TestListPipelineVersions_Pagination_Descend(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create "version_1" with fakeUUIDTwo. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_3" with fakeUUIDThree. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_2" with fakeUUIDFour. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFour, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_4" with fakeUUIDFive. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFive, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // List result in 2 pages: first page "version_4" and "version_3"; second + // page "version_2" and "version_1". + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "name desc", nil) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.NotEmpty(t, nextPageToken) + assert.Equal(t, 4, total_size) + + // First page. + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDFive, + CreatedAtInSec: 5, + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDThree, + CreatedAtInSec: 3, + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) + + opts, err = list.NewOptionsFromToken(nextPageToken, 2) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err = + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.Empty(t, nextPageToken) + assert.Equal(t, 4, total_size) + + // Second Page. + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDFour, + CreatedAtInSec: 4, + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) +} + +func TestListPipelineVersions_Pagination_LessThanPageSize(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "", nil) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 1, total_size) + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + Name: "pipeline_version_1", + CreatedAtInSec: 2, + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) +} + +func TestListPipelineVersionsError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Internal error because of closed DB. + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "", nil) + assert.Nil(t, err) + _, _, _, err = pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} + +func TestUpdatePipelineVersionStatus(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineVersion, _ := pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Change version to deleting status + err := pipelineStore.UpdatePipelineVersionStatus( + pipelineVersion.UUID, model.PipelineVersionDeleting) + assert.Nil(t, err) + + // Check the new status by retrieving this pipeline version. + retrievedPipelineVersion, err := + pipelineStore.GetPipelineVersionWithStatus( + pipelineVersion.UUID, model.PipelineVersionDeleting) + assert.Nil(t, err) + assert.Equal(t, *retrievedPipelineVersion, model.PipelineVersion{ + UUID: fakeUUIDTwo, + Name: "pipeline_version_1", + CreatedAtInSec: 2, + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionDeleting, + }) +} + +func TestUpdatePipelineVersionStatusError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Internal error because of closed DB. + err := pipelineStore.UpdatePipelineVersionStatus( + fakeUUID, model.PipelineVersionDeleting) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} diff --git a/backend/src/apiserver/storage/resource_reference_store.go b/backend/src/apiserver/storage/resource_reference_store.go index b0dab45cf53b..3a43dbfa32ee 100644 --- a/backend/src/apiserver/storage/resource_reference_store.go +++ b/backend/src/apiserver/storage/resource_reference_store.go @@ -18,7 +18,7 @@ var resourceReferenceColumns = []string{"ResourceUUID", "ResourceType", "Referen type ResourceReferenceStoreInterface interface { // Retrieve the resource reference for a given resource id, type and a reference type. GetResourceReference(resourceId string, resourceType common.ResourceType, - referenceType common.ResourceType) (*model.ResourceReference, error) + referenceType common.ResourceType) (*model.ResourceReference, error) } type ResourceReferenceStore struct { @@ -62,6 +62,8 @@ func (s *ResourceReferenceStore) checkReferenceExist(tx *sql.Tx, referenceId str selectBuilder = sq.Select("1").From("jobs").Where(sq.Eq{"uuid": referenceId}) case common.Experiment: selectBuilder = sq.Select("1").From("experiments").Where(sq.Eq{"uuid": referenceId}) + case common.PipelineVersion: + selectBuilder = sq.Select("1").From("pipeline_versions").Where(sq.Eq{"uuid": referenceId}) default: return false } @@ -82,9 +84,9 @@ func (s *ResourceReferenceStore) checkReferenceExist(tx *sql.Tx, referenceId str func (s *ResourceReferenceStore) DeleteResourceReferences(tx *sql.Tx, id string, resourceType common.ResourceType) error { refSql, refArgs, err := sq. Delete("resource_references"). - Where(sq.Or{ - sq.Eq{"ResourceUUID": id, "ResourceType": resourceType}, - sq.Eq{"ReferenceUUID": id, "ReferenceType": resourceType}}). + Where(sq.Or{ + sq.Eq{"ResourceUUID": id, "ResourceType": resourceType}, + sq.Eq{"ReferenceUUID": id, "ReferenceType": resourceType}}). ToSql() _, err = tx.Exec(refSql, refArgs...) if err != nil { @@ -94,24 +96,24 @@ func (s *ResourceReferenceStore) DeleteResourceReferences(tx *sql.Tx, id string, } func (s *ResourceReferenceStore) GetResourceReference(resourceId string, resourceType common.ResourceType, - referenceType common.ResourceType) (*model.ResourceReference, error) { + referenceType common.ResourceType) (*model.ResourceReference, error) { sql, args, err := sq.Select(resourceReferenceColumns...). From("resource_references"). - Where(sq.Eq{ - "ResourceUUID": resourceId, - "ResourceType": resourceType, - "ReferenceType": referenceType}). + Where(sq.Eq{ + "ResourceUUID": resourceId, + "ResourceType": resourceType, + "ReferenceType": referenceType}). Limit(1).ToSql() if err != nil { return nil, util.NewInternalServerError(err, "Failed to create query to get resource reference. "+ - "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) + "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) } row, err := s.db.Query(sql, args...) if err != nil { return nil, util.NewInternalServerError(err, "Failed to get resource reference. "+ - "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) + "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) } defer row.Close() reference, err := s.scanRows(row) diff --git a/backend/src/apiserver/visualization/third_party_licenses.csv b/backend/src/apiserver/visualization/third_party_licenses.csv index b91b98f5e219..b213938a29dd 100644 --- a/backend/src/apiserver/visualization/third_party_licenses.csv +++ b/backend/src/apiserver/visualization/third_party_licenses.csv @@ -49,6 +49,7 @@ h5py,https://github.com/h5py/h5py/blob/master/LICENSE,BSD-3 hdfs,https://github.com/mtth/hdfs/blob/master/LICENSE,MIT httplib2,https://github.com/httplib2/httplib2/blob/master/LICENSE,MIT idna,https://github.com/kjd/idna/blob/master/LICENSE.rst,BSD-3 +importlib-metadata,https://gitlab.com/python-devs/importlib_metadata/blob/master/LICENSE,Apache 2.0 ipykernel,https://github.com/ipython/ipykernel/blob/master/COPYING.md,BSD-3 ipython,https://github.com/ipython/ipython/blob/master/LICENSE,BSD-3 ipython-genutils,https://github.com/ipython/ipython_genutils/blob/master/COPYING.md,BSD-3 @@ -56,6 +57,7 @@ ipywidgets,https://github.com/jupyter-widgets/ipywidgets/blob/master/LICENSE,BSD itables,https://github.com/mwouts/itables/blob/master/LICENSE,MIT jedi,https://github.com/davidhalter/jedi/blob/master/LICENSE.txt,MIT joblib,https://github.com/joblib/joblib/blob/master/LICENSE.txt,BSD-3 +js-regex,https://github.com/Zac-HD/js-regex/blob/master/LICENSE,Mozilla 2.0 jsonschema,https://github.com/Julian/jsonschema/blob/master/COPYING,MIT jupyter,https://github.com/jupyter/jupyter/blob/master/LICENSE,BSD-3 jupyter-client,https://github.com/jupyter/jupyter_client/blob/master/COPYING.md,BSD-3 @@ -64,9 +66,10 @@ jupyter-core,https://github.com/jupyter/jupyter_core/blob/master/COPYING.md,BSD- mistune,https://github.com/lepture/mistune/blob/master/LICENSE,BSD-3 mock,https://github.com/testing-cabal/mock/blob/master/LICENSE.txt,BSD-2 monotonic,https://github.com/atdt/monotonic/blob/master/LICENSE,Apache 2.0 +more-itertools,https://github.com/erikrose/more-itertools/blob/master/LICENSE,MIT nbconvert,https://github.com/jupyter/nbconvert/blob/master/LICENSE,BSD-3 nbformat,https://github.com/jupyter/nbformat/blob/master/COPYING.md,BSD-3 -notebook,https://github.com/jupyter/notebook/blob/master/COPYING.md,BSD-3 +notebook,https://github.com/jupyter/notebook/blob/master/LICENSE,BSD-3 numpy,https://github.com/numpy/numpy/blob/master/LICENSE.txt,BSD-3 oauth2client,https://github.com/googleapis/oauth2client/blob/master/LICENSE,Apache 2.0 oauthlib,https://github.com/oauthlib/oauthlib/blob/master/LICENSE,BSD-3 @@ -116,3 +119,4 @@ urllib3,https://github.com/urllib3/urllib3/blob/master/LICENSE.txt,MIT wcwidth,https://github.com/jquast/wcwidth/blob/master/LICENSE.txt,MIT webencodings,https://github.com/gsnedders/python-webencodings/blob/master/LICENSE,BSD-3 widgetsnbextension,https://github.com/jupyter-widgets/ipywidgets/blob/master/widgetsnbextension/LICENSE,BSD-3 +zipp,https://github.com/jaraco/zipp/blob/master/LICENSE,MIT diff --git a/backend/src/crd/controller/viewer/reconciler/reconciler.go b/backend/src/crd/controller/viewer/reconciler/reconciler.go index a6287e651bc1..1792f47dd5f2 100644 --- a/backend/src/crd/controller/viewer/reconciler/reconciler.go +++ b/backend/src/crd/controller/viewer/reconciler/reconciler.go @@ -165,11 +165,14 @@ func setPodSpecForTensorboard(view *viewerV1beta1.Viewer, s *corev1.PodSpec) { c := &s.Containers[0] c.Name = view.Name + "-pod" - c.Image = "tensorflow/tensorflow" + c.Image = "tensorflow/tensorflow:1.13.2" c.Args = []string{ "tensorboard", fmt.Sprintf("--logdir=%s", view.Spec.TensorboardSpec.LogDir), fmt.Sprintf("--path_prefix=/tensorboard/%s/", view.Name), + // This is needed for tf 2.0. We need to optionally add it + // when https://github.com/kubeflow/pipelines/issues/2514 is done + // "--bind_all", } c.Ports = []corev1.ContainerPort{ corev1.ContainerPort{ContainerPort: viewerTargetPort}, diff --git a/backend/src/crd/controller/viewer/reconciler/reconciler_test.go b/backend/src/crd/controller/viewer/reconciler/reconciler_test.go index 19df04a4254e..c6a996eb67df 100644 --- a/backend/src/crd/controller/viewer/reconciler/reconciler_test.go +++ b/backend/src/crd/controller/viewer/reconciler/reconciler_test.go @@ -174,7 +174,7 @@ func TestReconcile_EachViewerCreatesADeployment(t *testing.T) { Spec: corev1.PodSpec{ Containers: []corev1.Container{{ Name: "viewer-123-pod", - Image: "tensorflow/tensorflow", + Image: "tensorflow/tensorflow:1.13.2", Args: []string{ "tensorboard", "--logdir=gs://tensorboard/logdir", @@ -271,7 +271,7 @@ func TestReconcile_ViewerUsesSpecifiedVolumeMountsForDeployment(t *testing.T) { Spec: corev1.PodSpec{ Containers: []corev1.Container{{ Name: "viewer-123-pod", - Image: "tensorflow/tensorflow", + Image: "tensorflow/tensorflow:1.13.2", Args: []string{ "tensorboard", "--logdir=gs://tensorboard/logdir", diff --git a/backend/test/integration/pipeline_api_test.go b/backend/test/integration/pipeline_api_test.go index 4ad99ca07d19..f0f427413d6d 100644 --- a/backend/test/integration/pipeline_api_test.go +++ b/backend/test/integration/pipeline_api_test.go @@ -195,6 +195,19 @@ func verifyPipeline(t *testing.T, pipeline *model.APIPipeline) { {Name: "param1", Value: "hello"}, // Default value in the pipeline template {Name: "param2"}, // No default value in the pipeline }, + // TODO(jingzhang36): after version API launch, remove the following field. + // This is because after the version API launch, we won't have defautl + // version produced automatically when creating pipeline. + DefaultVersion: &model.APIPipelineVersion{ + CreatedAt: pipeline.CreatedAt, + ID: pipeline.ID, + Name: "arguments-parameters.yaml", + Parameters: []*model.APIParameter{ + {Name: "param1", Value: "hello"}, + {Name: "param2"}}, + ResourceReferences: []*model.APIResourceReference{{ + Key: &model.APIResourceKey{ID: pipeline.ID, Type: model.APIResourceTypePIPELINE}, + Relationship: model.APIRelationshipOWNER}}}, } assert.Equal(t, expected, *pipeline) } diff --git a/components/gcp/automl/export_data_to_gcs/component.py b/components/gcp/automl/export_data_to_gcs/component.py new file mode 100644 index 000000000000..499129f7635d --- /dev/null +++ b/components/gcp/automl/export_data_to_gcs/component.py @@ -0,0 +1,53 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import NamedTuple + + +def automl_export_data_to_gcs( + dataset_path: str, + gcs_output_uri_prefix: str = None, + #retry=None, #=google.api_core.gapic_v1.method.DEFAULT, + timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, + metadata: dict = {}, +) -> NamedTuple('Outputs', [('gcs_output_uri_prefix', str)]): + """Exports dataset data to GCS.""" + import sys + import subprocess + subprocess.run([sys.executable, "-m", "pip", "install", "google-cloud-automl==0.4.0", "--quiet", "--no-warn-script-location"], env={"PIP_DISABLE_PIP_VERSION_CHECK": "1"}, check=True) + + import google + from google.cloud import automl + client = automl.AutoMlClient() + + output_config = {"gcs_destination": {"output_uri_prefix": gcs_output_uri_prefix}} + + response = client.export_data( + name=dataset_path, + output_config=output_config, + #retry=retry or google.api_core.gapic_v1.method.DEFAULT + timeout=timeout or google.api_core.gapic_v1.method.DEFAULT, + metadata=metadata, + ) + print('Operation started:') + print(response.operation) + result = response.result() + metadata = response.metadata + print('Operation finished:') + print(metadata) + return (gcs_output_uri_prefix, ) + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op(automl_export_data_to_gcs, output_component_file='component.yaml', base_image='python:3.7') diff --git a/components/gcp/automl/export_data_to_gcs/component.yaml b/components/gcp/automl/export_data_to_gcs/component.yaml new file mode 100644 index 000000000000..2a7778eb0d72 --- /dev/null +++ b/components/gcp/automl/export_data_to_gcs/component.yaml @@ -0,0 +1,113 @@ +name: Automl export data to gcs +description: | + Exports dataset data to GCS. +inputs: +- name: dataset_path + type: String +- name: gcs_output_uri_prefix + optional: true + type: String +- name: timeout + optional: true + type: Float +- default: '{}' + name: metadata + optional: true + type: JsonObject +outputs: +- name: gcs_output_uri_prefix + type: String +implementation: + container: + image: python:3.7 + command: + - python3 + - -u + - -c + - | + from typing import NamedTuple + + def automl_export_data_to_gcs( + dataset_path: str, + gcs_output_uri_prefix: str = None, + #retry=None, #=google.api_core.gapic_v1.method.DEFAULT, + timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, + metadata: dict = {}, + ) -> NamedTuple('Outputs', [('gcs_output_uri_prefix', str)]): + """Exports dataset data to GCS.""" + import sys + import subprocess + subprocess.run([sys.executable, "-m", "pip", "install", "google-cloud-automl==0.4.0", "--quiet", "--no-warn-script-location"], env={"PIP_DISABLE_PIP_VERSION_CHECK": "1"}, check=True) + + import google + from google.cloud import automl + client = automl.AutoMlClient() + + output_config = {"gcs_destination": {"output_uri_prefix": gcs_output_uri_prefix}} + + response = client.export_data( + name=dataset_path, + output_config=output_config, + #retry=retry or google.api_core.gapic_v1.method.DEFAULT + timeout=timeout or google.api_core.gapic_v1.method.DEFAULT, + metadata=metadata, + ) + print('Operation started:') + print(response.operation) + result = response.result() + metadata = response.metadata + print('Operation finished:') + print(metadata) + return (gcs_output_uri_prefix, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Automl export data to gcs', description='Exports dataset data to GCS.\n') + _parser.add_argument("--dataset-path", dest="dataset_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--gcs-output-uri-prefix", dest="gcs_output_uri_prefix", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--timeout", dest="timeout", type=float, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = automl_export_data_to_gcs(**_parsed_args) + + if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): + _outputs = [_outputs] + + _output_serializers = [ + str + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --dataset-path + - inputValue: dataset_path + - if: + cond: + isPresent: gcs_output_uri_prefix + then: + - --gcs-output-uri-prefix + - inputValue: gcs_output_uri_prefix + - if: + cond: + isPresent: timeout + then: + - --timeout + - inputValue: timeout + - if: + cond: + isPresent: metadata + then: + - --metadata + - inputValue: metadata + - '----output-paths' + - outputPath: gcs_output_uri_prefix diff --git a/components/gcp/bigquery/query/README.md b/components/gcp/bigquery/query/README.md index 4e1f738b2fbb..dcfff192646f 100644 --- a/components/gcp/bigquery/query/README.md +++ b/components/gcp/bigquery/query/README.md @@ -89,7 +89,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp bigquery_query_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/bigquery/query/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/bigquery/query/component.yaml') help(bigquery_query_op) ``` diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml index da9cd3f334ed..28b1374ebf48 100644 --- a/components/gcp/bigquery/query/component.yaml +++ b/components/gcp/bigquery/query/component.yaml @@ -59,7 +59,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.bigquery, query, --query, {inputValue: query}, diff --git a/components/gcp/bigquery/query/sample.ipynb b/components/gcp/bigquery/query/sample.ipynb index 02a12e7be451..40548462536c 100644 --- a/components/gcp/bigquery/query/sample.ipynb +++ b/components/gcp/bigquery/query/sample.ipynb @@ -108,7 +108,7 @@ "import kfp.components as comp\n", "\n", "bigquery_query_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/bigquery/query/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/bigquery/query/component.yaml')\n", "help(bigquery_query_op)" ] }, diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py index fd3949fddbdf..d60a30e173ae 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py @@ -68,7 +68,7 @@ def wait_for_operation_done(ml_client, operation_name, action, wait_interval): )) return operation -def wait_for_job_done(ml_client, project_id, job_id, wait_interval): +def wait_for_job_done(ml_client, project_id, job_id, wait_interval, show_tensorboard=True): """Waits for a CMLE job done. Args: @@ -76,6 +76,7 @@ def wait_for_job_done(ml_client, project_id, job_id, wait_interval): project_id: the ID of the project which has the job job_id: the ID of the job to wait wait_interval: the interval in seconds to wait between polls. + show_tensorboard: True to dump Tensorboard metadata. Returns: The completed job. @@ -88,7 +89,7 @@ def wait_for_job_done(ml_client, project_id, job_id, wait_interval): job = ml_client.get_job(project_id, job_id) print(job) if not metadata_dumped: - _dump_job_metadata(project_id, job_id, job) + _dump_job_metadata(project_id, job_id, job, show_tensorboard=show_tensorboard) metadata_dumped = True if job.get('state', None) in ['SUCCEEDED', 'FAILED', 'CANCELLED']: break @@ -104,7 +105,7 @@ def wait_for_job_done(ml_client, project_id, job_id, wait_interval): job['state'], job.get('errorMessage', ''))) return job -def _dump_job_metadata(project_id, job_id, job): +def _dump_job_metadata(project_id, job_id, job, show_tensorboard=True): display.display(display.Link( 'https://console.cloud.google.com/mlengine/jobs/{}?project={}'.format( job_id, project_id), @@ -115,7 +116,7 @@ def _dump_job_metadata(project_id, job_id, job): project_id, job_id), 'Logs' )) - if 'trainingInput' in job and 'jobDir' in job['trainingInput']: + if show_tensorboard and 'trainingInput' in job and 'jobDir' in job['trainingInput']: display.display(display.Tensorboard( job['trainingInput']['jobDir'])) diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py index 472ba2c14c50..b6d9c2e53910 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py @@ -18,7 +18,7 @@ from ._client import MLEngineClient from .. import common as gcp_common -def wait_job(project_id, job_id, wait_interval=30): +def wait_job(project_id, job_id, wait_interval=30, show_tensorboard=True): """Waits a MLEngine job. Args: @@ -26,6 +26,7 @@ def wait_job(project_id, job_id, wait_interval=30): job_id (str): Required. The ID of the job to wait. wait_interval (int): optional wait interval between calls to get job status. Defaults to 30. + show_tensorboard (bool): optional. True to dump Tensorboard metadata. Outputs: /tmp/kfp/output/ml_engine/job.json: The json payload of the waiting job. @@ -34,4 +35,4 @@ def wait_job(project_id, job_id, wait_interval=30): """ ml_client = MLEngineClient() with KfpExecutionContext(on_cancel=lambda: cancel_job(ml_client, project_id, job_id)): - return wait_for_job_done(ml_client, project_id, job_id, wait_interval) \ No newline at end of file + return wait_for_job_done(ml_client, project_id, job_id, wait_interval, show_tensorboard = show_tensorboard) \ No newline at end of file diff --git a/components/gcp/container/component_sdk/python/setup.py b/components/gcp/container/component_sdk/python/setup.py index 31367f1000ca..6c52292a3cc5 100644 --- a/components/gcp/container/component_sdk/python/setup.py +++ b/components/gcp/container/component_sdk/python/setup.py @@ -15,7 +15,7 @@ from setuptools import setup PACKAGE_NAME = 'kfp-component' -VERSION = '0.1.31' +VERSION = '0.1.34' setup( name=PACKAGE_NAME, diff --git a/components/gcp/dataflow/launch_python/README.md b/components/gcp/dataflow/launch_python/README.md index be57d9e729be..28ef1e287d13 100644 --- a/components/gcp/dataflow/launch_python/README.md +++ b/components/gcp/dataflow/launch_python/README.md @@ -1,12 +1,34 @@ # Name -Data preparation by executing an Apache Beam job in Cloud Dataflow +Component: Data preparation by executing an Apache Beam job in Cloud Dataflow # Labels -GCP, Cloud Dataflow, Apache Beam, Python, Kubeflow +Cloud Dataflow, Apache Beam, Kubeflow # Summary -A Kubeflow Pipeline component that prepares data by submitting an Apache Beam job (authored in Python) to Cloud Dataflow for execution. The Python Beam code is run with Cloud Dataflow Runner. +A Kubeflow pipeline component that prepares data by submitting an Apache Beam job (authored in Python) to Cloud Dataflow for execution. The Python Beam code is run with Cloud Dataflow Runner. + +# Facets + +Use case: +Other + +Technique: +Other + +Input data type: +Tabular + +ML workflow: +Data preparation # Details ## Intended use @@ -16,12 +38,12 @@ Use this component to run a Python Beam code to submit a Cloud Dataflow job as a ## Runtime arguments Name | Description | Optional | Data type| Accepted values | Default | :--- | :----------| :----------| :----------| :----------| :---------- | -python_file_path | The path to the Cloud Storage bucket or local directory containing the Python file to be run. | | GCSPath | | | -project_id | The ID of the Google Cloud Platform (GCP) project containing the Cloud Dataflow job.| | GCPProjectID | | | -staging_dir | The path to the Cloud Storage directory where the staging files are stored. A random subdirectory will be created under the staging directory to keep the job information.This is done so that you can resume the job in case of failure. `staging_dir` is passed as the command line arguments (`staging_location` and `temp_location`) of the Beam code. | Yes | GCSPath | | None | -requirements_file_path | The path to the Cloud Storage bucket or local directory containing the pip requirements file. | Yes | GCSPath | | None | +python_file_path | The path to the Cloud Storage bucket or local directory containing the Python file to be run. | - | GCSPath | - | - | +project_id | The ID of the Google Cloud Platform (GCP) project containing the Cloud Dataflow job.| -| GCPProjectID | -| -| +staging_dir | The path to the Cloud Storage directory where the staging files are stored. A random subdirectory will be created under the staging directory to keep the job information.This is done so that you can resume the job in case of failure. The command line arguments, `staging_location` and `temp_location`, of the Beam code are passed through `staging_dir`. | Yes | GCSPath | - | None | +requirements_file_path | The path to the Cloud Storage bucket or local directory containing the pip requirements file. | Yes | GCSPath | - | None | args | The list of arguments to pass to the Python file. | No | List | A list of string arguments | None | -wait_interval | The number of seconds to wait between calls to get the status of the job. | Yes | Integer | | 30 | +wait_interval | The number of seconds to wait between calls to get the status of the job. | Yes | Integer | - | 30 | ## Input data schema @@ -31,21 +53,20 @@ Before you use the component, the following files must be ready in a Cloud Stora The Beam Python code should follow the [Beam programming guide](https://beam.apache.org/documentation/programming-guide/) as well as the following additional requirements to be compatible with this component: - It accepts the command line arguments `--project`, `--temp_location`, `--staging_location`, which are [standard Dataflow Runner options](https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#setting-other-cloud-pipeline-options). -- It enables `info logging` before the start of a Cloud Dataflow job in the Python code. This is important to allow the component to track the status and ID of the job that is created. For example, calling `logging.getLogger().setLevel(logging.INFO)` before any other code. - +- It enables `info logging` before the start of a Cloud Dataflow job in the Python code. This allows the component to track the status and ID of the job that is created. For example, calling `logging.getLogger().setLevel(logging.INFO)` before any other code. ## Output Name | Description :--- | :---------- -job_id | The id of the Cloud Dataflow job that is created. +job_id | The ID of the Cloud Dataflow job that is created. ## Cautions & requirements To use the components, the following requirements must be met: - Cloud Dataflow API is enabled. -- The component is running under a secret Kubeflow user service account in a Kubeflow Pipeline cluster. For example: -``` -component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) -``` +- The component is running under a secret Kubeflow user service account in a Kubeflow Pipelines cluster. For example: + ``` + component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) + ``` The Kubeflow user service account is a member of: - `roles/dataflow.developer` role of the project. - `roles/storage.objectViewer` role of the Cloud Storage Objects `python_file_path` and `requirements_file_path`. @@ -59,83 +80,49 @@ The component does several things during the execution: - Stores the Cloud Dataflow job information in `staging_dir` so the job can be resumed in case of failure. - Waits for the job to finish. The steps to use the component in a pipeline are: -1. Install the Kubeflow Pipelines SDK: +1. Install the Kubeflow pipeline's SDK: + ```python + %%capture --no-stderr + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -```python -%%capture --no-stderr - -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` - -2. Load the component using KFP SDK +2. Load the component using the Kubeflow pipeline's SDK: + ```python + import kfp.components as comp -```python -import kfp.components as comp - -dataflow_python_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_python/component.yaml') -help(dataflow_python_op) -``` + dataflow_python_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataflow/launch_python/component.yaml') + help(dataflow_python_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. -In this sample, we run a wordcount sample code in a Kubeflow Pipeline. The output will be stored in a Cloud Storage bucket. Here is the sample code: - +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +In this sample, we run a wordcount sample code in a Kubeflow pipeline. The output will be stored in a Cloud Storage bucket. Here is the sample code: ```python !gsutil cat gs://ml-pipeline-playground/samples/dataflow/wc/wc.py ``` - - # - # Licensed to the Apache Software Foundation (ASF) under one or more - # contributor license agreements. See the NOTICE file distributed with - # this work for additional information regarding copyright ownership. - # The ASF licenses this file to You under the Apache License, Version 2.0 - # (the "License"); you may not use this file except in compliance with - # the License. You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - """A minimalist word-counting workflow that counts words in Shakespeare. - - This is the first in a series of successively more detailed 'word count' - examples. - - Next, see the wordcount pipeline, then the wordcount_debugging pipeline, for - more detailed examples that introduce additional concepts. - Concepts: +Concepts: - 1. Reading data from text files - 2. Specifying 'inline' transforms - 3. Counting a PCollection - 4. Writing data to Cloud Storage as text files - - To execute this pipeline locally, first edit the code to specify the output - location. Output location could be a local file path or an output prefix - on GCS. (Only update the output location marked with the first CHANGE comment.) +1. Reading data from text files. +2. Specifying inline transforms. +3. Counting a PCollection. +4. Writing data to Cloud Storage as text files. + +Notes: + +To execute this pipeline locally, first edit the code to specify the output location. Output location could be a local file path or an output prefix on Cloud Storage. (Only update the output location marked with the first CHANGE comment in the following code.) - To execute this pipeline remotely, first edit the code to set your project ID, - runner type, the staging location, the temp location, and the output location. - The specified GCS bucket(s) must already exist. (Update all the places marked - with a CHANGE comment.) +To execute this pipeline remotely, first edit the code to set your project ID, runner type, the staging location, the temp location, and the output location. +The specified Cloud Storage bucket(s) must already exist. (Update all the places marked with a CHANGE comment in the following code.) - Then, run the pipeline as described in the README. It will be deployed and run - using the Google Cloud Dataflow Service. No args are required to run the - pipeline. You can see the results in your output bucket in the GCS browser. - """ +Then, run the pipeline as described in the README. It will be deployed and run using the Cloud Dataflow service. No arguments are required to run the pipeline. You can see the results in your output bucket in the Cloud Storage browser. +```python from __future__ import absolute_import import argparse @@ -161,22 +148,22 @@ In this sample, we run a wordcount sample code in a Kubeflow Pipeline. The outpu help='Input file to process.') parser.add_argument('--output', dest='output', - # CHANGE 1/5: The Google Cloud Storage path is required - # for outputting the results. + # CHANGE 1/5: The Cloud Storage path is required + # to output the results. default='gs://YOUR_OUTPUT_BUCKET/AND_OUTPUT_PREFIX', help='Output file to write results to.') known_args, pipeline_args = parser.parse_known_args(argv) # pipeline_args.extend([ # # CHANGE 2/5: (OPTIONAL) Change this to DataflowRunner to - # # run your pipeline on the Google Cloud Dataflow Service. + # # run your pipeline on the Cloud Dataflow Service. # '--runner=DirectRunner', # # CHANGE 3/5: Your project ID is required in order to run your pipeline on - # # the Google Cloud Dataflow Service. + # # the Cloud Dataflow Service. # '--project=SET_YOUR_PROJECT_ID_HERE', - # # CHANGE 4/5: Your Google Cloud Storage path is required for staging local + # # CHANGE 4/5: Your Cloud Storage path is required for staging local # # files. # '--staging_location=gs://YOUR_BUCKET_NAME/AND_STAGING_DIRECTORY', - # # CHANGE 5/5: Your Google Cloud Storage path is required for temporary + # # CHANGE 5/5: Your Cloud Storage path is required for temporary # # files. # '--temp_location=gs://YOUR_BUCKET_NAME/AND_TEMP_DIRECTORY', # '--job_name=your-wordcount-job', @@ -214,27 +201,24 @@ In this sample, we run a wordcount sample code in a Kubeflow Pipeline. The outpu if __name__ == '__main__': logging.getLogger().setLevel(logging.INFO) run() - +``` #### Set sample parameters - ```python -# Required Parameters -PROJECT_ID = '' -GCS_STAGING_DIR = 'gs://' # No ending slash +# Required parameters +PROJECT_ID = '' +GCS_STAGING_DIR = 'gs://' # No ending slash ``` - ```python -# Optional Parameters +# Optional parameters EXPERIMENT_NAME = 'Dataflow - Launch Python' OUTPUT_FILE = '{}/wc/wordcount.out'.format(GCS_STAGING_DIR) ``` #### Example pipeline that uses the component - ```python import kfp.dsl as dsl import kfp.gcp as gcp @@ -264,7 +248,6 @@ def pipeline( #### Compile the pipeline - ```python pipeline_func = pipeline pipeline_filename = pipeline_func.__name__ + '.zip' @@ -274,12 +257,11 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) #### Submit the pipeline for execution - ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -291,14 +273,13 @@ run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arg #### Inspect the output - ```python !gsutil cat $OUTPUT_FILE ``` ## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_launch_python.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) +* [Component Python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_launch_python.py) +* [Component Docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/dataflow/launch_python/sample.ipynb) * [Dataflow Python Quickstart](https://cloud.google.com/dataflow/docs/quickstarts/quickstart-python) diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml index e3f1978ef143..fedee29d94f0 100644 --- a/components/gcp/dataflow/launch_python/component.yaml +++ b/components/gcp/dataflow/launch_python/component.yaml @@ -53,7 +53,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataflow, launch_python, --python_file_path, {inputValue: python_file_path}, diff --git a/components/gcp/dataflow/launch_python/sample.ipynb b/components/gcp/dataflow/launch_python/sample.ipynb index 80adefc4f370..0a216fa92955 100644 --- a/components/gcp/dataflow/launch_python/sample.ipynb +++ b/components/gcp/dataflow/launch_python/sample.ipynb @@ -95,7 +95,7 @@ "import kfp.components as comp\n", "\n", "dataflow_python_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_python/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataflow/launch_python/component.yaml')\n", "help(dataflow_python_op)" ] }, diff --git a/components/gcp/dataflow/launch_template/README.md b/components/gcp/dataflow/launch_template/README.md index 32773fcfa6c3..5a2f6f4c05ad 100644 --- a/components/gcp/dataflow/launch_template/README.md +++ b/components/gcp/dataflow/launch_template/README.md @@ -67,7 +67,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataflow_template_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_template/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataflow/launch_template/component.yaml') help(dataflow_template_op) ``` diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml index 071033f0961f..baa7b8960544 100644 --- a/components/gcp/dataflow/launch_template/component.yaml +++ b/components/gcp/dataflow/launch_template/component.yaml @@ -63,7 +63,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataflow, launch_template, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataflow/launch_template/sample.ipynb b/components/gcp/dataflow/launch_template/sample.ipynb index 8e8ab9605708..767f7e5d8273 100644 --- a/components/gcp/dataflow/launch_template/sample.ipynb +++ b/components/gcp/dataflow/launch_template/sample.ipynb @@ -85,7 +85,7 @@ "import kfp.components as comp\n", "\n", "dataflow_template_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_template/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataflow/launch_template/component.yaml')\n", "help(dataflow_template_op)" ] }, diff --git a/components/gcp/dataproc/create_cluster/README.md b/components/gcp/dataproc/create_cluster/README.md index 743e1159b77f..7c40a5185d95 100644 --- a/components/gcp/dataproc/create_cluster/README.md +++ b/components/gcp/dataproc/create_cluster/README.md @@ -92,7 +92,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_create_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/create_cluster/component.yaml') + dataproc_create_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/create_cluster/component.yaml') help(dataproc_create_cluster_op) ``` diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml index 555c9f394a92..f251d3b23c5e 100644 --- a/components/gcp/dataproc/create_cluster/component.yaml +++ b/components/gcp/dataproc/create_cluster/component.yaml @@ -70,7 +70,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, create_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/create_cluster/sample.ipynb b/components/gcp/dataproc/create_cluster/sample.ipynb index d5295ce6bc75..b7ee5167fa5b 100644 --- a/components/gcp/dataproc/create_cluster/sample.ipynb +++ b/components/gcp/dataproc/create_cluster/sample.ipynb @@ -92,7 +92,7 @@ "import kfp.components as comp\n", "\n", "dataproc_create_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/create_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/create_cluster/component.yaml')\n", "help(dataproc_create_cluster_op)" ] }, diff --git a/components/gcp/dataproc/delete_cluster/README.md b/components/gcp/dataproc/delete_cluster/README.md index 56f409789a02..62fb340ceafa 100644 --- a/components/gcp/dataproc/delete_cluster/README.md +++ b/components/gcp/dataproc/delete_cluster/README.md @@ -70,7 +70,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_delete_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/delete_cluster/component.yaml') + dataproc_delete_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/delete_cluster/component.yaml') help(dataproc_delete_cluster_op) ``` diff --git a/components/gcp/dataproc/delete_cluster/component.yaml b/components/gcp/dataproc/delete_cluster/component.yaml index 6f97e1c46a3f..408f67bb0e94 100644 --- a/components/gcp/dataproc/delete_cluster/component.yaml +++ b/components/gcp/dataproc/delete_cluster/component.yaml @@ -36,7 +36,7 @@ inputs: type: Integer implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, delete_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/delete_cluster/sample.ipynb b/components/gcp/dataproc/delete_cluster/sample.ipynb index feda7414e002..debc67411185 100644 --- a/components/gcp/dataproc/delete_cluster/sample.ipynb +++ b/components/gcp/dataproc/delete_cluster/sample.ipynb @@ -75,7 +75,7 @@ "import kfp.components as comp\n", "\n", "dataproc_delete_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/delete_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/delete_cluster/component.yaml')\n", "help(dataproc_delete_cluster_op)" ] }, diff --git a/components/gcp/dataproc/submit_hadoop_job/README.md b/components/gcp/dataproc/submit_hadoop_job/README.md index e9e784efbcaa..038a1c23721f 100644 --- a/components/gcp/dataproc/submit_hadoop_job/README.md +++ b/components/gcp/dataproc/submit_hadoop_job/README.md @@ -1,33 +1,52 @@ # Name -Data preparation using Hadoop MapReduce on YARN with Cloud Dataproc +Component: Data preparation using Hadoop MapReduce on YARN with Cloud Dataproc -# Label -Cloud Dataproc, GCP, Cloud Storage, Hadoop, YARN, Apache, MapReduce +# Labels +Cloud Dataproc, Hadoop, YARN, Apache, MapReduce # Summary -A Kubeflow Pipeline component to prepare data by submitting an Apache Hadoop MapReduce job on Apache Hadoop YARN to Cloud Dataproc. +A Kubeflow pipeline component to prepare data by submitting an Apache Hadoop MapReduce job on Apache Hadoop YARN to Cloud Dataproc. + +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: # Details ## Intended use -Use the component to run an Apache Hadoop MapReduce job as one preprocessing step in a Kubeflow Pipeline. +Use the component to run an Apache Hadoop MapReduce job as one preprocessing step in a Kubeflow pipeline. ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | |----------|-------------|----------|-----------|-----------------|---------| -| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | | | -| region | The Dataproc region to handle the request. | No | GCPRegion | | | -| cluster_name | The name of the cluster to run the job. | No | String | | | -| main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file containing the main class to execute. | No | List | | | -| main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `hadoop_job.jarFileUris`. | No | String | | | -| args | The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | | None | -| hadoop_job | The payload of a [HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob). | Yes | Dict | | None | -| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | -| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 | - -Note: -`main_jar_file_uri`: The examples for the files are : +| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | - | - | +| region | The Dataproc region to handle the request. | No | GCPRegion | - | - | +| cluster_name | The name of the cluster to run the job. | No | String | - | - | +| main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file containing the main class to execute. | No | List |- |- | +| main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `hadoop_job.jarFileUris`. | No | String |- | - | +| args | The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | - | None | +| hadoop_job | The payload of a [HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob). | Yes | Dict | - | None | +| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | -| None | +| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | - | 30 | + +Note: + +`main_jar_file_uri`: The examples for the files are: - `gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar` - `hdfs:/tmp/test-samples/custom-wordcount.jarfile:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar` @@ -46,78 +65,67 @@ To use the component, you must: ```python component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) ``` -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. +* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. ## Detailed description -This component creates a Hadoop job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). +This component creates a Hadoop job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: - +1. Install the Kubeflow pipeline's SDK: + ```python + %%capture --no-stderr -```python -%%capture --no-stderr - -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` - -2. Load the component using KFP SDK - + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -```python -import kfp.components as comp +2. Load the component using the Kubeflow pipeline's SDK: -dataproc_submit_hadoop_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hadoop_job/component.yaml') -help(dataproc_submit_hadoop_job_op) -``` + ```python + import kfp.components as comp -## Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. + dataproc_submit_hadoop_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_hadoop_job/component.yaml') + help(dataproc_submit_hadoop_job_op) + ``` +### Sample +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. -### Setup a Dataproc cluster +#### Setup a Dataproc cluster [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. ### Prepare a Hadoop job -Upload your Hadoop JAR file to a Cloud Storage bucket. In the sample, we will use a JAR file that is preinstalled in the main cluster, so there is no need to provide `main_jar_file_uri`. - -Here is the [WordCount example source code](https://github.com/apache/hadoop/blob/trunk/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordCount.java). +Upload your Hadoop JAR file to a Cloud Storage bucket. In the sample, we will use a JAR file that is preinstalled in the main cluster, so you don't have to provide the argument, `main_jar_file_uri`. -To package a self-contained Hadoop MapReduce application from the source code, follow the [MapReduce Tutorial](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html). - - -### Set sample parameters +To package a self-contained Hadoop MapReduce application from the [WordCount example source code](https://github.com/apache/hadoop/blob/trunk/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordCount.java), follow the [MapReduce Tutorial](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html). +#### Set sample parameters ```python -PROJECT_ID = '' -CLUSTER_NAME = '' -OUTPUT_GCS_PATH = '' +PROJECT_ID = '' +CLUSTER_NAME = '' +OUTPUT_GCS_PATH = '' REGION = 'us-central1' MAIN_CLASS = 'org.apache.hadoop.examples.WordCount' INTPUT_GCS_PATH = 'gs://ml-pipeline-playground/shakespeare1.txt' EXPERIMENT_NAME = 'Dataproc - Submit Hadoop Job' ``` -#### Insepct Input Data +#### Inspect the input data The input file is a simple text file: - ```python !gsutil cat $INTPUT_GCS_PATH ``` -### Clean up the existing output files (optional) -This is needed because the sample code requires the output folder to be a clean folder. To continue to run the sample, make sure that the service account of the notebook server has access to the `OUTPUT_GCS_PATH`. - -CAUTION: This will remove all blob files under `OUTPUT_GCS_PATH`. +#### Clean up the existing output files (optional) +This is needed because the sample code requires the output folder to be a clean folder. To continue to run the sample, make sure that the service account of the notebook server has access to `OUTPUT_GCS_PATH`. +Caution: This will remove all blob files under `OUTPUT_GCS_PATH`. ```python !gsutil rm $OUTPUT_GCS_PATH/** @@ -125,7 +133,6 @@ CAUTION: This will remove all blob files under `OUTPUT_GCS_PATH`. #### Example pipeline that uses the component - ```python import kfp.dsl as dsl import kfp.gcp as gcp @@ -174,10 +181,10 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -187,10 +194,9 @@ run_name = pipeline_func.__name__ + ' run' run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) ``` -### Inspect the output +#### Inspect the output The sample in the notebook will count the words in the input text and save them in sharded files. The command to inspect the output is: - ```python !gsutil cat $OUTPUT_GCS_PATH/* ``` @@ -201,5 +207,5 @@ The sample in the notebook will count the words in the input text and save them * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/dataproc/submit_hadoop_job/sample.ipynb) * [Dataproc HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob) -## License +# License By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml index f18d11ab9a21..4b39fd921f36 100644 --- a/components/gcp/dataproc/submit_hadoop_job/component.yaml +++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml @@ -80,7 +80,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, submit_hadoop_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb index 653b1e06d52e..14da69202c4a 100644 --- a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb @@ -90,7 +90,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hadoop_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", "help(dataproc_submit_hadoop_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_hive_job/README.md b/components/gcp/dataproc/submit_hive_job/README.md index 9c15b6e83f28..910c24cb12aa 100644 --- a/components/gcp/dataproc/submit_hive_job/README.md +++ b/components/gcp/dataproc/submit_hive_job/README.md @@ -1,16 +1,34 @@ # Name -Data preparation using Apache Hive on YARN with Cloud Dataproc +Component: Data preparation using Apache Hive on YARN with Cloud Dataproc # Label -Cloud Dataproc, GCP, Cloud Storage, YARN, Hive, Apache +Cloud Dataproc, YARN, Apache Hive # Summary -A Kubeflow Pipeline component to prepare data by submitting an Apache Hive job on YARN to Cloud Dataproc. +A Kubeflow pipeline component to prepare data by submitting an Apache Hive job on YARN to Cloud Dataproc. + +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: # Details ## Intended use -Use the component to run an Apache Hive job as one preprocessing step in a Kubeflow Pipeline. +Use the component to run an Apache Hive job as one preprocessing step in a Kubeflow pipeline. ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | @@ -19,9 +37,9 @@ Use the component to run an Apache Hive job as one preprocessing step in a Kubef | region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | | | cluster_name | The name of the cluster to run the job. | No | String | | | | queries | The queries to execute the Hive job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None | -| query_file_uri | The HCFS URI of the script that contains the Hive queries. | Yes | GCSPath | | None | +| query_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the script that contains the Hive queries. | Yes | GCSPath | | None | | script_variables | Mapping of the query’s variable names to their values (equivalent to the Hive command: SET name="value";). | Yes | Dict | | None | -| hive_job | The payload of a [HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) | Yes | Dict | | None | +| hive_job | The payload of a [Hive job](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) | Yes | Dict | | None | | job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | | wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 | @@ -42,34 +60,30 @@ To use the component, you must: * Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. ## Detailed description -This component creates a Hive job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). +This component creates a Hive job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: - - - -```python -%%capture --no-stderr +1. Install the Kubeflow pipeline's SDK: -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` + ```python + %%capture --no-stderr -2. Load the component using KFP SDK + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` +2. Load the component using the Kubeflow pipeline's SDK: -```python -import kfp.components as comp + ```python + import kfp.components as comp -dataproc_submit_hive_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hive_job/component.yaml') -help(dataproc_submit_hive_job_op) -``` + dataproc_submit_hive_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_hive_job/component.yaml') + help(dataproc_submit_hive_job_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. #### Setup a Dataproc cluster @@ -78,17 +92,15 @@ Note: The following sample code works in an IPython notebook or directly in Pyth #### Prepare a Hive query -Put your Hive queries in the queries list, or upload your Hive queries into a file saved in a Cloud Storage bucket and then enter the Cloud Storage bucket’s path in `query_file_uri.` In this sample, we will use a hard coded query in the queries list to select data from a public CSV file from Cloud Storage. +You can put your Hive queries in the `queries` list, or you can use `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file in Cloud Storage. For more details, see the [Hive language manual.](https://cwiki.apache.org/confluence/display/Hive/LanguageManual) - #### Set sample parameters - ```python -PROJECT_ID = '' -CLUSTER_NAME = '' +PROJECT_ID = '' +CLUSTER_NAME = '' REGION = 'us-central1' QUERY = ''' DROP TABLE IF EXISTS natality_csv; @@ -160,10 +172,10 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -174,8 +186,8 @@ run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arg ``` ## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hive_job.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) +* [Component Python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hive_job.py) +* [Component Docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/dataproc/submit_hive_job/sample.ipynb) * [Dataproc HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml index 5120514f0582..5245ec6a7945 100644 --- a/components/gcp/dataproc/submit_hive_job/component.yaml +++ b/components/gcp/dataproc/submit_hive_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, submit_hive_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_hive_job/sample.ipynb b/components/gcp/dataproc/submit_hive_job/sample.ipynb index 27e1e4f4eb62..1523cef3ec31 100644 --- a/components/gcp/dataproc/submit_hive_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hive_job/sample.ipynb @@ -81,7 +81,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hive_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hive_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_hive_job/component.yaml')\n", "help(dataproc_submit_hive_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_pig_job/README.md b/components/gcp/dataproc/submit_pig_job/README.md index 921a6efc1713..2b9b81275fb5 100644 --- a/components/gcp/dataproc/submit_pig_job/README.md +++ b/components/gcp/dataproc/submit_pig_job/README.md @@ -86,7 +86,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_pig_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pig_job/component.yaml') + dataproc_submit_pig_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_pig_job/component.yaml') help(dataproc_submit_pig_job_op) ``` diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml index f7df48896b4e..a5ed8de7029c 100644 --- a/components/gcp/dataproc/submit_pig_job/component.yaml +++ b/components/gcp/dataproc/submit_pig_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, submit_pig_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pig_job/sample.ipynb b/components/gcp/dataproc/submit_pig_job/sample.ipynb index e3a518b5e899..1f672c2ce567 100644 --- a/components/gcp/dataproc/submit_pig_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pig_job/sample.ipynb @@ -84,7 +84,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pig_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pig_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_pig_job/component.yaml')\n", "help(dataproc_submit_pig_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_pyspark_job/README.md b/components/gcp/dataproc/submit_pyspark_job/README.md index e2f1340f5729..b5a430826e66 100644 --- a/components/gcp/dataproc/submit_pyspark_job/README.md +++ b/components/gcp/dataproc/submit_pyspark_job/README.md @@ -83,7 +83,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_pyspark_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pyspark_job/component.yaml') + dataproc_submit_pyspark_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_pyspark_job/component.yaml') help(dataproc_submit_pyspark_job_op) ``` diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml index 526b0bf71581..87ab605b4f12 100644 --- a/components/gcp/dataproc/submit_pyspark_job/component.yaml +++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml @@ -69,7 +69,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, submit_pyspark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb index c79de4ae1ec7..a5c7b050d17b 100644 --- a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb @@ -86,7 +86,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pyspark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", "help(dataproc_submit_pyspark_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_spark_job/README.md b/components/gcp/dataproc/submit_spark_job/README.md index 7b0fbd23bf79..cca22ec06a81 100644 --- a/components/gcp/dataproc/submit_spark_job/README.md +++ b/components/gcp/dataproc/submit_spark_job/README.md @@ -101,7 +101,7 @@ Follow these steps to use the component in a pipeline: import kfp.components as comp dataproc_submit_spark_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_spark_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_spark_job/component.yaml') help(dataproc_submit_spark_job_op) ``` diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml index 1a8f2b052108..267695312db6 100644 --- a/components/gcp/dataproc/submit_spark_job/component.yaml +++ b/components/gcp/dataproc/submit_spark_job/component.yaml @@ -76,7 +76,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, submit_spark_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_spark_job/sample.ipynb b/components/gcp/dataproc/submit_spark_job/sample.ipynb index 907c2ba8ca51..e11c83e64389 100644 --- a/components/gcp/dataproc/submit_spark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_spark_job/sample.ipynb @@ -99,7 +99,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_spark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_spark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_spark_job/component.yaml')\n", "help(dataproc_submit_spark_job_op)" ] }, diff --git a/components/gcp/dataproc/submit_sparksql_job/README.md b/components/gcp/dataproc/submit_sparksql_job/README.md index 2d1256a357b1..ad7ba209f57e 100644 --- a/components/gcp/dataproc/submit_sparksql_job/README.md +++ b/components/gcp/dataproc/submit_sparksql_job/README.md @@ -1,30 +1,48 @@ # Name -Data preparation using SparkSQL on YARN with Cloud Dataproc +Component: Data preparation using SparkSQL on YARN with Cloud Dataproc # Label -Cloud Dataproc, GCP, Cloud Storage, YARN, SparkSQL, Kubeflow, pipelines, components +Cloud Dataproc, YARN, SparkSQL, Kubeflow # Summary -A Kubeflow Pipeline component to prepare data by submitting a SparkSql job on YARN to Cloud Dataproc. +A Kubeflow pipeline component to prepare data by submitting a SparkSql job on YARN to Cloud Dataproc. + +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: # Details ## Intended use -Use the component to run an Apache SparkSql job as one preprocessing step in a Kubeflow Pipeline. +Use the component to run an Apache SparkSql job as one preprocessing step in a Kubeflow pipeline. ## Runtime arguments Argument| Description | Optional | Data type| Accepted values| Default | :--- | :---------- | :--- | :------- | :------ | :------ -project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No| GCPProjectID | | | -region | The Cloud Dataproc region to handle the request. | No | GCPRegion| -cluster_name | The name of the cluster to run the job. | No | String| | | -queries | The queries to execute the SparkSQL job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None | -query_file_uri | The HCFS URI of the script that contains the SparkSQL queries.| Yes | GCSPath | | None | -script_variables | Mapping of the query’s variable names to their values (equivalent to the SparkSQL command: SET name="value";).| Yes| Dict | | None | -sparksql_job | The payload of a [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob). | Yes | Dict | | None | -job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | -wait_interval | The number of seconds to pause between polling the operation. | Yes |Integer | | 30 | +project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No| GCPProjectID | - | -| +region | The Cloud Dataproc region to handle the request. | No | GCPRegion|-|- +cluster_name | The name of the cluster to run the job. | No | String| -| -| +queries | The queries to execute the SparkSQL job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | - | None | +query_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the script that contains the SparkSQL queries. The SparkSQL queries are listed in a CSV file that is stored in a Cloud Storage bucket.| Yes | GCSPath | - | None | +script_variables | Mapping of the query’s variable names to their values (equivalent to the SparkSQL command: SET name="value";).| Yes| Dict |- | None | +sparksql_job | The payload of a [SparkSql job](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob). | Yes | Dict | - | None | +job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | - | None | +wait_interval | The number of seconds to pause between polling the operation. | Yes |Integer | - | 30 | ## Output Name | Description | Type @@ -36,54 +54,50 @@ To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). * [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). * Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: -``` -component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) -``` -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. + ``` + component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) + ``` +* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. ## Detailed Description -This component creates a Pig job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). +This component creates a SparkSql job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: +1. Install the Kubeflow pipeline's SDK: + ```python + %%capture --no-stderr -```python -%%capture --no-stderr + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` +2. Load the component using the Kubeflow pipeline's SDK: -2. Load the component using KFP SDK + ```python + import kfp.components as comp - -```python -import kfp.components as comp - -dataproc_submit_sparksql_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_sparksql_job/component.yaml') -help(dataproc_submit_sparksql_job_op) -``` + dataproc_submit_sparksql_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_sparksql_job/component.yaml') + help(dataproc_submit_sparksql_job_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. #### Setup a Dataproc cluster [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. #### Prepare a SparkSQL job -Either put your SparkSQL queries in the `queires` list, or upload your SparkSQL queries into a file to a Cloud Storage bucket and then enter the Cloud Storage bucket’s path in `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file from Cloud Storage. +You can put your SparkSQL queries in the `queries` list, or you can use `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file in Cloud Storage. -For more details about Spark SQL, see [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html) +For more details about Spark SQL, see [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html). #### Set sample parameters - ```python -PROJECT_ID = '' -CLUSTER_NAME = '' +PROJECT_ID = '' +CLUSTER_NAME = '' REGION = 'us-central1' QUERY = ''' DROP TABLE IF EXISTS natality_csv; @@ -108,7 +122,6 @@ EXPERIMENT_NAME = 'Dataproc - Submit SparkSQL Job' #### Example pipeline that uses the component - ```python import kfp.dsl as dsl import kfp.gcp as gcp @@ -143,7 +156,6 @@ def dataproc_submit_sparksql_job_pipeline( #### Compile the pipeline - ```python pipeline_func = dataproc_submit_sparksql_job_pipeline pipeline_filename = pipeline_func.__name__ + '.zip' @@ -153,12 +165,11 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) #### Submit the pipeline for execution - ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml index f53f6859ab7c..b909bc996971 100644 --- a/components/gcp/dataproc/submit_sparksql_job/component.yaml +++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml @@ -75,7 +75,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.dataproc, submit_sparksql_job, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb index b037c2cf0e83..06f5362f6bb3 100644 --- a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb +++ b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb @@ -81,7 +81,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_sparksql_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", "help(dataproc_submit_sparksql_job_op)" ] }, diff --git a/components/gcp/ml_engine/batch_predict/README.md b/components/gcp/ml_engine/batch_predict/README.md index a3f2d21a0caa..d5a82198b80d 100644 --- a/components/gcp/ml_engine/batch_predict/README.md +++ b/components/gcp/ml_engine/batch_predict/README.md @@ -94,7 +94,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_batch_predict_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/batch_predict/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/ml_engine/batch_predict/component.yaml') help(mlengine_batch_predict_op) ``` diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml index 06afc0f9b300..4e1cf51c4378 100644 --- a/components/gcp/ml_engine/batch_predict/component.yaml +++ b/components/gcp/ml_engine/batch_predict/component.yaml @@ -69,7 +69,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.ml_engine, batch_predict, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/batch_predict/sample.ipynb b/components/gcp/ml_engine/batch_predict/sample.ipynb index 99980c832bc5..f08bd38c4c8b 100644 --- a/components/gcp/ml_engine/batch_predict/sample.ipynb +++ b/components/gcp/ml_engine/batch_predict/sample.ipynb @@ -112,7 +112,7 @@ "import kfp.components as comp\n", "\n", "mlengine_batch_predict_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/batch_predict/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/ml_engine/batch_predict/component.yaml')\n", "help(mlengine_batch_predict_op)" ] }, diff --git a/components/gcp/ml_engine/deploy/README.md b/components/gcp/ml_engine/deploy/README.md index 25f038acaf90..ead3f346dd40 100644 --- a/components/gcp/ml_engine/deploy/README.md +++ b/components/gcp/ml_engine/deploy/README.md @@ -110,7 +110,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_deploy_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/deploy/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/ml_engine/deploy/component.yaml') help(mlengine_deploy_op) ``` diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml index 216b7d5f3470..6ef1d8c43aa3 100644 --- a/components/gcp/ml_engine/deploy/component.yaml +++ b/components/gcp/ml_engine/deploy/component.yaml @@ -95,7 +95,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.ml_engine, deploy, --model_uri, {inputValue: model_uri}, diff --git a/components/gcp/ml_engine/deploy/sample.ipynb b/components/gcp/ml_engine/deploy/sample.ipynb index 43858ff6e93a..e5831a244c48 100644 --- a/components/gcp/ml_engine/deploy/sample.ipynb +++ b/components/gcp/ml_engine/deploy/sample.ipynb @@ -128,7 +128,7 @@ "import kfp.components as comp\n", "\n", "mlengine_deploy_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/deploy/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/ml_engine/deploy/component.yaml')\n", "help(mlengine_deploy_op)" ] }, diff --git a/components/gcp/ml_engine/train/README.md b/components/gcp/ml_engine/train/README.md index 81c11e0f3aa4..97679c073efd 100644 --- a/components/gcp/ml_engine/train/README.md +++ b/components/gcp/ml_engine/train/README.md @@ -1,33 +1,55 @@ # Name -Submitting a Cloud Machine Learning Engine training job as a pipeline step +Component: Submitting an AI Platform training job as a pipeline step # Label -GCP, Cloud ML Engine, Machine Learning, pipeline, component, Kubeflow, Kubeflow Pipeline + AI Platform, Kubeflow # Summary -A Kubeflow Pipeline component to submit a Cloud ML Engine training job as a step in a pipeline. +A Kubeflow pipeline component to submit an AI Platform training job as a step in a pipeline. + +# Facets + +Use case: +Other + +Technique: +Other + +Input data type: +Tabular + +ML workflow: +Training # Details ## Intended use -Use this component to submit a training job to Cloud ML Engine from a Kubeflow Pipeline. +Use this component to submit a training job to AI Platform from a Kubeflow pipeline. ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | |:------------------|:------------------|:----------|:--------------|:-----------------|:-------------| -| project_id | The ID of the Google Cloud Platform (GCP) project of the job. | No | GCPProjectID | | | -| python_module | The name of the Python module to run after installing the training program. | Yes | String | | None | -| package_uris | The Cloud Storage location of the packages that contain the training program and any additional dependencies. The maximum number of package URIs is 100. | Yes | List | | None | -| region | The Compute Engine region in which the training job is run. | Yes | GCPRegion | | us-central1 | -| args | The command line arguments to pass to the training program. | Yes | List | | None | -| job_dir | A Cloud Storage path in which to store the training outputs and other data needed for training. This path is passed to your TensorFlow program as the `job-dir` command-line argument. The benefit of specifying this field is that Cloud ML validates the path for use in training. | Yes | GCSPath | | None | -| python_version | The version of Python used in training. If it is not set, the default version is 2.7. Python 3.5 is available when the runtime version is set to 1.4 and above. | Yes | String | | None | -| runtime_version | The runtime version of Cloud ML Engine to use for training. If it is not set, Cloud ML Engine uses the default. | Yes | String | | 1 | -| master_image_uri | The Docker image to run on the master replica. This image must be in Container Registry. | Yes | GCRPath | | None | -| worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath | | None | +| project_id | The Google Cloud Platform (GCP) project ID of the job. | No | GCPProjectID | - | - | +| python_module | The name of the Python module to run after installing the training program. | Yes | String | - | None | +| package_uris | The Cloud Storage location of the packages that contain the training program and any additional dependencies. The maximum number of package URIs is 100. | Yes | List | -| None | +| region | The Compute Engine region in which the training job is run. | Yes | GCPRegion | -| us-central1 | +| args | The command line arguments to pass to the training program. | Yes | List | - | None | +| job_dir | A Cloud Storage path in which to store the training outputs and other data needed for training. This path is passed to your TensorFlow program as the command-line argument, `job-dir`. The benefit of specifying this field is that Cloud ML validates the path for use in training. | Yes | GCSPath | - | None | +| python_version | The version of Python used in training. If it is not set, the default version is 2.7. Python 3.5 is available when the runtime version is set to 1.4 and above. | Yes | String | - | None | +| runtime_version | The runtime version of AI Platform to use for training. If it is not set, AI Platform uses the default. | Yes | String | - | 1 | +| master_image_uri | The Docker image to run on the master replica. This image must be in Container Registry. | Yes | GCRPath | - | None | +| worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath |- | None | | training_input | The input parameters to create a training job. | Yes | Dict | [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) | None | -| job_id_prefix | The prefix of the job ID that is generated. | Yes | String | | None | -| wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | | 30 | +| job_id_prefix | The prefix of the job ID that is generated. | Yes | String | - | None | +| wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | - | 30 | @@ -43,7 +65,7 @@ The component accepts two types of inputs: | Name | Description | Type | |:------- |:---- | :--- | | job_id | The ID of the created job. | String | -| job_dir | The Cloud Storage path that contains the trained model output files. | GCSPath | +| job_dir | The Cloud Storage path that contains the output files with the trained model. | GCSPath | ## Cautions & requirements @@ -63,51 +85,45 @@ To use the component, you must: ## Detailed description -The component builds the [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) payload and submits a job via the [Cloud ML Engine REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs). +The component builds the [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) payload and submits a job via the [AI Platform REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs). The steps to use the component in a pipeline are: -1. Install the Kubeflow Pipeline SDK: - +1. Install the Kubeflow pipeline's SDK: + ```python + %%capture --no-stderr -```python -%%capture --no-stderr - -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` - -2. Load the component using KFP SDK - + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -```python -import kfp.components as comp +2. Load the component using the Kubeflow pipeline's SDK: -mlengine_train_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/train/component.yaml') -help(mlengine_train_op) -``` + ```python + import kfp.components as comp + mlengine_train_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/ml_engine/train/component.yaml') + help(mlengine_train_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. +The following sample code works in an IPython notebook or directly in Python code. -In this sample, you use the code from the [census estimator sample](https://github.com/GoogleCloudPlatform/cloudml-samples/tree/master/census/estimator) to train a model in Cloud ML Engine. To upload the code to Cloud ML Engine, package the Python code and upload it to a Cloud Storage bucket. +In this sample, you use the code from the [census estimator sample](https://github.com/GoogleCloudPlatform/cloudml-samples/tree/master/census/estimator) to train a model on AI Platform. To upload the code to AI Platform, package the Python code and upload it to a Cloud Storage bucket. Note: You must have read and write permissions on the bucket that you use as the working directory. -#### Set sample parameters +#### Set sample parameters ```python -# Required Parameters -PROJECT_ID = '' -GCS_WORKING_DIR = 'gs://' # No ending slash +# Required parameters +PROJECT_ID = '' +GCS_WORKING_DIR = 'gs://' # No ending slash ``` - ```python -# Optional Parameters +# Optional parameters EXPERIMENT_NAME = 'CLOUDML - Train' TRAINER_GCS_PATH = GCS_WORKING_DIR + '/train/trainer.tar.gz' OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/' @@ -115,14 +131,12 @@ OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/' #### Clean up the working directory - ```python %%capture --no-stderr !gsutil rm -r $GCS_WORKING_DIR ``` -#### Download the sample trainer code to local - +#### Download the sample trainer code to a local directory ```python %%capture --no-stderr @@ -132,7 +146,6 @@ OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/' #### Package code and upload the package to Cloud Storage - ```python %%capture --no-stderr %%bash -s "$TRAINER_GCS_PATH" @@ -145,7 +158,6 @@ rm -fr ./cloudml-samples-master/ ./master.zip ./dist #### Example pipeline that uses the component - ```python import kfp.dsl as dsl import kfp.gcp as gcp @@ -192,7 +204,6 @@ def pipeline( #### Compile the pipeline - ```python pipeline_func = pipeline pipeline_filename = pipeline_func.__name__ + '.zip' @@ -202,12 +213,11 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) #### Submit the pipeline for execution - ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -221,16 +231,15 @@ run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arg Use the following command to inspect the contents in the output directory: - ```python !gsutil ls $OUTPUT_GCS_PATH ``` ## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) +* [Component Python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py) +* [Component Docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/ml_engine/train/sample.ipynb) -* [Cloud Machine Learning Engine job REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs) +* [AI Platform REST API - Resource: Job](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs) ## License By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml index e7d6d3842456..06ad40e9a4ea 100644 --- a/components/gcp/ml_engine/train/component.yaml +++ b/components/gcp/ml_engine/train/component.yaml @@ -103,7 +103,7 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:d6d9d8da19f7110fff3a5ba713710402edaeee65 args: [ kfp_component.google.ml_engine, train, --project_id, {inputValue: project_id}, diff --git a/components/gcp/ml_engine/train/sample.ipynb b/components/gcp/ml_engine/train/sample.ipynb index 319e7abce166..fe74118995c0 100644 --- a/components/gcp/ml_engine/train/sample.ipynb +++ b/components/gcp/ml_engine/train/sample.ipynb @@ -104,7 +104,7 @@ "import kfp.components as comp\n", "\n", "mlengine_train_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/train/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/02c991dd265054b040265b3dfa1903d5b49df859/components/gcp/ml_engine/train/component.yaml')\n", "help(mlengine_train_op)" ] }, diff --git a/components/ibm-components/watson/train/component.yaml b/components/ibm-components/watson/train/component.yaml index 97efa4a4c4f3..7344ed1189c3 100644 --- a/components/ibm-components/watson/train/component.yaml +++ b/components/ibm-components/watson/train/component.yaml @@ -20,9 +20,9 @@ inputs: - {name: execution_command, description: 'Required. Execution command to start the model training.'} - {name: config, description: 'Credential configfile is properly created.', default: 'secret_name'} - {name: framework, description: 'ML/DL Model Framework', default: 'tensorflow'} - - {name: framework_version, description: 'Model Framework version', default: '1.5'} + - {name: framework_version, description: 'Model Framework version', default: '1.13'} - {name: runtime, description: 'Model Code runtime language', default: 'python'} - - {name: runtime_version, description: 'Model Code runtime version', default: '3.5'} + - {name: runtime_version, description: 'Model Code runtime version', default: '3.6'} - {name: run_definition, description: 'Name for the Watson Machine Learning training definition', default: 'python-tensorflow-definition'} - {name: run_name, description: 'Name for the Watson Machine Learning training-runs', default: 'python-tensorflow-run'} - {name: author_name, description: 'Name of this training job author', default: 'default-author'} diff --git a/components/ibm-components/watson/train/src/wml-train.py b/components/ibm-components/watson/train/src/wml-train.py index 018f66ca2d88..f0eb1703cc11 100644 --- a/components/ibm-components/watson/train/src/wml-train.py +++ b/components/ibm-components/watson/train/src/wml-train.py @@ -27,9 +27,9 @@ def train(args): wml_train_code = args.train_code wml_execution_command = args.execution_command.strip('\'') wml_framework_name = args.framework if args.framework else 'tensorflow' - wml_framework_version = args.framework_version if args.framework_version else '1.5' + wml_framework_version = args.framework_version if args.framework_version else '1.13' wml_runtime_name = args.runtime if args.runtime else 'python' - wml_runtime_version = args.runtime_version if args.runtime_version else '3.5' + wml_runtime_version = args.runtime_version if args.runtime_version else '3.6' wml_run_definition = args.run_definition if args.run_definition else 'python-tensorflow-definition' wml_run_name = args.run_name if args.run_name else 'python-tensorflow-run' wml_author_name = args.author_name if args.author_name else 'default-author' diff --git a/components/kubeflow/deployer/component.yaml b/components/kubeflow/deployer/component.yaml index 2386f046ccd7..2ea0176f622f 100644 --- a/components/kubeflow/deployer/component.yaml +++ b/components/kubeflow/deployer/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:d6d9d8da19f7110fff3a5ba713710402edaeee65 command: [/bin/deploy.sh] args: [ --model-export-path, {inputValue: Model dir}, diff --git a/components/kubeflow/dnntrainer/component.yaml b/components/kubeflow/dnntrainer/component.yaml index 927ff2cabcc3..4ca9d46f057b 100644 --- a/components/kubeflow/dnntrainer/component.yaml +++ b/components/kubeflow/dnntrainer/component.yaml @@ -16,7 +16,7 @@ outputs: - {name: MLPipeline UI metadata, type: UI metadata} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d6d9d8da19f7110fff3a5ba713710402edaeee65 command: [python2, -m, trainer.task] args: [ --transformed-data-dir, {inputValue: Transformed data dir}, diff --git a/components/kubeflow/kfserving/Dockerfile b/components/kubeflow/kfserving/Dockerfile index 1b66dcb2a714..82f655f0a84c 100644 --- a/components/kubeflow/kfserving/Dockerfile +++ b/components/kubeflow/kfserving/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.6-slim -RUN pip3 install kubernetes==9.0.0 kfserving==0.1.1 requests==2.22.0 Flask==1.1.1 flask-cors==3.0.8 +RUN pip3 install kubernetes==10.0.1 kfserving==0.2.1 requests==2.22.0 Flask==1.1.1 flask-cors==3.0.8 ENV APP_HOME /app COPY src $APP_HOME diff --git a/components/kubeflow/kfserving/OWNERS b/components/kubeflow/kfserving/OWNERS new file mode 100644 index 000000000000..78d93353d16d --- /dev/null +++ b/components/kubeflow/kfserving/OWNERS @@ -0,0 +1,5 @@ +approvers: + - animeshsingh +reviewers: + - animeshsingh + - Tomcli diff --git a/components/kubeflow/kfserving/component.yaml b/components/kubeflow/kfserving/component.yaml index a3eded2dbd20..ea59664ae871 100644 --- a/components/kubeflow/kfserving/component.yaml +++ b/components/kubeflow/kfserving/component.yaml @@ -13,10 +13,10 @@ inputs: - {name: Autoscaling Target, type: String, default: '0', description: 'Autoscaling Target Number'} - {name: KFServing Endpoint, type: String, default: '', description: 'KFServing remote deployer API endpoint'} outputs: - - {name: Endpoint URI, type: String, description: 'URI of the deployed prediction service..'} + - {name: Service Endpoint URI, type: String, description: 'URI of the deployed prediction service..'} implementation: container: - image: aipipeline/kfserving-component:v0.1.0 + image: aipipeline/kfserving-component:v0.2.1 command: ['python'] args: [ -u, kfservingdeployer.py, @@ -29,7 +29,7 @@ implementation: --framework, {inputValue: Framework}, --default-custom-model-spec,{inputValue: Default Custom Model Spec}, --canary-custom-model-spec, {inputValue: Canary Custom Model Spec}, - --kfserving-deployer-api, {inputValue: KFServing Endpoint}, + --kfserving-endpoint, {inputValue: KFServing Endpoint}, --autoscaling-target, {inputValue: Autoscaling Target}, - --output_path, {outputPath: Endpoint URI} + --output_path, {outputPath: Service Endpoint URI} ] diff --git a/components/kubeflow/kfserving/sample-pipeline.py b/components/kubeflow/kfserving/sample-pipeline.py deleted file mode 100644 index a3b1e14c2883..000000000000 --- a/components/kubeflow/kfserving/sample-pipeline.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2019 IBM Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import kfp.dsl as dsl -from kfp import components -import json - -kfserving_op = components.load_component_from_file('component.yaml') - -@dsl.pipeline( - name='kfserving pipeline', - description='A pipeline for kfserving.' -) -def kfservingPipeline( - action = 'create', - model_name='tensorflow-sample', - default_model_uri='gs://kfserving-samples/models/tensorflow/flowers', - canary_model_uri='gs://kfserving-samples/models/tensorflow/flowers-2', - canary_model_traffic_percentage='10', - namespace='kubeflow', - framework='tensorflow', - default_custom_model_spec='{}', - canary_custom_model_spec='{}', - autoscaling_target=0, - kfserving_endpoint='' -): - - # define workflow - kfserving = kfserving_op(action = action, - model_name=model_name, - default_model_uri=default_model_uri, - canary_model_uri=canary_model_uri, - canary_model_traffic_percentage=canary_model_traffic_percentage, - namespace=namespace, - framework=framework, - default_custom_model_spec=default_custom_model_spec, - canary_custom_model_spec=canary_custom_model_spec, - autoscaling_target=autoscaling_target, - kfserving_endpoint=kfserving_endpoint) - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(kfservingPipeline, __file__ + '.tar.gz') diff --git a/components/kubeflow/kfserving/src/app.py b/components/kubeflow/kfserving/src/app.py index 1423c8fcad8e..21aa7f0d6874 100644 --- a/components/kubeflow/kfserving/src/app.py +++ b/components/kubeflow/kfserving/src/app.py @@ -1,3 +1,17 @@ +# Copyright 2019 kubeflow.org. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from flask import Flask, request, abort from flask_cors import CORS import json diff --git a/components/kubeflow/kfserving/src/kfservingdeployer.py b/components/kubeflow/kfserving/src/kfservingdeployer.py index 6ae807bfcaa5..4803c85b8966 100644 --- a/components/kubeflow/kfserving/src/kfservingdeployer.py +++ b/components/kubeflow/kfserving/src/kfservingdeployer.py @@ -1,16 +1,17 @@ -# Copyright 2019 IBM Corporation +# Copyright 2019 kubeflow.org. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import json import argparse import os @@ -21,33 +22,36 @@ from kfserving import KFServingClient from kfserving import constants -from kfserving import V1alpha1ModelSpec -from kfserving import V1alpha1TensorflowSpec -from kfserving import V1alpha1PyTorchSpec -from kfserving import V1alpha1SKLearnSpec -from kfserving import V1alpha1XGBoostSpec -from kfserving import V1alpha1TensorRTSpec -from kfserving import V1alpha1CustomSpec -from kfserving import V1alpha1KFServiceSpec -from kfserving import V1alpha1KFService - - -def ModelSpec(framework, model_uri): +from kfserving import V1alpha2EndpointSpec +from kfserving import V1alpha2PredictorSpec +from kfserving import V1alpha2TensorflowSpec +from kfserving import V1alpha2PyTorchSpec +from kfserving import V1alpha2SKLearnSpec +from kfserving import V1alpha2XGBoostSpec +from kfserving.models.v1alpha2_onnx_spec import V1alpha2ONNXSpec +from kfserving import V1alpha2TensorRTSpec +from kfserving import V1alpha2CustomSpec +from kfserving import V1alpha2InferenceServiceSpec +from kfserving import V1alpha2InferenceService + +def EndpointSpec(framework, storage_uri): if framework == 'tensorflow': - return V1alpha1ModelSpec(tensorflow=V1alpha1TensorflowSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(tensorflow=V1alpha2TensorflowSpec(storage_uri=storage_uri))) elif framework == 'pytorch': - return V1alpha1ModelSpec(pytorch=V1alpha1PyTorchSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(pytorch=V1alpha2PyTorchSpec(storage_uri=storage_uri))) elif framework == 'sklearn': - return V1alpha1ModelSpec(sklearn=V1alpha1SKLearnSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(sklearn=V1alpha2SKLearnSpec(storage_uri=storage_uri))) elif framework == 'xgboost': - return V1alpha1ModelSpec(xgboost=V1alpha1XGBoostSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(xgboost=V1alpha2XGBoostSpec(storage_uri=storage_uri))) + elif framework == 'onnx': + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(onnx=V1alpha2ONNXSpec(storage_uri=storage_uri))) elif framework == 'tensorrt': - return V1alpha1ModelSpec(tensorrt=V1alpha1TensorRTSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(tensorrt=V1alpha2TensorRTSpec(storage_uri=storage_uri))) else: raise("Error: No matching framework: " + framework) -def customModelSpec(custom_model_spec): +def customEndpointSpec(custom_model_spec): env = [client.V1EnvVar(name=i['name'], value=i['value']) for i in custom_model_spec['env']] if custom_model_spec.get('env', '') else None ports = [client.V1ContainerPort(container_port=int(custom_model_spec.get('port', '')))] if custom_model_spec.get('port', '') else None containerSpec = client.V1Container( @@ -60,16 +64,16 @@ def customModelSpec(custom_model_spec): image_pull_policy=custom_model_spec.get('image_pull_policy', None), working_dir=custom_model_spec.get('working_dir', None) ) - return V1alpha1ModelSpec(custom=V1alpha1CustomSpec(container=containerSpec)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(custom=V1alpha2CustomSpec(container=containerSpec))) -def kfserving_deployment(metadata, default_model_spec, canary_model_spec=None, canary_model_traffic=None): - return V1alpha1KFService(api_version=constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION, +def InferenceService(metadata, default_model_spec, canary_model_spec=None, canary_model_traffic=None): + return V1alpha2InferenceService(api_version=constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION, kind=constants.KFSERVING_KIND, metadata=metadata, - spec=V1alpha1KFServiceSpec(default=default_model_spec, - canary=canary_model_spec, - canary_traffic_percent=canary_model_traffic)) + spec=V1alpha2InferenceServiceSpec(default=default_model_spec, + canary=canary_model_spec, + canary_traffic_percent=canary_model_traffic)) def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary_model_traffic, namespace, framework, default_custom_model_spec, canary_custom_model_spec, autoscaling_target=0): @@ -78,26 +82,33 @@ def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary else: annotations = None metadata = client.V1ObjectMeta(name=model_name, namespace=namespace, annotations=annotations) - if framework != 'custom': - default_model_spec = ModelSpec(framework, default_model_uri) - else: - default_model_spec = customModelSpec(default_custom_model_spec) + + # Create Default deployment if default model uri is provided. + if framework != 'custom' and default_model_uri: + default_model_spec = EndpointSpec(framework, default_model_uri) + elif framework == 'custom' and default_custom_model_spec: + default_model_spec = customEndpointSpec(default_custom_model_spec) + # Create Canary deployment if canary model uri is provided. if framework != 'custom' and canary_model_uri: - canary_model_spec = ModelSpec(framework, canary_model_uri) - kfsvc = kfserving_deployment(metadata, default_model_spec, canary_model_spec, canary_model_traffic) + canary_model_spec = EndpointSpec(framework, canary_model_uri) + kfsvc = InferenceService(metadata, default_model_spec, canary_model_spec, canary_model_traffic) elif framework == 'custom' and canary_custom_model_spec: - canary_model_spec = customModelSpec(canary_custom_model_spec) - kfsvc = kfserving_deployment(metadata, default_model_spec, canary_model_spec, canary_model_traffic) + canary_model_spec = customEndpointSpec(canary_custom_model_spec) + kfsvc = InferenceService(metadata, default_model_spec, canary_model_spec, canary_model_traffic) else: - kfsvc = kfserving_deployment(metadata, default_model_spec) + kfsvc = InferenceService(metadata, default_model_spec) KFServing = KFServingClient() if action == 'create': - KFServing.create(kfsvc) + KFServing.create(kfsvc, watch=True, timeout_seconds=120) elif action == 'update': KFServing.patch(model_name, kfsvc) + elif action == 'rollout': + KFServing.rollout_canary(model_name, canary=canary_model_spec, percent=canary_model_traffic, namespace=namespace, watch=True, timeout_seconds=120) + elif action == 'promote': + KFServing.promote(model_name, namespace=namespace, watch=True, timeout_seconds=120) elif action == 'delete': KFServing.delete(model_name, namespace=namespace) else: @@ -117,7 +128,7 @@ def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary parser.add_argument('--framework', type=str, help='Model Serving Framework', default='tensorflow') parser.add_argument('--default-custom-model-spec', type=json.loads, help='Custom runtime default custom model container spec', default={}) parser.add_argument('--canary-custom-model-spec', type=json.loads, help='Custom runtime canary custom model container spec', default={}) - parser.add_argument('--kfserving-deployer-api', type=str, help='kfserving remote deployer api endpoint', default='') + parser.add_argument('--kfserving-endpoint', type=str, help='kfserving remote deployer api endpoint', default='') parser.add_argument('--autoscaling-target', type=str, help='Autoscaling target number', default='0') parser.add_argument('--output_path', type=str, help='Path to store URI output') args = parser.parse_args() @@ -134,10 +145,10 @@ def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary output_path = args.output_path default_custom_model_spec = args.default_custom_model_spec canary_custom_model_spec = args.canary_custom_model_spec - kfserving_deployer_api = url.sub('', args.kfserving_deployer_api) + kfserving_endpoint = url.sub('', args.kfserving_endpoint) autoscaling_target = int(args.autoscaling_target) - if kfserving_deployer_api: + if kfserving_endpoint: formData = { "action": action, "model_name": model_name, @@ -150,7 +161,7 @@ def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary "canary_custom_model_spec": canary_custom_model_spec, "autoscaling_target": autoscaling_target } - response = requests.post("http://" + kfserving_deployer_api + "/deploy-model", json=formData) + response = requests.post("http://" + kfserving_endpoint + "/deploy-model", json=formData) model_status = response.json() else: model_status = deploy_model( @@ -171,7 +182,9 @@ def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary print('Sample test commands: ') print('# Note: If Istio Ingress gateway is not served with LoadBalancer, use $CLUSTER_NODE_IP:31380 as the ISTIO_INGRESS_ENDPOINT') print('ISTIO_INGRESS_ENDPOINT=$(kubectl -n istio-system get service istio-ingressgateway -o jsonpath=\'{.status.loadBalancer.ingress[0].ip}\')') - print('curl -X GET -H "Host: ' + url.sub('', model_status['status']['url']) + '" $ISTIO_INGRESS_ENDPOINT') + # model_status['status']['url'] is like http://flowers-sample.kubeflow.example.com/v1/models/flowers-sample + host, path = url.sub('', model_status['status']['url']).split("/", 1) + print('curl -X GET -H "Host: ' + host + '" http://$ISTIO_INGRESS_ENDPOINT/' + path) except: print('Model is not ready, check the logs for the Knative URL status.') if not os.path.exists(os.path.dirname(output_path)): diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py index b5a7c7fd357e..afaa3bddbc0d 100644 --- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py +++ b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py @@ -17,7 +17,7 @@ def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'): return dsl.ContainerOp( name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:57d9f7f1cfd458e945d297957621716062d89a49', + image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:d6d9d8da19f7110fff3a5ba713710402edaeee65', arguments = [ '--workers', number_of_workers, '--pss', number_of_parameter_servers, diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml index 72e04d63bd80..42cecf8e30bc 100644 --- a/components/kubeflow/launcher/src/train.template.yaml +++ b/components/kubeflow/launcher/src/train.template.yaml @@ -26,7 +26,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d6d9d8da19f7110fff3a5ba713710402edaeee65 command: - python - -m @@ -49,7 +49,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d6d9d8da19f7110fff3a5ba713710402edaeee65 command: - python - -m @@ -72,7 +72,7 @@ spec: spec: containers: - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:d6d9d8da19f7110fff3a5ba713710402edaeee65 command: - python - -m diff --git a/components/local/confusion_matrix/component.yaml b/components/local/confusion_matrix/component.yaml index 025d4818ec11..fbb965879652 100644 --- a/components/local/confusion_matrix/component.yaml +++ b/components/local/confusion_matrix/component.yaml @@ -9,7 +9,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:d6d9d8da19f7110fff3a5ba713710402edaeee65 command: [python2, /ml/confusion_matrix.py] args: [ --predictions, {inputValue: Predictions}, diff --git a/components/local/roc/component.yaml b/components/local/roc/component.yaml index 9e46900f7b34..e9d67ee65de6 100644 --- a/components/local/roc/component.yaml +++ b/components/local/roc/component.yaml @@ -11,7 +11,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:d6d9d8da19f7110fff3a5ba713710402edaeee65 command: [python2, /ml/roc.py] args: [ --predictions, {inputValue: Predictions dir}, diff --git a/components/third_party_licenses.csv b/components/third_party_licenses.csv index a86a2338b803..693ffdcb7836 100644 --- a/components/third_party_licenses.csv +++ b/components/third_party_licenses.csv @@ -137,8 +137,8 @@ ipython-genutils,https://raw.githubusercontent.com/ipython/ipython_genutils/mast ipywidgets,https://raw.githubusercontent.com/jupyter-widgets/ipywidgets/master/LICENSE,3-Clause BSD Jinja2,https://raw.githubusercontent.com/pallets/jinja/master/LICENSE.rst,BSD jsonschema,https://raw.githubusercontent.com/Julian/jsonschema/master/COPYING,MIT -jupyter,https://raw.githubusercontent.com/jupyter/notebook/master/COPYING.md,BSD -notebook,https://raw.githubusercontent.com/jupyter/notebook/master/COPYING.md,BSD +jupyter,https://raw.githubusercontent.com/jupyter/notebook/master/LICENSE,BSD +notebook,https://raw.githubusercontent.com/jupyter/notebook/master/LICENSE,BSD jupyter-client,https://raw.githubusercontent.com/jupyter/jupyter_client/master/COPYING.md,BSD jupyter-console,https://raw.githubusercontent.com/jupyter/jupyter_console/master/COPYING.md,BSD jupyter-core,https://raw.githubusercontent.com/jupyter/jupyter_core/master/COPYING.md,BSD diff --git a/frontend/.prettierignore b/frontend/.prettierignore new file mode 100644 index 000000000000..650570754b35 --- /dev/null +++ b/frontend/.prettierignore @@ -0,0 +1,2 @@ +src/generated +server diff --git a/frontend/.prettierrc.yaml b/frontend/.prettierrc.yaml new file mode 100644 index 000000000000..960e71e6ab5a --- /dev/null +++ b/frontend/.prettierrc.yaml @@ -0,0 +1,4 @@ +trailingComma: all +singleQuote: true +jsxSingleQuote: true +printWidth: 100 diff --git a/frontend/OWNERS b/frontend/OWNERS index 98a0efa00141..e4115faf230c 100644 --- a/frontend/OWNERS +++ b/frontend/OWNERS @@ -1,10 +1,6 @@ approvers: - - rileyjbauer - bobgy - jingzhang36 - rmgogogo reviewers: - - rileyjbauer - bobgy - - jingzhang36 - - rmgogogo diff --git a/frontend/README.md b/frontend/README.md index 1a2f816d606d..edf9b769791d 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,6 +1,7 @@ # Kubeflow Pipelines Management Frontend -**Develop:** +## Develop + You need `npm`, install dependencies using `npm install`. If you made any changes to protos (see backend/README), you'll need to @@ -23,9 +24,19 @@ database. ### Using a real cluster as backend -1. First configure your `kubectl` to talk to your kfp lite cluster. -2. `npm run start:proxies` to start proxy servers that port forwards to your cluster. -3. `npm start` to start a webpack dev server, it has already been configured to talk to aforementioned proxies. +#### Common steps + +1. First configure your `kubectl` to talk to your KFP standalone cluster. +2. `npm start` to start a webpack dev server, it is configured to proxy api requests to localhost:3001. The following step will start a proxy that handles api requests proxied to localhost:3001. + +#### Special step that depend on what you want to do + +| What to develop? | Who handles API requests? | Script to run | Extra notes | +| ----------------------- | ------------------------- | -------------------------------------------------------------- | ------------------------------------------------------------------ | +| Client UI | standalone deployment | `NAMESPACE=kubeflow npm run start:proxy-standalone` | | +| Client UI + Node server | standalone deployment | `NAMESPACE=kubeflow npm run start:proxy-standalone-and-server` | You need to rerun the script every time you edit node server code. | + +TODO: figure out and document how to use a Kubeflow deployment to develop UI. **Production Build:** You can do `npm run build` to build the frontend code for production, which @@ -45,3 +56,34 @@ this image, you'll need to port forward 3000, and pass the environment variables `ML_PIPELINE_SERVICE_HOST` and `ML_PIPELINE_SERVICE_PORT` with the details of the API server, which you can run using `npm run api` separately. + +## Code Style + +We use [prettier](https://prettier.io/) for code formatting, our prettier config +is [here](https://github.com/kubeflow/pipelines/blob/master/frontend/.prettierrc.yaml). + +To understand more what prettier is: [What is Prettier](https://prettier.io/docs/en/index.html). + +### IDE Integration + +- For vscode, install the plugin "Prettier - Code formatter" and it will pick + this project's config automatically. + Recommend setting the following for vscode to autoformat + organize import on save. + ``` + "[typescript]": { + "editor.codeActionsOnSave": { + "source.organizeImports": true, + }, + "editor.formatOnSave": true, + }, + ``` +- For others, refer to https://prettier.io/docs/en/editors.html + +### Format Code Manually + +Run `npm run format`. + +### Escape hatch + +If there's some code that you don't want being formatted by prettier, follow +guide [here](https://prettier.io/docs/en/ignore.html). (Most likely you don't need this.) diff --git a/frontend/mock-backend/fixed-data.ts b/frontend/mock-backend/fixed-data.ts index 8f5e273091d0..415afb2512ff 100644 --- a/frontend/mock-backend/fixed-data.ts +++ b/frontend/mock-backend/fixed-data.ts @@ -12,16 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. +import { ApiExperiment } from '../src/apis/experiment'; +import { ApiJob } from '../src/apis/job'; +import { ApiPipeline } from '../src/apis/pipeline'; +import { ApiRelationship, ApiResourceType, ApiRunDetail, RunMetricFormat } from '../src/apis/run'; import helloWorldRun from './hello-world-runtime'; import helloWorldWithStepsRun from './hello-world-with-steps-runtime'; +import jsonRun from './json-runtime'; import coinflipRun from './mock-coinflip-runtime'; import errorRun from './mock-error-runtime'; import xgboostRun from './mock-xgboost-runtime'; -import jsonRun from './json-runtime'; -import { ApiExperiment } from '../src/apis/experiment'; -import { ApiJob } from '../src/apis/job'; -import { ApiPipeline } from '../src/apis/pipeline'; -import { ApiRunDetail, ApiResourceType, ApiRelationship, RunMetricFormat } from '../src/apis/run'; function padStartTwoZeroes(str: string): string { let padded = str || ''; @@ -51,8 +51,8 @@ const pipelines: ApiPipeline[] = [ }, { name: 'output', - } - ] + }, + ], }, { created_at: new Date('2018-04-02T20:59:29.000Z'), @@ -71,8 +71,8 @@ const pipelines: ApiPipeline[] = [ }, { name: 'output', - } - ] + }, + ], }, { created_at: new Date('2018-04-03T20:58:23.000Z'), @@ -102,8 +102,16 @@ const pipelines: ApiPipeline[] = [ }, { name: 'output', - } - ] + }, + ], + }, + { + created_at: new Date('2019-10-25T20:59:23.000Z'), + description: + 'A pipeline using [markdown](https://en.wikipedia.org/wiki/Markdown) for description.', + id: '8fbe3bd6-a01f-11e8-98d0-529269fb1461', + name: 'Markdown description', + parameters: [], }, ]; @@ -134,25 +142,27 @@ const jobs: ApiJob[] = [ { name: 'output', value: 'gs://path-to-my-project', - } + }, ], pipeline_id: pipelines[0].id, pipeline_name: pipelines[0].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Failed:Succeeded', trigger: { cron_schedule: { cron: '30 1 * * * ?', end_time: new Date('2018-04-01T21:58:23.000Z'), start_time: new Date('2018-03-01T21:58:23.000Z'), - } + }, }, updated_at: new Date('2018-03-01T21:58:23.000Z'), }, @@ -176,18 +186,20 @@ const jobs: ApiJob[] = [ { name: 'output', value: 'some-output-path', - } + }, ], pipeline_id: pipelines[1].id, pipeline_name: pipelines[1].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Succeeded', trigger: { cron_schedule: { @@ -221,24 +233,26 @@ const jobs: ApiJob[] = [ { name: 'output', value: 'gs://path-to-my-other-project', - } + }, ], pipeline_id: pipelines[2].id, pipeline_name: pipelines[2].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Succeeded', trigger: { periodic_schedule: { end_time: new Date('2018-03-03T23:58:23.000Z'), interval_second: '439652', - } + }, }, updated_at: new Date('2018-03-03T23:58:23.000Z'), }, @@ -258,7 +272,8 @@ const experiments: ApiExperiment[] = [ name: 'Kubeflow Pipelines Experiment', }, { - description: 'A different Pipeline experiment used to group runs. ' + + description: + 'A different Pipeline experiment used to group runs. ' + 'This experiment also has a very long description, which should overflow the container card.', id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', name: 'Experiment Number 2', @@ -287,7 +302,7 @@ const runs: ApiRunDetail[] = [ name: 'log_loss', node_id: 'coinflip-recursive-q7dqb', number_value: -0.573, - } + }, ], name: 'coinflip-recursive-run-lknlfs3', pipeline_spec: { @@ -298,13 +313,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[0].id, pipeline_name: pipelines[0].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-03-17T20:58:23.000Z'), status: 'Failed:Succeeded', }, @@ -337,13 +354,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[0].id, pipeline_name: pipelines[0].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-04-17T21:00:00.000Z'), status: 'Error', }, @@ -356,12 +375,14 @@ const runs: ApiRunDetail[] = [ created_at: new Date('2018-05-17T21:58:23.000Z'), description: 'A simple run with json input', id: '183ac01f-dc26-4ebf-b817-7b3f96fdc3ac', - metrics: [{ - format: RunMetricFormat.PERCENTAGE, - name: 'accuracy', - node_id: 'json-12abc', - number_value: 0.5423, - }], + metrics: [ + { + format: RunMetricFormat.PERCENTAGE, + name: 'accuracy', + node_id: 'json-12abc', + number_value: 0.5423, + }, + ], name: 'json-12abc', pipeline_spec: { parameters: [ @@ -371,16 +392,18 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[2].id, pipeline_name: pipelines[2].name, }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-05-17T21:58:23.000Z'), status: 'Running', - } + }, }, { pipeline_runtime: { @@ -390,12 +413,14 @@ const runs: ApiRunDetail[] = [ created_at: new Date('2018-05-17T21:58:23.000Z'), description: 'A simple hello world run', id: 'fa5d897e-88d3-4dfc-b189-9dea6947c9bc', - metrics: [{ - format: RunMetricFormat.PERCENTAGE, - name: 'accuracy', - node_id: 'hello-world-7sm94', - number_value: 0.5423, - }], + metrics: [ + { + format: RunMetricFormat.PERCENTAGE, + name: 'accuracy', + node_id: 'hello-world-7sm94', + number_value: 0.5423, + }, + ], name: 'hello-world-7sm94', pipeline_spec: { parameters: [ @@ -405,13 +430,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[2].id, pipeline_name: pipelines[2].name, }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-05-17T21:58:23.000Z'), status: 'Running', }, @@ -425,12 +452,14 @@ const runs: ApiRunDetail[] = [ description: 'A simple hello world run, but with steps. Not attached to any experiment', finished_at: new Date('2018-06-18T21:00:33.000Z'), id: '21afb688-7597-47e9-b6c3-35d3145fe5e1', - metrics: [{ - format: RunMetricFormat.PERCENTAGE, - name: 'accuracy', - node_id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', - number_value: 0.43, - }], + metrics: [ + { + format: RunMetricFormat.PERCENTAGE, + name: 'accuracy', + node_id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', + number_value: 0.43, + }, + ], name: 'hello-world-with-steps-kajnkv4', pipeline_spec: { parameters: [ @@ -487,13 +516,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[1].id, pipeline_name: pipelines[1].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-07-17T23:58:23.000Z'), status: 'Pending', }, @@ -504,14 +535,15 @@ const runs: ApiRunDetail[] = [ }, run: { created_at: new Date('2018-08-18T20:58:23.000Z'), - description: 'An xgboost evaluation run with a very long description that includes:' - + ' Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent fermentum commodo' - + ' libero, a imperdiet ipsum cursus id. Nullam odio sem, ornare id sollicitudin ac,' - + ' rutrum in dolor. Integer interdum lacus in ex rutrum elementum. Mauris gravida feugiat' - + ' enim, ac dapibus augue rhoncus in. Integer vel tempus nulla. Cras sed ultrices dolor.' - + ' Ut nec dapibus eros, vitae iaculis nunc. In aliquet accumsan rhoncus. Donec vitae' - + ' ipsum a tellus fermentum pharetra in in neque. Pellentesque consequat felis non est' - + ' vulputate pellentesque. Aliquam eget cursus enim.', + description: + 'An xgboost evaluation run with a very long description that includes:' + + ' Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent fermentum commodo' + + ' libero, a imperdiet ipsum cursus id. Nullam odio sem, ornare id sollicitudin ac,' + + ' rutrum in dolor. Integer interdum lacus in ex rutrum elementum. Mauris gravida feugiat' + + ' enim, ac dapibus augue rhoncus in. Integer vel tempus nulla. Cras sed ultrices dolor.' + + ' Ut nec dapibus eros, vitae iaculis nunc. In aliquet accumsan rhoncus. Donec vitae' + + ' ipsum a tellus fermentum pharetra in in neque. Pellentesque consequat felis non est' + + ' vulputate pellentesque. Aliquam eget cursus enim.', finished_at: new Date('2018-08-20T21:01:23.000Z'), id: '7fc01714-4a13-4c05-8044-a8a72c14253b', metrics: [ @@ -526,9 +558,10 @@ const runs: ApiRunDetail[] = [ name: 'log_loss', node_id: 'xgboost-training-gzkm9-2365787662', number_value: -0.123, - } + }, ], - name: 'xgboost-run-with-a-veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery-' + + name: + 'xgboost-run-with-a-veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery-' + 'loooooooooooooooooooooooooooong-name-aifk298', pipeline_spec: { parameters: [ @@ -538,13 +571,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[1].id, pipeline_name: pipelines[1].name, }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-08-18T20:58:23.000Z'), status: 'Succeeded', }, @@ -570,7 +605,7 @@ const runs: ApiRunDetail[] = [ name: 'log_loss', node_id: 'hello-world-7sm94', number_value: -0.223, - } + }, ], name: 'hello-world-with-pipeline', pipeline_spec: { @@ -580,13 +615,15 @@ const runs: ApiRunDetail[] = [ ], workflow_manifest: JSON.stringify(helloWorldRun), }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-08-18T20:58:23.000Z'), status: 'Succeeded', }, @@ -624,7 +661,9 @@ function generateNRuns(): ApiRunDetail[] { run: { created_at: new Date('2018-02-12T20:' + padStartTwoZeroes(i.toString()) + ':23.000Z'), description: 'The description of a dummy run', - finished_at: new Date('2018-02-12T20:' + padStartTwoZeroes(((2 * i) % 60).toString()) + ':25.000Z'), + finished_at: new Date( + '2018-02-12T20:' + padStartTwoZeroes(((2 * i) % 60).toString()) + ':25.000Z', + ), id: 'Some-run-id-' + i, metrics: [ { @@ -655,13 +694,15 @@ function generateNRuns(): ApiRunDetail[] { pipeline_id: 'Some-pipeline-id-' + i, pipeline_name: 'Kubeflow Pipeline number ' + i, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-02-12T20:' + padStartTwoZeroes(i.toString()) + ':23.000Z'), status: 'Succeeded', }, @@ -697,17 +738,19 @@ function generateNJobs(): ApiJob[] { { name: 'output', value: 'gs://path-to-my-project', - } + }, ], pipeline_id: pipelines[i % pipelines.length].id, }, - resource_references: [{ - key: { - id: '7fc01714-4a13-4c05-5902-a8a72c14253b', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '7fc01714-4a13-4c05-5902-a8a72c14253b', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Succeeded', trigger: undefined, updated_at: new Date('2018-02-01T20:' + padStartTwoZeroes(i.toString()) + ':23.000Z'), diff --git a/frontend/mock-backend/mock-api-server.ts b/frontend/mock-backend/mock-api-server.ts index acc3e714867e..82a7377ecca4 100644 --- a/frontend/mock-backend/mock-api-server.ts +++ b/frontend/mock-backend/mock-api-server.ts @@ -18,6 +18,9 @@ import mockApiMiddleware from './mock-api-middleware'; const app = express(); const port = process.argv[2] || 3001; +// Uncomment the following line to get 1000ms delay to all requests +// app.use((req, res, next) => { setTimeout(next, 1000); }); + app.use((_, res, next) => { res.header('Access-Control-Allow-Origin', '*'); res.header('Access-Control-Allow-Headers', 'X-Requested-With, content-type'); diff --git a/frontend/package-lock.json b/frontend/package-lock.json index d939c24ca742..8f5ca589faf6 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -63,6 +63,24 @@ "regenerator-runtime": "^0.12.0" } }, + "@grpc/proto-loader": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.3.0.tgz", + "integrity": "sha512-9b8S/V+3W4Gv7G/JKSZ48zApgyYbfIR7mAC9XNnaSWme3zj57MIESu0ELzm9j5oxNIpFG8DgO00iJMIUZ5luqw==", + "requires": { + "@types/lodash": "^4.14.104", + "@types/node": "^9.4.6", + "lodash": "^4.17.5", + "protobufjs": "^6.8.6" + }, + "dependencies": { + "@types/node": { + "version": "9.6.52", + "resolved": "https://registry.npmjs.org/@types/node/-/node-9.6.52.tgz", + "integrity": "sha512-d6UdHtc8HKe3NTruj9mHk2B8EiHZyuG/00aYbUedHvy9sBhtLAX1gaxSNgvcheOvIZavvmpJYlwfHjjxlU/Few==" + } + } + }, "@material-ui/core": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/@material-ui/core/-/core-3.7.1.tgz", @@ -144,6 +162,60 @@ "react-is": "^16.6.3" } }, + "@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha1-m4sMxmPWaafY9vXQiToU00jzD78=" + }, + "@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha1-NVy8mLr61ZePntCV85diHx0Ga3A=" + }, + "@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha1-upn7WYYUr2VwDBYZ/wbUVLDYTEU=", + "requires": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha1-Xp4avctz/Ap8uLKR33jIy9l7h9E=" + }, + "@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha1-/yAOPnzyQp4tyvwRQIKOjMY48Ik=" + }, + "@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha1-bMKyDFya1q0NzP0hynZz2Nf79o0=" + }, + "@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha1-Cf0V8tbTq/qbZbw2ZQbWrXhG/1Q=" + }, + "@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA=" + }, "@types/body-parser": { "version": "1.17.0", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.17.0.tgz", @@ -155,10 +227,13 @@ } }, "@types/cheerio": { - "version": "0.22.10", - "resolved": "https://registry.npmjs.org/@types/cheerio/-/cheerio-0.22.10.tgz", - "integrity": "sha512-fOM/Jhv51iyugY7KOBZz2ThfT1gwvsGCfWxpLpZDgkGjpEO4Le9cld07OdskikLjDUQJ43dzDaVRSFwQlpdqVg==", - "dev": true + "version": "0.22.13", + "resolved": "https://registry.npmjs.org/@types/cheerio/-/cheerio-0.22.13.tgz", + "integrity": "sha512-OZd7dCUOUkiTorf97vJKwZnSja/DmHfuBAroe1kREZZTCf/tlFecwHhsOos3uVHxeKGZDwzolIrCUApClkdLuA==", + "dev": true, + "requires": { + "@types/node": "*" + } }, "@types/connect": { "version": "3.4.32", @@ -442,9 +517,9 @@ "dev": true }, "@types/enzyme": { - "version": "3.1.15", - "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.1.15.tgz", - "integrity": "sha512-6b4JWgV+FNec1c4+8HauGbXg5gRc1oQK93t2+4W+bHjG/PzO+iPvagY6d6bXAZ+t+ps51Zb2F9LQ4vl0S0Epog==", + "version": "3.10.3", + "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.10.3.tgz", + "integrity": "sha512-f/Kcb84sZOSZiBPCkr4He9/cpuSLcKRyQaEE20Q30Prx0Dn6wcyMAWI0yofL6yvd9Ht9G7EVkQeRqK0n5w8ILw==", "dev": true, "requires": { "@types/cheerio": "*", @@ -452,9 +527,9 @@ } }, "@types/enzyme-adapter-react-16": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@types/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.0.3.tgz", - "integrity": "sha512-9eRLBsC/Djkys05BdTWgav8v6fSCjyzjNuLwG2sfa2b2g/VAN10luP0zB0VwtOWFQ0LGjIboJJvIsVdU5gqRmg==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.0.5.tgz", + "integrity": "sha512-K7HLFTkBDN5RyRmU90JuYt8OWEY2iKUn43SDWEoBOXd/PowUWjLZ3Q6qMBiQuZeFYK/TOstaZxsnI0fXoAfLpg==", "dev": true, "requires": { "@types/enzyme": "*" @@ -549,8 +624,21 @@ "@types/lodash": { "version": "4.14.119", "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.119.tgz", - "integrity": "sha512-Z3TNyBL8Vd/M9D9Ms2S3LmFq2sSMzahodD6rCS9V2N44HUMINb75jNkSuwAx7eo2ufqTdfOdtGQpNbieUjPQmw==", - "dev": true + "integrity": "sha512-Z3TNyBL8Vd/M9D9Ms2S3LmFq2sSMzahodD6rCS9V2N44HUMINb75jNkSuwAx7eo2ufqTdfOdtGQpNbieUjPQmw==" + }, + "@types/long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.0.tgz", + "integrity": "sha512-1w52Nyx4Gq47uuu0EVcsHBxZFJgurQ+rTKS3qMHxR1GY2T8c2AJYd6vZoZ9q1rupaDjU0yT+Jc2XTyXkjeMA+Q==" + }, + "@types/markdown-to-jsx": { + "version": "6.9.0", + "resolved": "https://registry.npmjs.org/@types/markdown-to-jsx/-/markdown-to-jsx-6.9.0.tgz", + "integrity": "sha512-LO/oxz+ZfwBDciiVGqLhhdyeWt196kgICe0QS88K1a2u/FgUF1QkeMAm4zdnAo1kNgo2KgFP1Uqy2IiPJLWppA==", + "dev": true, + "requires": { + "@types/react": "*" + } }, "@types/mime": { "version": "2.0.0", @@ -567,8 +655,7 @@ "@types/node": { "version": "10.12.18", "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz", - "integrity": "sha512-fh+pAqt4xRzPfqA6eh3Z2y6fyZavRIumvjhaCL753+TVkGKGhpPeyrJG2JftD0T9q4GF00KjefsQ+PQNDdWQaQ==", - "dev": true + "integrity": "sha512-fh+pAqt4xRzPfqA6eh3Z2y6fyZavRIumvjhaCL753+TVkGKGhpPeyrJG2JftD0T9q4GF00KjefsQ+PQNDdWQaQ==" }, "@types/prop-types": { "version": "15.5.8", @@ -744,6 +831,43 @@ "es6-promisify": "^5.0.0" } }, + "airbnb-prop-types": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/airbnb-prop-types/-/airbnb-prop-types-2.15.0.tgz", + "integrity": "sha512-jUh2/hfKsRjNFC4XONQrxo/n/3GG4Tn6Hl0WlFQN5PY9OMC9loSCoAYKnZsWaP8wEfd5xcrPloK0Zg6iS1xwVA==", + "dev": true, + "requires": { + "array.prototype.find": "^2.1.0", + "function.prototype.name": "^1.1.1", + "has": "^1.0.3", + "is-regex": "^1.0.4", + "object-is": "^1.0.1", + "object.assign": "^4.1.0", + "object.entries": "^1.1.0", + "prop-types": "^15.7.2", + "prop-types-exact": "^1.2.0", + "react-is": "^16.9.0" + }, + "dependencies": { + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "react-is": { + "version": "16.10.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.10.2.tgz", + "integrity": "sha512-INBT1QEgtcCCgvccr5/86CfD71fw9EPmDxgiJX4I2Ddr6ZsV6iFXsuby+qWJPtmNuMY0zByTsG4468P7nHuNWA==", + "dev": true + } + } + }, "ajv": { "version": "5.5.2", "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", @@ -1006,15 +1130,51 @@ "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=" }, + "array.prototype.find": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array.prototype.find/-/array.prototype.find-2.1.0.tgz", + "integrity": "sha512-Wn41+K1yuO5p7wRZDl7890c3xvv5UBrfVXTVIe28rSQb6LS0fZMDrQB6PAcxQFRFy6vJTLDc3A2+3CjQdzVKRg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.13.0" + } + }, "array.prototype.flat": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.1.tgz", - "integrity": "sha512-rVqIs330nLJvfC7JqYvEWwqVr5QjYF1ib02i3YJtR/fICO6527Tjpc/e4Mvmxh3GIePPreRXMdaGyC99YphWEw==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.2.tgz", + "integrity": "sha512-VXjh7lAL4KXKF2hY4FnEW9eRW6IhdvFW1sN/JwLbmECbCgACCnBHNyP3lFiYuttr0jxRN9Bsc5+G27dMseSWqQ==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.10.0", + "define-properties": "^1.1.3", + "es-abstract": "^1.15.0", "function-bind": "^1.1.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.15.0.tgz", + "integrity": "sha512-bhkEqWJ2t2lMeaJDuk7okMkJWI/yqgH/EoGwpcvv0XW9RWQsRspI4wt6xuyuvMvvQE3gg/D9HXppgk21w78GyQ==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.0", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.0", + "is-callable": "^1.1.4", + "is-regex": "^1.0.4", + "object-inspect": "^1.6.0", + "object-keys": "^1.1.1", + "string.prototype.trimleft": "^2.1.0", + "string.prototype.trimright": "^2.1.0" + } + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + } } }, "arrify": { @@ -1027,6 +1187,15 @@ "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" }, + "ascli": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ascli/-/ascli-1.0.1.tgz", + "integrity": "sha1-vPpZdKYvGOgcq660lzKrSoj5Brw=", + "requires": { + "colour": "~0.7.1", + "optjs": "~3.2.2" + } + }, "asn1": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", @@ -2148,15 +2317,23 @@ } }, "bfj": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/bfj/-/bfj-6.1.1.tgz", - "integrity": "sha512-+GUNvzHR4nRyGybQc2WpNJL4MJazMuvf92ueIyA0bIkPRwhhQu3IfZQ2PSoVPpCBJfmoSdOxu5rnotfFLlvYRQ==", + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/bfj/-/bfj-6.1.2.tgz", + "integrity": "sha512-BmBJa4Lip6BPRINSZ0BPEIfB1wUY/9rwbwvIHQA1KjX9om29B6id0wnWXq7m3bn5JrUVjeOTnVuhPT1FiHwPGw==", "dev": true, "requires": { - "bluebird": "^3.5.1", - "check-types": "^7.3.0", - "hoopy": "^0.1.2", - "tryer": "^1.0.0" + "bluebird": "^3.5.5", + "check-types": "^8.0.3", + "hoopy": "^0.1.4", + "tryer": "^1.0.1" + }, + "dependencies": { + "bluebird": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.1.tgz", + "integrity": "sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg==", + "dev": true + } } }, "big.js": { @@ -2471,6 +2648,21 @@ "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", "integrity": "sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=" }, + "bytebuffer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/bytebuffer/-/bytebuffer-5.0.1.tgz", + "integrity": "sha1-WC7qSxqHO20CCkjVjfhfC7ps/d0=", + "requires": { + "long": "~3" + }, + "dependencies": { + "long": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/long/-/long-3.2.0.tgz", + "integrity": "sha1-2CG3E4yhy1gcFymQ7xTbIAtcR0s=" + } + } + }, "bytes": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", @@ -2695,25 +2887,35 @@ "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I=" }, "check-types": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/check-types/-/check-types-7.4.0.tgz", - "integrity": "sha512-YbulWHdfP99UfZ73NcUDlNJhEIDgm9Doq9GhpyXbF+7Aegi3CVV7qqMCKTTqJxlvEvnQBp9IA+dxsGN6xK/nSg==", + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/check-types/-/check-types-8.0.3.tgz", + "integrity": "sha512-YpeKZngUmG65rLudJ4taU7VLkOCTMhNl/u4ctNC56LQS/zJTyNH0Lrtwm1tfTsbLlwvlfsA2d1c8vCf/Kh2KwQ==", "dev": true }, "cheerio": { - "version": "1.0.0-rc.2", - "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.2.tgz", - "integrity": "sha1-S59TqBsn5NXawxwP/Qz6A8xoMNs=", + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.3.tgz", + "integrity": "sha512-0td5ijfUPuubwLUu0OBoe98gZj8C/AA+RW3v67GPlGOrvxWjZmBXiBCRU+I8VEiNyJzjth40POfHiz2RB3gImA==", "dev": true, "requires": { "css-select": "~1.2.0", - "dom-serializer": "~0.1.0", + "dom-serializer": "~0.1.1", "entities": "~1.1.1", "htmlparser2": "^3.9.1", "lodash": "^4.15.0", "parse5": "^3.0.1" }, "dependencies": { + "dom-serializer": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.1.tgz", + "integrity": "sha512-l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA==", + "dev": true, + "requires": { + "domelementtype": "^1.3.0", + "entities": "^1.1.1" + } + }, "domhandler": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz", @@ -2724,17 +2926,17 @@ } }, "htmlparser2": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.0.tgz", - "integrity": "sha512-J1nEUGv+MkXS0weHNWVKJJ+UrLfePxRWpN3C9bEi9fLxL2+ggW94DQvgYVXsaT30PGwYRIZKNZXuyMhp3Di4bQ==", + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.1.tgz", + "integrity": "sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==", "dev": true, "requires": { - "domelementtype": "^1.3.0", + "domelementtype": "^1.3.1", "domhandler": "^2.3.0", "domutils": "^1.5.1", "entities": "^1.1.1", "inherits": "^2.0.1", - "readable-stream": "^3.0.6" + "readable-stream": "^3.1.1" } }, "parse5": { @@ -2747,9 +2949,9 @@ } }, "readable-stream": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.1.1.tgz", - "integrity": "sha512-DkN66hPyqDhnIQ6Jcsvx9bFjhw214O4poMBcIMgPVpQvNy9a0e0Uhg5SqySyDKAmUlwt8LonTBz1ezOnM8pUdA==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", "dev": true, "requires": { "inherits": "^2.0.3", @@ -3073,6 +3275,11 @@ "resolved": "https://registry.npmjs.org/colors/-/colors-1.1.2.tgz", "integrity": "sha1-FopHAXVran9RoSzgyXv6KMCE7WM=" }, + "colour": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/colour/-/colour-0.7.1.tgz", + "integrity": "sha1-nLFpkX7F0SwHNtPoaFdG3xyt93g=" + }, "combined-stream": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz", @@ -4327,9 +4534,9 @@ "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, "ejs": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.6.1.tgz", - "integrity": "sha512-0xy4A/twfrRCnkhfk8ErDi5DqdAsAqeGxht4xkCUrsvhhbQNs7E+4jV0CN7+NKIY0aHE72+XvqtBIXzD31ZbXQ==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.7.1.tgz", + "integrity": "sha512-kS/gEPzZs3Y1rRsbGX4UOSjtP/CeJP0CxSNZHYxGfVM/VgLcv0ZqM7C45YyTj2DI2g7+P9Dd24C+IMIg6D0nYQ==", "dev": true }, "electron-to-chromium": { @@ -4394,18 +4601,20 @@ "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==" }, "enzyme": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/enzyme/-/enzyme-3.8.0.tgz", - "integrity": "sha512-bfsWo5nHyZm1O1vnIsbwdfhU989jk+squU9NKvB+Puwo5j6/Wg9pN5CO0YJelm98Dao3NPjkDZk+vvgwpMwYxw==", + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/enzyme/-/enzyme-3.10.0.tgz", + "integrity": "sha512-p2yy9Y7t/PFbPoTvrWde7JIYB2ZyGC+NgTNbVEGvZ5/EyoYSr9aG/2rSbVvyNvMHEhw9/dmGUJHWtfQIEiX9pg==", "dev": true, "requires": { "array.prototype.flat": "^1.2.1", "cheerio": "^1.0.0-rc.2", "function.prototype.name": "^1.1.0", "has": "^1.0.3", + "html-element-map": "^1.0.0", "is-boolean-object": "^1.0.0", "is-callable": "^1.1.4", "is-number-object": "^1.0.3", + "is-regex": "^1.0.4", "is-string": "^1.0.4", "is-subset": "^0.1.1", "lodash.escape": "^4.0.1", @@ -4421,30 +4630,94 @@ } }, "enzyme-adapter-react-16": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.7.1.tgz", - "integrity": "sha512-OQXKgfHWyHN3sFu2nKj3mhgRcqIPIJX6aOzq5AHVFES4R9Dw/vCBZFMPyaG81g2AZ5DogVh39P3MMNUbqNLTcw==", + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.15.1.tgz", + "integrity": "sha512-yMPxrP3vjJP+4wL/qqfkT6JAIctcwKF+zXO6utlGPgUJT2l4tzrdjMDWGd/Pp1BjHBcljhN24OzNEGRteibJhA==", "dev": true, "requires": { - "enzyme-adapter-utils": "^1.9.0", - "function.prototype.name": "^1.1.0", + "enzyme-adapter-utils": "^1.12.1", + "enzyme-shallow-equal": "^1.0.0", + "has": "^1.0.3", "object.assign": "^4.1.0", - "object.values": "^1.0.4", - "prop-types": "^15.6.2", - "react-is": "^16.6.1", - "react-test-renderer": "^16.0.0-0" + "object.values": "^1.1.0", + "prop-types": "^15.7.2", + "react-is": "^16.10.2", + "react-test-renderer": "^16.0.0-0", + "semver": "^5.7.0" + }, + "dependencies": { + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "react-is": { + "version": "16.10.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.10.2.tgz", + "integrity": "sha512-INBT1QEgtcCCgvccr5/86CfD71fw9EPmDxgiJX4I2Ddr6ZsV6iFXsuby+qWJPtmNuMY0zByTsG4468P7nHuNWA==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, "enzyme-adapter-utils": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/enzyme-adapter-utils/-/enzyme-adapter-utils-1.9.1.tgz", - "integrity": "sha512-LWc88BbKztLXlpRf5Ba/pSMJRaNezAwZBvis3N/IuB65ltZEh2E2obWU9B36pAbw7rORYeBUuqc79OL17ZzN1A==", + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/enzyme-adapter-utils/-/enzyme-adapter-utils-1.12.1.tgz", + "integrity": "sha512-KWiHzSjZaLEoDCOxY8Z1RAbUResbqKN5bZvenPbfKtWorJFVETUw754ebkuCQ3JKm0adx1kF8JaiR+PHPiP47g==", "dev": true, "requires": { - "function.prototype.name": "^1.1.0", + "airbnb-prop-types": "^2.15.0", + "function.prototype.name": "^1.1.1", "object.assign": "^4.1.0", - "prop-types": "^15.6.2", - "semver": "^5.6.0" + "object.fromentries": "^2.0.1", + "prop-types": "^15.7.2", + "semver": "^5.7.0" + }, + "dependencies": { + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "react-is": { + "version": "16.10.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.10.2.tgz", + "integrity": "sha512-INBT1QEgtcCCgvccr5/86CfD71fw9EPmDxgiJX4I2Ddr6ZsV6iFXsuby+qWJPtmNuMY0zByTsG4468P7nHuNWA==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } + } + }, + "enzyme-shallow-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/enzyme-shallow-equal/-/enzyme-shallow-equal-1.0.0.tgz", + "integrity": "sha512-VUf+q5o1EIv2ZaloNQQtWCJM9gpeux6vudGVH6vLmfPXFLRuxl5+Aq3U260wof9nn0b0i+P5OEUXm1vnxkRpXQ==", + "dev": true, + "requires": { + "has": "^1.0.3", + "object-is": "^1.0.1" } }, "enzyme-to-json": { @@ -5323,13 +5596,13 @@ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" }, "fsevents": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.4.tgz", - "integrity": "sha512-z8H8/diyk76B7q5wg+Ud0+CqzcAF3mBBI/bA5ne5zrRUUIvNkJY//D3BqyH571KuAC4Nr7Rw7CjWX4r0y9DvNg==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.9.tgz", + "integrity": "sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw==", "optional": true, "requires": { - "nan": "^2.9.2", - "node-pre-gyp": "^0.10.0" + "nan": "^2.12.1", + "node-pre-gyp": "^0.12.0" }, "dependencies": { "abbrev": { @@ -5347,7 +5620,7 @@ "optional": true }, "are-we-there-yet": { - "version": "1.1.4", + "version": "1.1.5", "bundled": true, "optional": true, "requires": { @@ -5368,7 +5641,7 @@ } }, "chownr": { - "version": "1.0.1", + "version": "1.1.1", "bundled": true, "optional": true }, @@ -5390,15 +5663,15 @@ "optional": true }, "debug": { - "version": "2.6.9", + "version": "4.1.1", "bundled": true, "optional": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } }, "deep-extend": { - "version": "0.5.1", + "version": "0.6.0", "bundled": true, "optional": true }, @@ -5441,7 +5714,7 @@ } }, "glob": { - "version": "7.1.2", + "version": "7.1.3", "bundled": true, "optional": true, "requires": { @@ -5459,11 +5732,11 @@ "optional": true }, "iconv-lite": { - "version": "0.4.21", + "version": "0.4.24", "bundled": true, "optional": true, "requires": { - "safer-buffer": "^2.1.0" + "safer-buffer": ">= 2.1.2 < 3" } }, "ignore-walk": { @@ -5516,15 +5789,15 @@ "bundled": true }, "minipass": { - "version": "2.2.4", + "version": "2.3.5", "bundled": true, "requires": { - "safe-buffer": "^5.1.1", + "safe-buffer": "^5.1.2", "yallist": "^3.0.0" } }, "minizlib": { - "version": "1.1.0", + "version": "1.2.1", "bundled": true, "optional": true, "requires": { @@ -5539,32 +5812,32 @@ } }, "ms": { - "version": "2.0.0", + "version": "2.1.1", "bundled": true, "optional": true }, "needle": { - "version": "2.2.0", + "version": "2.3.0", "bundled": true, "optional": true, "requires": { - "debug": "^2.1.2", + "debug": "^4.1.0", "iconv-lite": "^0.4.4", "sax": "^1.2.4" } }, "node-pre-gyp": { - "version": "0.10.0", + "version": "0.12.0", "bundled": true, "optional": true, "requires": { "detect-libc": "^1.0.2", "mkdirp": "^0.5.1", - "needle": "^2.2.0", + "needle": "^2.2.1", "nopt": "^4.0.1", "npm-packlist": "^1.1.6", "npmlog": "^4.0.2", - "rc": "^1.1.7", + "rc": "^1.2.7", "rimraf": "^2.6.1", "semver": "^5.3.0", "tar": "^4" @@ -5580,12 +5853,12 @@ } }, "npm-bundled": { - "version": "1.0.3", + "version": "1.0.6", "bundled": true, "optional": true }, "npm-packlist": { - "version": "1.1.10", + "version": "1.4.1", "bundled": true, "optional": true, "requires": { @@ -5650,11 +5923,11 @@ "optional": true }, "rc": { - "version": "1.2.7", + "version": "1.2.8", "bundled": true, "optional": true, "requires": { - "deep-extend": "^0.5.1", + "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" @@ -5682,15 +5955,15 @@ } }, "rimraf": { - "version": "2.6.2", + "version": "2.6.3", "bundled": true, "optional": true, "requires": { - "glob": "^7.0.5" + "glob": "^7.1.3" } }, "safe-buffer": { - "version": "5.1.1", + "version": "5.1.2", "bundled": true }, "safer-buffer": { @@ -5704,7 +5977,7 @@ "optional": true }, "semver": { - "version": "5.5.0", + "version": "5.7.0", "bundled": true, "optional": true }, @@ -5748,16 +6021,16 @@ "optional": true }, "tar": { - "version": "4.4.1", + "version": "4.4.8", "bundled": true, "optional": true, "requires": { - "chownr": "^1.0.1", + "chownr": "^1.1.1", "fs-minipass": "^1.2.5", - "minipass": "^2.2.4", - "minizlib": "^1.1.0", + "minipass": "^2.3.4", + "minizlib": "^1.1.1", "mkdirp": "^0.5.0", - "safe-buffer": "^5.1.1", + "safe-buffer": "^5.1.2", "yallist": "^3.0.2" } }, @@ -5767,11 +6040,11 @@ "optional": true }, "wide-align": { - "version": "1.1.2", + "version": "1.1.3", "bundled": true, "optional": true, "requires": { - "string-width": "^1.0.2" + "string-width": "^1.0.2 || 2" } }, "wrappy": { @@ -5779,7 +6052,7 @@ "bundled": true }, "yallist": { - "version": "3.0.2", + "version": "3.0.3", "bundled": true } } @@ -5790,16 +6063,23 @@ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, "function.prototype.name": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.0.tgz", - "integrity": "sha512-Bs0VRrTz4ghD8pTmbJQD1mZ8A/mN0ur/jGz+A6FBxPDUPkm1tNfF6bhTYPA7i7aF4lZJVr+OXTNNrnnIl58Wfg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.1.tgz", + "integrity": "sha512-e1NzkiJuw6xqVH7YSdiW/qDHebcmMhPNe6w+4ZYYEg0VA+LaLzx37RimbPLuonHhYGFGPx1ME2nSi74JiaCr/Q==", "dev": true, "requires": { - "define-properties": "^1.1.2", + "define-properties": "^1.1.3", "function-bind": "^1.1.1", - "is-callable": "^1.1.3" + "functions-have-names": "^1.1.1", + "is-callable": "^1.1.4" } }, + "functions-have-names": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.1.1.tgz", + "integrity": "sha512-U0kNHUoxwPNPWOJaMG7Z00d4a/qZVrFtzWJRaK8V9goaVOCXBSQSJpt3MYGNtkScKEBKovxLjnNdC9MlXwo5Pw==", + "dev": true + }, "get-caller-file": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", @@ -5912,95 +6192,580 @@ } } }, - "global-dirs": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz", - "integrity": "sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU=", - "requires": { - "ini": "^1.3.4" - } - }, - "global-modules": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz", - "integrity": "sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==", - "requires": { - "global-prefix": "^1.0.1", - "is-windows": "^1.0.1", - "resolve-dir": "^1.0.0" - } - }, - "global-prefix": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", - "integrity": "sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=", - "requires": { - "expand-tilde": "^2.0.2", - "homedir-polyfill": "^1.0.1", - "ini": "^1.3.4", - "is-windows": "^1.0.1", - "which": "^1.2.14" - } - }, - "globals": { - "version": "9.18.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-9.18.0.tgz", - "integrity": "sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==" - }, - "globby": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-5.0.0.tgz", - "integrity": "sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0=", - "requires": { - "array-union": "^1.0.1", - "arrify": "^1.0.0", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" - } - }, - "got": { - "version": "6.7.1", - "resolved": "http://registry.npmjs.org/got/-/got-6.7.1.tgz", - "integrity": "sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA=", - "requires": { - "create-error-class": "^3.0.0", - "duplexer3": "^0.1.4", - "get-stream": "^3.0.0", - "is-redirect": "^1.0.0", - "is-retry-allowed": "^1.0.0", - "is-stream": "^1.0.0", - "lowercase-keys": "^1.0.0", - "safe-buffer": "^5.0.1", - "timed-out": "^4.0.0", - "unzip-response": "^2.0.1", - "url-parse-lax": "^1.0.0" - } - }, - "graceful-fs": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz", - "integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA==" - }, - "graphlib": { - "version": "2.1.7", - "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.7.tgz", - "integrity": "sha512-TyI9jIy2J4j0qgPmOOrHTCtpPqJGN/aurBwc6ZT+bRii+di1I+Wv3obRhVrmBEXet+qkMaEX67dXrwsd3QQM6w==", - "requires": { - "lodash": "^4.17.5" - } - }, - "growl": { - "version": "1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", - "dev": true - }, - "growly": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz", - "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=" + "global-dirs": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz", + "integrity": "sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU=", + "requires": { + "ini": "^1.3.4" + } + }, + "global-modules": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz", + "integrity": "sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==", + "requires": { + "global-prefix": "^1.0.1", + "is-windows": "^1.0.1", + "resolve-dir": "^1.0.0" + } + }, + "global-prefix": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", + "integrity": "sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=", + "requires": { + "expand-tilde": "^2.0.2", + "homedir-polyfill": "^1.0.1", + "ini": "^1.3.4", + "is-windows": "^1.0.1", + "which": "^1.2.14" + } + }, + "globals": { + "version": "9.18.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-9.18.0.tgz", + "integrity": "sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==" + }, + "globby": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-5.0.0.tgz", + "integrity": "sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0=", + "requires": { + "array-union": "^1.0.1", + "arrify": "^1.0.0", + "glob": "^7.0.3", + "object-assign": "^4.0.1", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "google-protobuf": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.10.0.tgz", + "integrity": "sha512-d0cMO8TJ6xtB/WrVHCv5U81L2ulX+aCD58IljyAN6mHwdHHJ2jbcauX5glvivi3s3hx7EYEo7eUA9WftzamMnw==" + }, + "got": { + "version": "6.7.1", + "resolved": "http://registry.npmjs.org/got/-/got-6.7.1.tgz", + "integrity": "sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA=", + "requires": { + "create-error-class": "^3.0.0", + "duplexer3": "^0.1.4", + "get-stream": "^3.0.0", + "is-redirect": "^1.0.0", + "is-retry-allowed": "^1.0.0", + "is-stream": "^1.0.0", + "lowercase-keys": "^1.0.0", + "safe-buffer": "^5.0.1", + "timed-out": "^4.0.0", + "unzip-response": "^2.0.1", + "url-parse-lax": "^1.0.0" + } + }, + "graceful-fs": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz", + "integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA==" + }, + "graphlib": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/graphlib/-/graphlib-2.1.7.tgz", + "integrity": "sha512-TyI9jIy2J4j0qgPmOOrHTCtpPqJGN/aurBwc6ZT+bRii+di1I+Wv3obRhVrmBEXet+qkMaEX67dXrwsd3QQM6w==", + "requires": { + "lodash": "^4.17.5" + } + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "growly": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz", + "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=" + }, + "grpc": { + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/grpc/-/grpc-1.24.0.tgz", + "integrity": "sha512-zq1rUh2uzfMqSfQ3bZvlQuX5yKfd/2vob+l9sK5Qma6P33m7UvyMCVW70+Wz0WTzy9W2A94eQD5XIOxKnZhsYQ==", + "requires": { + "lodash.camelcase": "^4.3.0", + "lodash.clone": "^4.5.0", + "nan": "^2.13.2", + "node-pre-gyp": "^0.13.0", + "protobufjs": "^5.0.3" + }, + "dependencies": { + "abbrev": { + "version": "1.1.1", + "bundled": true + }, + "ansi-regex": { + "version": "2.1.1", + "bundled": true + }, + "aproba": { + "version": "1.2.0", + "bundled": true + }, + "are-we-there-yet": { + "version": "1.1.5", + "bundled": true, + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + } + }, + "balanced-match": { + "version": "1.0.0", + "bundled": true + }, + "brace-expansion": { + "version": "1.1.11", + "bundled": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "camelcase": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", + "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=" + }, + "chownr": { + "version": "1.1.2", + "bundled": true + }, + "cliui": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", + "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "requires": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wrap-ansi": "^2.0.0" + } + }, + "code-point-at": { + "version": "1.1.0", + "bundled": true + }, + "concat-map": { + "version": "0.0.1", + "bundled": true + }, + "console-control-strings": { + "version": "1.1.0", + "bundled": true + }, + "core-util-is": { + "version": "1.0.2", + "bundled": true + }, + "debug": { + "version": "3.2.6", + "bundled": true, + "requires": { + "ms": "^2.1.1" + } + }, + "deep-extend": { + "version": "0.6.0", + "bundled": true + }, + "delegates": { + "version": "1.0.0", + "bundled": true + }, + "detect-libc": { + "version": "1.0.3", + "bundled": true + }, + "fs-minipass": { + "version": "1.2.6", + "bundled": true, + "requires": { + "minipass": "^2.2.1" + } + }, + "fs.realpath": { + "version": "1.0.0", + "bundled": true + }, + "gauge": { + "version": "2.7.4", + "bundled": true, + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "glob": { + "version": "7.1.4", + "bundled": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "has-unicode": { + "version": "2.0.1", + "bundled": true + }, + "iconv-lite": { + "version": "0.4.24", + "bundled": true, + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "ignore-walk": { + "version": "3.0.1", + "bundled": true, + "requires": { + "minimatch": "^3.0.4" + } + }, + "inflight": { + "version": "1.0.6", + "bundled": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "bundled": true + }, + "ini": { + "version": "1.3.5", + "bundled": true + }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "bundled": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "isarray": { + "version": "1.0.0", + "bundled": true + }, + "minimatch": { + "version": "3.0.4", + "bundled": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.0", + "bundled": true + }, + "minipass": { + "version": "2.3.5", + "bundled": true, + "requires": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "minizlib": { + "version": "1.2.1", + "bundled": true, + "requires": { + "minipass": "^2.2.1" + } + }, + "mkdirp": { + "version": "0.5.1", + "bundled": true, + "requires": { + "minimist": "0.0.8" + }, + "dependencies": { + "minimist": { + "version": "0.0.8", + "bundled": true + } + } + }, + "ms": { + "version": "2.1.2", + "bundled": true + }, + "nan": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==" + }, + "needle": { + "version": "2.4.0", + "bundled": true, + "requires": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + } + }, + "node-pre-gyp": { + "version": "0.13.0", + "bundled": true, + "requires": { + "detect-libc": "^1.0.2", + "mkdirp": "^0.5.1", + "needle": "^2.2.1", + "nopt": "^4.0.1", + "npm-packlist": "^1.1.6", + "npmlog": "^4.0.2", + "rc": "^1.2.7", + "rimraf": "^2.6.1", + "semver": "^5.3.0", + "tar": "^4" + } + }, + "nopt": { + "version": "4.0.1", + "bundled": true, + "requires": { + "abbrev": "1", + "osenv": "^0.1.4" + } + }, + "npm-bundled": { + "version": "1.0.6", + "bundled": true + }, + "npm-packlist": { + "version": "1.4.4", + "bundled": true, + "requires": { + "ignore-walk": "^3.0.1", + "npm-bundled": "^1.0.1" + } + }, + "npmlog": { + "version": "4.1.2", + "bundled": true, + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "number-is-nan": { + "version": "1.0.1", + "bundled": true + }, + "object-assign": { + "version": "4.1.1", + "bundled": true + }, + "once": { + "version": "1.4.0", + "bundled": true, + "requires": { + "wrappy": "1" + } + }, + "os-homedir": { + "version": "1.0.2", + "bundled": true + }, + "os-locale": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz", + "integrity": "sha1-IPnxeuKe00XoveWDsT0gCYA8FNk=", + "requires": { + "lcid": "^1.0.0" + } + }, + "os-tmpdir": { + "version": "1.0.2", + "bundled": true + }, + "osenv": { + "version": "0.1.5", + "bundled": true, + "requires": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "bundled": true + }, + "process-nextick-args": { + "version": "2.0.1", + "bundled": true + }, + "protobufjs": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-5.0.3.tgz", + "integrity": "sha512-55Kcx1MhPZX0zTbVosMQEO5R6/rikNXd9b6RQK4KSPcrSIIwoXTtebIczUrXlwaSrbz4x8XUVThGPob1n8I4QA==", + "requires": { + "ascli": "~1", + "bytebuffer": "~5", + "glob": "^7.0.5", + "yargs": "^3.10.0" + } + }, + "rc": { + "version": "1.2.8", + "bundled": true, + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + } + }, + "readable-stream": { + "version": "2.3.6", + "bundled": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "rimraf": { + "version": "2.7.1", + "bundled": true, + "requires": { + "glob": "^7.1.3" + } + }, + "safe-buffer": { + "version": "5.1.2", + "bundled": true + }, + "safer-buffer": { + "version": "2.1.2", + "bundled": true + }, + "sax": { + "version": "1.2.4", + "bundled": true + }, + "semver": { + "version": "5.7.1", + "bundled": true + }, + "set-blocking": { + "version": "2.0.0", + "bundled": true + }, + "signal-exit": { + "version": "3.0.2", + "bundled": true + }, + "string-width": { + "version": "1.0.2", + "bundled": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "string_decoder": { + "version": "1.1.1", + "bundled": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "strip-ansi": { + "version": "3.0.1", + "bundled": true, + "requires": { + "ansi-regex": "^2.0.0" + } + }, + "strip-json-comments": { + "version": "2.0.1", + "bundled": true + }, + "tar": { + "version": "4.4.10", + "bundled": true, + "requires": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.3.5", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + } + }, + "util-deprecate": { + "version": "1.0.2", + "bundled": true + }, + "wide-align": { + "version": "1.1.3", + "bundled": true, + "requires": { + "string-width": "^1.0.2 || 2" + } + }, + "window-size": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.4.tgz", + "integrity": "sha1-+OGqHuWlPsW/FR/6CXQqatdpeHY=" + }, + "wrappy": { + "version": "1.0.2", + "bundled": true + }, + "yallist": { + "version": "3.0.3", + "bundled": true + }, + "yargs": { + "version": "3.32.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.32.0.tgz", + "integrity": "sha1-AwiOnr+edWtpdRYR0qXvWRSCyZU=", + "requires": { + "camelcase": "^2.0.1", + "cliui": "^3.0.3", + "decamelize": "^1.1.1", + "os-locale": "^1.4.0", + "string-width": "^1.0.1", + "window-size": "^0.1.4", + "y18n": "^3.2.0" + } + } + } + }, + "grpc-web": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/grpc-web/-/grpc-web-1.0.6.tgz", + "integrity": "sha512-iwqchQXNsA8bcJwuYEv/A8ScEaf5nA5+vtptEv2KN646KW94tsq7O+Q7hy5+gADOg6XhmJqPhSlvTMdiHlVfcw==" }, "gzip-size": { "version": "3.0.0", @@ -6016,11 +6781,11 @@ "integrity": "sha1-/Xqtcmvxpf0W38KbL3pmAdJxOcQ=" }, "handlebars": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.0.12.tgz", - "integrity": "sha512-RhmTekP+FZL+XNhwS1Wf+bTTZpdLougwt5pcgA1tuz6Jcx0fpH/7z0qd71RKnZHBCxIRBHfBOnio4gViPemNzA==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.4.3.tgz", + "integrity": "sha512-B0W4A2U1ww3q7VVthTKfh+epHx+q4mCt6iK+zEAzbMBpWQAwxCeKxEGpj/1oQTpzPXDNSOG7hmG14TsISH50yw==", "requires": { - "async": "^2.5.0", + "neo-async": "^2.6.0", "optimist": "^0.6.1", "source-map": "^0.6.1", "uglify-js": "^3.1.4" @@ -6257,6 +7022,23 @@ "resolved": "https://registry.npmjs.org/html-comment-regex/-/html-comment-regex-1.1.2.tgz", "integrity": "sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==" }, + "html-element-map": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/html-element-map/-/html-element-map-1.1.0.tgz", + "integrity": "sha512-iqiG3dTZmy+uUaTmHarTL+3/A2VW9ox/9uasKEZC+R/wAtUrTcRlXPSaPqsnWPfIu8wqn09jQNwMRqzL54jSYA==", + "dev": true, + "requires": { + "array-filter": "^1.0.0" + }, + "dependencies": { + "array-filter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-1.0.0.tgz", + "integrity": "sha1-uveeYubvTCpMC4MSMtr/7CUfnYM=", + "dev": true + } + } + }, "html-encoding-sniffer": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz", @@ -7939,9 +8721,9 @@ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "js-yaml": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", - "integrity": "sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A==", + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -8246,14 +9028,6 @@ } } }, - "linkify-it": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.1.0.tgz", - "integrity": "sha512-4REs8/062kV2DSHxNfq5183zrqXMl7WP0WzABH9IeJI+NLm429FgE1PDecltYfnOoFDFlZGh2T8PfZn0r+GTRg==", - "requires": { - "uc.micro": "^1.0.1" - } - }, "load-bmfont": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/load-bmfont/-/load-bmfont-1.4.0.tgz", @@ -8307,9 +9081,9 @@ } }, "lodash": { - "version": "4.17.11", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", - "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==" + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" }, "lodash._arraycopy": { "version": "3.0.0", @@ -8376,17 +9150,16 @@ "resolved": "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz", "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=" }, - "lodash.assign": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz", - "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc=", - "dev": true - }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, + "lodash.clone": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clone/-/lodash.clone-4.5.0.tgz", + "integrity": "sha1-GVhwRQ9aExkkeN9Lw9I9LeoZB7Y=" + }, "lodash.clonedeep": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-3.0.2.tgz", @@ -8397,11 +9170,6 @@ "lodash._bindcallback": "^3.0.0" } }, - "lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" - }, "lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", @@ -8521,6 +9289,11 @@ "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.1.tgz", "integrity": "sha1-4PyVEztu8nbNyIh82vJKpvFW+Po=" }, + "long": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", + "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + }, "longest": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", @@ -8618,16 +9391,13 @@ "object-visit": "^1.0.0" } }, - "markdown-it": { - "version": "8.4.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", - "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", + "markdown-to-jsx": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-6.10.3.tgz", + "integrity": "sha512-PSoUyLnW/xoW6RsxZrquSSz5eGEOTwa15H5eqp3enmrp8esmgDJmhzd6zmQ9tgAA9TxJzx1Hmf3incYU/IamoQ==", "requires": { - "argparse": "^1.0.7", - "entities": "~1.1.1", - "linkify-it": "^2.0.0", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" + "prop-types": "^15.6.2", + "unquote": "^1.1.0" } }, "marked": { @@ -8637,22 +9407,23 @@ "dev": true }, "marked-terminal": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-3.1.1.tgz", - "integrity": "sha512-7UBFww1rdx0w9HehLMCVYa8/AxXaiDigDfMsJcj82/wgLQG9cj+oiMAVlJpeWD57VFJY2OYY+bKeEVIjIlxi+w==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-3.3.0.tgz", + "integrity": "sha512-+IUQJ5VlZoAFsM5MHNT7g3RHSkA3eETqhRCdXv4niUMAKHQ7lb1yvAcuGPmm4soxhmtX13u4Li6ZToXtvSEH+A==", "dev": true, "requires": { + "ansi-escapes": "^3.1.0", "cardinal": "^2.1.1", "chalk": "^2.4.1", "cli-table": "^0.3.1", - "lodash.assign": "^4.2.0", - "node-emoji": "^1.4.1" + "node-emoji": "^1.4.1", + "supports-hyperlinks": "^1.0.1" }, "dependencies": { "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dev": true, "requires": { "ansi-styles": "^3.2.1", @@ -8688,11 +9459,6 @@ "safe-buffer": "^5.1.2" } }, - "mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=" - }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -8856,9 +9622,9 @@ } }, "mixin-deep": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz", - "integrity": "sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", + "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", "requires": { "for-in": "^1.0.2", "is-extendable": "^1.0.1" @@ -8962,9 +9728,9 @@ "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=" }, "nearley": { - "version": "2.16.0", - "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.16.0.tgz", - "integrity": "sha512-Tr9XD3Vt/EujXbZBv6UAHYoLUSMQAxSsTnm9K3koXzjzNWY195NqALeyrzLZBKzAkL3gl92BcSogqrHjD8QuUg==", + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.19.0.tgz", + "integrity": "sha512-2v52FTw7RPqieZr3Gth1luAXZR7Je6q3KaDHY5bjl/paDUdMu35fZ8ICNgiYJRr3tf3NMvIQQR1r27AvEr9CRA==", "dev": true, "requires": { "commander": "^2.19.0", @@ -8998,9 +9764,9 @@ } }, "node-emoji": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.8.1.tgz", - "integrity": "sha512-+ktMAh1Jwas+TnGodfCfjUbJKoANqPaJFN0z0iqh41eqD8dvguNzcitVSBSVK1pidz0AqGbLKcoVuVLRVZ/aVg==", + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.10.0.tgz", + "integrity": "sha512-Yt3384If5H6BYGVHiHwTL+99OzJKHhgp82S8/dktEK73T26BazdgZ4JZh92xSVtGNJvz9UbXdNAc5hcrXV42vw==", "dev": true, "requires": { "lodash.toarray": "^4.4.0" @@ -9255,6 +10021,44 @@ "has": "^1.0.3" } }, + "object.fromentries": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.1.tgz", + "integrity": "sha512-PUQv8Hbg3j2QX0IQYv3iAGCbGcu4yY4KQ92/dhA4sFSixBmSmp13UpDLs6jGK8rBtbmhNNIK99LD2k293jpiGA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.15.0", + "function-bind": "^1.1.1", + "has": "^1.0.3" + }, + "dependencies": { + "es-abstract": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.15.0.tgz", + "integrity": "sha512-bhkEqWJ2t2lMeaJDuk7okMkJWI/yqgH/EoGwpcvv0XW9RWQsRspI4wt6xuyuvMvvQE3gg/D9HXppgk21w78GyQ==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.0", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.0", + "is-callable": "^1.1.4", + "is-regex": "^1.0.4", + "object-inspect": "^1.6.0", + "object-keys": "^1.1.1", + "string.prototype.trimleft": "^2.1.0", + "string.prototype.trimright": "^2.1.0" + } + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + } + } + }, "object.getownpropertydescriptors": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz", @@ -9377,6 +10181,11 @@ } } }, + "optjs": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/optjs/-/optjs-3.2.2.tgz", + "integrity": "sha1-aabOicRCpEQDFBrS+bNwvVu29O4=" + }, "original": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/original/-/original-1.0.2.tgz", @@ -10865,6 +11674,12 @@ "resolved": "https://registry.npmjs.org/preserve/-/preserve-0.2.0.tgz", "integrity": "sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks=" }, + "prettier": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.18.2.tgz", + "integrity": "sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw==", + "dev": true + }, "pretty-bytes": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-4.0.2.tgz", @@ -10938,6 +11753,37 @@ "object-assign": "^4.1.1" } }, + "prop-types-exact": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/prop-types-exact/-/prop-types-exact-1.2.0.tgz", + "integrity": "sha512-K+Tk3Kd9V0odiXFP9fwDHUYRyvK3Nun3GVyPapSIs5OBkITAm15W0CPFD/YKTkMUAbc0b9CUwRQp2ybiBIq+eA==", + "dev": true, + "requires": { + "has": "^1.0.3", + "object.assign": "^4.1.0", + "reflect.ownkeys": "^0.2.0" + } + }, + "protobufjs": { + "version": "6.8.8", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.8.8.tgz", + "integrity": "sha512-AAmHtD5pXgZfi7GMpllpO3q1Xw1OYldr+dMUlAnffGTAhqkg72WdmSY71uKBF/JuyiKs8psYbtKrhi0ASCD8qw==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.0", + "@types/node": "^10.1.0", + "long": "^4.0.0" + } + }, "proxy-addr": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz", @@ -11006,9 +11852,9 @@ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" }, "puppeteer": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-1.11.0.tgz", - "integrity": "sha512-iG4iMOHixc2EpzqRV+pv7o3GgmU2dNYEMkvKwSaQO/vMZURakwSOn/EYJ6OIRFYOque1qorzIBvrytPIQB3YzQ==", + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-1.20.0.tgz", + "integrity": "sha512-bt48RDBy2eIwZPrkgbcwHtb51mj2nKvHOPMaSH2IsWiv7lOG9k9zhaRzpDZafrk05ajMc3cu+lSQYYOfH2DkVQ==", "dev": true, "requires": { "debug": "^4.1.0", @@ -11031,15 +11877,15 @@ } }, "mime": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz", - "integrity": "sha512-ikBcWwyqXQSHKtciCcctu9YfPbFYZ4+gbHEmE0Q8jzcTYQg5dHCr3g2wwAZjPoJfQVXZq6KXAjpXOTf5/cjT7w==", + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==", "dev": true }, "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, "progress": { @@ -11049,9 +11895,9 @@ "dev": true }, "ws": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.1.2.tgz", - "integrity": "sha512-rfUqzvz0WxmSXtJpPMX2EeASXabOrSMk1ruMOV3JBTBjo4ac2lDjGGsbQSyxj8Odhw5fBib8ZKEjDNvgouNKYw==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", + "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", "dev": true, "requires": { "async-limiter": "~1.0.0" @@ -11648,6 +12494,12 @@ "balanced-match": "^0.4.2" } }, + "reflect.ownkeys": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/reflect.ownkeys/-/reflect.ownkeys-0.2.0.tgz", + "integrity": "sha1-dJrO7H8/34tj+SegSAnpDFwLNGA=", + "dev": true + }, "regenerate": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.0.tgz", @@ -12179,9 +13031,9 @@ "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" }, "set-value": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.0.tgz", - "integrity": "sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", "requires": { "extend-shallow": "^2.0.1", "is-extendable": "^0.1.1", @@ -12728,14 +13580,34 @@ } }, "string.prototype.trim": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.1.2.tgz", - "integrity": "sha1-0E3iyJ4Tf019IG8Ia17S+ua+jOo=", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.0.tgz", + "integrity": "sha512-9EIjYD/WdlvLpn987+ctkLf0FfvBefOCuiEr2henD8X+7jfwPnyvTdmW8OJhj5p+M0/96mBdynLWkxUr+rHlpg==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.5.0", - "function-bind": "^1.0.2" + "define-properties": "^1.1.3", + "es-abstract": "^1.13.0", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimleft": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz", + "integrity": "sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimright": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz", + "integrity": "sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" } }, "string_decoder": { @@ -12814,6 +13686,24 @@ "has-flag": "^3.0.0" } }, + "supports-hyperlinks": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-1.0.1.tgz", + "integrity": "sha512-HHi5kVSefKaJkGYXbDuKbUGRVxqnWGn3J2e39CYcNJEfWciGq2zYtOhXLTlvrOZW1QU7VX67w7fMmWafHX9Pfw==", + "dev": true, + "requires": { + "has-flag": "^2.0.0", + "supports-color": "^5.0.0" + }, + "dependencies": { + "has-flag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=", + "dev": true + } + } + }, "svgo": { "version": "0.7.2", "resolved": "https://registry.npmjs.org/svgo/-/svgo-0.7.2.tgz", @@ -13448,9 +14338,9 @@ } }, "tslint-config-prettier": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/tslint-config-prettier/-/tslint-config-prettier-1.17.0.tgz", - "integrity": "sha512-NKWNkThwqE4Snn4Cm6SZB7lV5RMDDFsBwz6fWUkTxOKGjMx8ycOHnjIbhn7dZd5XmssW3CwqUjlANR6EhP9YQw==" + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/tslint-config-prettier/-/tslint-config-prettier-1.18.0.tgz", + "integrity": "sha512-xPw9PgNPLG3iKRxmK7DWr+Ea/SzrvfHtjFt5LBl61gk2UBG/DB9kCXRjv+xyIU1rUtnayLeMUVJBcMX8Z17nDg==" }, "tslint-react": { "version": "3.6.0", @@ -13509,9 +14399,9 @@ "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" }, "typescript": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.3.1.tgz", - "integrity": "sha512-cTmIDFW7O0IHbn1DPYjkiebHxwtCMU+eTy30ZtJNBPF9j2O1ITu5XH2YnBeVRKWHqF+3JQwWJv0Q0aUgX8W7IA==", + "version": "3.6.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.6.4.tgz", + "integrity": "sha512-unoCll1+l+YK4i4F8f22TaNVPRHcD9PA3yCuZ8g5e0qGqlVlJ/8FSateOLLSagn+Yg5+ZwuPkL8LFUc0Jcvksg==", "dev": true }, "typestyle": { @@ -13528,11 +14418,6 @@ "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.19.tgz", "integrity": "sha512-T3PVJ6uz8i0HzPxOF9SWzWAlfN/DavlpQqepn22xgve/5QecC+XMCAtmUNnY7C9StehaV6exjUCI801lOI7QlQ==" }, - "uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" - }, "uglify-js": { "version": "3.4.9", "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.4.9.tgz", @@ -13633,35 +14518,14 @@ "dev": true }, "union-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.0.tgz", - "integrity": "sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", + "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", "requires": { "arr-union": "^3.1.0", "get-value": "^2.0.6", "is-extendable": "^0.1.1", - "set-value": "^0.4.3" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - }, - "set-value": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-0.4.3.tgz", - "integrity": "sha1-fbCPnT0i3H945Trzw79GZuzfzPE=", - "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.1", - "to-object-path": "^0.3.0" - } - } + "set-value": "^2.0.1" } }, "uniq": { @@ -13708,6 +14572,11 @@ "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" }, + "unquote": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz", + "integrity": "sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=" + }, "unset-value": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", @@ -13749,11 +14618,6 @@ "resolved": "https://registry.npmjs.org/unzip-response/-/unzip-response-2.0.1.tgz", "integrity": "sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c=" }, - "upath": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/upath/-/upath-1.1.0.tgz", - "integrity": "sha512-bzpH/oBhoS/QI/YtbkqCg6VEiPYjSZtrHQM6/QnJS6OL9pKUFLqb3aFh4Scvwm45+7iAgiMkLhSbaZxUqmrprw==" - }, "update-notifier": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-2.5.0.tgz", @@ -14004,23 +14868,34 @@ } }, "chokidar": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.0.4.tgz", - "integrity": "sha512-z9n7yt9rOvIJrMhvDtDictKrkFHeihkNl6uWMmZlmL6tJtX9Cs+87oK+teBx+JIgzvbX3yZHT3eF8vpbDxHJXQ==", + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", "requires": { "anymatch": "^2.0.0", - "async-each": "^1.0.0", - "braces": "^2.3.0", - "fsevents": "^1.2.2", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", "glob-parent": "^3.1.0", - "inherits": "^2.0.1", + "inherits": "^2.0.3", "is-binary-path": "^1.0.0", "is-glob": "^4.0.0", - "lodash.debounce": "^4.0.8", - "normalize-path": "^2.1.1", + "normalize-path": "^3.0.0", "path-is-absolute": "^1.0.0", - "readdirp": "^2.0.0", - "upath": "^1.0.5" + "readdirp": "^2.2.1", + "upath": "^1.1.1" + }, + "dependencies": { + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + }, + "upath": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==" + } } }, "glob-parent": { @@ -14280,12 +15155,13 @@ } }, "webpack-bundle-analyzer": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.0.3.tgz", - "integrity": "sha512-naLWiRfmtH4UJgtUktRTLw6FdoZJ2RvCR9ePbwM9aRMsS/KjFerkPZG9epEvXRAw5d5oPdrs9+3p+afNjxW8Xw==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.6.0.tgz", + "integrity": "sha512-orUfvVYEfBMDXgEKAKVvab5iQ2wXneIEorGNsyuOyVYpjYrI7CUOhhXNDd3huMwQ3vNNWWlGP+hzflMFYNzi2g==", "dev": true, "requires": { - "acorn": "^5.7.3", + "acorn": "^6.0.7", + "acorn-walk": "^6.1.1", "bfj": "^6.1.1", "chalk": "^2.4.1", "commander": "^2.18.0", @@ -14293,16 +15169,22 @@ "express": "^4.16.3", "filesize": "^3.6.1", "gzip-size": "^5.0.0", - "lodash": "^4.17.10", + "lodash": "^4.17.15", "mkdirp": "^0.5.1", "opener": "^1.5.1", "ws": "^6.0.0" }, "dependencies": { + "acorn": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.3.0.tgz", + "integrity": "sha512-/czfa8BwS88b9gWQVhc8eknunSA2DoJpJyTQkhheIf5E48u1N0R4q/YxxsAeqRrmK9TQ/uYfgLDfZo91UlANIA==", + "dev": true + }, "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dev": true, "requires": { "ansi-styles": "^3.2.1", @@ -14317,25 +15199,25 @@ "dev": true }, "gzip-size": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-5.0.0.tgz", - "integrity": "sha512-5iI7omclyqrnWw4XbXAmGhPsABkSIDQonv2K0h61lybgofWa6iZyvrI3r2zsJH4P8Nb64fFVzlvfhs0g7BBxAA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-5.1.1.tgz", + "integrity": "sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA==", "dev": true, "requires": { "duplexer": "^0.1.1", - "pify": "^3.0.0" + "pify": "^4.0.1" } }, "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", "dev": true }, "ws": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.1.2.tgz", - "integrity": "sha512-rfUqzvz0WxmSXtJpPMX2EeASXabOrSMk1ruMOV3JBTBjo4ac2lDjGGsbQSyxj8Odhw5fBib8ZKEjDNvgouNKYw==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", + "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", "dev": true, "requires": { "async-limiter": "~1.0.0" diff --git a/frontend/package.json b/frontend/package.json index 1756ea907a19..60c4088c8cd5 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -16,9 +16,9 @@ "grpc-web": "^1.0.0", "http-proxy-middleware": "^0.19.0", "immer": "^1.7.4", - "js-yaml": "^3.12.0", - "lodash": ">=4.17.11", - "markdown-it": "^8.4.2", + "js-yaml": "^3.13.1", + "lodash": "^4.17.15", + "markdown-to-jsx": "^6.10.3", "portable-fetch": "^3.0.0", "re-resizable": "^4.9.0", "react": "^16.7.0", @@ -41,18 +41,23 @@ "apis:run": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/run.swagger.json -l typescript-fetch -o ./src/apis/run -c ./swagger-config.json", "apis:filter": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/filter.swagger.json -l typescript-fetch -o ./src/apis/filter -c ./swagger-config.json", "apis:visualization": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/visualization.swagger.json -l typescript-fetch -o ./src/apis/visualization -c ./swagger-config.json", - "build": "react-scripts-ts build", + "build": "npm run lint && react-scripts-ts build", "docker": "COMMIT_HASH=`git rev-parse HEAD`; docker build -q -t ml-pipelines-frontend:${COMMIT_HASH} --build-arg COMMIT_HASH=${COMMIT_HASH} --build-arg DATE=\"`date -u`\" -f Dockerfile ..", "eject": "react-scripts-ts eject", + "format": "prettier --write 'src/**/*.{ts,tsx}'", + "format:check": "prettier --check 'src/**/*.{ts,tsx}' || node ./scripts/check-format-error-info.js", "java": "java -version", + "lint": "tslint -c ./tslint.prod.json -p .", "mock:api": "ts-node-dev -O '{\"module\": \"commonjs\"}' mock-backend/mock-api-server.ts 3001", "mock:server": "node server/dist/server.js build", "postinstall": "cd ./server && npm i && cd ../mock-backend && npm i && cd ../src/generated/src/apis/metadata && npm i", - "start:proxies": "./start-proxies.sh", + "start:proxy-standalone": "./start-proxy-standalone.sh", + "start:proxy-standalone-and-server": "./start-proxy-standalone-and-server.sh", "start": "react-scripts-ts start", "test": "react-scripts-ts test --env=jsdom", "test:coverage": "npm test -- --env=jsdom --coverage", "test:coveralls": "npm run test:coverage && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js", + "test:ci": "npm run format:check && npm run lint && npm run test:coveralls", "vr-approve": "backstop approve", "vr-test": "ts-node -O '{\"module\": \"commonjs\"}' backstop.ts" }, @@ -60,13 +65,14 @@ "@types/d3": "^5.0.0", "@types/d3-dsv": "^1.0.33", "@types/dagre": "^0.7.40", - "@types/enzyme": "^3.1.15", - "@types/enzyme-adapter-react-16": "^1.0.3", + "@types/enzyme": "^3.10.3", + "@types/enzyme-adapter-react-16": "^1.0.5", "@types/express": "^4.16.0", "@types/http-proxy-middleware": "^0.17.5", "@types/jest": "^23.3.2", "@types/js-yaml": "^3.11.2", "@types/lodash": ">=4.14.117", + "@types/markdown-to-jsx": "^6.9.0", "@types/node": "^10.10.1", "@types/react": "^16.7.18", "@types/react-dom": "^16.0.7", @@ -75,16 +81,18 @@ "@types/react-virtualized": "^9.18.7", "backstopjs": "^3.5.16", "coveralls": "^3.0.2", - "enzyme": "^3.7.0", - "enzyme-adapter-react-16": "^1.5.0", + "enzyme": "^3.10.0", + "enzyme-adapter-react-16": "^1.15.1", "enzyme-to-json": "^3.3.4", + "prettier": "1.18.2", "react-router-test-context": "^0.1.0", "react-test-renderer": "^16.5.2", "swagger-ts-client": "^0.9.6", "ts-node": "^7.0.1", "ts-node-dev": "^1.0.0-pre.30", - "typescript": "^3.3.1", - "webpack-bundle-analyzer": "^3.0.2" + "tslint-config-prettier": "^1.18.0", + "typescript": "^3.6.4", + "webpack-bundle-analyzer": "^3.6.0" }, "homepage": "./", "jest": { diff --git a/frontend/public/index.html b/frontend/public/index.html index 5fbffbcb1fa9..7c22b06dff7c 100644 --- a/frontend/public/index.html +++ b/frontend/public/index.html @@ -20,8 +20,8 @@ - - + + Kubeflow Pipelines diff --git a/frontend/public/favicon.ico b/frontend/public/static/favicon.ico similarity index 100% rename from frontend/public/favicon.ico rename to frontend/public/static/favicon.ico diff --git a/frontend/public/manifest.json b/frontend/public/static/manifest.json similarity index 100% rename from frontend/public/manifest.json rename to frontend/public/static/manifest.json diff --git a/frontend/scripts/check-format-error-info.js b/frontend/scripts/check-format-error-info.js new file mode 100644 index 000000000000..8b644f7a1cca --- /dev/null +++ b/frontend/scripts/check-format-error-info.js @@ -0,0 +1,24 @@ +/* + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +console.error(); +console.error('================='); +console.error('Please use `npm run format` to format your code.'); +console.error( + 'Refer to https://github.com/kubeflow/pipelines/tree/master/frontend#code-style for our code style guidance.', +); +console.error(); +process.exit(1); diff --git a/frontend/src/Css.tsx b/frontend/src/Css.tsx index 6e21e619ab7a..5fae050dc319 100644 --- a/frontend/src/Css.tsx +++ b/frontend/src/Css.tsx @@ -122,7 +122,7 @@ export const theme = createMuiTheme({ }, color: color.theme, marginRight: 10, - padding: '0 8px' + padding: '0 8px', }, }, MuiDialogActions: { @@ -162,7 +162,7 @@ export const theme = createMuiTheme({ }, MuiInput: { input: { padding: 0 }, - root: { padding: 0 } + root: { padding: 0 }, }, MuiInputAdornment: { positionEnd: { @@ -175,13 +175,13 @@ export const theme = createMuiTheme({ backgroundColor: '#666', color: '#f1f1f1', fontSize: 12, - } + }, }, }, palette, typography: { fontFamily: fonts.main, - fontSize: fontsize.base + ' !important' as any, + fontSize: (fontsize.base + ' !important') as any, useNextVariants: true, }, }); @@ -247,7 +247,7 @@ export const commonCss = stylesheet({ infoIcon: { color: color.lowContrast, height: 16, - width: 16 + width: 16, }, link: { $nest: { @@ -280,8 +280,7 @@ export const commonCss = stylesheet({ whiteSpace: 'pre-wrap', }, scrollContainer: { - background: - `linear-gradient(white 30%, rgba(255,255,255,0)), + background: `linear-gradient(white 30%, rgba(255,255,255,0)), linear-gradient(rgba(255,255,255,0), white 70%) 0 100%, radial-gradient(farthest-corner at 50% 0, rgba(0,0,0,.2), rgba(0,0,0,0)), radial-gradient(farthest-corner at 50% 100%, rgba(0,0,0,.2), rgba(0,0,0,0)) 0 100%`, diff --git a/frontend/src/TestUtils.tsx b/frontend/src/TestUtils.tsx index 4b8c9611e4c7..7aa594f0afcd 100644 --- a/frontend/src/TestUtils.tsx +++ b/frontend/src/TestUtils.tsx @@ -50,7 +50,7 @@ export default class TestUtils { * Adds a one-time mock implementation to the provided spy that mimics an error * network response */ - public static makeErrorResponseOnce(spy: jest.MockInstance<{}>, message: string): void { + public static makeErrorResponseOnce(spy: jest.MockInstance, message: string): void { spy.mockImplementationOnce(() => { throw { text: () => Promise.resolve(message), @@ -64,11 +64,16 @@ export default class TestUtils { * to be set after component initialization. */ // tslint:disable-next-line:variable-name - public static generatePageProps(PageElement: new (_: PageProps) => Page, - location: Location, matchValue: match, - historyPushSpy: jest.SpyInstance | null, updateBannerSpy: jest.SpyInstance | null, - updateDialogSpy: jest.SpyInstance | null, updateToolbarSpy: jest.SpyInstance | null, - updateSnackbarSpy: jest.SpyInstance | null): PageProps { + public static generatePageProps( + PageElement: new (_: PageProps) => Page, + location: Location, + matchValue: match, + historyPushSpy: jest.SpyInstance | null, + updateBannerSpy: jest.SpyInstance | null, + updateDialogSpy: jest.SpyInstance | null, + updateToolbarSpy: jest.SpyInstance | null, + updateSnackbarSpy: jest.SpyInstance | null, + ): PageProps { const pageProps = { history: { push: historyPushSpy } as any, location: location as any, @@ -88,7 +93,10 @@ export default class TestUtils { return pageProps; } - public static getToolbarButton(updateToolbarSpy: jest.SpyInstance, buttonKey: string): ToolbarActionConfig { + public static getToolbarButton( + updateToolbarSpy: jest.SpyInstance, + buttonKey: string, + ): ToolbarActionConfig { const lastCallIdx = updateToolbarSpy.mock.calls.length - 1; const lastCall = updateToolbarSpy.mock.calls[lastCallIdx][0]; return lastCall.actions[buttonKey]; diff --git a/frontend/src/apis/experiment/.swagger-codegen/VERSION b/frontend/src/apis/experiment/.swagger-codegen/VERSION index a6254504e401..48a6b508dc9f 100644 --- a/frontend/src/apis/experiment/.swagger-codegen/VERSION +++ b/frontend/src/apis/experiment/.swagger-codegen/VERSION @@ -1 +1 @@ -2.3.1 \ No newline at end of file +2.4.7 \ No newline at end of file diff --git a/frontend/src/apis/experiment/api.ts b/frontend/src/apis/experiment/api.ts index 7e85bbca7324..a168703c6571 100644 --- a/frontend/src/apis/experiment/api.ts +++ b/frontend/src/apis/experiment/api.ts @@ -5,29 +5,28 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ +import * as url from 'url'; +import * as portableFetch from 'portable-fetch'; +import { Configuration } from './configuration'; -import * as url from "url"; -import * as portableFetch from "portable-fetch"; -import { Configuration } from "./configuration"; - -const BASE_PATH = "http://localhost".replace(/\/+$/, ""); +const BASE_PATH = 'http://localhost'.replace(/\/+$/, ''); /** * * @export */ export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", + csv: ',', + ssv: ' ', + tsv: '\t', + pipes: '|', }; /** @@ -36,130 +35,134 @@ export const COLLECTION_FORMATS = { * @interface FetchAPI */ export interface FetchAPI { - (url: string, init?: any): Promise; + (url: string, init?: any): Promise; } /** - * + * * @export * @interface FetchArgs */ export interface FetchArgs { - url: string; - options: any; + url: string; + options: any; } /** - * + * * @export * @class BaseAPI */ export class BaseAPI { - protected configuration: Configuration; - - constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected fetch: FetchAPI = portableFetch) { - if (configuration) { - this.configuration = configuration; - this.basePath = configuration.basePath || this.basePath; - } + protected configuration: Configuration; + + constructor( + configuration?: Configuration, + protected basePath: string = BASE_PATH, + protected fetch: FetchAPI = portableFetch, + ) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath || this.basePath; } -}; + } +} /** - * + * * @export * @class RequiredError * @extends {Error} */ export class RequiredError extends Error { - name: "RequiredError" - constructor(public field: string, msg?: string) { - super(msg); - } + name: 'RequiredError'; + constructor(public field: string, msg?: string) { + super(msg); + } } /** - * + * * @export * @interface ApiExperiment */ export interface ApiExperiment { - /** - * Output. Unique experiment ID. Generated by API server. - * @type {string} - * @memberof ApiExperiment - */ - id?: string; - /** - * Required input field. Unique experiment name provided by user. - * @type {string} - * @memberof ApiExperiment - */ - name?: string; - /** - * - * @type {string} - * @memberof ApiExperiment - */ - description?: string; - /** - * Output. The time that the experiment created. - * @type {Date} - * @memberof ApiExperiment - */ - created_at?: Date; + /** + * Output. Unique experiment ID. Generated by API server. + * @type {string} + * @memberof ApiExperiment + */ + id?: string; + /** + * Required input field. Unique experiment name provided by user. + * @type {string} + * @memberof ApiExperiment + */ + name?: string; + /** + * + * @type {string} + * @memberof ApiExperiment + */ + description?: string; + /** + * Output. The time that the experiment created. + * @type {Date} + * @memberof ApiExperiment + */ + created_at?: Date; } /** - * + * * @export * @interface ApiListExperimentsResponse */ export interface ApiListExperimentsResponse { - /** - * A list of experiments returned. - * @type {Array<ApiExperiment>} - * @memberof ApiListExperimentsResponse - */ - experiments?: Array; - /** - * The total number of experiments for the given query. - * @type {number} - * @memberof ApiListExperimentsResponse - */ - total_size?: number; - /** - * The token to list the next page of experiments. - * @type {string} - * @memberof ApiListExperimentsResponse - */ - next_page_token?: string; + /** + * A list of experiments returned. + * @type {Array} + * @memberof ApiListExperimentsResponse + */ + experiments?: Array; + /** + * The total number of experiments for the given query. + * @type {number} + * @memberof ApiListExperimentsResponse + */ + total_size?: number; + /** + * The token to list the next page of experiments. + * @type {string} + * @memberof ApiListExperimentsResponse + */ + next_page_token?: string; } /** - * + * * @export * @interface ApiStatus */ export interface ApiStatus { - /** - * - * @type {string} - * @memberof ApiStatus - */ - error?: string; - /** - * - * @type {number} - * @memberof ApiStatus - */ - code?: number; - /** - * - * @type {Array<ProtobufAny>} - * @memberof ApiStatus - */ - details?: Array; + /** + * + * @type {string} + * @memberof ApiStatus + */ + error?: string; + /** + * + * @type {number} + * @memberof ApiStatus + */ + code?: number; + /** + * + * @type {Array} + * @memberof ApiStatus + */ + details?: Array; } /** @@ -168,189 +171,233 @@ export interface ApiStatus { * @interface ProtobufAny */ export interface ProtobufAny { - /** - * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - * @type {string} - * @memberof ProtobufAny - */ - type_url?: string; - /** - * Must be a valid serialized protocol buffer of the above specified type. - * @type {string} - * @memberof ProtobufAny - */ - value?: string; + /** + * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. + * @type {string} + * @memberof ProtobufAny + */ + type_url?: string; + /** + * Must be a valid serialized protocol buffer of the above specified type. + * @type {string} + * @memberof ProtobufAny + */ + value?: string; } - /** * ExperimentServiceApi - fetch parameter creator * @export */ -export const ExperimentServiceApiFetchParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @param {ApiExperiment} body The experiment to be created - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createExperiment(body: ApiExperiment, options: any = {}): FetchArgs { - // verify required parameter 'body' is not null or undefined - if (body === null || body === undefined) { - throw new RequiredError('body','Required parameter body was null or undefined when calling createExperiment.'); - } - const localVarPath = `/apis/v1beta1/experiments`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - const needsSerialization = ("ApiExperiment" !== "string") || localVarRequestOptions.headers['Content-Type'] === 'application/json'; - localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : (body || ""); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id The ID of the experiment to be deleted. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteExperiment(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling deleteExperiment.'); - } - const localVarPath = `/apis/v1beta1/experiments/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id The ID of the experiment to be retrieved - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getExperiment(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling getExperiment.'); - } - const localVarPath = `/apis/v1beta1/experiments/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listExperiment(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options: any = {}): FetchArgs { - const localVarPath = `/apis/v1beta1/experiments`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - if (page_token !== undefined) { - localVarQueryParameter['page_token'] = page_token; - } - - if (page_size !== undefined) { - localVarQueryParameter['page_size'] = page_size; - } - - if (sort_by !== undefined) { - localVarQueryParameter['sort_by'] = sort_by; - } - - if (filter !== undefined) { - localVarQueryParameter['filter'] = filter; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } +export const ExperimentServiceApiFetchParamCreator = function(configuration?: Configuration) { + return { + /** + * + * @param {ApiExperiment} body The experiment to be created + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createExperiment(body: ApiExperiment, options: any = {}): FetchArgs { + // verify required parameter 'body' is not null or undefined + if (body === null || body === undefined) { + throw new RequiredError( + 'body', + 'Required parameter body was null or undefined when calling createExperiment.', + ); + } + const localVarPath = `/apis/v1beta1/experiments`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + const needsSerialization = + 'ApiExperiment' !== 'string' || + localVarRequestOptions.headers['Content-Type'] === 'application/json'; + localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : body || ''; + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id The ID of the experiment to be deleted. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteExperiment(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling deleteExperiment.', + ); + } + const localVarPath = `/apis/v1beta1/experiments/{id}`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id The ID of the experiment to be retrieved + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getExperiment(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling getExperiment.', + ); + } + const localVarPath = `/apis/v1beta1/experiments/{id}`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listExperiment( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options: any = {}, + ): FetchArgs { + const localVarPath = `/apis/v1beta1/experiments`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + if (page_token !== undefined) { + localVarQueryParameter['page_token'] = page_token; + } + + if (page_size !== undefined) { + localVarQueryParameter['page_size'] = page_size; + } + + if (sort_by !== undefined) { + localVarQueryParameter['sort_by'] = sort_by; + } + + if (filter !== undefined) { + localVarQueryParameter['filter'] = filter; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + }; }; /** @@ -358,186 +405,249 @@ export const ExperimentServiceApiFetchParamCreator = function (configuration?: C * @export */ export const ExperimentServiceApiFp = function(configuration?: Configuration) { - return { - /** - * - * @param {ApiExperiment} body The experiment to be created - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createExperiment(body: ApiExperiment, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = ExperimentServiceApiFetchParamCreator(configuration).createExperiment(body, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id The ID of the experiment to be deleted. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteExperiment(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = ExperimentServiceApiFetchParamCreator(configuration).deleteExperiment(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id The ID of the experiment to be retrieved - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getExperiment(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = ExperimentServiceApiFetchParamCreator(configuration).getExperiment(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listExperiment(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = ExperimentServiceApiFetchParamCreator(configuration).listExperiment(page_token, page_size, sort_by, filter, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - } + return { + /** + * + * @param {ApiExperiment} body The experiment to be created + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createExperiment( + body: ApiExperiment, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = ExperimentServiceApiFetchParamCreator( + configuration, + ).createExperiment(body, options); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id The ID of the experiment to be deleted. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteExperiment( + id: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = ExperimentServiceApiFetchParamCreator( + configuration, + ).deleteExperiment(id, options); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id The ID of the experiment to be retrieved + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getExperiment( + id: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = ExperimentServiceApiFetchParamCreator(configuration).getExperiment( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listExperiment( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = ExperimentServiceApiFetchParamCreator(configuration).listExperiment( + page_token, + page_size, + sort_by, + filter, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + }; }; /** * ExperimentServiceApi - factory interface * @export */ -export const ExperimentServiceApiFactory = function (configuration?: Configuration, fetch?: FetchAPI, basePath?: string) { - return { - /** - * - * @param {ApiExperiment} body The experiment to be created - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createExperiment(body: ApiExperiment, options?: any) { - return ExperimentServiceApiFp(configuration).createExperiment(body, options)(fetch, basePath); - }, - /** - * - * @param {string} id The ID of the experiment to be deleted. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteExperiment(id: string, options?: any) { - return ExperimentServiceApiFp(configuration).deleteExperiment(id, options)(fetch, basePath); - }, - /** - * - * @param {string} id The ID of the experiment to be retrieved - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getExperiment(id: string, options?: any) { - return ExperimentServiceApiFp(configuration).getExperiment(id, options)(fetch, basePath); - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listExperiment(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options?: any) { - return ExperimentServiceApiFp(configuration).listExperiment(page_token, page_size, sort_by, filter, options)(fetch, basePath); - }, - }; -}; - -/** - * ExperimentServiceApi - object-oriented interface - * @export - * @class ExperimentServiceApi - * @extends {BaseAPI} - */ -export class ExperimentServiceApi extends BaseAPI { +export const ExperimentServiceApiFactory = function( + configuration?: Configuration, + fetch?: FetchAPI, + basePath?: string, +) { + return { /** - * - * @param {} body The experiment to be created + * + * @param {ApiExperiment} body The experiment to be created * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof ExperimentServiceApi */ - public createExperiment(body: ApiExperiment, options?: any) { - return ExperimentServiceApiFp(this.configuration).createExperiment(body, options)(this.fetch, this.basePath); - } - + createExperiment(body: ApiExperiment, options?: any) { + return ExperimentServiceApiFp(configuration).createExperiment(body, options)(fetch, basePath); + }, /** - * - * @param {} id The ID of the experiment to be deleted. + * + * @param {string} id The ID of the experiment to be deleted. * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof ExperimentServiceApi */ - public deleteExperiment(id: string, options?: any) { - return ExperimentServiceApiFp(this.configuration).deleteExperiment(id, options)(this.fetch, this.basePath); - } - + deleteExperiment(id: string, options?: any) { + return ExperimentServiceApiFp(configuration).deleteExperiment(id, options)(fetch, basePath); + }, /** - * - * @param {} id The ID of the experiment to be retrieved + * + * @param {string} id The ID of the experiment to be retrieved * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof ExperimentServiceApi */ - public getExperiment(id: string, options?: any) { - return ExperimentServiceApiFp(this.configuration).getExperiment(id, options)(this.fetch, this.basePath); - } - + getExperiment(id: string, options?: any) { + return ExperimentServiceApiFp(configuration).getExperiment(id, options)(fetch, basePath); + }, /** - * - * @param {} [page_token] - * @param {} [page_size] - * @param {} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof ExperimentServiceApi */ - public listExperiment(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options?: any) { - return ExperimentServiceApiFp(this.configuration).listExperiment(page_token, page_size, sort_by, filter, options)(this.fetch, this.basePath); - } + listExperiment( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options?: any, + ) { + return ExperimentServiceApiFp(configuration).listExperiment( + page_token, + page_size, + sort_by, + filter, + options, + )(fetch, basePath); + }, + }; +}; +/** + * ExperimentServiceApi - object-oriented interface + * @export + * @class ExperimentServiceApi + * @extends {BaseAPI} + */ +export class ExperimentServiceApi extends BaseAPI { + /** + * + * @param {ApiExperiment} body The experiment to be created + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ExperimentServiceApi + */ + public createExperiment(body: ApiExperiment, options?: any) { + return ExperimentServiceApiFp(this.configuration).createExperiment(body, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} id The ID of the experiment to be deleted. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ExperimentServiceApi + */ + public deleteExperiment(id: string, options?: any) { + return ExperimentServiceApiFp(this.configuration).deleteExperiment(id, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} id The ID of the experiment to be retrieved + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ExperimentServiceApi + */ + public getExperiment(id: string, options?: any) { + return ExperimentServiceApiFp(this.configuration).getExperiment(id, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof ExperimentServiceApi + */ + public listExperiment( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options?: any, + ) { + return ExperimentServiceApiFp(this.configuration).listExperiment( + page_token, + page_size, + sort_by, + filter, + options, + )(this.fetch, this.basePath); + } } - diff --git a/frontend/src/apis/experiment/configuration.ts b/frontend/src/apis/experiment/configuration.ts index c3a35f9d5756..5a5190e39a09 100644 --- a/frontend/src/apis/experiment/configuration.ts +++ b/frontend/src/apis/experiment/configuration.ts @@ -4,63 +4,62 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - export interface ConfigurationParameters { - apiKey?: string | ((name: string) => string); - username?: string; - password?: string; - accessToken?: string | ((name: string, scopes?: string[]) => string); - basePath?: string; + apiKey?: string | ((name: string) => string); + username?: string; + password?: string; + accessToken?: string | ((name: string, scopes?: string[]) => string); + basePath?: string; } export class Configuration { - /** - * parameter for apiKey security - * @param name security name - * @memberof Configuration - */ - apiKey?: string | ((name: string) => string); - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - username?: string; - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - password?: string; - /** - * parameter for oauth2 security - * @param name security name - * @param scopes oauth2 scope - * @memberof Configuration - */ - accessToken?: string | ((name: string, scopes?: string[]) => string); - /** - * override base path - * - * @type {string} - * @memberof Configuration - */ - basePath?: string; + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | ((name: string) => string); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | ((name: string, scopes?: string[]) => string); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; - constructor(param: ConfigurationParameters = {}) { - this.apiKey = param.apiKey; - this.username = param.username; - this.password = param.password; - this.accessToken = param.accessToken; - this.basePath = param.basePath; - } + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + } } diff --git a/frontend/src/apis/experiment/custom.d.ts b/frontend/src/apis/experiment/custom.d.ts index 02f969575e37..4c611cc3216e 100644 --- a/frontend/src/apis/experiment/custom.d.ts +++ b/frontend/src/apis/experiment/custom.d.ts @@ -1 +1,2 @@ -declare module 'portable-fetch'; \ No newline at end of file +declare module 'portable-fetch'; +declare module 'url'; diff --git a/frontend/src/apis/experiment/index.ts b/frontend/src/apis/experiment/index.ts index e6234477c372..bc15ccfbc1d6 100644 --- a/frontend/src/apis/experiment/index.ts +++ b/frontend/src/apis/experiment/index.ts @@ -4,13 +4,12 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - -export * from "./api"; -export * from "./configuration"; +export * from './api'; +export * from './configuration'; diff --git a/frontend/src/apis/filter/.swagger-codegen/VERSION b/frontend/src/apis/filter/.swagger-codegen/VERSION index a6254504e401..48a6b508dc9f 100644 --- a/frontend/src/apis/filter/.swagger-codegen/VERSION +++ b/frontend/src/apis/filter/.swagger-codegen/VERSION @@ -1 +1 @@ -2.3.1 \ No newline at end of file +2.4.7 \ No newline at end of file diff --git a/frontend/src/apis/filter/api.ts b/frontend/src/apis/filter/api.ts index 65f3eee5cea4..c35fc87ad68e 100644 --- a/frontend/src/apis/filter/api.ts +++ b/frontend/src/apis/filter/api.ts @@ -5,28 +5,28 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ +import * as url from 'url'; +import * as portableFetch from 'portable-fetch'; +import { Configuration } from './configuration'; -import * as portableFetch from "portable-fetch"; -import { Configuration } from "./configuration"; - -const BASE_PATH = "http://localhost".replace(/\/+$/, ""); +const BASE_PATH = 'http://localhost'.replace(/\/+$/, ''); /** * * @export */ export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", + csv: ',', + ssv: ' ', + tsv: '\t', + pipes: '|', }; /** @@ -35,46 +35,50 @@ export const COLLECTION_FORMATS = { * @interface FetchAPI */ export interface FetchAPI { - (url: string, init?: any): Promise; + (url: string, init?: any): Promise; } /** - * + * * @export * @interface FetchArgs */ export interface FetchArgs { - url: string; - options: any; + url: string; + options: any; } /** - * + * * @export * @class BaseAPI */ export class BaseAPI { - protected configuration: Configuration; - - constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected fetch: FetchAPI = portableFetch) { - if (configuration) { - this.configuration = configuration; - this.basePath = configuration.basePath || this.basePath; - } + protected configuration: Configuration; + + constructor( + configuration?: Configuration, + protected basePath: string = BASE_PATH, + protected fetch: FetchAPI = portableFetch, + ) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath || this.basePath; } -}; + } +} /** - * + * * @export * @class RequiredError * @extends {Error} */ export class RequiredError extends Error { - name: "RequiredError" - constructor(public field: string, msg?: string) { - super(msg); - } + name: 'RequiredError'; + constructor(public field: string, msg?: string) { + super(msg); + } } /** @@ -83,40 +87,40 @@ export class RequiredError extends Error { * @interface ApiFilter */ export interface ApiFilter { - /** - * All predicates are AND-ed when this filter is applied. - * @type {Array<ApiPredicate>} - * @memberof ApiFilter - */ - predicates?: Array; + /** + * All predicates are AND-ed when this filter is applied. + * @type {Array} + * @memberof ApiFilter + */ + predicates?: Array; } /** - * + * * @export * @interface ApiIntValues */ export interface ApiIntValues { - /** - * - * @type {Array<number>} - * @memberof ApiIntValues - */ - values?: Array; + /** + * + * @type {Array} + * @memberof ApiIntValues + */ + values?: Array; } /** - * + * * @export * @interface ApiLongValues */ export interface ApiLongValues { - /** - * - * @type {Array<string>} - * @memberof ApiLongValues - */ - values?: Array; + /** + * + * @type {Array} + * @memberof ApiLongValues + */ + values?: Array; } /** @@ -125,74 +129,74 @@ export interface ApiLongValues { * @interface ApiPredicate */ export interface ApiPredicate { - /** - * - * @type {PredicateOp} - * @memberof ApiPredicate - */ - op?: PredicateOp; - /** - * - * @type {string} - * @memberof ApiPredicate - */ - key?: string; - /** - * - * @type {number} - * @memberof ApiPredicate - */ - int_value?: number; - /** - * - * @type {string} - * @memberof ApiPredicate - */ - long_value?: string; - /** - * - * @type {string} - * @memberof ApiPredicate - */ - string_value?: string; - /** - * Timestamp values will be converted to Unix time (seconds since the epoch) prior to being used in a filtering operation. - * @type {Date} - * @memberof ApiPredicate - */ - timestamp_value?: Date; - /** - * Array values below are only meant to be used by the IN operator. - * @type {ApiIntValues} - * @memberof ApiPredicate - */ - int_values?: ApiIntValues; - /** - * - * @type {ApiLongValues} - * @memberof ApiPredicate - */ - long_values?: ApiLongValues; - /** - * - * @type {ApiStringValues} - * @memberof ApiPredicate - */ - string_values?: ApiStringValues; + /** + * + * @type {PredicateOp} + * @memberof ApiPredicate + */ + op?: PredicateOp; + /** + * + * @type {string} + * @memberof ApiPredicate + */ + key?: string; + /** + * + * @type {number} + * @memberof ApiPredicate + */ + int_value?: number; + /** + * + * @type {string} + * @memberof ApiPredicate + */ + long_value?: string; + /** + * + * @type {string} + * @memberof ApiPredicate + */ + string_value?: string; + /** + * Timestamp values will be converted to Unix time (seconds since the epoch) prior to being used in a filtering operation. + * @type {Date} + * @memberof ApiPredicate + */ + timestamp_value?: Date; + /** + * Array values below are only meant to be used by the IN operator. + * @type {ApiIntValues} + * @memberof ApiPredicate + */ + int_values?: ApiIntValues; + /** + * + * @type {ApiLongValues} + * @memberof ApiPredicate + */ + long_values?: ApiLongValues; + /** + * + * @type {ApiStringValues} + * @memberof ApiPredicate + */ + string_values?: ApiStringValues; } /** - * + * * @export * @interface ApiStringValues */ export interface ApiStringValues { - /** - * - * @type {Array<string>} - * @memberof ApiStringValues - */ - values?: Array; + /** + * + * @type {Array} + * @memberof ApiStringValues + */ + values?: Array; } /** @@ -201,15 +205,13 @@ export interface ApiStringValues { * @enum {string} */ export enum PredicateOp { - UNKNOWN = 'UNKNOWN', - EQUALS = 'EQUALS', - NOTEQUALS = 'NOT_EQUALS', - GREATERTHAN = 'GREATER_THAN', - GREATERTHANEQUALS = 'GREATER_THAN_EQUALS', - LESSTHAN = 'LESS_THAN', - LESSTHANEQUALS = 'LESS_THAN_EQUALS', - IN = 'IN', - ISSUBSTRING = 'IS_SUBSTRING' + UNKNOWN = 'UNKNOWN', + EQUALS = 'EQUALS', + NOTEQUALS = 'NOT_EQUALS', + GREATERTHAN = 'GREATER_THAN', + GREATERTHANEQUALS = 'GREATER_THAN_EQUALS', + LESSTHAN = 'LESS_THAN', + LESSTHANEQUALS = 'LESS_THAN_EQUALS', + IN = 'IN', + ISSUBSTRING = 'IS_SUBSTRING', } - - diff --git a/frontend/src/apis/filter/configuration.ts b/frontend/src/apis/filter/configuration.ts index 841b58ae17a5..78b88fb36946 100644 --- a/frontend/src/apis/filter/configuration.ts +++ b/frontend/src/apis/filter/configuration.ts @@ -4,63 +4,62 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - export interface ConfigurationParameters { - apiKey?: string | ((name: string) => string); - username?: string; - password?: string; - accessToken?: string | ((name: string, scopes?: string[]) => string); - basePath?: string; + apiKey?: string | ((name: string) => string); + username?: string; + password?: string; + accessToken?: string | ((name: string, scopes?: string[]) => string); + basePath?: string; } export class Configuration { - /** - * parameter for apiKey security - * @param name security name - * @memberof Configuration - */ - apiKey?: string | ((name: string) => string); - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - username?: string; - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - password?: string; - /** - * parameter for oauth2 security - * @param name security name - * @param scopes oauth2 scope - * @memberof Configuration - */ - accessToken?: string | ((name: string, scopes?: string[]) => string); - /** - * override base path - * - * @type {string} - * @memberof Configuration - */ - basePath?: string; + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | ((name: string) => string); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | ((name: string, scopes?: string[]) => string); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; - constructor(param: ConfigurationParameters = {}) { - this.apiKey = param.apiKey; - this.username = param.username; - this.password = param.password; - this.accessToken = param.accessToken; - this.basePath = param.basePath; - } + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + } } diff --git a/frontend/src/apis/filter/custom.d.ts b/frontend/src/apis/filter/custom.d.ts index 02f969575e37..4c611cc3216e 100644 --- a/frontend/src/apis/filter/custom.d.ts +++ b/frontend/src/apis/filter/custom.d.ts @@ -1 +1,2 @@ -declare module 'portable-fetch'; \ No newline at end of file +declare module 'portable-fetch'; +declare module 'url'; diff --git a/frontend/src/apis/filter/index.ts b/frontend/src/apis/filter/index.ts index 693743ee8e82..2d6272712dbf 100644 --- a/frontend/src/apis/filter/index.ts +++ b/frontend/src/apis/filter/index.ts @@ -4,13 +4,12 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - -export * from "./api"; -export * from "./configuration"; +export * from './api'; +export * from './configuration'; diff --git a/frontend/src/apis/job/.swagger-codegen/VERSION b/frontend/src/apis/job/.swagger-codegen/VERSION index a6254504e401..48a6b508dc9f 100644 --- a/frontend/src/apis/job/.swagger-codegen/VERSION +++ b/frontend/src/apis/job/.swagger-codegen/VERSION @@ -1 +1 @@ -2.3.1 \ No newline at end of file +2.4.7 \ No newline at end of file diff --git a/frontend/src/apis/job/api.ts b/frontend/src/apis/job/api.ts index f278e2b7db59..399181f7df36 100644 --- a/frontend/src/apis/job/api.ts +++ b/frontend/src/apis/job/api.ts @@ -5,29 +5,28 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ +import * as url from 'url'; +import * as portableFetch from 'portable-fetch'; +import { Configuration } from './configuration'; -import * as url from "url"; -import * as portableFetch from "portable-fetch"; -import { Configuration } from "./configuration"; - -const BASE_PATH = "http://localhost".replace(/\/+$/, ""); +const BASE_PATH = 'http://localhost'.replace(/\/+$/, ''); /** * * @export */ export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", + csv: ',', + ssv: ' ', + tsv: '\t', + pipes: '|', }; /** @@ -36,362 +35,366 @@ export const COLLECTION_FORMATS = { * @interface FetchAPI */ export interface FetchAPI { - (url: string, init?: any): Promise; + (url: string, init?: any): Promise; } /** - * + * * @export * @interface FetchArgs */ export interface FetchArgs { - url: string; - options: any; + url: string; + options: any; } /** - * + * * @export * @class BaseAPI */ export class BaseAPI { - protected configuration: Configuration; + protected configuration: Configuration; - constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected fetch: FetchAPI = portableFetch) { - if (configuration) { - this.configuration = configuration; - this.basePath = configuration.basePath || this.basePath; - } + constructor( + configuration?: Configuration, + protected basePath: string = BASE_PATH, + protected fetch: FetchAPI = portableFetch, + ) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath || this.basePath; } -}; + } +} /** - * + * * @export * @class RequiredError * @extends {Error} */ export class RequiredError extends Error { - name: "RequiredError" - constructor(public field: string, msg?: string) { - super(msg); - } + name: 'RequiredError'; + constructor(public field: string, msg?: string) { + super(msg); + } } /** - * + * * @export * @interface ApiCronSchedule */ export interface ApiCronSchedule { - /** - * - * @type {Date} - * @memberof ApiCronSchedule - */ - start_time?: Date; - /** - * - * @type {Date} - * @memberof ApiCronSchedule - */ - end_time?: Date; - /** - * - * @type {string} - * @memberof ApiCronSchedule - */ - cron?: string; + /** + * + * @type {Date} + * @memberof ApiCronSchedule + */ + start_time?: Date; + /** + * + * @type {Date} + * @memberof ApiCronSchedule + */ + end_time?: Date; + /** + * + * @type {string} + * @memberof ApiCronSchedule + */ + cron?: string; } /** - * + * * @export * @interface ApiJob */ export interface ApiJob { - /** - * Output. Unique run ID. Generated by API server. - * @type {string} - * @memberof ApiJob - */ - id?: string; - /** - * Required input field. Job name provided by user. Not unique. - * @type {string} - * @memberof ApiJob - */ - name?: string; - /** - * - * @type {string} - * @memberof ApiJob - */ - description?: string; - /** - * Required input field. Describing what the pipeline manifest and parameters to use for the scheduled job. - * @type {ApiPipelineSpec} - * @memberof ApiJob - */ - pipeline_spec?: ApiPipelineSpec; - /** - * Optional input field. Specify which resource this run belongs to. - * @type {Array<ApiResourceReference>} - * @memberof ApiJob - */ - resource_references?: Array; - /** - * - * @type {string} - * @memberof ApiJob - */ - max_concurrency?: string; - /** - * Required input field. Specify how a run is triggered. Support cron mode or periodic mode. - * @type {ApiTrigger} - * @memberof ApiJob - */ - trigger?: ApiTrigger; - /** - * - * @type {JobMode} - * @memberof ApiJob - */ - mode?: JobMode; - /** - * Output. The time this job is created. - * @type {Date} - * @memberof ApiJob - */ - created_at?: Date; - /** - * Output. The last time this job is updated. - * @type {Date} - * @memberof ApiJob - */ - updated_at?: Date; - /** - * - * @type {string} - * @memberof ApiJob - */ - status?: string; - /** - * In case any error happens retrieving a job field, only job ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. - * @type {string} - * @memberof ApiJob - */ - error?: string; - /** - * Input. Whether the job is enabled or not. - * @type {boolean} - * @memberof ApiJob - */ - enabled?: boolean; + /** + * Output. Unique run ID. Generated by API server. + * @type {string} + * @memberof ApiJob + */ + id?: string; + /** + * Required input field. Job name provided by user. Not unique. + * @type {string} + * @memberof ApiJob + */ + name?: string; + /** + * + * @type {string} + * @memberof ApiJob + */ + description?: string; + /** + * Required input field. Describing what the pipeline manifest and parameters to use for the scheduled job. + * @type {ApiPipelineSpec} + * @memberof ApiJob + */ + pipeline_spec?: ApiPipelineSpec; + /** + * Optional input field. Specify which resource this run belongs to. + * @type {Array} + * @memberof ApiJob + */ + resource_references?: Array; + /** + * + * @type {string} + * @memberof ApiJob + */ + max_concurrency?: string; + /** + * Required input field. Specify how a run is triggered. Support cron mode or periodic mode. + * @type {ApiTrigger} + * @memberof ApiJob + */ + trigger?: ApiTrigger; + /** + * + * @type {JobMode} + * @memberof ApiJob + */ + mode?: JobMode; + /** + * Output. The time this job is created. + * @type {Date} + * @memberof ApiJob + */ + created_at?: Date; + /** + * Output. The last time this job is updated. + * @type {Date} + * @memberof ApiJob + */ + updated_at?: Date; + /** + * + * @type {string} + * @memberof ApiJob + */ + status?: string; + /** + * In case any error happens retrieving a job field, only job ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. + * @type {string} + * @memberof ApiJob + */ + error?: string; + /** + * Input. Whether the job is enabled or not. + * @type {boolean} + * @memberof ApiJob + */ + enabled?: boolean; } /** - * + * * @export * @interface ApiListJobsResponse */ export interface ApiListJobsResponse { - /** - * A list of jobs returned. - * @type {Array<ApiJob>} - * @memberof ApiListJobsResponse - */ - jobs?: Array; - /** - * - * @type {number} - * @memberof ApiListJobsResponse - */ - total_size?: number; - /** - * - * @type {string} - * @memberof ApiListJobsResponse - */ - next_page_token?: string; + /** + * A list of jobs returned. + * @type {Array} + * @memberof ApiListJobsResponse + */ + jobs?: Array; + /** + * + * @type {number} + * @memberof ApiListJobsResponse + */ + total_size?: number; + /** + * + * @type {string} + * @memberof ApiListJobsResponse + */ + next_page_token?: string; } /** - * + * * @export * @interface ApiParameter */ export interface ApiParameter { - /** - * - * @type {string} - * @memberof ApiParameter - */ - name?: string; - /** - * - * @type {string} - * @memberof ApiParameter - */ - value?: string; + /** + * + * @type {string} + * @memberof ApiParameter + */ + name?: string; + /** + * + * @type {string} + * @memberof ApiParameter + */ + value?: string; } /** - * + * * @export * @interface ApiPeriodicSchedule */ export interface ApiPeriodicSchedule { - /** - * - * @type {Date} - * @memberof ApiPeriodicSchedule - */ - start_time?: Date; - /** - * - * @type {Date} - * @memberof ApiPeriodicSchedule - */ - end_time?: Date; - /** - * - * @type {string} - * @memberof ApiPeriodicSchedule - */ - interval_second?: string; + /** + * + * @type {Date} + * @memberof ApiPeriodicSchedule + */ + start_time?: Date; + /** + * + * @type {Date} + * @memberof ApiPeriodicSchedule + */ + end_time?: Date; + /** + * + * @type {string} + * @memberof ApiPeriodicSchedule + */ + interval_second?: string; } /** - * + * * @export * @interface ApiPipelineSpec */ export interface ApiPipelineSpec { - /** - * Optional input field. The ID of the pipeline user uploaded before. - * @type {string} - * @memberof ApiPipelineSpec - */ - pipeline_id?: string; - /** - * Optional output field. The name of the pipeline. Not empty if the pipeline id is not empty. - * @type {string} - * @memberof ApiPipelineSpec - */ - pipeline_name?: string; - /** - * Optional input field. The marshalled raw argo JSON workflow. This will be deprecated when pipeline_manifest is in use. - * @type {string} - * @memberof ApiPipelineSpec - */ - workflow_manifest?: string; - /** - * Optional input field. The raw pipeline JSON spec. - * @type {string} - * @memberof ApiPipelineSpec - */ - pipeline_manifest?: string; - /** - * The parameter user provide to inject to the pipeline JSON. If a default value of a parameter exist in the JSON, the value user provided here will replace. - * @type {Array<ApiParameter>} - * @memberof ApiPipelineSpec - */ - parameters?: Array; + /** + * Optional input field. The ID of the pipeline user uploaded before. + * @type {string} + * @memberof ApiPipelineSpec + */ + pipeline_id?: string; + /** + * Optional output field. The name of the pipeline. Not empty if the pipeline id is not empty. + * @type {string} + * @memberof ApiPipelineSpec + */ + pipeline_name?: string; + /** + * Optional input field. The marshalled raw argo JSON workflow. This will be deprecated when pipeline_manifest is in use. + * @type {string} + * @memberof ApiPipelineSpec + */ + workflow_manifest?: string; + /** + * Optional input field. The raw pipeline JSON spec. + * @type {string} + * @memberof ApiPipelineSpec + */ + pipeline_manifest?: string; + /** + * The parameter user provide to inject to the pipeline JSON. If a default value of a parameter exist in the JSON, the value user provided here will replace. + * @type {Array} + * @memberof ApiPipelineSpec + */ + parameters?: Array; } /** - * + * * @export * @enum {string} */ export enum ApiRelationship { - UNKNOWNRELATIONSHIP = 'UNKNOWN_RELATIONSHIP', - OWNER = 'OWNER', - CREATOR = 'CREATOR' + UNKNOWNRELATIONSHIP = 'UNKNOWN_RELATIONSHIP', + OWNER = 'OWNER', + CREATOR = 'CREATOR', } /** - * + * * @export * @interface ApiResourceKey */ export interface ApiResourceKey { - /** - * The type of the resource that referred to. - * @type {ApiResourceType} - * @memberof ApiResourceKey - */ - type?: ApiResourceType; - /** - * The ID of the resource that referred to. - * @type {string} - * @memberof ApiResourceKey - */ - id?: string; + /** + * The type of the resource that referred to. + * @type {ApiResourceType} + * @memberof ApiResourceKey + */ + type?: ApiResourceType; + /** + * The ID of the resource that referred to. + * @type {string} + * @memberof ApiResourceKey + */ + id?: string; } /** - * + * * @export * @interface ApiResourceReference */ export interface ApiResourceReference { - /** - * - * @type {ApiResourceKey} - * @memberof ApiResourceReference - */ - key?: ApiResourceKey; - /** - * The name of the resource that referred to. - * @type {string} - * @memberof ApiResourceReference - */ - name?: string; - /** - * Required field. The relationship from referred resource to the object. - * @type {ApiRelationship} - * @memberof ApiResourceReference - */ - relationship?: ApiRelationship; + /** + * + * @type {ApiResourceKey} + * @memberof ApiResourceReference + */ + key?: ApiResourceKey; + /** + * The name of the resource that referred to. + * @type {string} + * @memberof ApiResourceReference + */ + name?: string; + /** + * Required field. The relationship from referred resource to the object. + * @type {ApiRelationship} + * @memberof ApiResourceReference + */ + relationship?: ApiRelationship; } /** - * + * * @export * @enum {string} */ export enum ApiResourceType { - UNKNOWNRESOURCETYPE = 'UNKNOWN_RESOURCE_TYPE', - EXPERIMENT = 'EXPERIMENT', - JOB = 'JOB' + UNKNOWNRESOURCETYPE = 'UNKNOWN_RESOURCE_TYPE', + EXPERIMENT = 'EXPERIMENT', + JOB = 'JOB', } /** - * + * * @export * @interface ApiStatus */ export interface ApiStatus { - /** - * - * @type {string} - * @memberof ApiStatus - */ - error?: string; - /** - * - * @type {number} - * @memberof ApiStatus - */ - code?: number; - /** - * - * @type {Array<ProtobufAny>} - * @memberof ApiStatus - */ - details?: Array; + /** + * + * @type {string} + * @memberof ApiStatus + */ + error?: string; + /** + * + * @type {number} + * @memberof ApiStatus + */ + code?: number; + /** + * + * @type {Array} + * @memberof ApiStatus + */ + details?: Array; } /** @@ -400,18 +403,18 @@ export interface ApiStatus { * @interface ApiTrigger */ export interface ApiTrigger { - /** - * - * @type {ApiCronSchedule} - * @memberof ApiTrigger - */ - cron_schedule?: ApiCronSchedule; - /** - * - * @type {ApiPeriodicSchedule} - * @memberof ApiTrigger - */ - periodic_schedule?: ApiPeriodicSchedule; + /** + * + * @type {ApiCronSchedule} + * @memberof ApiTrigger + */ + cron_schedule?: ApiCronSchedule; + /** + * + * @type {ApiPeriodicSchedule} + * @memberof ApiTrigger + */ + periodic_schedule?: ApiPeriodicSchedule; } /** @@ -420,9 +423,9 @@ export interface ApiTrigger { * @enum {string} */ export enum JobMode { - UNKNOWNMODE = 'UNKNOWN_MODE', - ENABLED = 'ENABLED', - DISABLED = 'DISABLED' + UNKNOWNMODE = 'UNKNOWN_MODE', + ENABLED = 'ENABLED', + DISABLED = 'DISABLED', } /** @@ -431,271 +434,339 @@ export enum JobMode { * @interface ProtobufAny */ export interface ProtobufAny { - /** - * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - * @type {string} - * @memberof ProtobufAny - */ - type_url?: string; - /** - * Must be a valid serialized protocol buffer of the above specified type. - * @type {string} - * @memberof ProtobufAny - */ - value?: string; + /** + * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. + * @type {string} + * @memberof ProtobufAny + */ + type_url?: string; + /** + * Must be a valid serialized protocol buffer of the above specified type. + * @type {string} + * @memberof ProtobufAny + */ + value?: string; } - /** * JobServiceApi - fetch parameter creator * @export */ -export const JobServiceApiFetchParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @param {ApiJob} body The job to be created - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createJob(body: ApiJob, options: any = {}): FetchArgs { - // verify required parameter 'body' is not null or undefined - if (body === null || body === undefined) { - throw new RequiredError('body','Required parameter body was null or undefined when calling createJob.'); - } - const localVarPath = `/apis/v1beta1/jobs`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - const needsSerialization = ("ApiJob" !== "string") || localVarRequestOptions.headers['Content-Type'] === 'application/json'; - localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : (body || ""); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id The ID of the job to be deleted - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteJob(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling deleteJob.'); - } - const localVarPath = `/apis/v1beta1/jobs/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id The ID of the job to be disabled - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - disableJob(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling disableJob.'); - } - const localVarPath = `/apis/v1beta1/jobs/{id}/disable` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id The ID of the job to be enabled - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - enableJob(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling enableJob.'); - } - const localVarPath = `/apis/v1beta1/jobs/{id}/enable` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id The ID of the job to be retrieved - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getJob(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling getJob.'); - } - const localVarPath = `/apis/v1beta1/jobs/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [resource_reference_key_type] The type of the resource that referred to. - * @param {string} [resource_reference_key_id] The ID of the resource that referred to. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listJobs(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options: any = {}): FetchArgs { - const localVarPath = `/apis/v1beta1/jobs`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - if (page_token !== undefined) { - localVarQueryParameter['page_token'] = page_token; - } - - if (page_size !== undefined) { - localVarQueryParameter['page_size'] = page_size; - } - - if (sort_by !== undefined) { - localVarQueryParameter['sort_by'] = sort_by; - } - - if (resource_reference_key_type !== undefined) { - localVarQueryParameter['resource_reference_key.type'] = resource_reference_key_type; - } - - if (resource_reference_key_id !== undefined) { - localVarQueryParameter['resource_reference_key.id'] = resource_reference_key_id; - } - - if (filter !== undefined) { - localVarQueryParameter['filter'] = filter; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } +export const JobServiceApiFetchParamCreator = function(configuration?: Configuration) { + return { + /** + * + * @param {ApiJob} body The job to be created + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createJob(body: ApiJob, options: any = {}): FetchArgs { + // verify required parameter 'body' is not null or undefined + if (body === null || body === undefined) { + throw new RequiredError( + 'body', + 'Required parameter body was null or undefined when calling createJob.', + ); + } + const localVarPath = `/apis/v1beta1/jobs`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + const needsSerialization = + 'ApiJob' !== 'string' || + localVarRequestOptions.headers['Content-Type'] === 'application/json'; + localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : body || ''; + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id The ID of the job to be deleted + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteJob(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling deleteJob.', + ); + } + const localVarPath = `/apis/v1beta1/jobs/{id}`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id The ID of the job to be disabled + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + disableJob(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling disableJob.', + ); + } + const localVarPath = `/apis/v1beta1/jobs/{id}/disable`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id The ID of the job to be enabled + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + enableJob(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling enableJob.', + ); + } + const localVarPath = `/apis/v1beta1/jobs/{id}/enable`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id The ID of the job to be retrieved + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getJob(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling getJob.', + ); + } + const localVarPath = `/apis/v1beta1/jobs/{id}`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listJobs( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options: any = {}, + ): FetchArgs { + const localVarPath = `/apis/v1beta1/jobs`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + if (page_token !== undefined) { + localVarQueryParameter['page_token'] = page_token; + } + + if (page_size !== undefined) { + localVarQueryParameter['page_size'] = page_size; + } + + if (sort_by !== undefined) { + localVarQueryParameter['sort_by'] = sort_by; + } + + if (resource_reference_key_type !== undefined) { + localVarQueryParameter['resource_reference_key.type'] = resource_reference_key_type; + } + + if (resource_reference_key_id !== undefined) { + localVarQueryParameter['resource_reference_key.id'] = resource_reference_key_id; + } + + if (filter !== undefined) { + localVarQueryParameter['filter'] = filter; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + }; }; /** @@ -703,268 +774,333 @@ export const JobServiceApiFetchParamCreator = function (configuration?: Configur * @export */ export const JobServiceApiFp = function(configuration?: Configuration) { - return { - /** - * - * @param {ApiJob} body The job to be created - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createJob(body: ApiJob, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).createJob(body, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id The ID of the job to be deleted - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).deleteJob(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id The ID of the job to be disabled - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - disableJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).disableJob(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id The ID of the job to be enabled - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - enableJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).enableJob(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id The ID of the job to be retrieved - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).getJob(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [resource_reference_key_type] The type of the resource that referred to. - * @param {string} [resource_reference_key_id] The ID of the resource that referred to. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listJobs(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).listJobs(page_token, page_size, sort_by, resource_reference_key_type, resource_reference_key_id, filter, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - } + return { + /** + * + * @param {ApiJob} body The job to be created + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createJob( + body: ApiJob, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).createJob( + body, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id The ID of the job to be deleted + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).deleteJob( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id The ID of the job to be disabled + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + disableJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).disableJob( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id The ID of the job to be enabled + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + enableJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).enableJob( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id The ID of the job to be retrieved + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getJob(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).getJob(id, options); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listJobs( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = JobServiceApiFetchParamCreator(configuration).listJobs( + page_token, + page_size, + sort_by, + resource_reference_key_type, + resource_reference_key_id, + filter, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + }; }; /** * JobServiceApi - factory interface * @export */ -export const JobServiceApiFactory = function (configuration?: Configuration, fetch?: FetchAPI, basePath?: string) { - return { - /** - * - * @param {ApiJob} body The job to be created - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createJob(body: ApiJob, options?: any) { - return JobServiceApiFp(configuration).createJob(body, options)(fetch, basePath); - }, - /** - * - * @param {string} id The ID of the job to be deleted - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteJob(id: string, options?: any) { - return JobServiceApiFp(configuration).deleteJob(id, options)(fetch, basePath); - }, - /** - * - * @param {string} id The ID of the job to be disabled - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - disableJob(id: string, options?: any) { - return JobServiceApiFp(configuration).disableJob(id, options)(fetch, basePath); - }, - /** - * - * @param {string} id The ID of the job to be enabled - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - enableJob(id: string, options?: any) { - return JobServiceApiFp(configuration).enableJob(id, options)(fetch, basePath); - }, - /** - * - * @param {string} id The ID of the job to be retrieved - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getJob(id: string, options?: any) { - return JobServiceApiFp(configuration).getJob(id, options)(fetch, basePath); - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [resource_reference_key_type] The type of the resource that referred to. - * @param {string} [resource_reference_key_id] The ID of the resource that referred to. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listJobs(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options?: any) { - return JobServiceApiFp(configuration).listJobs(page_token, page_size, sort_by, resource_reference_key_type, resource_reference_key_id, filter, options)(fetch, basePath); - }, - }; -}; - -/** - * JobServiceApi - object-oriented interface - * @export - * @class JobServiceApi - * @extends {BaseAPI} - */ -export class JobServiceApi extends BaseAPI { +export const JobServiceApiFactory = function( + configuration?: Configuration, + fetch?: FetchAPI, + basePath?: string, +) { + return { /** - * - * @param {} body The job to be created + * + * @param {ApiJob} body The job to be created * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof JobServiceApi */ - public createJob(body: ApiJob, options?: any) { - return JobServiceApiFp(this.configuration).createJob(body, options)(this.fetch, this.basePath); - } - + createJob(body: ApiJob, options?: any) { + return JobServiceApiFp(configuration).createJob(body, options)(fetch, basePath); + }, /** - * - * @param {} id The ID of the job to be deleted + * + * @param {string} id The ID of the job to be deleted * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof JobServiceApi */ - public deleteJob(id: string, options?: any) { - return JobServiceApiFp(this.configuration).deleteJob(id, options)(this.fetch, this.basePath); - } - + deleteJob(id: string, options?: any) { + return JobServiceApiFp(configuration).deleteJob(id, options)(fetch, basePath); + }, /** - * - * @param {} id The ID of the job to be disabled + * + * @param {string} id The ID of the job to be disabled * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof JobServiceApi */ - public disableJob(id: string, options?: any) { - return JobServiceApiFp(this.configuration).disableJob(id, options)(this.fetch, this.basePath); - } - + disableJob(id: string, options?: any) { + return JobServiceApiFp(configuration).disableJob(id, options)(fetch, basePath); + }, /** - * - * @param {} id The ID of the job to be enabled + * + * @param {string} id The ID of the job to be enabled * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof JobServiceApi */ - public enableJob(id: string, options?: any) { - return JobServiceApiFp(this.configuration).enableJob(id, options)(this.fetch, this.basePath); - } - + enableJob(id: string, options?: any) { + return JobServiceApiFp(configuration).enableJob(id, options)(fetch, basePath); + }, /** - * - * @param {} id The ID of the job to be retrieved + * + * @param {string} id The ID of the job to be retrieved * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof JobServiceApi */ - public getJob(id: string, options?: any) { - return JobServiceApiFp(this.configuration).getJob(id, options)(this.fetch, this.basePath); - } - + getJob(id: string, options?: any) { + return JobServiceApiFp(configuration).getJob(id, options)(fetch, basePath); + }, /** - * - * @param {} [page_token] - * @param {} [page_size] - * @param {} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {} [resource_reference_key_type] The type of the resource that referred to. - * @param {} [resource_reference_key_id] The ID of the resource that referred to. - * @param {} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof JobServiceApi */ - public listJobs(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options?: any) { - return JobServiceApiFp(this.configuration).listJobs(page_token, page_size, sort_by, resource_reference_key_type, resource_reference_key_id, filter, options)(this.fetch, this.basePath); - } + listJobs( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options?: any, + ) { + return JobServiceApiFp(configuration).listJobs( + page_token, + page_size, + sort_by, + resource_reference_key_type, + resource_reference_key_id, + filter, + options, + )(fetch, basePath); + }, + }; +}; -} +/** + * JobServiceApi - object-oriented interface + * @export + * @class JobServiceApi + * @extends {BaseAPI} + */ +export class JobServiceApi extends BaseAPI { + /** + * + * @param {ApiJob} body The job to be created + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof JobServiceApi + */ + public createJob(body: ApiJob, options?: any) { + return JobServiceApiFp(this.configuration).createJob(body, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} id The ID of the job to be deleted + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof JobServiceApi + */ + public deleteJob(id: string, options?: any) { + return JobServiceApiFp(this.configuration).deleteJob(id, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} id The ID of the job to be disabled + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof JobServiceApi + */ + public disableJob(id: string, options?: any) { + return JobServiceApiFp(this.configuration).disableJob(id, options)(this.fetch, this.basePath); + } + /** + * + * @param {string} id The ID of the job to be enabled + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof JobServiceApi + */ + public enableJob(id: string, options?: any) { + return JobServiceApiFp(this.configuration).enableJob(id, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} id The ID of the job to be retrieved + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof JobServiceApi + */ + public getJob(id: string, options?: any) { + return JobServiceApiFp(this.configuration).getJob(id, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof JobServiceApi + */ + public listJobs( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options?: any, + ) { + return JobServiceApiFp(this.configuration).listJobs( + page_token, + page_size, + sort_by, + resource_reference_key_type, + resource_reference_key_id, + filter, + options, + )(this.fetch, this.basePath); + } +} diff --git a/frontend/src/apis/job/configuration.ts b/frontend/src/apis/job/configuration.ts index 8624aa60db0e..09fcdb80ac7f 100644 --- a/frontend/src/apis/job/configuration.ts +++ b/frontend/src/apis/job/configuration.ts @@ -4,63 +4,62 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - export interface ConfigurationParameters { - apiKey?: string | ((name: string) => string); - username?: string; - password?: string; - accessToken?: string | ((name: string, scopes?: string[]) => string); - basePath?: string; + apiKey?: string | ((name: string) => string); + username?: string; + password?: string; + accessToken?: string | ((name: string, scopes?: string[]) => string); + basePath?: string; } export class Configuration { - /** - * parameter for apiKey security - * @param name security name - * @memberof Configuration - */ - apiKey?: string | ((name: string) => string); - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - username?: string; - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - password?: string; - /** - * parameter for oauth2 security - * @param name security name - * @param scopes oauth2 scope - * @memberof Configuration - */ - accessToken?: string | ((name: string, scopes?: string[]) => string); - /** - * override base path - * - * @type {string} - * @memberof Configuration - */ - basePath?: string; + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | ((name: string) => string); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | ((name: string, scopes?: string[]) => string); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; - constructor(param: ConfigurationParameters = {}) { - this.apiKey = param.apiKey; - this.username = param.username; - this.password = param.password; - this.accessToken = param.accessToken; - this.basePath = param.basePath; - } + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + } } diff --git a/frontend/src/apis/job/custom.d.ts b/frontend/src/apis/job/custom.d.ts index 02f969575e37..4c611cc3216e 100644 --- a/frontend/src/apis/job/custom.d.ts +++ b/frontend/src/apis/job/custom.d.ts @@ -1 +1,2 @@ -declare module 'portable-fetch'; \ No newline at end of file +declare module 'portable-fetch'; +declare module 'url'; diff --git a/frontend/src/apis/job/index.ts b/frontend/src/apis/job/index.ts index 2e9b45facf04..f8a1f06048ab 100644 --- a/frontend/src/apis/job/index.ts +++ b/frontend/src/apis/job/index.ts @@ -4,13 +4,12 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - -export * from "./api"; -export * from "./configuration"; +export * from './api'; +export * from './configuration'; diff --git a/frontend/src/apis/metadata/BUILD.bazel b/frontend/src/apis/metadata/BUILD.bazel new file mode 100644 index 000000000000..b53786f08e34 --- /dev/null +++ b/frontend/src/apis/metadata/BUILD.bazel @@ -0,0 +1,28 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") + +proto_library( + name = "ml_metadata_proto", + srcs = [ + "metadata_store.proto", + "metadata_store_service.proto", + ], + visibility = ["//visibility:public"], + deps = ["//src/apis/metadata:metadata_proto"], +) + +go_proto_library( + name = "ml_metadata_go_proto", + compilers = ["@io_bazel_rules_go//proto:go_grpc"], + importpath = "github.com/kubeflow/pipelines/frontend/src/apis/metadata", + proto = ":ml_metadata_proto", + visibility = ["//visibility:public"], + deps = ["//src/apis/metadata:go_default_library"], +) + +go_library( + name = "go_default_library", + embed = [":ml_metadata_go_proto"], + importpath = "github.com/kubeflow/pipelines/frontend/src/apis/metadata", + visibility = ["//visibility:public"], +) diff --git a/frontend/src/apis/pipeline/.swagger-codegen/VERSION b/frontend/src/apis/pipeline/.swagger-codegen/VERSION index a6254504e401..48a6b508dc9f 100644 --- a/frontend/src/apis/pipeline/.swagger-codegen/VERSION +++ b/frontend/src/apis/pipeline/.swagger-codegen/VERSION @@ -1 +1 @@ -2.3.1 \ No newline at end of file +2.4.7 \ No newline at end of file diff --git a/frontend/src/apis/pipeline/api.ts b/frontend/src/apis/pipeline/api.ts index a11484a22723..c82cd9fca383 100644 --- a/frontend/src/apis/pipeline/api.ts +++ b/frontend/src/apis/pipeline/api.ts @@ -5,29 +5,28 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ +import * as url from 'url'; +import * as portableFetch from 'portable-fetch'; +import { Configuration } from './configuration'; -import * as url from "url"; -import * as portableFetch from "portable-fetch"; -import { Configuration } from "./configuration"; - -const BASE_PATH = "http://localhost".replace(/\/+$/, ""); +const BASE_PATH = 'http://localhost'.replace(/\/+$/, ''); /** * * @export */ export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", + csv: ',', + ssv: ' ', + tsv: '\t', + pipes: '|', }; /** @@ -36,196 +35,200 @@ export const COLLECTION_FORMATS = { * @interface FetchAPI */ export interface FetchAPI { - (url: string, init?: any): Promise; + (url: string, init?: any): Promise; } /** - * + * * @export * @interface FetchArgs */ export interface FetchArgs { - url: string; - options: any; + url: string; + options: any; } /** - * + * * @export * @class BaseAPI */ export class BaseAPI { - protected configuration: Configuration; - - constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected fetch: FetchAPI = portableFetch) { - if (configuration) { - this.configuration = configuration; - this.basePath = configuration.basePath || this.basePath; - } + protected configuration: Configuration; + + constructor( + configuration?: Configuration, + protected basePath: string = BASE_PATH, + protected fetch: FetchAPI = portableFetch, + ) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath || this.basePath; } -}; + } +} /** - * + * * @export * @class RequiredError * @extends {Error} */ export class RequiredError extends Error { - name: "RequiredError" - constructor(public field: string, msg?: string) { - super(msg); - } + name: 'RequiredError'; + constructor(public field: string, msg?: string) { + super(msg); + } } /** - * + * * @export * @interface ApiGetTemplateResponse */ export interface ApiGetTemplateResponse { - /** - * - * @type {string} - * @memberof ApiGetTemplateResponse - */ - template?: string; + /** + * + * @type {string} + * @memberof ApiGetTemplateResponse + */ + template?: string; } /** - * + * * @export * @interface ApiListPipelinesResponse */ export interface ApiListPipelinesResponse { - /** - * - * @type {Array<ApiPipeline>} - * @memberof ApiListPipelinesResponse - */ - pipelines?: Array; - /** - * - * @type {number} - * @memberof ApiListPipelinesResponse - */ - total_size?: number; - /** - * - * @type {string} - * @memberof ApiListPipelinesResponse - */ - next_page_token?: string; + /** + * + * @type {Array} + * @memberof ApiListPipelinesResponse + */ + pipelines?: Array; + /** + * + * @type {number} + * @memberof ApiListPipelinesResponse + */ + total_size?: number; + /** + * + * @type {string} + * @memberof ApiListPipelinesResponse + */ + next_page_token?: string; } /** - * + * * @export * @interface ApiParameter */ export interface ApiParameter { - /** - * - * @type {string} - * @memberof ApiParameter - */ - name?: string; - /** - * - * @type {string} - * @memberof ApiParameter - */ - value?: string; + /** + * + * @type {string} + * @memberof ApiParameter + */ + name?: string; + /** + * + * @type {string} + * @memberof ApiParameter + */ + value?: string; } /** - * + * * @export * @interface ApiPipeline */ export interface ApiPipeline { - /** - * Output. Unique pipeline ID. Generated by API server. - * @type {string} - * @memberof ApiPipeline - */ - id?: string; - /** - * Output. The time this pipeline is created. - * @type {Date} - * @memberof ApiPipeline - */ - created_at?: Date; - /** - * Optional input field. Pipeline name provided by user. If not specified, file name is used as pipeline name. - * @type {string} - * @memberof ApiPipeline - */ - name?: string; - /** - * Optional input field. Describing the purpose of the job. - * @type {string} - * @memberof ApiPipeline - */ - description?: string; - /** - * Output. The input parameters for this pipeline. - * @type {Array<ApiParameter>} - * @memberof ApiPipeline - */ - parameters?: Array; - /** - * The URL to the source of the pipeline. This is required when creating the pipeine through CreatePipeline API. - * @type {ApiUrl} - * @memberof ApiPipeline - */ - url?: ApiUrl; - /** - * In case any error happens retrieving a pipeline field, only pipeline ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. - * @type {string} - * @memberof ApiPipeline - */ - error?: string; + /** + * Output. Unique pipeline ID. Generated by API server. + * @type {string} + * @memberof ApiPipeline + */ + id?: string; + /** + * Output. The time this pipeline is created. + * @type {Date} + * @memberof ApiPipeline + */ + created_at?: Date; + /** + * Optional input field. Pipeline name provided by user. If not specified, file name is used as pipeline name. + * @type {string} + * @memberof ApiPipeline + */ + name?: string; + /** + * Optional input field. Describing the purpose of the job. + * @type {string} + * @memberof ApiPipeline + */ + description?: string; + /** + * Output. The input parameters for this pipeline. + * @type {Array} + * @memberof ApiPipeline + */ + parameters?: Array; + /** + * The URL to the source of the pipeline. This is required when creating the pipeine through CreatePipeline API. + * @type {ApiUrl} + * @memberof ApiPipeline + */ + url?: ApiUrl; + /** + * In case any error happens retrieving a pipeline field, only pipeline ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. + * @type {string} + * @memberof ApiPipeline + */ + error?: string; } /** - * + * * @export * @interface ApiStatus */ export interface ApiStatus { - /** - * - * @type {string} - * @memberof ApiStatus - */ - error?: string; - /** - * - * @type {number} - * @memberof ApiStatus - */ - code?: number; - /** - * - * @type {Array<ProtobufAny>} - * @memberof ApiStatus - */ - details?: Array; + /** + * + * @type {string} + * @memberof ApiStatus + */ + error?: string; + /** + * + * @type {number} + * @memberof ApiStatus + */ + code?: number; + /** + * + * @type {Array} + * @memberof ApiStatus + */ + details?: Array; } /** - * + * * @export * @interface ApiUrl */ export interface ApiUrl { - /** - * - * @type {string} - * @memberof ApiUrl - */ - pipeline_url?: string; + /** + * + * @type {string} + * @memberof ApiUrl + */ + pipeline_url?: string; } /** @@ -234,225 +237,280 @@ export interface ApiUrl { * @interface ProtobufAny */ export interface ProtobufAny { - /** - * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - * @type {string} - * @memberof ProtobufAny - */ - type_url?: string; - /** - * Must be a valid serialized protocol buffer of the above specified type. - * @type {string} - * @memberof ProtobufAny - */ - value?: string; + /** + * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. + * @type {string} + * @memberof ProtobufAny + */ + type_url?: string; + /** + * Must be a valid serialized protocol buffer of the above specified type. + * @type {string} + * @memberof ProtobufAny + */ + value?: string; } - /** * PipelineServiceApi - fetch parameter creator * @export */ -export const PipelineServiceApiFetchParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @param {ApiPipeline} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createPipeline(body: ApiPipeline, options: any = {}): FetchArgs { - // verify required parameter 'body' is not null or undefined - if (body === null || body === undefined) { - throw new RequiredError('body','Required parameter body was null or undefined when calling createPipeline.'); - } - const localVarPath = `/apis/v1beta1/pipelines`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - const needsSerialization = ("ApiPipeline" !== "string") || localVarRequestOptions.headers['Content-Type'] === 'application/json'; - localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : (body || ""); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deletePipeline(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling deletePipeline.'); - } - const localVarPath = `/apis/v1beta1/pipelines/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getPipeline(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling getPipeline.'); - } - const localVarPath = `/apis/v1beta1/pipelines/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getTemplate(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling getTemplate.'); - } - const localVarPath = `/apis/v1beta1/pipelines/{id}/templates` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listPipelines(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options: any = {}): FetchArgs { - const localVarPath = `/apis/v1beta1/pipelines`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - if (page_token !== undefined) { - localVarQueryParameter['page_token'] = page_token; - } - - if (page_size !== undefined) { - localVarQueryParameter['page_size'] = page_size; - } - - if (sort_by !== undefined) { - localVarQueryParameter['sort_by'] = sort_by; - } - - if (filter !== undefined) { - localVarQueryParameter['filter'] = filter; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } +export const PipelineServiceApiFetchParamCreator = function(configuration?: Configuration) { + return { + /** + * + * @param {ApiPipeline} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createPipeline(body: ApiPipeline, options: any = {}): FetchArgs { + // verify required parameter 'body' is not null or undefined + if (body === null || body === undefined) { + throw new RequiredError( + 'body', + 'Required parameter body was null or undefined when calling createPipeline.', + ); + } + const localVarPath = `/apis/v1beta1/pipelines`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + const needsSerialization = + 'ApiPipeline' !== 'string' || + localVarRequestOptions.headers['Content-Type'] === 'application/json'; + localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : body || ''; + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deletePipeline(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling deletePipeline.', + ); + } + const localVarPath = `/apis/v1beta1/pipelines/{id}`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getPipeline(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling getPipeline.', + ); + } + const localVarPath = `/apis/v1beta1/pipelines/{id}`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getTemplate(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling getTemplate.', + ); + } + const localVarPath = `/apis/v1beta1/pipelines/{id}/templates`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listPipelines( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options: any = {}, + ): FetchArgs { + const localVarPath = `/apis/v1beta1/pipelines`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + if (page_token !== undefined) { + localVarQueryParameter['page_token'] = page_token; + } + + if (page_size !== undefined) { + localVarQueryParameter['page_size'] = page_size; + } + + if (sort_by !== undefined) { + localVarQueryParameter['sort_by'] = sort_by; + } + + if (filter !== undefined) { + localVarQueryParameter['filter'] = filter; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + }; }; /** @@ -460,224 +518,298 @@ export const PipelineServiceApiFetchParamCreator = function (configuration?: Con * @export */ export const PipelineServiceApiFp = function(configuration?: Configuration) { - return { - /** - * - * @param {ApiPipeline} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createPipeline(body: ApiPipeline, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).createPipeline(body, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deletePipeline(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).deletePipeline(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getPipeline(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).getPipeline(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getTemplate(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).getTemplate(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listPipelines(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).listPipelines(page_token, page_size, sort_by, filter, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - } + return { + /** + * + * @param {ApiPipeline} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createPipeline( + body: ApiPipeline, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).createPipeline( + body, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deletePipeline( + id: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).deletePipeline( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getPipeline( + id: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).getPipeline( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getTemplate( + id: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).getTemplate( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listPipelines( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = PipelineServiceApiFetchParamCreator(configuration).listPipelines( + page_token, + page_size, + sort_by, + filter, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + }; }; /** * PipelineServiceApi - factory interface * @export */ -export const PipelineServiceApiFactory = function (configuration?: Configuration, fetch?: FetchAPI, basePath?: string) { - return { - /** - * - * @param {ApiPipeline} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createPipeline(body: ApiPipeline, options?: any) { - return PipelineServiceApiFp(configuration).createPipeline(body, options)(fetch, basePath); - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deletePipeline(id: string, options?: any) { - return PipelineServiceApiFp(configuration).deletePipeline(id, options)(fetch, basePath); - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getPipeline(id: string, options?: any) { - return PipelineServiceApiFp(configuration).getPipeline(id, options)(fetch, basePath); - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getTemplate(id: string, options?: any) { - return PipelineServiceApiFp(configuration).getTemplate(id, options)(fetch, basePath); - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listPipelines(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options?: any) { - return PipelineServiceApiFp(configuration).listPipelines(page_token, page_size, sort_by, filter, options)(fetch, basePath); - }, - }; -}; - -/** - * PipelineServiceApi - object-oriented interface - * @export - * @class PipelineServiceApi - * @extends {BaseAPI} - */ -export class PipelineServiceApi extends BaseAPI { +export const PipelineServiceApiFactory = function( + configuration?: Configuration, + fetch?: FetchAPI, + basePath?: string, +) { + return { /** - * - * @param {} body + * + * @param {ApiPipeline} body * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof PipelineServiceApi */ - public createPipeline(body: ApiPipeline, options?: any) { - return PipelineServiceApiFp(this.configuration).createPipeline(body, options)(this.fetch, this.basePath); - } - + createPipeline(body: ApiPipeline, options?: any) { + return PipelineServiceApiFp(configuration).createPipeline(body, options)(fetch, basePath); + }, /** - * - * @param {} id + * + * @param {string} id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof PipelineServiceApi */ - public deletePipeline(id: string, options?: any) { - return PipelineServiceApiFp(this.configuration).deletePipeline(id, options)(this.fetch, this.basePath); - } - + deletePipeline(id: string, options?: any) { + return PipelineServiceApiFp(configuration).deletePipeline(id, options)(fetch, basePath); + }, /** - * - * @param {} id + * + * @param {string} id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof PipelineServiceApi */ - public getPipeline(id: string, options?: any) { - return PipelineServiceApiFp(this.configuration).getPipeline(id, options)(this.fetch, this.basePath); - } - + getPipeline(id: string, options?: any) { + return PipelineServiceApiFp(configuration).getPipeline(id, options)(fetch, basePath); + }, /** - * - * @param {} id + * + * @param {string} id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof PipelineServiceApi */ - public getTemplate(id: string, options?: any) { - return PipelineServiceApiFp(this.configuration).getTemplate(id, options)(this.fetch, this.basePath); - } - + getTemplate(id: string, options?: any) { + return PipelineServiceApiFp(configuration).getTemplate(id, options)(fetch, basePath); + }, /** - * - * @param {} [page_token] - * @param {} [page_size] - * @param {} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof PipelineServiceApi */ - public listPipelines(page_token?: string, page_size?: number, sort_by?: string, filter?: string, options?: any) { - return PipelineServiceApiFp(this.configuration).listPipelines(page_token, page_size, sort_by, filter, options)(this.fetch, this.basePath); - } + listPipelines( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options?: any, + ) { + return PipelineServiceApiFp(configuration).listPipelines( + page_token, + page_size, + sort_by, + filter, + options, + )(fetch, basePath); + }, + }; +}; +/** + * PipelineServiceApi - object-oriented interface + * @export + * @class PipelineServiceApi + * @extends {BaseAPI} + */ +export class PipelineServiceApi extends BaseAPI { + /** + * + * @param {ApiPipeline} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PipelineServiceApi + */ + public createPipeline(body: ApiPipeline, options?: any) { + return PipelineServiceApiFp(this.configuration).createPipeline(body, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PipelineServiceApi + */ + public deletePipeline(id: string, options?: any) { + return PipelineServiceApiFp(this.configuration).deletePipeline(id, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PipelineServiceApi + */ + public getPipeline(id: string, options?: any) { + return PipelineServiceApiFp(this.configuration).getPipeline(id, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PipelineServiceApi + */ + public getTemplate(id: string, options?: any) { + return PipelineServiceApiFp(this.configuration).getTemplate(id, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof PipelineServiceApi + */ + public listPipelines( + page_token?: string, + page_size?: number, + sort_by?: string, + filter?: string, + options?: any, + ) { + return PipelineServiceApiFp(this.configuration).listPipelines( + page_token, + page_size, + sort_by, + filter, + options, + )(this.fetch, this.basePath); + } } - diff --git a/frontend/src/apis/pipeline/configuration.ts b/frontend/src/apis/pipeline/configuration.ts index 575aa7f40e93..c0ded69e6793 100644 --- a/frontend/src/apis/pipeline/configuration.ts +++ b/frontend/src/apis/pipeline/configuration.ts @@ -4,63 +4,62 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - export interface ConfigurationParameters { - apiKey?: string | ((name: string) => string); - username?: string; - password?: string; - accessToken?: string | ((name: string, scopes?: string[]) => string); - basePath?: string; + apiKey?: string | ((name: string) => string); + username?: string; + password?: string; + accessToken?: string | ((name: string, scopes?: string[]) => string); + basePath?: string; } export class Configuration { - /** - * parameter for apiKey security - * @param name security name - * @memberof Configuration - */ - apiKey?: string | ((name: string) => string); - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - username?: string; - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - password?: string; - /** - * parameter for oauth2 security - * @param name security name - * @param scopes oauth2 scope - * @memberof Configuration - */ - accessToken?: string | ((name: string, scopes?: string[]) => string); - /** - * override base path - * - * @type {string} - * @memberof Configuration - */ - basePath?: string; + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | ((name: string) => string); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | ((name: string, scopes?: string[]) => string); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; - constructor(param: ConfigurationParameters = {}) { - this.apiKey = param.apiKey; - this.username = param.username; - this.password = param.password; - this.accessToken = param.accessToken; - this.basePath = param.basePath; - } + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + } } diff --git a/frontend/src/apis/pipeline/custom.d.ts b/frontend/src/apis/pipeline/custom.d.ts index 02f969575e37..4c611cc3216e 100644 --- a/frontend/src/apis/pipeline/custom.d.ts +++ b/frontend/src/apis/pipeline/custom.d.ts @@ -1 +1,2 @@ -declare module 'portable-fetch'; \ No newline at end of file +declare module 'portable-fetch'; +declare module 'url'; diff --git a/frontend/src/apis/pipeline/index.ts b/frontend/src/apis/pipeline/index.ts index 8494a2d56fdb..b51112ddb2e3 100644 --- a/frontend/src/apis/pipeline/index.ts +++ b/frontend/src/apis/pipeline/index.ts @@ -4,13 +4,12 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - -export * from "./api"; -export * from "./configuration"; +export * from './api'; +export * from './configuration'; diff --git a/frontend/src/apis/run/.swagger-codegen/VERSION b/frontend/src/apis/run/.swagger-codegen/VERSION index a6254504e401..48a6b508dc9f 100644 --- a/frontend/src/apis/run/.swagger-codegen/VERSION +++ b/frontend/src/apis/run/.swagger-codegen/VERSION @@ -1 +1 @@ -2.3.1 \ No newline at end of file +2.4.7 \ No newline at end of file diff --git a/frontend/src/apis/run/api.ts b/frontend/src/apis/run/api.ts index e75829739e90..fbd58d4f297e 100644 --- a/frontend/src/apis/run/api.ts +++ b/frontend/src/apis/run/api.ts @@ -5,29 +5,28 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ +import * as url from 'url'; +import * as portableFetch from 'portable-fetch'; +import { Configuration } from './configuration'; -import * as url from "url"; -import * as portableFetch from "portable-fetch"; -import { Configuration } from "./configuration"; - -const BASE_PATH = "http://localhost".replace(/\/+$/, ""); +const BASE_PATH = 'http://localhost'.replace(/\/+$/, ''); /** * * @export */ export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", + csv: ',', + ssv: ' ', + tsv: '\t', + pipes: '|', }; /** @@ -36,424 +35,428 @@ export const COLLECTION_FORMATS = { * @interface FetchAPI */ export interface FetchAPI { - (url: string, init?: any): Promise; + (url: string, init?: any): Promise; } /** - * + * * @export * @interface FetchArgs */ export interface FetchArgs { - url: string; - options: any; + url: string; + options: any; } /** - * + * * @export * @class BaseAPI */ export class BaseAPI { - protected configuration: Configuration; - - constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected fetch: FetchAPI = portableFetch) { - if (configuration) { - this.configuration = configuration; - this.basePath = configuration.basePath || this.basePath; - } + protected configuration: Configuration; + + constructor( + configuration?: Configuration, + protected basePath: string = BASE_PATH, + protected fetch: FetchAPI = portableFetch, + ) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath || this.basePath; } -}; + } +} /** - * + * * @export * @class RequiredError * @extends {Error} */ export class RequiredError extends Error { - name: "RequiredError" - constructor(public field: string, msg?: string) { - super(msg); - } + name: 'RequiredError'; + constructor(public field: string, msg?: string) { + super(msg); + } } /** - * + * * @export * @interface ApiListRunsResponse */ export interface ApiListRunsResponse { - /** - * - * @type {Array<ApiRun>} - * @memberof ApiListRunsResponse - */ - runs?: Array; - /** - * - * @type {number} - * @memberof ApiListRunsResponse - */ - total_size?: number; - /** - * - * @type {string} - * @memberof ApiListRunsResponse - */ - next_page_token?: string; + /** + * + * @type {Array} + * @memberof ApiListRunsResponse + */ + runs?: Array; + /** + * + * @type {number} + * @memberof ApiListRunsResponse + */ + total_size?: number; + /** + * + * @type {string} + * @memberof ApiListRunsResponse + */ + next_page_token?: string; } /** - * + * * @export * @interface ApiParameter */ export interface ApiParameter { - /** - * - * @type {string} - * @memberof ApiParameter - */ - name?: string; - /** - * - * @type {string} - * @memberof ApiParameter - */ - value?: string; + /** + * + * @type {string} + * @memberof ApiParameter + */ + name?: string; + /** + * + * @type {string} + * @memberof ApiParameter + */ + value?: string; } /** - * + * * @export * @interface ApiPipelineRuntime */ export interface ApiPipelineRuntime { - /** - * Output. The runtime JSON manifest of the pipeline, including the status of pipeline steps and fields need for UI visualization etc. - * @type {string} - * @memberof ApiPipelineRuntime - */ - pipeline_manifest?: string; - /** - * Output. The runtime JSON manifest of the argo workflow. This is deprecated after pipeline_runtime_manifest is in use. - * @type {string} - * @memberof ApiPipelineRuntime - */ - workflow_manifest?: string; + /** + * Output. The runtime JSON manifest of the pipeline, including the status of pipeline steps and fields need for UI visualization etc. + * @type {string} + * @memberof ApiPipelineRuntime + */ + pipeline_manifest?: string; + /** + * Output. The runtime JSON manifest of the argo workflow. This is deprecated after pipeline_runtime_manifest is in use. + * @type {string} + * @memberof ApiPipelineRuntime + */ + workflow_manifest?: string; } /** - * + * * @export * @interface ApiPipelineSpec */ export interface ApiPipelineSpec { - /** - * Optional input field. The ID of the pipeline user uploaded before. - * @type {string} - * @memberof ApiPipelineSpec - */ - pipeline_id?: string; - /** - * Optional output field. The name of the pipeline. Not empty if the pipeline id is not empty. - * @type {string} - * @memberof ApiPipelineSpec - */ - pipeline_name?: string; - /** - * Optional input field. The marshalled raw argo JSON workflow. This will be deprecated when pipeline_manifest is in use. - * @type {string} - * @memberof ApiPipelineSpec - */ - workflow_manifest?: string; - /** - * Optional input field. The raw pipeline JSON spec. - * @type {string} - * @memberof ApiPipelineSpec - */ - pipeline_manifest?: string; - /** - * The parameter user provide to inject to the pipeline JSON. If a default value of a parameter exist in the JSON, the value user provided here will replace. - * @type {Array<ApiParameter>} - * @memberof ApiPipelineSpec - */ - parameters?: Array; + /** + * Optional input field. The ID of the pipeline user uploaded before. + * @type {string} + * @memberof ApiPipelineSpec + */ + pipeline_id?: string; + /** + * Optional output field. The name of the pipeline. Not empty if the pipeline id is not empty. + * @type {string} + * @memberof ApiPipelineSpec + */ + pipeline_name?: string; + /** + * Optional input field. The marshalled raw argo JSON workflow. This will be deprecated when pipeline_manifest is in use. + * @type {string} + * @memberof ApiPipelineSpec + */ + workflow_manifest?: string; + /** + * Optional input field. The raw pipeline JSON spec. + * @type {string} + * @memberof ApiPipelineSpec + */ + pipeline_manifest?: string; + /** + * The parameter user provide to inject to the pipeline JSON. If a default value of a parameter exist in the JSON, the value user provided here will replace. + * @type {Array} + * @memberof ApiPipelineSpec + */ + parameters?: Array; } /** - * + * * @export * @interface ApiReadArtifactResponse */ export interface ApiReadArtifactResponse { - /** - * The bytes of the artifact content. - * @type {string} - * @memberof ApiReadArtifactResponse - */ - data?: string; + /** + * The bytes of the artifact content. + * @type {string} + * @memberof ApiReadArtifactResponse + */ + data?: string; } /** - * + * * @export * @enum {string} */ export enum ApiRelationship { - UNKNOWNRELATIONSHIP = 'UNKNOWN_RELATIONSHIP', - OWNER = 'OWNER', - CREATOR = 'CREATOR' + UNKNOWNRELATIONSHIP = 'UNKNOWN_RELATIONSHIP', + OWNER = 'OWNER', + CREATOR = 'CREATOR', } /** - * + * * @export * @interface ApiReportRunMetricsRequest */ export interface ApiReportRunMetricsRequest { - /** - * Required. The parent run ID of the metric. - * @type {string} - * @memberof ApiReportRunMetricsRequest - */ - run_id?: string; - /** - * List of metrics to report. - * @type {Array<ApiRunMetric>} - * @memberof ApiReportRunMetricsRequest - */ - metrics?: Array; + /** + * Required. The parent run ID of the metric. + * @type {string} + * @memberof ApiReportRunMetricsRequest + */ + run_id?: string; + /** + * List of metrics to report. + * @type {Array} + * @memberof ApiReportRunMetricsRequest + */ + metrics?: Array; } /** - * + * * @export * @interface ApiReportRunMetricsResponse */ export interface ApiReportRunMetricsResponse { - /** - * - * @type {Array<ReportRunMetricsResponseReportRunMetricResult>} - * @memberof ApiReportRunMetricsResponse - */ - results?: Array; + /** + * + * @type {Array} + * @memberof ApiReportRunMetricsResponse + */ + results?: Array; } /** - * + * * @export * @interface ApiResourceKey */ export interface ApiResourceKey { - /** - * The type of the resource that referred to. - * @type {ApiResourceType} - * @memberof ApiResourceKey - */ - type?: ApiResourceType; - /** - * The ID of the resource that referred to. - * @type {string} - * @memberof ApiResourceKey - */ - id?: string; + /** + * The type of the resource that referred to. + * @type {ApiResourceType} + * @memberof ApiResourceKey + */ + type?: ApiResourceType; + /** + * The ID of the resource that referred to. + * @type {string} + * @memberof ApiResourceKey + */ + id?: string; } /** - * + * * @export * @interface ApiResourceReference */ export interface ApiResourceReference { - /** - * - * @type {ApiResourceKey} - * @memberof ApiResourceReference - */ - key?: ApiResourceKey; - /** - * The name of the resource that referred to. - * @type {string} - * @memberof ApiResourceReference - */ - name?: string; - /** - * Required field. The relationship from referred resource to the object. - * @type {ApiRelationship} - * @memberof ApiResourceReference - */ - relationship?: ApiRelationship; + /** + * + * @type {ApiResourceKey} + * @memberof ApiResourceReference + */ + key?: ApiResourceKey; + /** + * The name of the resource that referred to. + * @type {string} + * @memberof ApiResourceReference + */ + name?: string; + /** + * Required field. The relationship from referred resource to the object. + * @type {ApiRelationship} + * @memberof ApiResourceReference + */ + relationship?: ApiRelationship; } /** - * + * * @export * @enum {string} */ export enum ApiResourceType { - UNKNOWNRESOURCETYPE = 'UNKNOWN_RESOURCE_TYPE', - EXPERIMENT = 'EXPERIMENT', - JOB = 'JOB' + UNKNOWNRESOURCETYPE = 'UNKNOWN_RESOURCE_TYPE', + EXPERIMENT = 'EXPERIMENT', + JOB = 'JOB', } /** - * + * * @export * @interface ApiRun */ export interface ApiRun { - /** - * Output. Unique run ID. Generated by API server. - * @type {string} - * @memberof ApiRun - */ - id?: string; - /** - * Required input field. Name provided by user, or auto generated if run is created by scheduled job. Not unique. - * @type {string} - * @memberof ApiRun - */ - name?: string; - /** - * - * @type {RunStorageState} - * @memberof ApiRun - */ - storage_state?: RunStorageState; - /** - * - * @type {string} - * @memberof ApiRun - */ - description?: string; - /** - * Required input field. Describing what the pipeline manifest and parameters to use for the run. - * @type {ApiPipelineSpec} - * @memberof ApiRun - */ - pipeline_spec?: ApiPipelineSpec; - /** - * Optional input field. Specify which resource this run belongs to. - * @type {Array<ApiResourceReference>} - * @memberof ApiRun - */ - resource_references?: Array; - /** - * Output. The time that the run created. - * @type {Date} - * @memberof ApiRun - */ - created_at?: Date; - /** - * Output. When this run is scheduled to run. This could be different from created_at. For example, if a run is from a backfilling job that was supposed to run 2 month ago, the scheduled_at is 2 month ago, v.s. created_at is the current time. - * @type {Date} - * @memberof ApiRun - */ - scheduled_at?: Date; - /** - * Output. The time this run is finished. - * @type {Date} - * @memberof ApiRun - */ - finished_at?: Date; - /** - * - * @type {string} - * @memberof ApiRun - */ - status?: string; - /** - * In case any error happens retrieving a run field, only run ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. - * @type {string} - * @memberof ApiRun - */ - error?: string; - /** - * Output. The metrics of the run. The metrics are reported by ReportMetrics API. - * @type {Array<ApiRunMetric>} - * @memberof ApiRun - */ - metrics?: Array; + /** + * Output. Unique run ID. Generated by API server. + * @type {string} + * @memberof ApiRun + */ + id?: string; + /** + * Required input field. Name provided by user, or auto generated if run is created by scheduled job. Not unique. + * @type {string} + * @memberof ApiRun + */ + name?: string; + /** + * + * @type {RunStorageState} + * @memberof ApiRun + */ + storage_state?: RunStorageState; + /** + * + * @type {string} + * @memberof ApiRun + */ + description?: string; + /** + * Required input field. Describing what the pipeline manifest and parameters to use for the run. + * @type {ApiPipelineSpec} + * @memberof ApiRun + */ + pipeline_spec?: ApiPipelineSpec; + /** + * Optional input field. Specify which resource this run belongs to. + * @type {Array} + * @memberof ApiRun + */ + resource_references?: Array; + /** + * Output. The time that the run created. + * @type {Date} + * @memberof ApiRun + */ + created_at?: Date; + /** + * Output. When this run is scheduled to run. This could be different from created_at. For example, if a run is from a backfilling job that was supposed to run 2 month ago, the scheduled_at is 2 month ago, v.s. created_at is the current time. + * @type {Date} + * @memberof ApiRun + */ + scheduled_at?: Date; + /** + * Output. The time this run is finished. + * @type {Date} + * @memberof ApiRun + */ + finished_at?: Date; + /** + * + * @type {string} + * @memberof ApiRun + */ + status?: string; + /** + * In case any error happens retrieving a run field, only run ID and the error message is returned. Client has the flexibility of choosing how to handle error. This is especially useful during listing call. + * @type {string} + * @memberof ApiRun + */ + error?: string; + /** + * Output. The metrics of the run. The metrics are reported by ReportMetrics API. + * @type {Array} + * @memberof ApiRun + */ + metrics?: Array; } /** - * + * * @export * @interface ApiRunDetail */ export interface ApiRunDetail { - /** - * - * @type {ApiRun} - * @memberof ApiRunDetail - */ - run?: ApiRun; - /** - * - * @type {ApiPipelineRuntime} - * @memberof ApiRunDetail - */ - pipeline_runtime?: ApiPipelineRuntime; + /** + * + * @type {ApiRun} + * @memberof ApiRunDetail + */ + run?: ApiRun; + /** + * + * @type {ApiPipelineRuntime} + * @memberof ApiRunDetail + */ + pipeline_runtime?: ApiPipelineRuntime; } /** - * + * * @export * @interface ApiRunMetric */ export interface ApiRunMetric { - /** - * Required. The user defined name of the metric. It must between 1 and 63 characters long and must conform to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. - * @type {string} - * @memberof ApiRunMetric - */ - name?: string; - /** - * Required. The runtime node ID which reports the metric. The node ID can be found in the RunDetail.workflow.Status. Metric with same (node_id, name) are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. - * @type {string} - * @memberof ApiRunMetric - */ - node_id?: string; - /** - * The number value of the metric. - * @type {number} - * @memberof ApiRunMetric - */ - number_value?: number; - /** - * The display format of metric. - * @type {RunMetricFormat} - * @memberof ApiRunMetric - */ - format?: RunMetricFormat; + /** + * Required. The user defined name of the metric. It must between 1 and 63 characters long and must conform to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. + * @type {string} + * @memberof ApiRunMetric + */ + name?: string; + /** + * Required. The runtime node ID which reports the metric. The node ID can be found in the RunDetail.workflow.Status. Metric with same (node_id, name) are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. + * @type {string} + * @memberof ApiRunMetric + */ + node_id?: string; + /** + * The number value of the metric. + * @type {number} + * @memberof ApiRunMetric + */ + number_value?: number; + /** + * The display format of metric. + * @type {RunMetricFormat} + * @memberof ApiRunMetric + */ + format?: RunMetricFormat; } /** - * + * * @export * @interface ApiStatus */ export interface ApiStatus { - /** - * - * @type {string} - * @memberof ApiStatus - */ - error?: string; - /** - * - * @type {number} - * @memberof ApiStatus - */ - code?: number; - /** - * - * @type {Array<ProtobufAny>} - * @memberof ApiStatus - */ - details?: Array; + /** + * + * @type {string} + * @memberof ApiStatus + */ + error?: string; + /** + * + * @type {number} + * @memberof ApiStatus + */ + code?: number; + /** + * + * @type {Array} + * @memberof ApiStatus + */ + details?: Array; } /** @@ -462,50 +465,50 @@ export interface ApiStatus { * @interface ProtobufAny */ export interface ProtobufAny { - /** - * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - * @type {string} - * @memberof ProtobufAny - */ - type_url?: string; - /** - * Must be a valid serialized protocol buffer of the above specified type. - * @type {string} - * @memberof ProtobufAny - */ - value?: string; + /** + * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. + * @type {string} + * @memberof ProtobufAny + */ + type_url?: string; + /** + * Must be a valid serialized protocol buffer of the above specified type. + * @type {string} + * @memberof ProtobufAny + */ + value?: string; } /** - * + * * @export * @interface ReportRunMetricsResponseReportRunMetricResult */ export interface ReportRunMetricsResponseReportRunMetricResult { - /** - * Output. The name of the metric. - * @type {string} - * @memberof ReportRunMetricsResponseReportRunMetricResult - */ - metric_name?: string; - /** - * Output. The ID of the node which reports the metric. - * @type {string} - * @memberof ReportRunMetricsResponseReportRunMetricResult - */ - metric_node_id?: string; - /** - * Output. The status of the metric reporting. - * @type {ReportRunMetricsResponseReportRunMetricResultStatus} - * @memberof ReportRunMetricsResponseReportRunMetricResult - */ - status?: ReportRunMetricsResponseReportRunMetricResultStatus; - /** - * Output. The detailed message of the error of the reporting. - * @type {string} - * @memberof ReportRunMetricsResponseReportRunMetricResult - */ - message?: string; + /** + * Output. The name of the metric. + * @type {string} + * @memberof ReportRunMetricsResponseReportRunMetricResult + */ + metric_name?: string; + /** + * Output. The ID of the node which reports the metric. + * @type {string} + * @memberof ReportRunMetricsResponseReportRunMetricResult + */ + metric_node_id?: string; + /** + * Output. The status of the metric reporting. + * @type {ReportRunMetricsResponseReportRunMetricResultStatus} + * @memberof ReportRunMetricsResponseReportRunMetricResult + */ + status?: ReportRunMetricsResponseReportRunMetricResultStatus; + /** + * Output. The detailed message of the error of the reporting. + * @type {string} + * @memberof ReportRunMetricsResponseReportRunMetricResult + */ + message?: string; } /** @@ -514,11 +517,11 @@ export interface ReportRunMetricsResponseReportRunMetricResult { * @enum {string} */ export enum ReportRunMetricsResponseReportRunMetricResultStatus { - UNSPECIFIED = 'UNSPECIFIED', - OK = 'OK', - INVALIDARGUMENT = 'INVALID_ARGUMENT', - DUPLICATEREPORTING = 'DUPLICATE_REPORTING', - INTERNALERROR = 'INTERNAL_ERROR' + UNSPECIFIED = 'UNSPECIFIED', + OK = 'OK', + INVALIDARGUMENT = 'INVALID_ARGUMENT', + DUPLICATEREPORTING = 'DUPLICATE_REPORTING', + INTERNALERROR = 'INTERNAL_ERROR', } /** @@ -527,438 +530,568 @@ export enum ReportRunMetricsResponseReportRunMetricResultStatus { * @enum {string} */ export enum RunMetricFormat { - UNSPECIFIED = 'UNSPECIFIED', - RAW = 'RAW', - PERCENTAGE = 'PERCENTAGE' + UNSPECIFIED = 'UNSPECIFIED', + RAW = 'RAW', + PERCENTAGE = 'PERCENTAGE', } /** - * + * * @export * @enum {string} */ export enum RunStorageState { - AVAILABLE = 'STORAGESTATE_AVAILABLE', - ARCHIVED = 'STORAGESTATE_ARCHIVED' + AVAILABLE = 'STORAGESTATE_AVAILABLE', + ARCHIVED = 'STORAGESTATE_ARCHIVED', } - /** * RunServiceApi - fetch parameter creator * @export */ -export const RunServiceApiFetchParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - archiveRun(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling archiveRun.'); - } - const localVarPath = `/apis/v1beta1/runs/{id}:archive` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {ApiRun} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRun(body: ApiRun, options: any = {}): FetchArgs { - // verify required parameter 'body' is not null or undefined - if (body === null || body === undefined) { - throw new RequiredError('body','Required parameter body was null or undefined when calling createRun.'); - } - const localVarPath = `/apis/v1beta1/runs`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - const needsSerialization = ("ApiRun" !== "string") || localVarRequestOptions.headers['Content-Type'] === 'application/json'; - localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : (body || ""); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRun(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling deleteRun.'); - } - const localVarPath = `/apis/v1beta1/runs/{id}` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRun(run_id: string, options: any = {}): FetchArgs { - // verify required parameter 'run_id' is not null or undefined - if (run_id === null || run_id === undefined) { - throw new RequiredError('run_id','Required parameter run_id was null or undefined when calling getRun.'); - } - const localVarPath = `/apis/v1beta1/runs/{run_id}` - .replace(`{${"run_id"}}`, encodeURIComponent(String(run_id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [resource_reference_key_type] The type of the resource that referred to. - * @param {string} [resource_reference_key_id] The ID of the resource that referred to. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRuns(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options: any = {}): FetchArgs { - const localVarPath = `/apis/v1beta1/runs`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - if (page_token !== undefined) { - localVarQueryParameter['page_token'] = page_token; - } - - if (page_size !== undefined) { - localVarQueryParameter['page_size'] = page_size; - } - - if (sort_by !== undefined) { - localVarQueryParameter['sort_by'] = sort_by; - } - - if (resource_reference_key_type !== undefined) { - localVarQueryParameter['resource_reference_key.type'] = resource_reference_key_type; - } - - if (resource_reference_key_id !== undefined) { - localVarQueryParameter['resource_reference_key.id'] = resource_reference_key_id; - } - - if (filter !== undefined) { - localVarQueryParameter['filter'] = filter; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} run_id The ID of the run. - * @param {string} node_id The ID of the running node. - * @param {string} artifact_name The name of the artifact. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - readArtifact(run_id: string, node_id: string, artifact_name: string, options: any = {}): FetchArgs { - // verify required parameter 'run_id' is not null or undefined - if (run_id === null || run_id === undefined) { - throw new RequiredError('run_id','Required parameter run_id was null or undefined when calling readArtifact.'); - } - // verify required parameter 'node_id' is not null or undefined - if (node_id === null || node_id === undefined) { - throw new RequiredError('node_id','Required parameter node_id was null or undefined when calling readArtifact.'); - } - // verify required parameter 'artifact_name' is not null or undefined - if (artifact_name === null || artifact_name === undefined) { - throw new RequiredError('artifact_name','Required parameter artifact_name was null or undefined when calling readArtifact.'); - } - const localVarPath = `/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read` - .replace(`{${"run_id"}}`, encodeURIComponent(String(run_id))) - .replace(`{${"node_id"}}`, encodeURIComponent(String(node_id))) - .replace(`{${"artifact_name"}}`, encodeURIComponent(String(artifact_name))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'GET' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @summary ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. - * @param {string} run_id Required. The parent run ID of the metric. - * @param {ApiReportRunMetricsRequest} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - reportRunMetrics(run_id: string, body: ApiReportRunMetricsRequest, options: any = {}): FetchArgs { - // verify required parameter 'run_id' is not null or undefined - if (run_id === null || run_id === undefined) { - throw new RequiredError('run_id','Required parameter run_id was null or undefined when calling reportRunMetrics.'); - } - // verify required parameter 'body' is not null or undefined - if (body === null || body === undefined) { - throw new RequiredError('body','Required parameter body was null or undefined when calling reportRunMetrics.'); - } - const localVarPath = `/apis/v1beta1/runs/{run_id}:reportMetrics` - .replace(`{${"run_id"}}`, encodeURIComponent(String(run_id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarHeaderParameter['Content-Type'] = 'application/json'; - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - const needsSerialization = ("ApiReportRunMetricsRequest" !== "string") || localVarRequestOptions.headers['Content-Type'] === 'application/json'; - localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : (body || ""); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - retryRun(run_id: string, options: any = {}): FetchArgs { - // verify required parameter 'run_id' is not null or undefined - if (run_id === null || run_id === undefined) { - throw new RequiredError('run_id','Required parameter run_id was null or undefined when calling retryRun.'); - } - const localVarPath = `/apis/v1beta1/runs/{run_id}/retry` - .replace(`{${"run_id"}}`, encodeURIComponent(String(run_id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - terminateRun(run_id: string, options: any = {}): FetchArgs { - // verify required parameter 'run_id' is not null or undefined - if (run_id === null || run_id === undefined) { - throw new RequiredError('run_id','Required parameter run_id was null or undefined when calling terminateRun.'); - } - const localVarPath = `/apis/v1beta1/runs/{run_id}/terminate` - .replace(`{${"run_id"}}`, encodeURIComponent(String(run_id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - unarchiveRun(id: string, options: any = {}): FetchArgs { - // verify required parameter 'id' is not null or undefined - if (id === null || id === undefined) { - throw new RequiredError('id','Required parameter id was null or undefined when calling unarchiveRun.'); - } - const localVarPath = `/apis/v1beta1/runs/{id}:unarchive` - .replace(`{${"id"}}`, encodeURIComponent(String(id))); - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; - - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } - - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } +export const RunServiceApiFetchParamCreator = function(configuration?: Configuration) { + return { + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + archiveRun(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling archiveRun.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{id}:archive`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {ApiRun} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRun(body: ApiRun, options: any = {}): FetchArgs { + // verify required parameter 'body' is not null or undefined + if (body === null || body === undefined) { + throw new RequiredError( + 'body', + 'Required parameter body was null or undefined when calling createRun.', + ); + } + const localVarPath = `/apis/v1beta1/runs`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + const needsSerialization = + 'ApiRun' !== 'string' || + localVarRequestOptions.headers['Content-Type'] === 'application/json'; + localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : body || ''; + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRun(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling deleteRun.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{id}`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'DELETE' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRun(run_id: string, options: any = {}): FetchArgs { + // verify required parameter 'run_id' is not null or undefined + if (run_id === null || run_id === undefined) { + throw new RequiredError( + 'run_id', + 'Required parameter run_id was null or undefined when calling getRun.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{run_id}`.replace( + `{${'run_id'}}`, + encodeURIComponent(String(run_id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRuns( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options: any = {}, + ): FetchArgs { + const localVarPath = `/apis/v1beta1/runs`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + if (page_token !== undefined) { + localVarQueryParameter['page_token'] = page_token; + } + + if (page_size !== undefined) { + localVarQueryParameter['page_size'] = page_size; + } + + if (sort_by !== undefined) { + localVarQueryParameter['sort_by'] = sort_by; + } + + if (resource_reference_key_type !== undefined) { + localVarQueryParameter['resource_reference_key.type'] = resource_reference_key_type; + } + + if (resource_reference_key_id !== undefined) { + localVarQueryParameter['resource_reference_key.id'] = resource_reference_key_id; + } + + if (filter !== undefined) { + localVarQueryParameter['filter'] = filter; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} run_id The ID of the run. + * @param {string} node_id The ID of the running node. + * @param {string} artifact_name The name of the artifact. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + readArtifact( + run_id: string, + node_id: string, + artifact_name: string, + options: any = {}, + ): FetchArgs { + // verify required parameter 'run_id' is not null or undefined + if (run_id === null || run_id === undefined) { + throw new RequiredError( + 'run_id', + 'Required parameter run_id was null or undefined when calling readArtifact.', + ); + } + // verify required parameter 'node_id' is not null or undefined + if (node_id === null || node_id === undefined) { + throw new RequiredError( + 'node_id', + 'Required parameter node_id was null or undefined when calling readArtifact.', + ); + } + // verify required parameter 'artifact_name' is not null or undefined + if (artifact_name === null || artifact_name === undefined) { + throw new RequiredError( + 'artifact_name', + 'Required parameter artifact_name was null or undefined when calling readArtifact.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read` + .replace(`{${'run_id'}}`, encodeURIComponent(String(run_id))) + .replace(`{${'node_id'}}`, encodeURIComponent(String(node_id))) + .replace(`{${'artifact_name'}}`, encodeURIComponent(String(artifact_name))); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'GET' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @summary ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. + * @param {string} run_id Required. The parent run ID of the metric. + * @param {ApiReportRunMetricsRequest} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + reportRunMetrics( + run_id: string, + body: ApiReportRunMetricsRequest, + options: any = {}, + ): FetchArgs { + // verify required parameter 'run_id' is not null or undefined + if (run_id === null || run_id === undefined) { + throw new RequiredError( + 'run_id', + 'Required parameter run_id was null or undefined when calling reportRunMetrics.', + ); + } + // verify required parameter 'body' is not null or undefined + if (body === null || body === undefined) { + throw new RequiredError( + 'body', + 'Required parameter body was null or undefined when calling reportRunMetrics.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{run_id}:reportMetrics`.replace( + `{${'run_id'}}`, + encodeURIComponent(String(run_id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarHeaderParameter['Content-Type'] = 'application/json'; + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + const needsSerialization = + 'ApiReportRunMetricsRequest' !== 'string' || + localVarRequestOptions.headers['Content-Type'] === 'application/json'; + localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : body || ''; + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + retryRun(run_id: string, options: any = {}): FetchArgs { + // verify required parameter 'run_id' is not null or undefined + if (run_id === null || run_id === undefined) { + throw new RequiredError( + 'run_id', + 'Required parameter run_id was null or undefined when calling retryRun.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{run_id}/retry`.replace( + `{${'run_id'}}`, + encodeURIComponent(String(run_id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + terminateRun(run_id: string, options: any = {}): FetchArgs { + // verify required parameter 'run_id' is not null or undefined + if (run_id === null || run_id === undefined) { + throw new RequiredError( + 'run_id', + 'Required parameter run_id was null or undefined when calling terminateRun.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{run_id}/terminate`.replace( + `{${'run_id'}}`, + encodeURIComponent(String(run_id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + unarchiveRun(id: string, options: any = {}): FetchArgs { + // verify required parameter 'id' is not null or undefined + if (id === null || id === undefined) { + throw new RequiredError( + 'id', + 'Required parameter id was null or undefined when calling unarchiveRun.', + ); + } + const localVarPath = `/apis/v1beta1/runs/{id}:unarchive`.replace( + `{${'id'}}`, + encodeURIComponent(String(id)), + ); + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; + + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } + + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + }; }; /** @@ -966,432 +1099,547 @@ export const RunServiceApiFetchParamCreator = function (configuration?: Configur * @export */ export const RunServiceApiFp = function(configuration?: Configuration) { - return { - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - archiveRun(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).archiveRun(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {ApiRun} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRun(body: ApiRun, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).createRun(body, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRun(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).deleteRun(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRun(run_id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).getRun(run_id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [resource_reference_key_type] The type of the resource that referred to. - * @param {string} [resource_reference_key_id] The ID of the resource that referred to. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRuns(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).listRuns(page_token, page_size, sort_by, resource_reference_key_type, resource_reference_key_id, filter, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} run_id The ID of the run. - * @param {string} node_id The ID of the running node. - * @param {string} artifact_name The name of the artifact. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - readArtifact(run_id: string, node_id: string, artifact_name: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).readArtifact(run_id, node_id, artifact_name, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @summary ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. - * @param {string} run_id Required. The parent run ID of the metric. - * @param {ApiReportRunMetricsRequest} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - reportRunMetrics(run_id: string, body: ApiReportRunMetricsRequest, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).reportRunMetrics(run_id, body, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - retryRun(run_id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).retryRun(run_id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - terminateRun(run_id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).terminateRun(run_id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - unarchiveRun(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).unarchiveRun(id, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - } + return { + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + archiveRun(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).archiveRun( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {ApiRun} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createRun( + body: ApiRun, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).createRun( + body, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + deleteRun(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).deleteRun( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + getRun( + run_id: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).getRun( + run_id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + listRuns( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).listRuns( + page_token, + page_size, + sort_by, + resource_reference_key_type, + resource_reference_key_id, + filter, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} run_id The ID of the run. + * @param {string} node_id The ID of the running node. + * @param {string} artifact_name The name of the artifact. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + readArtifact( + run_id: string, + node_id: string, + artifact_name: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).readArtifact( + run_id, + node_id, + artifact_name, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @summary ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. + * @param {string} run_id Required. The parent run ID of the metric. + * @param {ApiReportRunMetricsRequest} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + reportRunMetrics( + run_id: string, + body: ApiReportRunMetricsRequest, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).reportRunMetrics( + run_id, + body, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + retryRun(run_id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).retryRun( + run_id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + terminateRun( + run_id: string, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).terminateRun( + run_id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + unarchiveRun(id: string, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = RunServiceApiFetchParamCreator(configuration).unarchiveRun( + id, + options, + ); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + }; }; /** * RunServiceApi - factory interface * @export */ -export const RunServiceApiFactory = function (configuration?: Configuration, fetch?: FetchAPI, basePath?: string) { - return { - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - archiveRun(id: string, options?: any) { - return RunServiceApiFp(configuration).archiveRun(id, options)(fetch, basePath); - }, - /** - * - * @param {ApiRun} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createRun(body: ApiRun, options?: any) { - return RunServiceApiFp(configuration).createRun(body, options)(fetch, basePath); - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - deleteRun(id: string, options?: any) { - return RunServiceApiFp(configuration).deleteRun(id, options)(fetch, basePath); - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - getRun(run_id: string, options?: any) { - return RunServiceApiFp(configuration).getRun(run_id, options)(fetch, basePath); - }, - /** - * - * @param {string} [page_token] - * @param {number} [page_size] - * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {string} [resource_reference_key_type] The type of the resource that referred to. - * @param {string} [resource_reference_key_id] The ID of the resource that referred to. - * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - listRuns(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options?: any) { - return RunServiceApiFp(configuration).listRuns(page_token, page_size, sort_by, resource_reference_key_type, resource_reference_key_id, filter, options)(fetch, basePath); - }, - /** - * - * @param {string} run_id The ID of the run. - * @param {string} node_id The ID of the running node. - * @param {string} artifact_name The name of the artifact. - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - readArtifact(run_id: string, node_id: string, artifact_name: string, options?: any) { - return RunServiceApiFp(configuration).readArtifact(run_id, node_id, artifact_name, options)(fetch, basePath); - }, - /** - * - * @summary ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. - * @param {string} run_id Required. The parent run ID of the metric. - * @param {ApiReportRunMetricsRequest} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - reportRunMetrics(run_id: string, body: ApiReportRunMetricsRequest, options?: any) { - return RunServiceApiFp(configuration).reportRunMetrics(run_id, body, options)(fetch, basePath); - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - retryRun(run_id: string, options?: any) { - return RunServiceApiFp(configuration).retryRun(run_id, options)(fetch, basePath); - }, - /** - * - * @param {string} run_id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - terminateRun(run_id: string, options?: any) { - return RunServiceApiFp(configuration).terminateRun(run_id, options)(fetch, basePath); - }, - /** - * - * @param {string} id - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - unarchiveRun(id: string, options?: any) { - return RunServiceApiFp(configuration).unarchiveRun(id, options)(fetch, basePath); - }, - }; -}; - -/** - * RunServiceApi - object-oriented interface - * @export - * @class RunServiceApi - * @extends {BaseAPI} - */ -export class RunServiceApi extends BaseAPI { +export const RunServiceApiFactory = function( + configuration?: Configuration, + fetch?: FetchAPI, + basePath?: string, +) { + return { /** - * - * @param {} id + * + * @param {string} id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public archiveRun(id: string, options?: any) { - return RunServiceApiFp(this.configuration).archiveRun(id, options)(this.fetch, this.basePath); - } - + archiveRun(id: string, options?: any) { + return RunServiceApiFp(configuration).archiveRun(id, options)(fetch, basePath); + }, /** - * - * @param {} body + * + * @param {ApiRun} body * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public createRun(body: ApiRun, options?: any) { - return RunServiceApiFp(this.configuration).createRun(body, options)(this.fetch, this.basePath); - } - + createRun(body: ApiRun, options?: any) { + return RunServiceApiFp(configuration).createRun(body, options)(fetch, basePath); + }, /** - * - * @param {} id + * + * @param {string} id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public deleteRun(id: string, options?: any) { - return RunServiceApiFp(this.configuration).deleteRun(id, options)(this.fetch, this.basePath); - } - + deleteRun(id: string, options?: any) { + return RunServiceApiFp(configuration).deleteRun(id, options)(fetch, basePath); + }, /** - * - * @param {} run_id + * + * @param {string} run_id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public getRun(run_id: string, options?: any) { - return RunServiceApiFp(this.configuration).getRun(run_id, options)(this.fetch, this.basePath); - } - + getRun(run_id: string, options?: any) { + return RunServiceApiFp(configuration).getRun(run_id, options)(fetch, basePath); + }, /** - * - * @param {} [page_token] - * @param {} [page_size] - * @param {} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. - * @param {} [resource_reference_key_type] The type of the resource that referred to. - * @param {} [resource_reference_key_id] The ID of the resource that referred to. - * @param {} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public listRuns(page_token?: string, page_size?: number, sort_by?: string, resource_reference_key_type?: string, resource_reference_key_id?: string, filter?: string, options?: any) { - return RunServiceApiFp(this.configuration).listRuns(page_token, page_size, sort_by, resource_reference_key_type, resource_reference_key_id, filter, options)(this.fetch, this.basePath); - } - + listRuns( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options?: any, + ) { + return RunServiceApiFp(configuration).listRuns( + page_token, + page_size, + sort_by, + resource_reference_key_type, + resource_reference_key_id, + filter, + options, + )(fetch, basePath); + }, /** - * - * @param {} run_id The ID of the run. - * @param {} node_id The ID of the running node. - * @param {} artifact_name The name of the artifact. + * + * @param {string} run_id The ID of the run. + * @param {string} node_id The ID of the running node. + * @param {string} artifact_name The name of the artifact. * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public readArtifact(run_id: string, node_id: string, artifact_name: string, options?: any) { - return RunServiceApiFp(this.configuration).readArtifact(run_id, node_id, artifact_name, options)(this.fetch, this.basePath); - } - + readArtifact(run_id: string, node_id: string, artifact_name: string, options?: any) { + return RunServiceApiFp(configuration).readArtifact(run_id, node_id, artifact_name, options)( + fetch, + basePath, + ); + }, /** - * + * * @summary ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. - * @param {} run_id Required. The parent run ID of the metric. - * @param {} body + * @param {string} run_id Required. The parent run ID of the metric. + * @param {ApiReportRunMetricsRequest} body * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public reportRunMetrics(run_id: string, body: ApiReportRunMetricsRequest, options?: any) { - return RunServiceApiFp(this.configuration).reportRunMetrics(run_id, body, options)(this.fetch, this.basePath); - } - + reportRunMetrics(run_id: string, body: ApiReportRunMetricsRequest, options?: any) { + return RunServiceApiFp(configuration).reportRunMetrics(run_id, body, options)( + fetch, + basePath, + ); + }, /** - * - * @param {} run_id + * + * @param {string} run_id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public retryRun(run_id: string, options?: any) { - return RunServiceApiFp(this.configuration).retryRun(run_id, options)(this.fetch, this.basePath); - } - + retryRun(run_id: string, options?: any) { + return RunServiceApiFp(configuration).retryRun(run_id, options)(fetch, basePath); + }, /** - * - * @param {} run_id + * + * @param {string} run_id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public terminateRun(run_id: string, options?: any) { - return RunServiceApiFp(this.configuration).terminateRun(run_id, options)(this.fetch, this.basePath); - } - + terminateRun(run_id: string, options?: any) { + return RunServiceApiFp(configuration).terminateRun(run_id, options)(fetch, basePath); + }, /** - * - * @param {} id + * + * @param {string} id * @param {*} [options] Override http request option. * @throws {RequiredError} - * @memberof RunServiceApi */ - public unarchiveRun(id: string, options?: any) { - return RunServiceApiFp(this.configuration).unarchiveRun(id, options)(this.fetch, this.basePath); - } + unarchiveRun(id: string, options?: any) { + return RunServiceApiFp(configuration).unarchiveRun(id, options)(fetch, basePath); + }, + }; +}; +/** + * RunServiceApi - object-oriented interface + * @export + * @class RunServiceApi + * @extends {BaseAPI} + */ +export class RunServiceApi extends BaseAPI { + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public archiveRun(id: string, options?: any) { + return RunServiceApiFp(this.configuration).archiveRun(id, options)(this.fetch, this.basePath); + } + + /** + * + * @param {ApiRun} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public createRun(body: ApiRun, options?: any) { + return RunServiceApiFp(this.configuration).createRun(body, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public deleteRun(id: string, options?: any) { + return RunServiceApiFp(this.configuration).deleteRun(id, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public getRun(run_id: string, options?: any) { + return RunServiceApiFp(this.configuration).getRun(run_id, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} [page_token] + * @param {number} [page_size] + * @param {string} [sort_by] Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + * @param {'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB'} [resource_reference_key_type] The type of the resource that referred to. + * @param {string} [resource_reference_key_id] The ID of the resource that referred to. + * @param {string} [filter] A base-64 encoded, JSON-serialized Filter protocol buffer (see filter.proto). + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public listRuns( + page_token?: string, + page_size?: number, + sort_by?: string, + resource_reference_key_type?: 'UNKNOWN_RESOURCE_TYPE' | 'EXPERIMENT' | 'JOB', + resource_reference_key_id?: string, + filter?: string, + options?: any, + ) { + return RunServiceApiFp(this.configuration).listRuns( + page_token, + page_size, + sort_by, + resource_reference_key_type, + resource_reference_key_id, + filter, + options, + )(this.fetch, this.basePath); + } + + /** + * + * @param {string} run_id The ID of the run. + * @param {string} node_id The ID of the running node. + * @param {string} artifact_name The name of the artifact. + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public readArtifact(run_id: string, node_id: string, artifact_name: string, options?: any) { + return RunServiceApiFp(this.configuration).readArtifact( + run_id, + node_id, + artifact_name, + options, + )(this.fetch, this.basePath); + } + + /** + * + * @summary ReportRunMetrics reports metrics of a run. Each metric is reported in its own transaction, so this API accepts partial failures. Metric can be uniquely identified by (run_id, node_id, name). Duplicate reporting will be ignored by the API. First reporting wins. + * @param {string} run_id Required. The parent run ID of the metric. + * @param {ApiReportRunMetricsRequest} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public reportRunMetrics(run_id: string, body: ApiReportRunMetricsRequest, options?: any) { + return RunServiceApiFp(this.configuration).reportRunMetrics(run_id, body, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public retryRun(run_id: string, options?: any) { + return RunServiceApiFp(this.configuration).retryRun(run_id, options)(this.fetch, this.basePath); + } + + /** + * + * @param {string} run_id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public terminateRun(run_id: string, options?: any) { + return RunServiceApiFp(this.configuration).terminateRun(run_id, options)( + this.fetch, + this.basePath, + ); + } + + /** + * + * @param {string} id + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof RunServiceApi + */ + public unarchiveRun(id: string, options?: any) { + return RunServiceApiFp(this.configuration).unarchiveRun(id, options)(this.fetch, this.basePath); + } } - diff --git a/frontend/src/apis/run/configuration.ts b/frontend/src/apis/run/configuration.ts index 27904f8b5d4a..885b3ec7edc9 100644 --- a/frontend/src/apis/run/configuration.ts +++ b/frontend/src/apis/run/configuration.ts @@ -4,63 +4,62 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - export interface ConfigurationParameters { - apiKey?: string | ((name: string) => string); - username?: string; - password?: string; - accessToken?: string | ((name: string, scopes?: string[]) => string); - basePath?: string; + apiKey?: string | ((name: string) => string); + username?: string; + password?: string; + accessToken?: string | ((name: string, scopes?: string[]) => string); + basePath?: string; } export class Configuration { - /** - * parameter for apiKey security - * @param name security name - * @memberof Configuration - */ - apiKey?: string | ((name: string) => string); - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - username?: string; - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - password?: string; - /** - * parameter for oauth2 security - * @param name security name - * @param scopes oauth2 scope - * @memberof Configuration - */ - accessToken?: string | ((name: string, scopes?: string[]) => string); - /** - * override base path - * - * @type {string} - * @memberof Configuration - */ - basePath?: string; + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | ((name: string) => string); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | ((name: string, scopes?: string[]) => string); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; - constructor(param: ConfigurationParameters = {}) { - this.apiKey = param.apiKey; - this.username = param.username; - this.password = param.password; - this.accessToken = param.accessToken; - this.basePath = param.basePath; - } + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + } } diff --git a/frontend/src/apis/run/custom.d.ts b/frontend/src/apis/run/custom.d.ts index 02f969575e37..4c611cc3216e 100644 --- a/frontend/src/apis/run/custom.d.ts +++ b/frontend/src/apis/run/custom.d.ts @@ -1 +1,2 @@ -declare module 'portable-fetch'; \ No newline at end of file +declare module 'portable-fetch'; +declare module 'url'; diff --git a/frontend/src/apis/run/index.ts b/frontend/src/apis/run/index.ts index 04238c0fb2c8..7466271c64c1 100644 --- a/frontend/src/apis/run/index.ts +++ b/frontend/src/apis/run/index.ts @@ -4,13 +4,12 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - -export * from "./api"; -export * from "./configuration"; +export * from './api'; +export * from './configuration'; diff --git a/frontend/src/apis/visualization/.swagger-codegen/VERSION b/frontend/src/apis/visualization/.swagger-codegen/VERSION index a6254504e401..48a6b508dc9f 100644 --- a/frontend/src/apis/visualization/.swagger-codegen/VERSION +++ b/frontend/src/apis/visualization/.swagger-codegen/VERSION @@ -1 +1 @@ -2.3.1 \ No newline at end of file +2.4.7 \ No newline at end of file diff --git a/frontend/src/apis/visualization/api.ts b/frontend/src/apis/visualization/api.ts index 573f9b6f99ea..6d0c3e6b3580 100644 --- a/frontend/src/apis/visualization/api.ts +++ b/frontend/src/apis/visualization/api.ts @@ -5,29 +5,28 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ +import * as url from 'url'; +import * as portableFetch from 'portable-fetch'; +import { Configuration } from './configuration'; -import * as url from "url"; -import * as portableFetch from "portable-fetch"; -import { Configuration } from "./configuration"; - -const BASE_PATH = "http://localhost".replace(/\/+$/, ""); +const BASE_PATH = 'http://localhost'.replace(/\/+$/, ''); /** * * @export */ export const COLLECTION_FORMATS = { - csv: ",", - ssv: " ", - tsv: "\t", - pipes: "|", + csv: ',', + ssv: ' ', + tsv: '\t', + pipes: '|', }; /** @@ -36,110 +35,114 @@ export const COLLECTION_FORMATS = { * @interface FetchAPI */ export interface FetchAPI { - (url: string, init?: any): Promise; + (url: string, init?: any): Promise; } /** - * + * * @export * @interface FetchArgs */ export interface FetchArgs { - url: string; - options: any; + url: string; + options: any; } /** - * + * * @export * @class BaseAPI */ export class BaseAPI { - protected configuration: Configuration; + protected configuration: Configuration; - constructor(configuration?: Configuration, protected basePath: string = BASE_PATH, protected fetch: FetchAPI = portableFetch) { - if (configuration) { - this.configuration = configuration; - this.basePath = configuration.basePath || this.basePath; - } + constructor( + configuration?: Configuration, + protected basePath: string = BASE_PATH, + protected fetch: FetchAPI = portableFetch, + ) { + if (configuration) { + this.configuration = configuration; + this.basePath = configuration.basePath || this.basePath; } -}; + } +} /** - * + * * @export * @class RequiredError * @extends {Error} */ export class RequiredError extends Error { - name: "RequiredError" - constructor(public field: string, msg?: string) { - super(msg); - } + name: 'RequiredError'; + constructor(public field: string, msg?: string) { + super(msg); + } } /** - * + * * @export * @interface ApiStatus */ export interface ApiStatus { - /** - * - * @type {string} - * @memberof ApiStatus - */ - error?: string; - /** - * - * @type {number} - * @memberof ApiStatus - */ - code?: number; - /** - * - * @type {Array<ProtobufAny>} - * @memberof ApiStatus - */ - details?: Array; + /** + * + * @type {string} + * @memberof ApiStatus + */ + error?: string; + /** + * + * @type {number} + * @memberof ApiStatus + */ + code?: number; + /** + * + * @type {Array} + * @memberof ApiStatus + */ + details?: Array; } /** - * + * * @export * @interface ApiVisualization */ export interface ApiVisualization { - /** - * - * @type {ApiVisualizationType} - * @memberof ApiVisualization - */ - type?: ApiVisualizationType; - /** - * Path pattern of input data to be used during generation of visualizations. This is required when creating the pipeline through CreateVisualization API. - * @type {string} - * @memberof ApiVisualization - */ - source?: string; - /** - * Variables to be used during generation of a visualization. This should be provided as a JSON string. This is required when creating the pipeline through CreateVisualization API. - * @type {string} - * @memberof ApiVisualization - */ - arguments?: string; - /** - * Output. Generated visualization html. - * @type {string} - * @memberof ApiVisualization - */ - html?: string; - /** - * In case any error happens when generating visualizations, only visualization ID and the error message are returned. Client has the flexibility of choosing how to handle the error. - * @type {string} - * @memberof ApiVisualization - */ - error?: string; + /** + * + * @type {ApiVisualizationType} + * @memberof ApiVisualization + */ + type?: ApiVisualizationType; + /** + * Path pattern of input data to be used during generation of visualizations. This is required when creating the pipeline through CreateVisualization API. + * @type {string} + * @memberof ApiVisualization + */ + source?: string; + /** + * Variables to be used during generation of a visualization. This should be provided as a JSON string. This is required when creating the pipeline through CreateVisualization API. + * @type {string} + * @memberof ApiVisualization + */ + arguments?: string; + /** + * Output. Generated visualization html. + * @type {string} + * @memberof ApiVisualization + */ + html?: string; + /** + * In case any error happens when generating visualizations, only visualization ID and the error message are returned. Client has the flexibility of choosing how to handle the error. + * @type {string} + * @memberof ApiVisualization + */ + error?: string; } /** @@ -148,11 +151,11 @@ export interface ApiVisualization { * @enum {string} */ export enum ApiVisualizationType { - ROCCURVE = 'ROC_CURVE', - TFDV = 'TFDV', - TFMA = 'TFMA', - TABLE = 'TABLE', - CUSTOM = 'CUSTOM' + ROCCURVE = 'ROC_CURVE', + TFDV = 'TFDV', + TFMA = 'TFMA', + TABLE = 'TABLE', + CUSTOM = 'CUSTOM', } /** @@ -161,67 +164,77 @@ export enum ApiVisualizationType { * @interface ProtobufAny */ export interface ProtobufAny { - /** - * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. - * @type {string} - * @memberof ProtobufAny - */ - type_url?: string; - /** - * Must be a valid serialized protocol buffer of the above specified type. - * @type {string} - * @memberof ProtobufAny - */ - value?: string; + /** + * A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics. + * @type {string} + * @memberof ProtobufAny + */ + type_url?: string; + /** + * Must be a valid serialized protocol buffer of the above specified type. + * @type {string} + * @memberof ProtobufAny + */ + value?: string; } - /** * VisualizationServiceApi - fetch parameter creator * @export */ -export const VisualizationServiceApiFetchParamCreator = function (configuration?: Configuration) { - return { - /** - * - * @param {ApiVisualization} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createVisualization(body: ApiVisualization, options: any = {}): FetchArgs { - // verify required parameter 'body' is not null or undefined - if (body === null || body === undefined) { - throw new RequiredError('body','Required parameter body was null or undefined when calling createVisualization.'); - } - const localVarPath = `/apis/v1beta1/visualizations`; - const localVarUrlObj = url.parse(localVarPath, true); - const localVarRequestOptions = Object.assign({ method: 'POST' }, options); - const localVarHeaderParameter = {} as any; - const localVarQueryParameter = {} as any; +export const VisualizationServiceApiFetchParamCreator = function(configuration?: Configuration) { + return { + /** + * + * @param {ApiVisualization} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createVisualization(body: ApiVisualization, options: any = {}): FetchArgs { + // verify required parameter 'body' is not null or undefined + if (body === null || body === undefined) { + throw new RequiredError( + 'body', + 'Required parameter body was null or undefined when calling createVisualization.', + ); + } + const localVarPath = `/apis/v1beta1/visualizations`; + const localVarUrlObj = url.parse(localVarPath, true); + const localVarRequestOptions = Object.assign({ method: 'POST' }, options); + const localVarHeaderParameter = {} as any; + const localVarQueryParameter = {} as any; - // authentication Bearer required - if (configuration && configuration.apiKey) { - const localVarApiKeyValue = typeof configuration.apiKey === 'function' - ? configuration.apiKey("authorization") - : configuration.apiKey; - localVarHeaderParameter["authorization"] = localVarApiKeyValue; - } + // authentication Bearer required + if (configuration && configuration.apiKey) { + const localVarApiKeyValue = + typeof configuration.apiKey === 'function' + ? configuration.apiKey('authorization') + : configuration.apiKey; + localVarHeaderParameter['authorization'] = localVarApiKeyValue; + } - localVarHeaderParameter['Content-Type'] = 'application/json'; + localVarHeaderParameter['Content-Type'] = 'application/json'; - localVarUrlObj.query = Object.assign({}, localVarUrlObj.query, localVarQueryParameter, options.query); - // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 - delete localVarUrlObj.search; - localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); - const needsSerialization = ("ApiVisualization" !== "string") || localVarRequestOptions.headers['Content-Type'] === 'application/json'; - localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : (body || ""); + localVarUrlObj.query = Object.assign( + {}, + localVarUrlObj.query, + localVarQueryParameter, + options.query, + ); + // fix override query string Detail: https://stackoverflow.com/a/7517673/1077943 + delete localVarUrlObj.search; + localVarRequestOptions.headers = Object.assign({}, localVarHeaderParameter, options.headers); + const needsSerialization = + 'ApiVisualization' !== 'string' || + localVarRequestOptions.headers['Content-Type'] === 'application/json'; + localVarRequestOptions.body = needsSerialization ? JSON.stringify(body || {}) : body || ''; - return { - url: url.format(localVarUrlObj), - options: localVarRequestOptions, - }; - }, - } + return { + url: url.format(localVarUrlObj), + options: localVarRequestOptions, + }; + }, + }; }; /** @@ -229,44 +242,56 @@ export const VisualizationServiceApiFetchParamCreator = function (configuration? * @export */ export const VisualizationServiceApiFp = function(configuration?: Configuration) { - return { - /** - * - * @param {ApiVisualization} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createVisualization(body: ApiVisualization, options?: any): (fetch?: FetchAPI, basePath?: string) => Promise { - const localVarFetchArgs = VisualizationServiceApiFetchParamCreator(configuration).createVisualization(body, options); - return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { - return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then((response) => { - if (response.status >= 200 && response.status < 300) { - return response.json(); - } else { - throw response; - } - }); - }; - }, - } + return { + /** + * + * @param {ApiVisualization} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createVisualization( + body: ApiVisualization, + options?: any, + ): (fetch?: FetchAPI, basePath?: string) => Promise { + const localVarFetchArgs = VisualizationServiceApiFetchParamCreator( + configuration, + ).createVisualization(body, options); + return (fetch: FetchAPI = portableFetch, basePath: string = BASE_PATH) => { + return fetch(basePath + localVarFetchArgs.url, localVarFetchArgs.options).then(response => { + if (response.status >= 200 && response.status < 300) { + return response.json(); + } else { + throw response; + } + }); + }; + }, + }; }; /** * VisualizationServiceApi - factory interface * @export */ -export const VisualizationServiceApiFactory = function (configuration?: Configuration, fetch?: FetchAPI, basePath?: string) { - return { - /** - * - * @param {ApiVisualization} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - */ - createVisualization(body: ApiVisualization, options?: any) { - return VisualizationServiceApiFp(configuration).createVisualization(body, options)(fetch, basePath); - }, - }; +export const VisualizationServiceApiFactory = function( + configuration?: Configuration, + fetch?: FetchAPI, + basePath?: string, +) { + return { + /** + * + * @param {ApiVisualization} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + */ + createVisualization(body: ApiVisualization, options?: any) { + return VisualizationServiceApiFp(configuration).createVisualization(body, options)( + fetch, + basePath, + ); + }, + }; }; /** @@ -276,16 +301,17 @@ export const VisualizationServiceApiFactory = function (configuration?: Configur * @extends {BaseAPI} */ export class VisualizationServiceApi extends BaseAPI { - /** - * - * @param {} body - * @param {*} [options] Override http request option. - * @throws {RequiredError} - * @memberof VisualizationServiceApi - */ - public createVisualization(body: ApiVisualization, options?: any) { - return VisualizationServiceApiFp(this.configuration).createVisualization(body, options)(this.fetch, this.basePath); - } - + /** + * + * @param {ApiVisualization} body + * @param {*} [options] Override http request option. + * @throws {RequiredError} + * @memberof VisualizationServiceApi + */ + public createVisualization(body: ApiVisualization, options?: any) { + return VisualizationServiceApiFp(this.configuration).createVisualization(body, options)( + this.fetch, + this.basePath, + ); + } } - diff --git a/frontend/src/apis/visualization/configuration.ts b/frontend/src/apis/visualization/configuration.ts index d81fb1744b9e..f560abd043e3 100644 --- a/frontend/src/apis/visualization/configuration.ts +++ b/frontend/src/apis/visualization/configuration.ts @@ -4,63 +4,62 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - export interface ConfigurationParameters { - apiKey?: string | ((name: string) => string); - username?: string; - password?: string; - accessToken?: string | ((name: string, scopes?: string[]) => string); - basePath?: string; + apiKey?: string | ((name: string) => string); + username?: string; + password?: string; + accessToken?: string | ((name: string, scopes?: string[]) => string); + basePath?: string; } export class Configuration { - /** - * parameter for apiKey security - * @param name security name - * @memberof Configuration - */ - apiKey?: string | ((name: string) => string); - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - username?: string; - /** - * parameter for basic security - * - * @type {string} - * @memberof Configuration - */ - password?: string; - /** - * parameter for oauth2 security - * @param name security name - * @param scopes oauth2 scope - * @memberof Configuration - */ - accessToken?: string | ((name: string, scopes?: string[]) => string); - /** - * override base path - * - * @type {string} - * @memberof Configuration - */ - basePath?: string; + /** + * parameter for apiKey security + * @param name security name + * @memberof Configuration + */ + apiKey?: string | ((name: string) => string); + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + username?: string; + /** + * parameter for basic security + * + * @type {string} + * @memberof Configuration + */ + password?: string; + /** + * parameter for oauth2 security + * @param name security name + * @param scopes oauth2 scope + * @memberof Configuration + */ + accessToken?: string | ((name: string, scopes?: string[]) => string); + /** + * override base path + * + * @type {string} + * @memberof Configuration + */ + basePath?: string; - constructor(param: ConfigurationParameters = {}) { - this.apiKey = param.apiKey; - this.username = param.username; - this.password = param.password; - this.accessToken = param.accessToken; - this.basePath = param.basePath; - } + constructor(param: ConfigurationParameters = {}) { + this.apiKey = param.apiKey; + this.username = param.username; + this.password = param.password; + this.accessToken = param.accessToken; + this.basePath = param.basePath; + } } diff --git a/frontend/src/apis/visualization/custom.d.ts b/frontend/src/apis/visualization/custom.d.ts index 02f969575e37..4c611cc3216e 100644 --- a/frontend/src/apis/visualization/custom.d.ts +++ b/frontend/src/apis/visualization/custom.d.ts @@ -1 +1,2 @@ -declare module 'portable-fetch'; \ No newline at end of file +declare module 'portable-fetch'; +declare module 'url'; diff --git a/frontend/src/apis/visualization/index.ts b/frontend/src/apis/visualization/index.ts index 349871961dd4..37241e7909f4 100644 --- a/frontend/src/apis/visualization/index.ts +++ b/frontend/src/apis/visualization/index.ts @@ -4,13 +4,12 @@ * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: version not set - * + * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ - -export * from "./api"; -export * from "./configuration"; +export * from './api'; +export * from './configuration'; diff --git a/frontend/src/assets.d.ts b/frontend/src/assets.d.ts new file mode 100644 index 000000000000..8cf0c373804e --- /dev/null +++ b/frontend/src/assets.d.ts @@ -0,0 +1,3 @@ +// Needed to import png files directly. +// Reference: https://github.com/wmonk/create-react-app-typescript/tree/master/template#adding-images-fonts-and-files +declare module '*.png'; diff --git a/frontend/src/atoms/BusyButton.tsx b/frontend/src/atoms/BusyButton.tsx index bd1cc2f57257..4b39a9e4d040 100644 --- a/frontend/src/atoms/BusyButton.tsx +++ b/frontend/src/atoms/BusyButton.tsx @@ -57,17 +57,20 @@ class BusyButton extends React.Component { public render(): JSX.Element { const { title, busy, className, disabled, icon, outlined, ...rest } = this.props; - return ; + return ( + + ); } } diff --git a/frontend/src/atoms/ExternalLink.tsx b/frontend/src/atoms/ExternalLink.tsx new file mode 100644 index 000000000000..1e851c7ff185 --- /dev/null +++ b/frontend/src/atoms/ExternalLink.tsx @@ -0,0 +1,19 @@ +import React, { DetailedHTMLProps, AnchorHTMLAttributes } from 'react'; +import { stylesheet } from 'typestyle'; +import { color } from '../Css'; + +const css = stylesheet({ + link: { + $nest: { + '&:hover': { + textDecoration: 'underline', + }, + }, + color: color.theme, + textDecoration: 'none', + }, +}); + +export const ExternalLink: React.FC< + DetailedHTMLProps, HTMLAnchorElement> +> = props => ; diff --git a/frontend/src/atoms/IconWithTooltip.test.tsx b/frontend/src/atoms/IconWithTooltip.test.tsx index a59f595b1f85..6b440691dca1 100644 --- a/frontend/src/atoms/IconWithTooltip.test.tsx +++ b/frontend/src/atoms/IconWithTooltip.test.tsx @@ -22,15 +22,20 @@ import { create } from 'react-test-renderer'; describe('IconWithTooltip', () => { it('renders without height or weight', () => { const tree = create( - + , ); expect(tree).toMatchSnapshot(); }); it('renders with height and weight', () => { const tree = create( - + , ); expect(tree).toMatchSnapshot(); }); diff --git a/frontend/src/atoms/IconWithTooltip.tsx b/frontend/src/atoms/IconWithTooltip.tsx index 6c3b4c761067..3f2d9fb69bae 100644 --- a/frontend/src/atoms/IconWithTooltip.tsx +++ b/frontend/src/atoms/IconWithTooltip.tsx @@ -31,11 +31,13 @@ export default (props: IconWithTooltipProps) => { return ( - + ); }; diff --git a/frontend/src/atoms/Input.test.tsx b/frontend/src/atoms/Input.test.tsx index 256ec37ae3f6..bc4b9018d1b2 100644 --- a/frontend/src/atoms/Input.test.tsx +++ b/frontend/src/atoms/Input.test.tsx @@ -20,18 +20,27 @@ import Input from './Input'; import { shallow } from 'enzyme'; import toJson from 'enzyme-to-json'; - describe('Input', () => { const handleChange = jest.fn(); const value = 'some input value'; it('renders with the right styles by default', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(toJson(tree)).toMatchSnapshot(); }); it('accepts height and width as prop overrides', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(toJson(tree)).toMatchSnapshot(); }); }); diff --git a/frontend/src/atoms/Input.tsx b/frontend/src/atoms/Input.tsx index f23396c8b6f3..ff6ffe954fb3 100644 --- a/frontend/src/atoms/Input.tsx +++ b/frontend/src/atoms/Input.tsx @@ -27,12 +27,17 @@ interface InputProps extends OutlinedTextFieldProps { export default (props: InputProps) => { const { height, maxWidth, variant, width, ...rest } = props; return ( - + {props.children} ); diff --git a/frontend/src/atoms/MD2Tabs.test.tsx b/frontend/src/atoms/MD2Tabs.test.tsx index 3acc1b5193c2..0c371ebb235e 100644 --- a/frontend/src/atoms/MD2Tabs.test.tsx +++ b/frontend/src/atoms/MD2Tabs.test.tsx @@ -29,20 +29,33 @@ describe('Input', () => { it('does not try to call the onSwitch handler if it is not defined', () => { const tree = shallow(); - tree.find(buttonSelector).at(1).simulate('click'); + tree + .find(buttonSelector) + .at(1) + .simulate('click'); }); it('calls the onSwitch function if an unselected button is clicked', () => { const switchHandler = jest.fn(); - const tree = shallow(); - tree.find(buttonSelector).at(1).simulate('click'); + const tree = shallow( + , + ); + tree + .find(buttonSelector) + .at(1) + .simulate('click'); expect(switchHandler).toHaveBeenCalled(); }); it('does not the onSwitch function if the already selected button is clicked', () => { const switchHandler = jest.fn(); - const tree = shallow(); - tree.find(buttonSelector).at(1).simulate('click'); + const tree = shallow( + , + ); + tree + .find(buttonSelector) + .at(1) + .simulate('click'); expect(switchHandler).not.toHaveBeenCalled(); }); diff --git a/frontend/src/atoms/MD2Tabs.tsx b/frontend/src/atoms/MD2Tabs.tsx index 1ccb77e35ef8..8e461dd8c2cc 100644 --- a/frontend/src/atoms/MD2Tabs.tsx +++ b/frontend/src/atoms/MD2Tabs.tsx @@ -66,7 +66,6 @@ const css = stylesheet({ }); class MD2Tabs extends React.Component { - private _rootRef = React.createRef(); private _indicatorRef = React.createRef(); private _tabRefs = this.props.tabs.map(t => React.createRef()); @@ -80,8 +79,15 @@ class MD2Tabs extends React.Component {
{this.props.tabs.map((tab, i) => ( - ))} @@ -116,12 +122,13 @@ class MD2Tabs extends React.Component { if (!activeLabelElement) { return; } - const leftOffset = activeLabelElement.getBoundingClientRect().left - + const leftOffset = + activeLabelElement.getBoundingClientRect().left - this._rootRef.current.getBoundingClientRect().left; const tabIndicator = this._indicatorRef.current; - tabIndicator.style.left = (leftOffset - 5) + 'px'; - tabIndicator.style.width = (activeLabelElement.getBoundingClientRect().width + 5) + 'px'; + tabIndicator.style.left = leftOffset - 5 + 'px'; + tabIndicator.style.width = activeLabelElement.getBoundingClientRect().width + 5 + 'px'; tabIndicator.style.display = 'block'; } } diff --git a/frontend/src/atoms/Separator.tsx b/frontend/src/atoms/Separator.tsx index 85509acbfde6..70647ccb0dfe 100644 --- a/frontend/src/atoms/Separator.tsx +++ b/frontend/src/atoms/Separator.tsx @@ -23,18 +23,20 @@ interface SeparatorProps { } const style = (orientation: Orientation, units: number) => { - return orientation === 'horizontal' ? { - display: 'inline-block', - minWidth: units, - width: units, - } : { - display: 'block', - flexShrink: 0, - height: units, - minHeight: units, - }; + return orientation === 'horizontal' + ? { + display: 'inline-block', + minWidth: units, + width: units, + } + : { + display: 'block', + flexShrink: 0, + height: units, + minHeight: units, + }; }; -export default (props: SeparatorProps) => ; +export default (props: SeparatorProps) => ( + +); diff --git a/frontend/src/components/ArtifactLink.tsx b/frontend/src/components/ArtifactLink.tsx new file mode 100644 index 000000000000..b30ff99b7df7 --- /dev/null +++ b/frontend/src/components/ArtifactLink.tsx @@ -0,0 +1,30 @@ +import * as React from 'react'; +import { generateGcsConsoleUri } from '../lib/Utils'; + +/** + * A component that renders an artifact URL as clickable link if URL is correct + */ +export const ArtifactLink: React.FC<{ artifactUri?: string }> = ({ artifactUri }) => { + let clickableUrl: string | undefined; + if (artifactUri) { + if (artifactUri.startsWith('gs:')) { + const gcsConsoleUrl = generateGcsConsoleUri(artifactUri); + if (gcsConsoleUrl) { + clickableUrl = gcsConsoleUrl; + } + } else if (artifactUri.startsWith('http:') || artifactUri.startsWith('https:')) { + clickableUrl = artifactUri; + } + } + + if (clickableUrl) { + // Opens in new window safely + return ( + + {artifactUri} + + ); + } else { + return <>{artifactUri}; + } +}; diff --git a/frontend/src/components/Banner.test.tsx b/frontend/src/components/Banner.test.tsx index 3cede5c48f99..cbffb075f952 100644 --- a/frontend/src/components/Banner.test.tsx +++ b/frontend/src/components/Banner.test.tsx @@ -41,19 +41,32 @@ describe('Banner', () => { }); it('shows "Refresh" button when passed a refresh function', () => { - const tree = shallow( { /* do nothing */}} />); + const tree = shallow( + { + /* do nothing */ + }} + />, + ); expect(tree).toMatchSnapshot(); }); it('opens details dialog when button is clicked', () => { const tree = shallow(); - tree.find('WithStyles(Button)').at(0).simulate('click'); + tree + .find('WithStyles(Button)') + .at(0) + .simulate('click'); expect(tree.state()).toHaveProperty('dialogOpen', true); }); it('closes details dialog when cancel button is clicked', () => { const tree = shallow(); - tree.find('WithStyles(Button)').at(0).simulate('click'); + tree + .find('WithStyles(Button)') + .at(0) + .simulate('click'); expect(tree.state()).toHaveProperty('dialogOpen', true); tree.find('#dismissDialogBtn').simulate('click'); expect(tree.state()).toHaveProperty('dialogOpen', false); @@ -62,7 +75,10 @@ describe('Banner', () => { it('calls refresh callback', () => { const spy = jest.fn(); const tree = shallow(); - tree.find('.' + css.refreshButton).at(0).simulate('click'); + tree + .find('.' + css.refreshButton) + .at(0) + .simulate('click'); expect(spy).toHaveBeenCalled(); }); }); diff --git a/frontend/src/components/Banner.tsx b/frontend/src/components/Banner.tsx index 466895ae3f71..1d2c1d56d9d4 100644 --- a/frontend/src/components/Banner.tsx +++ b/frontend/src/components/Banner.tsx @@ -68,7 +68,6 @@ interface BannerState { } class Banner extends React.Component { - constructor(props: any) { super(props); @@ -78,20 +77,25 @@ class Banner extends React.Component { } public render(): JSX.Element { - // Default to error styles. - let bannerModeCss = stylesheet({ mode: { backgroundColor: color.errorBg, color: color.errorText, } }); + let bannerModeCss = stylesheet({ + mode: { backgroundColor: color.errorBg, color: color.errorText }, + }); let bannerIcon = ; let dialogTitle = 'An error occurred'; switch (this.props.mode) { case 'error': - bannerModeCss = stylesheet({ mode: { backgroundColor: color.errorBg, color: color.errorText, } }); + bannerModeCss = stylesheet({ + mode: { backgroundColor: color.errorBg, color: color.errorText }, + }); bannerIcon = ; dialogTitle = 'An error occurred'; break; case 'warning': - bannerModeCss = stylesheet({ mode: { backgroundColor: color.warningBg, color: color.warningText, } }); + bannerModeCss = stylesheet({ + mode: { backgroundColor: color.warningBg, color: color.warningText }, + }); bannerIcon = ; dialogTitle = 'Warning'; break; @@ -107,26 +111,32 @@ class Banner extends React.Component { {this.props.message}
- {this.props.additionalInfo && + {this.props.additionalInfo && ( - } - {this.props.refresh && - - } + )}
- {this.props.additionalInfo - && + {this.props.additionalInfo && ( + {dialogTitle} {this.props.additionalInfo} - + - } + + )} ); } diff --git a/frontend/src/components/CollapseButton.test.tsx b/frontend/src/components/CollapseButton.test.tsx index 3afdfba94485..f0302fc3f874 100644 --- a/frontend/src/components/CollapseButton.test.tsx +++ b/frontend/src/components/CollapseButton.test.tsx @@ -26,39 +26,57 @@ describe('CollapseButton', () => { }, } as any; - afterEach(() => compareComponent.state.collapseSections = {}); + afterEach(() => (compareComponent.state.collapseSections = {})); it('initial render', () => { const tree = shallow( - ); + , + ); expect(tree).toMatchSnapshot(); }); it('renders the button collapsed if in collapsedSections', () => { compareComponent.state.collapseSections.testSection = true; const tree = shallow( - ); + , + ); expect(tree).toMatchSnapshot(); }); it('collapses given section when clicked', () => { const tree = shallow( - ); + , + ); tree.find('WithStyles(Button)').simulate('click'); - expect(compareComponent.setState).toHaveBeenCalledWith( - { collapseSections: { testSection: true } }); + expect(compareComponent.setState).toHaveBeenCalledWith({ + collapseSections: { testSection: true }, + }); }); it('expands given section when clicked if it is collapsed', () => { compareComponent.state.collapseSections.testSection = true; const tree = shallow( - ); + , + ); tree.find('WithStyles(Button)').simulate('click'); - expect(compareComponent.setState).toHaveBeenCalledWith( - { collapseSections: { testSection: false } }); + expect(compareComponent.setState).toHaveBeenCalledWith({ + collapseSections: { testSection: false }, + }); }); }); diff --git a/frontend/src/components/CollapseButton.tsx b/frontend/src/components/CollapseButton.tsx index adddab6bc644..e0daede3fa0e 100644 --- a/frontend/src/components/CollapseButton.tsx +++ b/frontend/src/components/CollapseButton.tsx @@ -44,22 +44,26 @@ interface CollapseButtonProps { } class CollapseButton extends React.Component { - public render(): JSX.Element { const { collapseSections, compareSetState } = this.props; const sectionName = this.props.sectionName; - return
- -
; + return ( +
+ +
+ ); } } diff --git a/frontend/src/components/CompareTable.test.tsx b/frontend/src/components/CompareTable.test.tsx index e004b65a3334..63f76b39ce20 100644 --- a/frontend/src/components/CompareTable.test.tsx +++ b/frontend/src/components/CompareTable.test.tsx @@ -37,18 +37,15 @@ describe('CompareTable', () => { expect(tree).toMatchSnapshot(); }); - const rows = [ - ['1', '2', '3'], - ['4', '5', '6'], - ['cell7', 'cell8', 'cell9'], - ]; + const rows = [['1', '2', '3'], ['4', '5', '6'], ['cell7', 'cell8', 'cell9']]; const xLabels = ['col1', 'col2', 'col3']; const yLabels = ['row1', 'row2', 'row3']; it('logs error if ylabels and rows have different lengths', () => { shallow(); expect(consoleSpy).toHaveBeenCalledWith( - 'Number of rows (2) should match the number of Y labels (3).'); + 'Number of rows (2) should match the number of Y labels (3).', + ); }); it('renders one row with three columns', () => { diff --git a/frontend/src/components/CompareTable.tsx b/frontend/src/components/CompareTable.tsx index 26edb18b8f1f..fda6a2ee6010 100644 --- a/frontend/src/components/CompareTable.tsx +++ b/frontend/src/components/CompareTable.tsx @@ -60,7 +60,8 @@ class CompareTable extends React.PureComponent { const { rows, xLabels, yLabels } = this.props; if (rows.length !== yLabels.length) { logger.error( - `Number of rows (${rows.length}) should match the number of Y labels (${yLabels.length}).`); + `Number of rows (${rows.length}) should match the number of Y labels (${yLabels.length}).`, + ); } if (!rows || rows.length === 0) { return null; @@ -73,16 +74,24 @@ class CompareTable extends React.PureComponent { {/* X labels row */} {xLabels.map((label, i) => ( - {label} + + {label} + ))} {rows.map((row, i) => ( {/* Y label */} - {yLabels[i]} + + {yLabels[i]} + {/* Row cells */} - {row.map((cell, j) => {cell})} + {row.map((cell, j) => ( + + {cell} + + ))} ))} diff --git a/frontend/src/components/CustomTable.test.tsx b/frontend/src/components/CustomTable.test.tsx index 0daec384f52c..53f9f8a80100 100644 --- a/frontend/src/components/CustomTable.test.tsx +++ b/frontend/src/components/CustomTable.test.tsx @@ -102,33 +102,44 @@ describe('CustomTable', () => { }); it('renders some columns with equal widths without rows', async () => { - const tree = shallow(); + const tree = shallow( + , + ); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); it('renders without the checkboxes if disableSelection is true', async () => { - const tree = shallow(); + const tree = shallow( + , + ); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); it('renders some columns with descending sort order on first column', async () => { - const tree = shallow(); + const tree = shallow( + , + ); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); it('renders columns with specified widths', async () => { - const testcolumns = [{ - flex: 3, - label: 'col1', - }, { - flex: 1, - label: 'col2', - }]; + const testcolumns = [ + { + flex: 3, + label: 'col1', + }, + { + flex: 1, + label: 'col2', + }, + ]; const tree = shallow(); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); @@ -147,15 +158,18 @@ describe('CustomTable', () => { }); it('calls reload function with sort key of clicked column, while keeping same page', () => { - const testcolumns = [{ - flex: 3, - label: 'col1', - sortKey: 'col1sortkey', - }, { - flex: 1, - label: 'col2', - sortKey: 'col2sortkey', - }]; + const testcolumns = [ + { + flex: 3, + label: 'col1', + sortKey: 'col1sortkey', + }, + { + flex: 1, + label: 'col2', + sortKey: 'col2sortkey', + }, + ]; const reload = jest.fn(); const tree = shallow(); expect(reload).toHaveBeenLastCalledWith({ @@ -166,7 +180,10 @@ describe('CustomTable', () => { sortBy: 'col1sortkey desc', }); - tree.find('WithStyles(TableSortLabel)').at(1).simulate('click'); + tree + .find('WithStyles(TableSortLabel)') + .at(1) + .simulate('click'); expect(reload).toHaveBeenLastCalledWith({ filter: '', orderAscending: true, @@ -177,15 +194,18 @@ describe('CustomTable', () => { }); it('calls reload function with same sort key in reverse order if same column is clicked twice', () => { - const testcolumns = [{ - flex: 3, - label: 'col1', - sortKey: 'col1sortkey', - }, { - flex: 1, - label: 'col2', - sortKey: 'col2sortkey', - }]; + const testcolumns = [ + { + flex: 3, + label: 'col1', + sortKey: 'col1sortkey', + }, + { + flex: 1, + label: 'col2', + sortKey: 'col2sortkey', + }, + ]; const reload = jest.fn(); const tree = shallow(); expect(reload).toHaveBeenLastCalledWith({ @@ -196,7 +216,10 @@ describe('CustomTable', () => { sortBy: 'col1sortkey desc', }); - tree.find('WithStyles(TableSortLabel)').at(1).simulate('click'); + tree + .find('WithStyles(TableSortLabel)') + .at(1) + .simulate('click'); expect(reload).toHaveBeenLastCalledWith({ filter: '', orderAscending: true, @@ -205,7 +228,10 @@ describe('CustomTable', () => { sortBy: 'col2sortkey', }); tree.setProps({ sortBy: 'col1sortkey' }); - tree.find('WithStyles(TableSortLabel)').at(1).simulate('click'); + tree + .find('WithStyles(TableSortLabel)') + .at(1) + .simulate('click'); expect(reload).toHaveBeenLastCalledWith({ filter: '', orderAscending: false, @@ -216,13 +242,16 @@ describe('CustomTable', () => { }); it('does not call reload if clicked column has no sort key', () => { - const testcolumns = [{ - flex: 3, - label: 'col1', - }, { - flex: 1, - label: 'col2', - }]; + const testcolumns = [ + { + flex: 3, + label: 'col1', + }, + { + flex: 1, + label: 'col2', + }, + ]; const reload = jest.fn(); const tree = shallow(); expect(reload).toHaveBeenLastCalledWith({ @@ -233,7 +262,10 @@ describe('CustomTable', () => { sortBy: '', }); - tree.find('WithStyles(TableSortLabel)').at(0).simulate('click'); + tree + .find('WithStyles(TableSortLabel)') + .at(0) + .simulate('click'); expect(reload).toHaveBeenLastCalledWith({ filter: '', orderAscending: false, @@ -246,14 +278,16 @@ describe('CustomTable', () => { it('logs error if row has more cells than columns', () => { shallow(); expect(consoleSpy).toHaveBeenLastCalledWith( - 'Rows must have the same number of cells defined in columns'); + 'Rows must have the same number of cells defined in columns', + ); }); it('logs error if row has fewer cells than columns', () => { const testcolumns = [{ label: 'col1' }, { label: 'col2' }, { label: 'col3' }]; shallow(); expect(consoleSpy).toHaveBeenLastCalledWith( - 'Rows must have the same number of cells defined in columns'); + 'Rows must have the same number of cells defined in columns', + ); }); it('renders some rows', async () => { @@ -270,17 +304,30 @@ describe('CustomTable', () => { it('calls update selection callback when items are selected', () => { const spy = jest.fn(); - const tree = shallow(); - tree.find('.row').at(0).simulate('click', { stopPropagation: () => null }); + const tree = shallow( + , + ); + tree + .find('.row') + .at(0) + .simulate('click', { stopPropagation: () => null }); expect(spy).toHaveBeenLastCalledWith(['row1']); }); it('does not add items to selection when multiple rows are clicked', () => { // Keeping track of selection is the parent's job. const spy = jest.fn(); - const tree = shallow(); - tree.find('.row').at(0).simulate('click', { stopPropagation: () => null }); - tree.find('.row').at(1).simulate('click', { stopPropagation: () => null }); + const tree = shallow( + , + ); + tree + .find('.row') + .at(0) + .simulate('click', { stopPropagation: () => null }); + tree + .find('.row') + .at(1) + .simulate('click', { stopPropagation: () => null }); expect(spy).toHaveBeenLastCalledWith(['row2']); }); @@ -288,57 +335,108 @@ describe('CustomTable', () => { // Keeping track of selection is the parent's job. const selectedIds = ['previouslySelectedRow']; const spy = jest.fn(); - const tree = shallow(); - tree.find('.row').at(0).simulate('click', { stopPropagation: () => null }); + const tree = shallow( + , + ); + tree + .find('.row') + .at(0) + .simulate('click', { stopPropagation: () => null }); expect(spy).toHaveBeenLastCalledWith(['previouslySelectedRow', 'row1']); }); it('does not call selectionCallback if disableSelection is true', () => { const spy = jest.fn(); - const tree = shallow(); - tree.find('.row').at(0).simulate('click', { stopPropagation: () => null }); - tree.find('.row').at(1).simulate('click', { stopPropagation: () => null }); + const tree = shallow( + , + ); + tree + .find('.row') + .at(0) + .simulate('click', { stopPropagation: () => null }); + tree + .find('.row') + .at(1) + .simulate('click', { stopPropagation: () => null }); expect(spy).not.toHaveBeenCalled(); }); it('handles no updateSelection method being passed', () => { const tree = shallow(); - tree.find('.row').at(0).simulate('click', { stopPropagation: () => null }); - tree.find('.columnName WithStyles(Checkbox)').at(0).simulate('change', { - target: { checked: true }, - }); + tree + .find('.row') + .at(0) + .simulate('click', { stopPropagation: () => null }); + tree + .find('.columnName WithStyles(Checkbox)') + .at(0) + .simulate('change', { + target: { checked: true }, + }); }); it('selects all items when head checkbox is clicked', () => { const spy = jest.fn(); - const tree = shallow(); - tree.find('.columnName WithStyles(Checkbox)').at(0).simulate('change', { - target: { checked: true }, - }); + const tree = shallow( + , + ); + tree + .find('.columnName WithStyles(Checkbox)') + .at(0) + .simulate('change', { + target: { checked: true }, + }); expect(spy).toHaveBeenLastCalledWith(['row1', 'row2']); }); it('unselects all items when head checkbox is clicked and all items are selected', () => { const spy = jest.fn(); - const tree = shallow(); - tree.find('.columnName WithStyles(Checkbox)').at(0).simulate('change', { - target: { checked: true }, - }); + const tree = shallow( + , + ); + tree + .find('.columnName WithStyles(Checkbox)') + .at(0) + .simulate('change', { + target: { checked: true }, + }); expect(spy).toHaveBeenLastCalledWith(['row1', 'row2']); - tree.find('.columnName WithStyles(Checkbox)').at(0).simulate('change', { - target: { checked: false }, - }); + tree + .find('.columnName WithStyles(Checkbox)') + .at(0) + .simulate('change', { + target: { checked: false }, + }); expect(spy).toHaveBeenLastCalledWith([]); }); it('selects all items if one item was checked then the head checkbox is clicked', () => { const spy = jest.fn(); - const tree = shallow(); - tree.find('.row').at(0).simulate('click', { stopPropagation: () => null }); - tree.find('.columnName WithStyles(Checkbox)').at(0).simulate('change', { - target: { checked: true }, - }); + const tree = shallow( + , + ); + tree + .find('.row') + .at(0) + .simulate('click', { stopPropagation: () => null }); + tree + .find('.columnName WithStyles(Checkbox)') + .at(0) + .simulate('change', { + target: { checked: true }, + }); expect(spy).toHaveBeenLastCalledWith(['row1', 'row2']); }); @@ -347,8 +445,20 @@ describe('CustomTable', () => { // work here because the parent is where the selectedIds state is kept const selectedIds = ['previouslySelectedRow']; const spy = jest.fn(); - const tree = shallow(); - tree.find('.row').at(0).simulate('click', { stopPropagation: () => null }); + const tree = shallow( + , + ); + tree + .find('.row') + .at(0) + .simulate('click', { stopPropagation: () => null }); expect(spy).toHaveBeenLastCalledWith(['row1']); }); @@ -358,8 +468,18 @@ describe('CustomTable', () => { const tree = shallow(); await TestUtils.flushPromises(); expect(tree.state()).toHaveProperty('maxPageIndex', 0); - expect(tree.find('WithStyles(IconButton)').at(0).prop('disabled')).toBeTruthy(); - expect(tree.find('WithStyles(IconButton)').at(1).prop('disabled')).toBeTruthy(); + expect( + tree + .find('WithStyles(IconButton)') + .at(0) + .prop('disabled'), + ).toBeTruthy(); + expect( + tree + .find('WithStyles(IconButton)') + .at(1) + .prop('disabled'), + ).toBeTruthy(); }); it('enables next page button if next page token is given', async () => { @@ -368,8 +488,18 @@ describe('CustomTable', () => { const tree = shallow(); await reloadResult; expect(tree.state()).toHaveProperty('maxPageIndex', Number.MAX_SAFE_INTEGER); - expect(tree.find('WithStyles(IconButton)').at(0).prop('disabled')).toBeTruthy(); - expect(tree.find('WithStyles(IconButton)').at(1).prop('disabled')).not.toBeTruthy(); + expect( + tree + .find('WithStyles(IconButton)') + .at(0) + .prop('disabled'), + ).toBeTruthy(); + expect( + tree + .find('WithStyles(IconButton)') + .at(1) + .prop('disabled'), + ).not.toBeTruthy(); }); it('calls reload with next page token when next page button is clicked', async () => { @@ -378,7 +508,10 @@ describe('CustomTable', () => { const tree = shallow(); await TestUtils.flushPromises(); - tree.find('WithStyles(IconButton)').at(1).simulate('click'); + tree + .find('WithStyles(IconButton)') + .at(1) + .simulate('click'); expect(spy).toHaveBeenLastCalledWith({ filter: '', orderAscending: false, @@ -394,7 +527,10 @@ describe('CustomTable', () => { const tree = shallow(); await TestUtils.flushPromises(); - tree.find('WithStyles(IconButton)').at(1).simulate('click'); + tree + .find('WithStyles(IconButton)') + .at(1) + .simulate('click'); await TestUtils.flushPromises(); expect(spy).toHaveBeenLastCalledWith({ filter: '', @@ -406,7 +542,12 @@ describe('CustomTable', () => { expect(tree.state()).toHaveProperty('currentPage', 1); tree.setProps({ rows: [rows[1]] }); expect(tree).toMatchSnapshot(); - expect(tree.find('WithStyles(IconButton)').at(0).prop('disabled')).not.toBeTruthy(); + expect( + tree + .find('WithStyles(IconButton)') + .at(0) + .prop('disabled'), + ).not.toBeTruthy(); }); it('renders new rows after clicking previous page, and enables next page button', async () => { @@ -415,10 +556,16 @@ describe('CustomTable', () => { const tree = shallow(); await reloadResult; - tree.find('WithStyles(IconButton)').at(1).simulate('click'); + tree + .find('WithStyles(IconButton)') + .at(1) + .simulate('click'); await reloadResult; - tree.find('WithStyles(IconButton)').at(0).simulate('click'); + tree + .find('WithStyles(IconButton)') + .at(0) + .simulate('click'); await TestUtils.flushPromises(); expect(spy).toHaveBeenLastCalledWith({ filter: '', @@ -429,7 +576,12 @@ describe('CustomTable', () => { }); tree.setProps({ rows }); - expect(tree.find('WithStyles(IconButton)').at(0).prop('disabled')).toBeTruthy(); + expect( + tree + .find('WithStyles(IconButton)') + .at(0) + .prop('disabled'), + ).toBeTruthy(); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); @@ -471,8 +623,9 @@ describe('CustomTable', () => { it('renders a collapsed row', async () => { const row = { ...rows[0] }; row.expandState = ExpandState.COLLAPSED; - const tree = shallow( null} />); + const tree = shallow( + null} />, + ); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); @@ -480,8 +633,15 @@ describe('CustomTable', () => { it('renders a collapsed row when selection is disabled', async () => { const row = { ...rows[0] }; row.expandState = ExpandState.COLLAPSED; - const tree = shallow( null} disableSelection={true} />); + const tree = shallow( + null} + disableSelection={true} + />, + ); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); @@ -497,8 +657,14 @@ describe('CustomTable', () => { it('renders an expanded row with expanded component below it', async () => { const row = { ...rows[0] }; row.expandState = ExpandState.EXPANDED; - const tree = shallow( Hello World} />); + const tree = shallow( + Hello World} + />, + ); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); @@ -508,15 +674,27 @@ describe('CustomTable', () => { const toggleSpy = jest.fn(); const stopPropagationSpy = jest.fn(); row.expandState = ExpandState.EXPANDED; - const tree = shallow( Hello World} toggleExpansion={toggleSpy} />); - tree.find('.' + css.expandButton).at(1).simulate('click', { stopPropagation: stopPropagationSpy }); + const tree = shallow( + Hello World} + toggleExpansion={toggleSpy} + />, + ); + tree + .find('.' + css.expandButton) + .at(1) + .simulate('click', { stopPropagation: stopPropagationSpy }); expect(toggleSpy).toHaveBeenCalledWith(1); expect(stopPropagationSpy).toHaveBeenCalledWith(); }); it('renders a table with sorting disabled', async () => { - const tree = shallow(); + const tree = shallow( + , + ); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); }); @@ -531,17 +709,23 @@ describe('CustomTable', () => { it('reloads the table with the encoded filter object', async () => { const reload = jest.fn(); - const tree = shallow(); + const tree = shallow( + , + ); // lodash's debounce function doesn't play nice with Jest, so we skip the handleChange function // and call _requestFilter directly. (tree.instance() as CustomTableTest)._requestFilter('test filter'); - const expectedEncodedFilter = encodeURIComponent(JSON.stringify({ - predicates: [{ - key: 'name', - op: PredicateOp.ISSUBSTRING, - string_value: 'test filter', - }] - })); + const expectedEncodedFilter = encodeURIComponent( + JSON.stringify({ + predicates: [ + { + key: 'name', + op: PredicateOp.ISSUBSTRING, + string_value: 'test filter', + }, + ], + }), + ); expect(tree.state('filterStringEncoded')).toEqual(expectedEncodedFilter); expect(reload).toHaveBeenLastCalledWith({ filter: expectedEncodedFilter, diff --git a/frontend/src/components/CustomTable.tsx b/frontend/src/components/CustomTable.tsx index 804f0411d5df..cf4252c306af 100644 --- a/frontend/src/components/CustomTable.tsx +++ b/frontend/src/components/CustomTable.tsx @@ -89,6 +89,9 @@ export const css = stylesheet({ fontWeight: 'bold', letterSpacing: 0.25, marginRight: 20, + overflow: 'hidden', + textOverflow: 'ellipsis', + whiteSpace: 'nowrap', }, emptyMessage: { padding: 20, @@ -165,7 +168,7 @@ export const css = stylesheet({ }, selectionToggle: { marginRight: 12, - minWidth: 32, + overflow: 'initial', // Resets overflow from 'hidden' }, verticalAlignInitial: { verticalAlign: 'initial', @@ -206,8 +209,10 @@ interface CustomTableState { export default class CustomTable extends React.Component { private _isMounted = true; - private _debouncedFilterRequest = - debounce((filterString: string) => this._requestFilter(filterString), 300); + private _debouncedFilterRequest = debounce( + (filterString: string) => this._requestFilter(filterString), + 300, + ); constructor(props: CustomTableProps) { super(props); @@ -219,8 +224,8 @@ export default class CustomTable extends React.Component v.id) : []; + const selectedIds = (event.target as CheckboxProps).checked + ? this.props.rows.map(v => v.id) + : []; if (this.props.updateSelection) { this.props.updateSelection(selectedIds); } @@ -249,9 +255,10 @@ export default class CustomTable extends React.Component total += (c.flex || 1), 0); - const widths = this.props.columns.map(c => (c.flex || 1) / totalFlex * 100); + const totalFlex = this.props.columns.reduce((total, c) => (total += c.flex || 1), 0); + const widths = this.props.columns.map(c => ((c.flex || 1) / totalFlex) * 100); return (
- {/* Filter/Search bar */} {!this.props.noFilterBox && (
- - ) - }} /> + ), + }} + />
)} {/* Header */} -
- {(this.props.disableSelection !== true && this.props.useRadioButtons !== true) && ( -
- -
- )} - {/* Shift cells to account for expand button */} - {!!this.props.getExpandComponent && ( - - )} +
+ {// Called as function to avoid breaking shallow rendering tests. + HeaderRowSelectionSection({ + disableSelection: this.props.disableSelection, + indeterminate: !!numSelected && numSelected < this.props.rows.length, + isSelected: !!numSelected && numSelected === this.props.rows.length, + onSelectAll: this.handleSelectAllClick.bind(this), + showExpandButton: !!this.props.getExpandComponent, + useRadioButtons: this.props.useRadioButtons, + })} {this.props.columns.map((col, i) => { const isColumnSortable = !!this.props.columns[i].sortKey; const isCurrentSortColumn = sortBy === this.props.columns[i].sortKey; return ( -
- {this.props.disableSorting === true &&
{col.label}
} +
+ {this.props.disableSorting === true && col.label} {!this.props.disableSorting && ( - - + this._requestSort(this.props.columns[i].sortKey)}> + onClick={() => this._requestSort(this.props.columns[i].sortKey)} + > {col.label} @@ -343,11 +366,16 @@ export default class CustomTable extends React.Component {/* Busy experience */} - {this.state.isBusy && ( -
- - )} + {this.state.isBusy && ( + +
+ + + )} {/* Empty experience */} {this.props.rows.length === 0 && !!this.props.emptyMessage && !this.state.isBusy && ( @@ -358,49 +386,42 @@ export default class CustomTable extends React.Component -
this.handleClick(e, row.id)}> - {/* Expansion toggle button */} - {((this.props.disableSelection !== true || !!this.props.getExpandComponent) && row.expandState !== ExpandState.NONE) && ( -
- {/* If using checkboxes */} - {(this.props.disableSelection !== true && this.props.useRadioButtons !== true) && ( - )} - {/* If using radio buttons */} - {(this.props.disableSelection !== true && this.props.useRadioButtons) && ( - )} - {!!this.props.getExpandComponent && ( - this._expandButtonToggled(e, i)}> - - - )} -
+ return ( +
+ key={i} + > +
this.handleClick(e, row.id)} + > + {// Called as function to avoid breaking shallow rendering tests. + BodyRowSelectionSection({ + disableSelection: this.props.disableSelection, + expandState: row.expandState, + isSelected: this.isSelected(row.id), + onExpand: e => this._expandButtonToggled(e, i), + showExpandButton: !!this.props.getExpandComponent, + useRadioButtons: this.props.useRadioButtons, + })} + +
+ {row.expandState === ExpandState.EXPANDED && this.props.getExpandComponent && ( +
{this.props.getExpandComponent(i)}
)} - - {}
- {row.expandState === ExpandState.EXPANDED && this.props.getExpandComponent && ( -
- {this.props.getExpandComponent(i)} -
- )} -
); + ); })}
@@ -408,20 +429,29 @@ export default class CustomTable extends React.Component Rows per page: - + InputProps={{ disableUnderline: true }} + onChange={this._requestRowsPerPage.bind(this)} + value={pageSize} + > {[10, 20, 50, 100].map((size, i) => ( - {size} + + {size} + ))} this._pageChanged(-1)} disabled={!this.state.currentPage}> - this._pageChanged(1)} - disabled={this.state.currentPage >= this.state.maxPageIndex}> + this._pageChanged(1)} + disabled={this.state.currentPage >= this.state.maxPageIndex} + >
@@ -432,13 +462,16 @@ export default class CustomTable extends React.Component { // Override the current state with incoming request - const request: ListRequest = Object.assign({ - filter: this.state.filterStringEncoded, - orderAscending: this.state.sortOrder === 'asc', - pageSize: this.state.pageSize, - pageToken: this.state.tokenList[this.state.currentPage], - sortBy: this.state.sortBy, - }, loadRequest); + const request: ListRequest = Object.assign( + { + filter: this.state.filterStringEncoded, + orderAscending: this.state.sortOrder === 'asc', + pageSize: this.state.pageSize, + pageToken: this.state.tokenList[this.state.currentPage], + sortBy: this.state.sortBy, + }, + loadRequest, + ); let result = ''; try { @@ -466,9 +499,9 @@ export default class CustomTable extends React.Component await this._debouncedFilterRequest(value as string) + async () => await this._debouncedFilterRequest(value as string), ); - } + }; // Exposed for testing protected async _requestFilter(filterString?: string): Promise { @@ -479,12 +512,14 @@ export default class CustomTable extends React.Component { this._resetToFirstPage( - await this.reload({ pageToken: '', orderAscending: sortOrder === 'asc', sortBy })); + await this.reload({ pageToken: '', orderAscending: sortOrder === 'asc', sortBy }), + ); }); } } @@ -563,3 +597,91 @@ export default class CustomTable extends React.Component = ({ + disableSelection, + indeterminate, + isSelected, + onSelectAll, + showExpandButton, + useRadioButtons, +}) => { + const nonEmpty = disableSelection !== true || showExpandButton; + if (!nonEmpty) { + return null; + } + + return ( +
+ {/* If using checkboxes */} + {disableSelection !== true && useRadioButtons !== true && ( + + )} + {/* If using radio buttons */} + {disableSelection !== true && useRadioButtons && ( + // Placeholder for radio button horizontal space. + + )} + {showExpandButton && } +
+ ); +}; + +interface BodyRowSelectionSectionProps extends SelectionSectionCommonProps { + expandState?: ExpandState; + onExpand: React.MouseEventHandler; +} +const BodyRowSelectionSection: React.FC = ({ + disableSelection, + expandState, + isSelected, + onExpand, + showExpandButton, + useRadioButtons, +}) => ( + <> + {/* Expansion toggle button */} + {(disableSelection !== true || showExpandButton) && expandState !== ExpandState.NONE && ( +
+ {/* If using checkboxes */} + {disableSelection !== true && useRadioButtons !== true && ( + + )} + {/* If using radio buttons */} + {disableSelection !== true && useRadioButtons && ( + + )} + {showExpandButton && ( + + + + )} +
+ )} + + {/* Placeholder for non-expandable rows */} + {expandState === ExpandState.NONE &&
} + +); diff --git a/frontend/src/components/CustomTableRow.test.tsx b/frontend/src/components/CustomTableRow.test.tsx index 88cdf08b00eb..a1318907db17 100644 --- a/frontend/src/components/CustomTableRow.test.tsx +++ b/frontend/src/components/CustomTableRow.test.tsx @@ -43,7 +43,7 @@ describe('CustomTable', () => { }; it('renders some rows using a custom renderer', async () => { - columns[0].customRenderer = () => (this is custom output) as any; + columns[0].customRenderer = () => this is custom output as any; const tree = shallow(); await TestUtils.flushPromises(); expect(tree).toMatchSnapshot(); diff --git a/frontend/src/components/CustomTableRow.tsx b/frontend/src/components/CustomTableRow.tsx index 4af1e1852833..773ee0315971 100644 --- a/frontend/src/components/CustomTableRow.tsx +++ b/frontend/src/components/CustomTableRow.tsx @@ -66,8 +66,8 @@ interface CustomTableRowProps { } function calculateColumnWidths(columns: Column[]): number[] { - const totalFlex = columns.reduce((total, c) => total += (c.flex || 1), 0); - return columns.map(c => (c.flex || 1) / totalFlex * 100); + const totalFlex = columns.reduce((total, c) => (total += c.flex || 1), 0); + return columns.map(c => ((c.flex || 1) / totalFlex) * 100); } // tslint:disable-next-line:variable-name @@ -76,17 +76,18 @@ export const CustomTableRow: React.FC = (props: CustomTable const widths = calculateColumnWidths(columns); return ( - { - row.otherFields.map((cell, i) => ( -
- {i === 0 && row.error && ( - - )} - {columns[i].customRenderer ? - columns[i].customRenderer!({ value: cell, id: row.id }) : cell} -
- )) - } + {row.otherFields.map((cell, i) => ( +
+ {i === 0 && row.error && ( + + + + )} + {columns[i].customRenderer + ? columns[i].customRenderer!({ value: cell, id: row.id }) + : cell} +
+ ))}
); -}; \ No newline at end of file +}; diff --git a/frontend/src/components/Description.test.tsx b/frontend/src/components/Description.test.tsx new file mode 100644 index 000000000000..a70239cf2785 --- /dev/null +++ b/frontend/src/components/Description.test.tsx @@ -0,0 +1,99 @@ +/* + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import * as React from 'react'; + +import { mount } from 'enzyme'; +import { Description } from './Description'; + +describe('Description', () => { + describe('When in normal mode', () => { + it('renders empty string', () => { + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders pure text', () => { + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders raw link', () => { + const description = 'https://www.google.com'; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders markdown link', () => { + const description = '[google](https://www.google.com)'; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders paragraphs', () => { + const description = 'Paragraph 1\n' + '\n' + 'Paragraph 2'; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders markdown list as list', () => { + const description = ` +* abc +* def`; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + }); + + describe('When in inline mode', () => { + it('renders paragraphs separated by space', () => { + const description = ` +Paragraph 1 + +Paragraph 2 +`; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders pure text', () => { + const tree = mount( + , + ).getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders raw link', () => { + const description = 'https://www.google.com'; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders markdown link', () => { + const description = '[google](https://www.google.com)'; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + + it('renders markdown list as pure text', () => { + const description = ` +* abc +* def`; + const tree = mount().getDOMNode(); + expect(tree).toMatchSnapshot(); + }); + }); +}); diff --git a/frontend/src/components/Description.tsx b/frontend/src/components/Description.tsx new file mode 100644 index 000000000000..d9abafc1b8b3 --- /dev/null +++ b/frontend/src/components/Description.tsx @@ -0,0 +1,43 @@ +/* + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import React from 'react'; +import Markdown from 'markdown-to-jsx'; +import { ExternalLink } from '../atoms/ExternalLink'; + +function preventEventBubbling(e: React.MouseEvent): void { + e.stopPropagation(); +} + +const renderExternalLink = (props: {}) => ( + +); + +const options = { + overrides: { a: { component: renderExternalLink } }, +}; + +const optionsForceInline = { + ...options, + forceInline: true, +}; + +export const Description: React.FC<{ description: string; forceInline?: boolean }> = ({ + description, + forceInline, +}) => { + return {description}; +}; diff --git a/frontend/src/components/DetailsTable.test.tsx b/frontend/src/components/DetailsTable.test.tsx index cb91e1de7639..dae438e77da0 100644 --- a/frontend/src/components/DetailsTable.test.tsx +++ b/frontend/src/components/DetailsTable.test.tsx @@ -36,8 +36,12 @@ describe('DetailsTable', () => { }); it('shows key and value for large values', () => { - const tree = shallow( { essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions - of Lorem Ipsum.`] - ]} />); + of Lorem Ipsum.`, + ], + ]} + />, + ); expect(tree).toMatchSnapshot(); }); @@ -56,7 +63,9 @@ describe('DetailsTable', () => { }); it('shows key and JSON value in row', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); @@ -86,24 +95,32 @@ describe('DetailsTable', () => { }); it('does not render booleans as JSON', () => { - const tree = shallow(); + const tree = shallow(); expect(tree).toMatchSnapshot(); }); it('shows keys and values for multiple rows', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); + it('does render values with the provided valueComponent', () => { + const valueComponent: React.FC = ({ key }) => {key}; + const tree = shallow( + , + ); + expect(tree).toMatchSnapshot(); + }); }); diff --git a/frontend/src/components/DetailsTable.tsx b/frontend/src/components/DetailsTable.tsx index 3f43b955c5b5..b2a56025c4c5 100644 --- a/frontend/src/components/DetailsTable.tsx +++ b/frontend/src/components/DetailsTable.tsx @@ -17,11 +17,13 @@ import * as React from 'react'; import { stylesheet } from 'typestyle'; import { color, spacing, commonCss } from '../Css'; +import { KeyValue } from '../lib/StaticGraphParser'; import Editor from './Editor'; import 'brace'; import 'brace/ext/language_tools'; import 'brace/mode/json'; import 'brace/theme/github'; +import { S3Artifact } from 'third_party/argo-ui/argo_template'; export const css = stylesheet({ key: { @@ -40,45 +42,73 @@ export const css = stylesheet({ }, valueText: { maxWidth: 400, + overflow: 'hidden', + textOverflow: 'ellipsis', + whiteSpace: 'nowrap', }, }); interface DetailsTableProps { - fields: string[][]; + fields: Array>; title?: string; + valueComponent?: React.FC; +} + +function isString(x: any): x is string { + return typeof x === 'string'; } export default (props: DetailsTableProps) => { - return ( - {!!props.title &&
{props.title}
} -
- {props.fields.map((f, i) => { - try { - const parsedJson = JSON.parse(f[1]); - // Nulls, booleans, strings, and numbers can all be parsed as JSON, but we don't care - // about rendering. Note that `typeOf null` returns 'object' - if (parsedJson === null || typeof parsedJson !== 'object') { - throw new Error('Parsed JSON was neither an array nor an object. Using default renderer'); + return ( + + {!!props.title &&
{props.title}
} +
+ {props.fields.map((f, i) => { + const [key, value] = f; + + // only try to parse json if value is a string + if (isString(value)) { + try { + const parsedJson = JSON.parse(value); + // Nulls, booleans, strings, and numbers can all be parsed as JSON, but we don't care + // about rendering. Note that `typeOf null` returns 'object' + if (parsedJson === null || typeof parsedJson !== 'object') { + throw new Error( + 'Parsed JSON was neither an array nor an object. Using default renderer', + ); + } + return ( +
+ {key} + +
+ ); + } catch (err) { + // do nothing + } } - return ( -
- {f[0]} - -
- ); - } catch (err) { // If the value isn't a JSON object, just display it as is return (
- {f[0]} - {f[1]} + {key} + + {props.valueComponent && !!value && !isString(value) + ? props.valueComponent(value) + : value} +
); - } - })} -
-
+ })} +
+
); }; diff --git a/frontend/src/components/Editor.test.tsx b/frontend/src/components/Editor.test.tsx index 2245fb6ba66b..30afeb2609ec 100644 --- a/frontend/src/components/Editor.test.tsx +++ b/frontend/src/components/Editor.test.tsx @@ -36,7 +36,7 @@ describe('Editor', () => { expect(tree.html()).toMatchSnapshot(); }); - it ('renders a placeholder that contains HTML', () => { + it('renders a placeholder that contains HTML', () => { const placeholder = 'I am a placeholder with HTML.'; const tree = mount(); expect(tree.html()).toMatchSnapshot(); @@ -49,4 +49,4 @@ describe('Editor', () => { const editor = (tree.instance() as any).editor; expect(editor.getValue()).toBe(value); }); -}); \ No newline at end of file +}); diff --git a/frontend/src/components/Editor.tsx b/frontend/src/components/Editor.tsx index 11dec1c990d9..ee73e46ac902 100644 --- a/frontend/src/components/Editor.tsx +++ b/frontend/src/components/Editor.tsx @@ -42,4 +42,4 @@ class Editor extends AceEditor { } } -export default Editor; \ No newline at end of file +export default Editor; diff --git a/frontend/src/components/GcsLink.tsx b/frontend/src/components/GcsLink.tsx deleted file mode 100644 index f0f7635141bc..000000000000 --- a/frontend/src/components/GcsLink.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import * as React from 'react'; -import { generateGcsConsoleUri } from '../lib/Utils'; - -/** - * A component that renders a gcs console link when gcsUri is gs:// and pure - * text if it is not a valid gs:// uri. - */ -export const GcsLink: React.FC<{ gcsUri?: string }> = ({ gcsUri }) => { - const gcsConsoleUri = gcsUri ? generateGcsConsoleUri(gcsUri) : undefined; - if (gcsConsoleUri) { - // Opens in new window safely - return {gcsUri}; - } else { - return <>{gcsUri}; - } -}; diff --git a/frontend/src/components/Graph.test.tsx b/frontend/src/components/Graph.test.tsx index 4de7c1baefd1..96340be29cbf 100644 --- a/frontend/src/components/Graph.test.tsx +++ b/frontend/src/components/Graph.test.tsx @@ -127,7 +127,10 @@ describe('Graph', () => { graph.setEdge('node2', 'node1'); const spy = jest.fn(); const tree = shallow(); - tree.find('.node').at(0).simulate('click'); + tree + .find('.node') + .at(0) + .simulate('click'); expect(spy).toHaveBeenCalledWith('node1'); }); diff --git a/frontend/src/components/Graph.tsx b/frontend/src/components/Graph.tsx index 07ab344932f6..ed8ebe51aeb7 100644 --- a/frontend/src/components/Graph.tsx +++ b/frontend/src/components/Graph.tsx @@ -89,7 +89,7 @@ const css = stylesheet({ margin: 10, position: 'absolute', // TODO: can this be calculated? - transform: 'translate(71px, 14px)' + transform: 'translate(71px, 14px)', }, root: { backgroundColor: color.graphBg, @@ -133,13 +133,12 @@ export default class Graph extends React.Component { const displayEdges: Edge[] = []; // Creates the lines that constitute the edges connecting the graph. - graph.edges().forEach((edgeInfo) => { + graph.edges().forEach(edgeInfo => { const edge = graph.edge(edgeInfo); const segments: Segment[] = []; if (edge.points.length > 1) { for (let i = 1; i < edge.points.length; i++) { - let xStart = edge.points[i - 1].x; let yStart = edge.points[i - 1].y; let xEnd = edge.points[i].x; @@ -157,8 +156,8 @@ export default class Graph extends React.Component { // Set the edge's first segment to start at the bottom or top of the source node. yStart = downwardPointingSegment - ? sourceNode.y + (sourceNode.height / 2) - 3 - : sourceNode.y - (sourceNode.height / 2); + ? sourceNode.y + sourceNode.height / 2 - 3 + : sourceNode.y - sourceNode.height / 2; xStart = this._ensureXIsWithinNode(sourceNode, xStart); } @@ -184,7 +183,7 @@ export default class Graph extends React.Component { // node. yEnd = downwardPointingSegment ? destinationNode.y - this.TOP_OFFSET + 5 - : destinationNode.y + (destinationNode.height / 2) + 3; + : destinationNode.y + destinationNode.height / 2 + 3; xEnd = this._ensureXIsWithinNode(destinationNode, xEnd); } @@ -226,7 +225,7 @@ export default class Graph extends React.Component { from: edgeInfo.v, isPlaceholder: edge.isPlaceholder, segments, - to: edgeInfo.w + to: edgeInfo.w, }); }); @@ -235,36 +234,50 @@ export default class Graph extends React.Component { return (
- {graph.nodes().map(id => Object.assign(graph.node(id), { id })).map((node, i) => ( -
{ - if (!this.props.selectedNodeId) { - this.setState({ hoveredNode: node.id }); - } - }} - onMouseLeave={() => { - if (this.state.hoveredNode === node.id) { - this.setState({ hoveredNode: undefined }); + {graph + .nodes() + .map(id => Object.assign(graph.node(id), { id })) + .map((node, i) => ( +
{ + if (!this.props.selectedNodeId) { + this.setState({ hoveredNode: node.id }); + } + }} + onMouseLeave={() => { + if (this.state.hoveredNode === node.id) { + this.setState({ hoveredNode: undefined }); + } + }} + onClick={() => + !node.isPlaceholder && this.props.onClick && this.props.onClick(node.id) } - }} - onClick={() => (!node.isPlaceholder && this.props.onClick) && this.props.onClick(node.id)} - style={{ - backgroundColor: node.bgColor, left: node.x, - maxHeight: node.height, - minHeight: node.height, - top: node.y, - transition: 'left 0.5s, top 0.5s', - width: node.width, - }}> - {!node.isPlaceholder && ( - -
{node.label}
-
- )} -
{node.icon}
-
- ))} + style={{ + backgroundColor: node.bgColor, + left: node.x, + maxHeight: node.height, + minHeight: node.height, + top: node.y, + transition: 'left 0.5s, top 0.5s', + width: node.width, + }} + > + {!node.isPlaceholder && ( + +
{node.label}
+
+ )} +
+ {node.icon} +
+
+ ))} {displayEdges.map((edge, i) => { const edgeColor = this._getEdgeColor(edge, highlightNode); @@ -272,8 +285,10 @@ export default class Graph extends React.Component { return (
{edge.segments.map((segment, l) => ( -
{ transform: `rotate(${segment.angle}deg)`, transition: 'left 0.5s, top 0.5s', width: segment.length, - }} /> + }} + /> ))} {/* Arrowhead */} {!edge.isPlaceholder && lastSegment.x2 !== undefined && lastSegment.y2 !== undefined && ( -
+
)}
); @@ -300,17 +319,18 @@ export default class Graph extends React.Component { } private _addDiagonalSegment( - segments: Segment[], - xStart: number, - yStart: number, - xEnd: number, - yEnd: number): void { + segments: Segment[], + xStart: number, + yStart: number, + xEnd: number, + yEnd: number, + ): void { const xMid = (xStart + xEnd) / 2; // The + 0.5 at the end of 'length' helps fill out the elbows of the edges. const length = Math.sqrt(Math.pow(xStart - xEnd, 2) + Math.pow(yStart - yEnd, 2)) + 0.5; - const x1 = xMid - (length / 2); + const x1 = xMid - length / 2; const y1 = (yStart + yEnd) / 2; - const angle = Math.atan2(yStart - yEnd, xStart - xEnd) * 180 / Math.PI; + const angle = (Math.atan2(yStart - yEnd, xStart - xEnd) * 180) / Math.PI; segments.push({ angle, length, diff --git a/frontend/src/components/LogViewer.test.tsx b/frontend/src/components/LogViewer.test.tsx index eaadb2317df1..262b54caed84 100644 --- a/frontend/src/components/LogViewer.test.tsx +++ b/frontend/src/components/LogViewer.test.tsx @@ -26,19 +26,20 @@ describe('LogViewer', () => { it('renders one log line', () => { const logLines = ['first line']; const logViewer = new LogViewer({ logLines }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders two log lines', () => { const logLines = ['first line', 'second line']; const logViewer = new LogViewer({ logLines }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders one long line without breaking', () => { - const line = `Lorem Ipsum is simply dummy text of the printing and typesetting` + + const line = + `Lorem Ipsum is simply dummy text of the printing and typesetting` + `industry. Lorem Ipsum has been the industry's standard dummy text ever` + `since the 1500s, when an unknown printer took a galley of type and` + `scrambled it to make a type specimen book. It has survived not only five` + @@ -48,7 +49,7 @@ describe('LogViewer', () => { `with desktop publishing software like Aldus PageMaker including versions` + `of Lorem Ipsum.`; const logViewer = new LogViewer({ logLines: [line] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); @@ -63,35 +64,35 @@ describe('LogViewer', () => { with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.`; const logViewer = new LogViewer({ logLines: line.split('\n') }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('linkifies standalone urls', () => { const logLines = ['this string: http://path.com is a url']; const logViewer = new LogViewer({ logLines }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('linkifies standalone https urls', () => { const logLines = ['this string: https://path.com is a url']; const logViewer = new LogViewer({ logLines }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('linkifies substring urls', () => { const logLines = ['this string:http://path.com is a url']; const logViewer = new LogViewer({ logLines }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('does not linkify non http/https urls', () => { const logLines = ['this string: gs://path is a GCS path']; const logViewer = new LogViewer({ logLines }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); @@ -105,49 +106,49 @@ describe('LogViewer', () => { it('renders a row with given index as line number', () => { const logViewer = new LogViewer({ logLines: ['line1', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders a row with error', () => { const logViewer = new LogViewer({ logLines: ['line1 with error', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders a row with upper case error', () => { const logViewer = new LogViewer({ logLines: ['line1 with ERROR', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders a row with error word as substring', () => { const logViewer = new LogViewer({ logLines: ['line1 with errorWord', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders a row with warning', () => { const logViewer = new LogViewer({ logLines: ['line1 with warning', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders a row with warn', () => { const logViewer = new LogViewer({ logLines: ['line1 with warn', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders a row with upper case warning', () => { const logViewer = new LogViewer({ logLines: ['line1 with WARNING', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); it('renders a row with warning word as substring', () => { const logViewer = new LogViewer({ logLines: ['line1 with warning:something', 'line2'] }); - const tree = shallow((logViewer as any)._rowRenderer({ index: 0 })); + const tree = mount((logViewer as any)._rowRenderer({ index: 0 })).getDOMNode(); expect(tree).toMatchSnapshot(); }); }); diff --git a/frontend/src/components/LogViewer.tsx b/frontend/src/components/LogViewer.tsx index 4030dc2bb151..235d0ff9f829 100644 --- a/frontend/src/components/LogViewer.tsx +++ b/frontend/src/components/LogViewer.tsx @@ -18,6 +18,7 @@ import * as React from 'react'; import { List, AutoSizer, ListRowProps } from 'react-virtualized'; import { fontsize, fonts } from '../Css'; import { stylesheet } from 'typestyle'; +import { OverscanIndicesGetter } from 'react-virtualized/dist/es/Grid'; const css = stylesheet({ a: { @@ -47,46 +48,118 @@ const css = stylesheet({ userSelect: 'none', }, root: { - $nest: { - '& .ReactVirtualized__Grid__innerScrollContainer': { - overflow: 'auto !important', - }, - }, + // We cannot easily add padding here without breaking react-virtualized size calculation, for + // details: https://github.com/bvaughn/react-virtualized/issues/992 + // Specifically, a complex solution was proposed in https://github.com/bvaughn/react-virtualized/issues/992#issuecomment-371145943. + // We may consider that later. backgroundColor: '#222', color: '#fff', fontFamily: fonts.code, fontSize: fontsize.small, - padding: '10px 0', + // This override and listContainerStyleOverride workarounds to allow horizontal scroll. + // Reference: https://github.com/bvaughn/react-virtualized/issues/1248 + overflow: 'auto !important', whiteSpace: 'pre', }, }); +const listContainerStyleOverride = { + overflow: 'visible', +}; + interface LogViewerProps { - classes?: string; logLines: string[]; } -class LogViewer extends React.Component { +// Use the same amount of overscan above and below visible rows. +// +// Why: +// * Default behavior is that when we scroll to one direction, content off +// screen on the other direction is unmounted from browser immediately. This +// caused a bug when selecting lines + scrolling. +// * With new behavior implemented below: we are now overscanning on both +// directions disregard of which direction user is scrolling to, we can ensure +// lines not exceeding maximum overscanRowCount lines off screen are still +// selectable. +const overscanOnBothDirections: OverscanIndicesGetter = ({ + direction, // One of "horizontal" or "vertical" + cellCount, // Number of rows or columns in the current axis + scrollDirection, // 1 (forwards) or -1 (backwards) + overscanCellsCount, // Maximum number of cells to over-render in either direction + startIndex, // Begin of range of visible cells + stopIndex, // End of range of visible cells +}) => { + return { + overscanStartIndex: Math.max(0, startIndex - overscanCellsCount), + overscanStopIndex: Math.min(cellCount - 1, stopIndex + overscanCellsCount), + }; +}; + +interface LogViewerState { + followNewLogs: boolean; +} + +class LogViewer extends React.Component { + public state = { + followNewLogs: true, + }; + private _rootRef = React.createRef(); public componentDidMount(): void { - this._scrollToEnd(); + // Wait until the next frame to scroll to bottom, because doms haven't been + // rendered when running this. + setTimeout(() => { + this._scrollToEnd(); + }); } public componentDidUpdate(): void { - this._scrollToEnd(); + if (this.state.followNewLogs) { + this._scrollToEnd(); + } } public render(): JSX.Element { - return - {({ height, width }) => ( - - )} - ; + return ( + + {({ height, width }) => ( + + )} + + ); } + private handleScroll = (info: { + clientHeight: number; + scrollHeight: number; + scrollTop: number; + }) => { + const offsetTolerance = 20; // pixels + const isScrolledToBottom = + info.scrollHeight - info.scrollTop - info.clientHeight <= offsetTolerance; + if (isScrolledToBottom !== this.state.followNewLogs) { + this.setState({ + followNewLogs: isScrolledToBottom, + }); + } + }; + private _scrollToEnd(): void { const root = this._rootRef.current; if (root) { @@ -96,56 +169,69 @@ class LogViewer extends React.Component { private _rowRenderer(props: ListRowProps): React.ReactNode { const { style, key, index } = props; + const line = this.props.logLines[index]; return (
- {index + 1} - - - {this._parseLine(this.props.logLines[index]).map((piece, p) => ( - {piece} - ))} - +
); } +} - private _getLineStyle(index: number): React.CSSProperties { - const line = this.props.logLines[index]; - const lineLowerCase = line.toLowerCase(); - if (lineLowerCase.indexOf('error') > -1 || lineLowerCase.indexOf('fail') > -1) { - return { - backgroundColor: '#700000', - color: 'white', - }; - } else if (lineLowerCase.indexOf('warn') > -1) { - return { - backgroundColor: '#545400', - color: 'white', - }; - } else { - return {}; - } +const LogLine: React.FC<{ index: number; line: string }> = ({ index, line }) => ( + <> + + {index + 1} + + + {parseLine(line).map((piece, p) => ( + {piece} + ))} + + +); +// improve performance when rerendering, because we render a lot of logs +const MemoedLogLine = React.memo(LogLine); + +function getLineStyle(line: string): React.CSSProperties { + const lineLowerCase = line.toLowerCase(); + if (lineLowerCase.indexOf('error') > -1 || lineLowerCase.indexOf('fail') > -1) { + return { + backgroundColor: '#700000', + color: 'white', + }; + } else if (lineLowerCase.indexOf('warn') > -1) { + return { + backgroundColor: '#545400', + color: 'white', + }; + } else { + return {}; } +} - private _parseLine(line: string): React.ReactNode[] { - // Linkify URLs starting with http:// or https:// - const urlPattern = /(\b(https?):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/gim; - let lastMatch = 0; - let match = urlPattern.exec(line); - const nodes = []; - while (match) { - // Append all text before URL match - nodes.push({line.substr(lastMatch, match.index)}); - // Append URL via an anchor element - nodes.push({match[0]}); - - lastMatch = match.index + match[0].length; - match = urlPattern.exec(line); - } - // Append all text after final URL - nodes.push({line.substr(lastMatch)}); - return nodes; +function parseLine(line: string): React.ReactNode[] { + // Linkify URLs starting with http:// or https:// + const urlPattern = /(\b(https?):\/\/[-A-Z0-9+&@#\/%?=~_|!:,.;]*[-A-Z0-9+&@#\/%=~_|])/gim; + let lastMatch = 0; + let match = urlPattern.exec(line); + const nodes = []; + while (match) { + // Append all text before URL match + nodes.push({line.substr(lastMatch, match.index)}); + // Append URL via an anchor element + nodes.push( + + {match[0]} + , + ); + + lastMatch = match.index + match[0].length; + match = urlPattern.exec(line); } + // Append all text after final URL + nodes.push({line.substr(lastMatch)}); + return nodes; } export default LogViewer; diff --git a/frontend/src/components/Metric.test.tsx b/frontend/src/components/Metric.test.tsx index 5ba52b77c180..958bbeb7f535 100644 --- a/frontend/src/components/Metric.test.tsx +++ b/frontend/src/components/Metric.test.tsx @@ -67,7 +67,8 @@ describe('Metric', () => { ); + />, + ); expect(tree).toMatchSnapshot(); }); @@ -77,7 +78,8 @@ describe('Metric', () => { ); + />, + ); expect(consoleSpy).toHaveBeenCalledTimes(0); expect(tree).toMatchSnapshot(); }); @@ -88,7 +90,8 @@ describe('Metric', () => { ); + />, + ); expect(consoleSpy).toHaveBeenCalled(); expect(tree).toMatchSnapshot(); }); @@ -99,7 +102,8 @@ describe('Metric', () => { ); + />, + ); expect(consoleSpy).toHaveBeenCalled(); expect(tree).toMatchSnapshot(); }); diff --git a/frontend/src/components/Metric.tsx b/frontend/src/components/Metric.tsx index f3bd85269c47..a64f5fe15acf 100644 --- a/frontend/src/components/Metric.tsx +++ b/frontend/src/components/Metric.tsx @@ -43,7 +43,6 @@ interface MetricProps { } class Metric extends React.PureComponent { - public render(): JSX.Element { const { metric, metadata } = this.props; if (!metric || metric.number_value === undefined) { @@ -68,23 +67,25 @@ class Metric extends React.PureComponent { } if (metric.number_value - metadata.minValue < 0) { - logger.error(`Metric ${metadata.name}'s value:` - + ` (${metric.number_value}) was lower than the supposed minimum of` - + ` (${metadata.minValue})`); + logger.error( + `Metric ${metadata.name}'s value:` + + ` (${metric.number_value}) was lower than the supposed minimum of` + + ` (${metadata.minValue})`, + ); return
{displayString}
; } if (metadata.maxValue - metric.number_value < 0) { - logger.error(`Metric ${metadata.name}'s value:` - + ` (${metric.number_value}) was greater than the supposed maximum of` - + ` (${metadata.maxValue})`); + logger.error( + `Metric ${metadata.name}'s value:` + + ` (${metric.number_value}) was greater than the supposed maximum of` + + ` (${metadata.maxValue})`, + ); return
{displayString}
; } const barWidth = - (metric.number_value - metadata.minValue) - / (metadata.maxValue - metadata.minValue) - * 100; + ((metric.number_value - metadata.minValue) / (metadata.maxValue - metadata.minValue)) * 100; width = `calc(${barWidth}%)`; } diff --git a/frontend/src/components/MinioArtifactLink.test.tsx b/frontend/src/components/MinioArtifactLink.test.tsx new file mode 100644 index 000000000000..7ecbd9ceb76a --- /dev/null +++ b/frontend/src/components/MinioArtifactLink.test.tsx @@ -0,0 +1,75 @@ +/* + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import MinioArtifactLink from './MinioArtifactLink'; + +describe('MinioArtifactLink', () => { + it('handles undefined artifact', () => { + expect(MinioArtifactLink(undefined as any)).toMatchSnapshot(); + }); + + it('handles null artifact', () => { + expect(MinioArtifactLink(null as any)).toMatchSnapshot(); + }); + + it('handles empty artifact', () => { + expect(MinioArtifactLink({} as any)).toMatchSnapshot(); + }); + + it('handles invalid artifact: no bucket', () => { + const s3artifact = { + accessKeySecret: { key: 'accesskey', optional: false, name: 'minio' }, + bucket: '', + endpoint: 'minio.kubeflow', + key: 'bar', + secretKeySecret: { key: 'secretkey', optional: false, name: 'minio' }, + }; + expect(MinioArtifactLink(s3artifact)).toMatchSnapshot(); + }); + + it('handles invalid artifact: no key', () => { + const s3artifact = { + accessKeySecret: { key: 'accesskey', optional: false, name: 'minio' }, + bucket: 'foo', + endpoint: 'minio.kubeflow', + key: '', + secretKeySecret: { key: 'secretkey', optional: false, name: 'minio' }, + }; + expect(MinioArtifactLink(s3artifact)).toMatchSnapshot(); + }); + + it('handles s3 artifact', () => { + const s3artifact = { + accessKeySecret: { key: 'accesskey', optional: false, name: 'minio' }, + bucket: 'foo', + endpoint: 's3.amazonaws.com', + key: 'bar', + secretKeySecret: { key: 'secretkey', optional: false, name: 'minio' }, + }; + expect(MinioArtifactLink(s3artifact)).toMatchSnapshot(); + }); + + it('handles minio artifact', () => { + const minioartifact = { + accessKeySecret: { key: 'accesskey', optional: false, name: 'minio' }, + bucket: 'foo', + endpoint: 'minio.kubeflow', + key: 'bar', + secretKeySecret: { key: 'secretkey', optional: false, name: 'minio' }, + }; + expect(MinioArtifactLink(minioartifact)).toMatchSnapshot(); + }); +}); diff --git a/frontend/src/components/MinioArtifactLink.tsx b/frontend/src/components/MinioArtifactLink.tsx new file mode 100644 index 000000000000..a1682b5d9596 --- /dev/null +++ b/frontend/src/components/MinioArtifactLink.tsx @@ -0,0 +1,32 @@ +import * as React from 'react'; +import { StoragePath, StorageService } from '../lib/WorkflowParser'; +import { S3Artifact } from '../../third_party/argo-ui/argo_template'; + +const artifactApiUri = ({ source, bucket, key }: StoragePath) => + 'artifacts/get' + `?source=${source}&bucket=${bucket}&key=${encodeURIComponent(key)}`; + +/** + * A component that renders an artifact link. + */ +const MinioArtifactLink: React.FC = s3artifact => { + if (!s3artifact || !s3artifact.key || !s3artifact.bucket) { + return null; + } + + const { key, bucket, endpoint } = s3artifact; + const source = endpoint === 's3.amazonaws.com' ? StorageService.S3 : StorageService.MINIO; + const linkText = `${source.toString()}://${bucket}/${key}`; + // Opens in new window safely + return ( + + {linkText} + + ); +}; + +export default MinioArtifactLink; diff --git a/frontend/src/components/NewRunParameters.test.tsx b/frontend/src/components/NewRunParameters.test.tsx index 9e0e4e851223..9b552fde87ce 100644 --- a/frontend/src/components/NewRunParameters.test.tsx +++ b/frontend/src/components/NewRunParameters.test.tsx @@ -43,12 +43,14 @@ describe('NewRunParameters', () => { handleParamChange, initialParams: [ { name: 'testParam1', value: 'testVal1' }, - { name: 'testParam2', value: 'testVal2' } + { name: 'testParam2', value: 'testVal2' }, ], titleMessage: 'Specify parameters required by the pipeline', } as NewRunParametersProps; const tree = shallow(); - tree.find('#newRunPipelineParam1').simulate('change', { target: { value: 'test param value' } }); + tree + .find('#newRunPipelineParam1') + .simulate('change', { target: { value: 'test param value' } }); expect(handleParamChange).toHaveBeenCalledTimes(1); expect(handleParamChange).toHaveBeenLastCalledWith(1, 'test param value'); }); diff --git a/frontend/src/components/NewRunParameters.tsx b/frontend/src/components/NewRunParameters.tsx index 52a2c653063e..f90955f7568b 100644 --- a/frontend/src/components/NewRunParameters.tsx +++ b/frontend/src/components/NewRunParameters.tsx @@ -39,11 +39,18 @@ class NewRunParameters extends React.Component {
{titleMessage}
{!!initialParams.length && (
- {initialParams.map((param, i) => - handleParamChange(i, ev.target.value || '')} - style={{ maxWidth: 600 }} className={commonCss.textField}/>)} + {initialParams.map((param, i) => ( + handleParamChange(i, ev.target.value || '')} + style={{ maxWidth: 600 }} + className={commonCss.textField} + /> + ))}
)}
diff --git a/frontend/src/components/PlotCard.tsx b/frontend/src/components/PlotCard.tsx index aa7e45e93950..e1d53ca114e5 100644 --- a/frontend/src/components/PlotCard.tsx +++ b/frontend/src/components/PlotCard.tsx @@ -100,8 +100,10 @@ class PlotCard extends React.Component { } public shouldComponentUpdate(nextProps: PlotCardProps, nextState: PlotCardState): boolean { - return JSON.stringify(nextProps) !== JSON.stringify(this.props) || - nextState.fullscreenDialogOpen !== this.state.fullscreenDialogOpen; + return ( + JSON.stringify(nextProps) !== JSON.stringify(this.props) || + nextState.fullscreenDialogOpen !== this.state.fullscreenDialogOpen + ); } public render(): JSX.Element | null { @@ -111,38 +113,50 @@ class PlotCard extends React.Component { return null; } - return
- -
-
{title}
-
- + return ( +
+ +
+
+ {title} +
+
+ +
-
- - + + - this.setState({ fullscreenDialogOpen: false })}> -
- - {componentMap[configs[0].type].prototype.getDisplayName()} - - ({title}) -
-
- -
-
-
; + this.setState({ fullscreenDialogOpen: false })} + > +
+ + {componentMap[configs[0].type].prototype.getDisplayName()} + + ({title}) +
+
+ +
+
+
+ ); } } diff --git a/frontend/src/components/ResourceInfo.tsx b/frontend/src/components/ResourceInfo.tsx index 1a7906b9bc90..91a609561f1c 100644 --- a/frontend/src/components/ResourceInfo.tsx +++ b/frontend/src/components/ResourceInfo.tsx @@ -18,7 +18,7 @@ import { stylesheet } from 'typestyle'; import { color, commonCss } from '../Css'; import { getMetadataValue } from '../lib/Utils'; import { Artifact, Execution } from '../generated/src/apis/metadata/metadata_store_pb'; -import { GcsLink } from './GcsLink'; +import { ArtifactLink } from './ArtifactLink'; export const css = stylesheet({ field: { @@ -41,7 +41,7 @@ export const css = stylesheet({ fontSize: '14px', letterSpacing: '0.2px', lineHeight: '20px', - } + }, }); export enum ResourceType { @@ -64,7 +64,6 @@ interface ExecutionProps { export type ResourceInfoProps = ArtifactProps | ExecutionProps; export class ResourceInfo extends React.Component { - public render(): JSX.Element { const { resource } = this.props; const propertyMap = resource.getPropertiesMap(); @@ -74,38 +73,43 @@ export class ResourceInfo extends React.Component {

Type: {this.props.typeName}

{(() => { if (this.props.resourceType === ResourceType.ARTIFACT) { - return <> -
URI
-
- -
- ; + return ( + <> +
URI
+
+ +
+ + ); } return null; })()}

Properties

- {propertyMap.getEntryList() + {propertyMap + .getEntryList() // TODO: __ALL_META__ is something of a hack, is redundant, and can be ignored .filter(k => k[0] !== '__ALL_META__') - .map(k => + .map(k => (
{k[0]}
-
{propertyMap && prettyPrintJsonValue(getMetadataValue(propertyMap.get(k[0])))}
+
+ {propertyMap && prettyPrintJsonValue(getMetadataValue(propertyMap.get(k[0])))} +
- ) - } + ))}

Custom Properties

- {customPropertyMap.getEntryList().map(k => + {customPropertyMap.getEntryList().map(k => (
{k[0]}
- {customPropertyMap && prettyPrintJsonValue(getMetadataValue(customPropertyMap.get(k[0])))} + {customPropertyMap && + prettyPrintJsonValue(getMetadataValue(customPropertyMap.get(k[0])))}
- )} + ))}
); diff --git a/frontend/src/components/Router.tsx b/frontend/src/components/Router.tsx index 521f5ead63ac..cb58522ede55 100644 --- a/frontend/src/components/Router.tsx +++ b/frontend/src/components/Router.tsx @@ -97,17 +97,23 @@ export const RoutePage = { RUN_DETAILS: `/runs/details/:${RouteParams.runId}`, }; -// tslint:disable-next-line:variable-name export const RoutePageFactory = { artifactDetails: (artifactType: string, artifactId: number) => { - return RoutePage.ARTIFACT_DETAILS - .replace(`:${RouteParams.ARTIFACT_TYPE}+`, artifactType) - .replace(`:${RouteParams.ID}`, '' + artifactId); - } + return RoutePage.ARTIFACT_DETAILS.replace( + `:${RouteParams.ARTIFACT_TYPE}+`, + artifactType, + ).replace(`:${RouteParams.ID}`, '' + artifactId); + }, +}; + +export const ExternalLinks = { + AI_HUB: 'https://aihub.cloud.google.com/u/0/s?category=pipeline', + DOCUMENTATION: 'https://www.kubeflow.org/docs/pipelines/', + GITHUB: 'https://github.com/kubeflow/pipelines', }; export interface DialogProps { - buttons?: Array<{ onClick?: () => any, text: string }>; + buttons?: Array<{ onClick?: () => any; text: string }>; // TODO: This should be generalized to any react component. content?: string; onClose?: () => any; @@ -123,7 +129,6 @@ interface RouteComponentState { } class Router extends React.Component<{}, RouteComponentState> { - constructor(props: any) { super(props); @@ -144,13 +149,17 @@ class Router extends React.Component<{}, RouteComponentState> { updateToolbar: this._updateToolbar.bind(this), }; - const routes: Array<{ path: string, Component: React.ComponentClass, view?: any }> = [ + const routes: Array<{ path: string; Component: React.ComponentClass; view?: any }> = [ { path: RoutePage.ARCHIVE, Component: Archive }, { path: RoutePage.ARTIFACTS, Component: ArtifactList }, { path: RoutePage.ARTIFACT_DETAILS, Component: ArtifactDetails }, { path: RoutePage.EXECUTIONS, Component: ExecutionList }, { path: RoutePage.EXECUTION_DETAILS, Component: ExecutionDetails }, - { path: RoutePage.EXPERIMENTS, Component: ExperimentsAndRuns, view: ExperimentsAndRunsTab.EXPERIMENTS }, + { + Component: ExperimentsAndRuns, + path: RoutePage.EXPERIMENTS, + view: ExperimentsAndRunsTab.EXPERIMENTS, + }, { path: RoutePage.EXPERIMENT_DETAILS, Component: ExperimentDetails }, { path: RoutePage.NEW_EXPERIMENT, Component: NewExperiment }, { path: RoutePage.NEW_RUN, Component: NewRun }, @@ -166,24 +175,39 @@ class Router extends React.Component<{}, RouteComponentState> {
- ()} /> + } + />
- ()} /> - {this.state.bannerProps.message - && } + /> + {this.state.bannerProps.message && ( + } + refresh={this.state.bannerProps.refresh} + /> + )} - ( - - )} /> + } + /> {routes.map((route, i) => { const { path, Component, ...otherProps } = { ...route }; - return ( - - )} />; + return ( + ( + + )} + /> + ); })} {/* 404 */} @@ -199,8 +223,12 @@ class Router extends React.Component<{}, RouteComponentState> {
- this._handleDialogClosed()}> + this._handleDialogClosed()} + > {this.state.dialogProps.title && ( {this.state.dialogProps.title} )} @@ -211,11 +239,16 @@ class Router extends React.Component<{}, RouteComponentState> { )} {this.state.dialogProps.buttons && ( - {this.state.dialogProps.buttons.map((b, i) => - )} + + ))} )} diff --git a/frontend/src/components/SideNav.test.tsx b/frontend/src/components/SideNav.test.tsx index 3e779895d61c..a68c5549be41 100644 --- a/frontend/src/components/SideNav.test.tsx +++ b/frontend/src/components/SideNav.test.tsx @@ -20,7 +20,7 @@ import SideNav, { css } from './SideNav'; import TestUtils from '../TestUtils'; import { Apis } from '../lib/Apis'; import { LocalStorage } from '../lib/LocalStorage'; -import { ReactWrapper, ShallowWrapper, shallow, } from 'enzyme'; +import { ReactWrapper, ShallowWrapper, shallow } from 'enzyme'; import { RoutePage } from './Router'; import { RouterProps } from 'react-router'; @@ -48,7 +48,7 @@ describe('SideNav', () => { apiServerCommitHash: 'd3c4add0a95e930c70a330466d0923827784eb9a', apiServerReady: true, buildDate: 'Wed Jan 9 19:40:24 UTC 2019', - frontendCommitHash: '8efb2fcff9f666ba5b101647e909dc9c6889cecb' + frontendCommitHash: '8efb2fcff9f666ba5b101647e909dc9c6889cecb', })); checkHubSpy.mockImplementation(() => ({ ok: true })); @@ -229,9 +229,9 @@ describe('SideNav', () => { apiServerCommitHash: '0a7b9e38f2b9bcdef4bbf3234d971e1635b50cd5', apiServerReady: true, buildDate: 'Tue Oct 23 14:23:53 UTC 2018', - frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98' + frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98', }; - buildInfoSpy.mockImplementationOnce(() => (buildInfo)); + buildInfoSpy.mockImplementationOnce(() => buildInfo); tree = shallow(); await TestUtils.flushPromises(); @@ -239,7 +239,8 @@ describe('SideNav', () => { expect(tree.state('displayBuildInfo')).toEqual({ commitHash: buildInfo.apiServerCommitHash.substring(0, 7), - commitUrl: 'https://www.github.com/kubeflow/pipelines/commit/' + buildInfo.apiServerCommitHash, + commitUrl: + 'https://www.github.com/kubeflow/pipelines/commit/' + buildInfo.apiServerCommitHash, date: new Date(buildInfo.buildDate).toLocaleDateString(), }); }); @@ -249,16 +250,18 @@ describe('SideNav', () => { apiServerReady: true, // No apiServerCommitHash buildDate: 'Tue Oct 23 14:23:53 UTC 2018', - frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98' + frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98', }; - buildInfoSpy.mockImplementationOnce(() => (buildInfo)); + buildInfoSpy.mockImplementationOnce(() => buildInfo); tree = shallow(); await TestUtils.flushPromises(); - expect(tree.state('displayBuildInfo')).toEqual(expect.objectContaining({ - commitHash: buildInfo.frontendCommitHash.substring(0, 7), - })); + expect(tree.state('displayBuildInfo')).toEqual( + expect.objectContaining({ + commitHash: buildInfo.frontendCommitHash.substring(0, 7), + }), + ); }); it('uses the frontend commit hash for the link URL if the api server hash is not returned', async () => { @@ -266,33 +269,38 @@ describe('SideNav', () => { apiServerReady: true, // No apiServerCommitHash buildDate: 'Tue Oct 23 14:23:53 UTC 2018', - frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98' + frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98', }; - buildInfoSpy.mockImplementationOnce(() => (buildInfo)); + buildInfoSpy.mockImplementationOnce(() => buildInfo); tree = shallow(); await TestUtils.flushPromises(); - expect(tree.state('displayBuildInfo')).toEqual(expect.objectContaining({ - commitUrl: 'https://www.github.com/kubeflow/pipelines/commit/' + buildInfo.frontendCommitHash, - })); + expect(tree.state('displayBuildInfo')).toEqual( + expect.objectContaining({ + commitUrl: + 'https://www.github.com/kubeflow/pipelines/commit/' + buildInfo.frontendCommitHash, + }), + ); }); - it('displays \'unknown\' if the frontend and api server commit hashes are not returned', async () => { + it("displays 'unknown' if the frontend and api server commit hashes are not returned", async () => { const buildInfo = { apiServerReady: true, // No apiServerCommitHash buildDate: 'Tue Oct 23 14:23:53 UTC 2018', // No frontendCommitHash }; - buildInfoSpy.mockImplementationOnce(() => (buildInfo)); + buildInfoSpy.mockImplementationOnce(() => buildInfo); tree = shallow(); await TestUtils.flushPromises(); - expect(tree.state('displayBuildInfo')).toEqual(expect.objectContaining({ - commitHash: 'unknown', - })); + expect(tree.state('displayBuildInfo')).toEqual( + expect.objectContaining({ + commitHash: 'unknown', + }), + ); }); it('links to the github repo root if the frontend and api server commit hashes are not returned', async () => { @@ -302,31 +310,35 @@ describe('SideNav', () => { buildDate: 'Tue Oct 23 14:23:53 UTC 2018', // No frontendCommitHash }; - buildInfoSpy.mockImplementationOnce(() => (buildInfo)); + buildInfoSpy.mockImplementationOnce(() => buildInfo); tree = shallow(); await TestUtils.flushPromises(); - expect(tree.state('displayBuildInfo')).toEqual(expect.objectContaining({ - commitUrl: 'https://www.github.com/kubeflow/pipelines', - })); + expect(tree.state('displayBuildInfo')).toEqual( + expect.objectContaining({ + commitUrl: 'https://www.github.com/kubeflow/pipelines', + }), + ); }); - it('displays \'unknown\' if the date is not returned', async () => { + it("displays 'unknown' if the date is not returned", async () => { const buildInfo = { apiServerCommitHash: '0a7b9e38f2b9bcdef4bbf3234d971e1635b50cd5', apiServerReady: true, // No buildDate - frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98' + frontendCommitHash: '302e93ce99099173f387c7e0635476fe1b69ea98', }; - buildInfoSpy.mockImplementationOnce(() => (buildInfo)); + buildInfoSpy.mockImplementationOnce(() => buildInfo); tree = shallow(); await TestUtils.flushPromises(); - expect(tree.state('displayBuildInfo')).toEqual(expect.objectContaining({ - date: 'unknown', - })); + expect(tree.state('displayBuildInfo')).toEqual( + expect.objectContaining({ + date: 'unknown', + }), + ); }); it('logs an error if the call getBuildInfo fails', async () => { diff --git a/frontend/src/components/SideNav.tsx b/frontend/src/components/SideNav.tsx index 9a607e3e9092..c4eeeb3b301a 100644 --- a/frontend/src/components/SideNav.tsx +++ b/frontend/src/components/SideNav.tsx @@ -24,12 +24,14 @@ import ExperimentsIcon from '../icons/experiments'; import IconButton from '@material-ui/core/IconButton'; import JupyterhubIcon from '@material-ui/icons/Code'; import OpenInNewIcon from '@material-ui/icons/OpenInNew'; +import DescriptionIcon from '@material-ui/icons/Description'; +import GitHubIcon from '../icons/GitHub-Mark-120px-plus.png'; import PipelinesIcon from '../icons/pipelines'; import Tooltip from '@material-ui/core/Tooltip'; import { Apis } from '../lib/Apis'; import { Link } from 'react-router-dom'; import { LocalStorage, LocalStorageKey } from '../lib/LocalStorage'; -import { RoutePage, RoutePrefix } from '../components/Router'; +import { RoutePage, RoutePrefix, ExternalLinks } from '../components/Router'; import { RouterProps } from 'react-router'; import { classes, stylesheet } from 'typestyle'; import { fontsize, commonCss } from '../Css'; @@ -91,6 +93,12 @@ export const css = stylesheet({ collapsedChevron: { transform: 'rotate(180deg)', }, + collapsedExternalLabel: { + // Hide text when collapsing, but do it with a transition of both height and + // opacity + height: 0, + opacity: 0, + }, collapsedLabel: { // Hide text when collapsing, but do it with a transition opacity: 0, @@ -101,6 +109,13 @@ export const css = stylesheet({ collapsedSeparator: { margin: '20px !important', }, + icon: { + height: 20, + width: 20, + }, + iconImage: { + opacity: 0.6, // Images are too colorful there by default, reduce their color. + }, indicator: { borderBottom: '3px solid transparent', borderLeft: `3px solid ${sideNavColors.fgActive}`, @@ -131,7 +146,7 @@ export const css = stylesheet({ verticalAlign: 'super', }, link: { - color: '#77abda' + color: '#77abda', }, openInNewTabIcon: { height: 12, @@ -199,15 +214,15 @@ export default class SideNav extends React.Component const commitHash = buildInfo.apiServerCommitHash || buildInfo.frontendCommitHash || ''; displayBuildInfo = { commitHash: commitHash ? commitHash.substring(0, 7) : 'unknown', - commitUrl: 'https://www.github.com/kubeflow/pipelines' - + (commitHash ? `/commit/${commitHash}` : ''), + commitUrl: + 'https://www.github.com/kubeflow/pipelines' + (commitHash ? `/commit/${commitHash}` : ''), date: buildInfo.buildDate ? new Date(buildInfo.buildDate).toLocaleDateString() : 'unknown', }; } catch (err) { logger.error('Failed to retrieve build info', err); } - this.setStateSafe({ displayBuildInfo}); + this.setStateSafe({ displayBuildInfo }); } public componentWillUnmount(): void { @@ -223,118 +238,255 @@ export default class SideNav extends React.Component }; return ( -
+
-
- +
+ - -
- +
+ - -
- +
+ - -
- +
+ - {this.state.jupyterHubAvailable && ( - - - )}
-
- +
+ -
- + } + /> + ( + + )} + /> + ( + + )} + /> +
+
{displayBuildInfo && ( - + )}
-
+
); } private _highlightExperimentsButton(page: string): boolean { - return page.startsWith(RoutePage.EXPERIMENTS) - || page.startsWith(RoutePage.RUNS) - || page.startsWith(RoutePrefix.RECURRING_RUN) - || page.startsWith(RoutePage.COMPARE); + return ( + page.startsWith(RoutePage.EXPERIMENTS) || + page.startsWith(RoutePage.RUNS) || + page.startsWith(RoutePrefix.RECURRING_RUN) || + page.startsWith(RoutePage.COMPARE) + ); } private _highlightArtifactsButton(page: string): boolean { @@ -346,10 +498,13 @@ export default class SideNav extends React.Component } private _toggleNavClicked(): void { - this.setStateSafe({ - collapsed: !this.state.collapsed, - manualCollapseState: true, - }, () => LocalStorage.saveNavbarCollapsed(this.state.collapsed)); + this.setStateSafe( + { + collapsed: !this.state.collapsed, + manualCollapseState: true, + }, + () => LocalStorage.saveNavbarCollapsed(this.state.collapsed), + ); this._toggleNavCollapsed(); } @@ -371,3 +526,30 @@ export default class SideNav extends React.Component } } } + +interface ExternalUriProps { + title: string; + to: string; + collapsed: boolean; + icon: (className: string) => React.ReactNode; +} + +// tslint:disable-next-line:variable-name +const ExternalUri: React.FC = ({ title, to, collapsed, icon }) => ( + + + + + +); diff --git a/frontend/src/components/SidePanel.tsx b/frontend/src/components/SidePanel.tsx index c5991f12b079..a33be7dde5e8 100644 --- a/frontend/src/components/SidePanel.tsx +++ b/frontend/src/components/SidePanel.tsx @@ -58,38 +58,44 @@ interface SidePanelProps { class SidePanel extends React.Component { public render(): JSX.Element { const { isBusy, isOpen, onClose, title } = this.props; - return - - {isOpen && ( -
-
- -
{title}
-
+ return ( + + + {isOpen && (
- - {isBusy === true && ( - - )} - +
+ +
{title}
+
- {this.props.children} + {isBusy === true && ( + + )} + +
{this.props.children}
-
- )} -
-
; + )} + + + ); } } diff --git a/frontend/src/components/StaticNodeDetails.tsx b/frontend/src/components/StaticNodeDetails.tsx index 44be8eb901f1..3b3bb8105731 100644 --- a/frontend/src/components/StaticNodeDetails.tsx +++ b/frontend/src/components/StaticNodeDetails.tsx @@ -41,44 +41,53 @@ class StaticNodeDetails extends React.Component { public render(): JSX.Element { const nodeInfo = this.props.nodeInfo; - return
- {(nodeInfo.nodeType === 'container') && ( -
- - - - -
Arguments
- {nodeInfo.args.map((arg, i) => -
{arg}
)} - -
Command
- {nodeInfo.command.map((c, i) =>
{c}
)} - -
Image
-
{nodeInfo.image}
- - -
- )} - - {(nodeInfo.nodeType === 'resource') && ( -
- - - - - -
- )} - - {!!nodeInfo.condition && ( -
-
Condition
-
Run when: {nodeInfo.condition}
-
- )} -
; + return ( +
+ {nodeInfo.nodeType === 'container' && ( +
+ + + + +
Arguments
+ {nodeInfo.args.map((arg, i) => ( +
+ {arg} +
+ ))} + +
Command
+ {nodeInfo.command.map((c, i) => ( +
+ {c} +
+ ))} + +
Image
+
{nodeInfo.image}
+ + +
+ )} + + {nodeInfo.nodeType === 'resource' && ( +
+ + + + + +
+ )} + + {!!nodeInfo.condition && ( +
+
Condition
+
Run when: {nodeInfo.condition}
+
+ )} +
+ ); } } diff --git a/frontend/src/components/Toolbar.test.tsx b/frontend/src/components/Toolbar.test.tsx index 21377ade5207..a032a28f1032 100644 --- a/frontend/src/components/Toolbar.test.tsx +++ b/frontend/src/components/Toolbar.test.tsx @@ -24,7 +24,7 @@ import InfoIcon from '@material-ui/icons/Info'; const action1 = jest.fn(); const action2 = jest.fn(); const actions: ToolbarActionMap = { - 'action1': { + action1: { action: action1, disabledTitle: 'test disabled title', icon: HelpIcon, @@ -32,7 +32,7 @@ const actions: ToolbarActionMap = { title: 'test title', tooltip: 'test tooltip', }, - 'action2': { + action2: { action: action2, disabled: true, disabledTitle: 'test disabled title2', @@ -67,111 +67,155 @@ describe('Toolbar', () => { }); it('renders without breadcrumbs and a string page title', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('renders without breadcrumbs and a component page title', () => { - const tree = shallow(test page title
} />); + const tree = shallow( + test page title
} + />, + ); expect(tree).toMatchSnapshot(); }); it('renders without breadcrumbs and one action', () => { const singleAction = { - 'action1': { + action1: { action: action1, disabledTitle: 'test disabled title', icon: HelpIcon, id: 'test id', title: 'test title', tooltip: 'test tooltip', - } + }, }; - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('renders without actions and one breadcrumb', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('renders without actions, one breadcrumb, and a page name', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('renders without breadcrumbs and two actions', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('fires the right action function when button is clicked', () => { - const tree = shallow(); - tree.find('BusyButton').at(0).simulate('click'); + const tree = shallow( + , + ); + tree + .find('BusyButton') + .at(0) + .simulate('click'); expect(action1).toHaveBeenCalled(); action1.mockClear(); }); it('renders outlined action buttons', () => { const outlinedActions = { - 'action1': { + action1: { action: jest.fn(), id: 'test outlined id', outlined: true, title: 'test outlined title', tooltip: 'test outlined tooltip', - } + }, }; - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('renders primary action buttons', () => { const primaryActions = { - 'action1': { + action1: { action: jest.fn(), id: 'test primary id', primary: true, title: 'test primary title', tooltip: 'test primary tooltip', - } + }, }; - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('renders primary action buttons without outline, even if outline is true', () => { const outlinedPrimaryActions = { - 'action1': { + action1: { action: jest.fn(), id: 'test id', outlined: true, primary: true, title: 'test title', tooltip: 'test tooltip', - } + }, }; - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); it('renders with two breadcrumbs and two actions', () => { - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); @@ -179,7 +223,9 @@ describe('Toolbar', () => { // This test uses createMemoryHistory because createBroweserHistory returns a singleton, and // there is no way to clear its entries which this test requires. const emptyHistory = createMemoryHistory(); - const tree = shallow(); + const tree = shallow( + , + ); expect(tree).toMatchSnapshot(); }); }); diff --git a/frontend/src/components/Toolbar.tsx b/frontend/src/components/Toolbar.tsx index 228eeafce744..eaf7e7d7bf63 100644 --- a/frontend/src/components/Toolbar.tsx +++ b/frontend/src/components/Toolbar.tsx @@ -26,7 +26,9 @@ import { classes, stylesheet } from 'typestyle'; import { spacing, fonts, fontsize, color, dimension, commonCss } from '../Css'; import { CSSProperties } from 'react'; -export interface ToolbarActionMap { [key: string]: ToolbarActionConfig; } +export interface ToolbarActionMap { + [key: string]: ToolbarActionConfig; +} export interface ToolbarActionConfig { action: () => void; @@ -83,7 +85,7 @@ const css = stylesheet({ $nest: { '&:hover': { background: color.lightGrey, - } + }, }, borderRadius: 3, padding: 3, @@ -117,7 +119,6 @@ export interface ToolbarProps { } class Toolbar extends React.Component { - public render(): JSX.Element | null { const { actions, breadcrumbs, pageTitle, pageTitleTooltip } = { ...this.props }; @@ -126,16 +127,19 @@ class Toolbar extends React.Component { } return ( -
+
{/* Breadcrumb */}
{breadcrumbs.map((crumb, i) => ( {i !== 0 && } - + {crumb.displayName} @@ -143,17 +147,26 @@ class Toolbar extends React.Component {
{/* Back Arrow */} - {breadcrumbs.length > 0 && + {breadcrumbs.length > 0 && ( -
{/* Div needed because we sometimes disable a button within a tooltip */} - + {' '} + {/* Div needed because we sometimes disable a button within a tooltip */} + - + onClick={this.props.history!.goBack} + > +
-
} + + )} {/* Resource Name */} {pageTitle} @@ -165,13 +178,26 @@ class Toolbar extends React.Component { {Object.keys(actions).map((buttonKey, i) => { const button = actions[buttonKey]; return ( - -
{/* Extra level needed by tooltip when child is disabled */} - +
+ {/* Extra level needed by tooltip when child is disabled */} + + className={button.primary ? commonCss.buttonAction : ''} + />
); diff --git a/frontend/src/components/Trigger.test.tsx b/frontend/src/components/Trigger.test.tsx index 6210bb6ce9cb..3d10c5b3382f 100644 --- a/frontend/src/components/Trigger.test.tsx +++ b/frontend/src/components/Trigger.test.tsx @@ -48,14 +48,18 @@ describe('Trigger', () => { it('renders week days if the trigger type is CRON and interval is weekly', () => { const tree = shallow(); (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.WEEK } }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.WEEK }, + }); expect(tree).toMatchSnapshot(); }); it('renders all week days enabled', () => { const tree = shallow(); (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.WEEK } }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.WEEK }, + }); (tree.instance() as any)._toggleCheckAllDays(); expect(tree).toMatchSnapshot(); }); @@ -63,7 +67,9 @@ describe('Trigger', () => { it('enables a single day on click', () => { const tree = shallow(); (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.WEEK } }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.WEEK }, + }); (tree.instance() as any)._toggleDay(1); (tree.instance() as any)._toggleDay(3); expect(tree).toMatchSnapshot(); @@ -73,29 +79,41 @@ describe('Trigger', () => { it('builds an every-minute trigger by default', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); expect(spy).toHaveBeenLastCalledWith( - { periodic_schedule: { end_time: undefined, interval_second: '60', start_time: undefined } }, - '10' + { + periodic_schedule: { end_time: undefined, interval_second: '60', start_time: undefined }, + }, + '10', ); }); it('builds trigger with a start time if the checkbox is checked', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: true } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: true }, + }); expect(spy).toHaveBeenLastCalledWith( { periodic_schedule: { end_time: undefined, interval_second: '60', start_time: testDate } }, - '10' + '10', ); }); it('builds trigger with the entered start date/time', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: true } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: true }, + }); (tree.instance() as Trigger).handleChange('startDate')({ target: { value: '2018-11-23' } }); (tree.instance() as Trigger).handleChange('endTime')({ target: { value: '08:35' } }); expect(spy).toHaveBeenLastCalledWith( @@ -103,18 +121,22 @@ describe('Trigger', () => { periodic_schedule: { end_time: undefined, interval_second: '60', - start_time: new Date(2018, 10, 23, 8, 35) - } + start_time: new Date(2018, 10, 23, 8, 35), + }, }, - '10' + '10', ); }); it('builds trigger without the entered start date if no time is entered', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: true } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: true }, + }); (tree.instance() as Trigger).handleChange('startDate')({ target: { value: '2018-11-23' } }); (tree.instance() as Trigger).handleChange('startTime')({ target: { value: '' } }); expect(spy).toHaveBeenLastCalledWith( @@ -123,17 +145,21 @@ describe('Trigger', () => { end_time: undefined, interval_second: '60', start_time: undefined, - } + }, }, - '10' + '10', ); }); it('builds trigger without the entered start time if no date is entered', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: true } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: true }, + }); (tree.instance() as Trigger).handleChange('startDate')({ target: { value: '' } }); (tree.instance() as Trigger).handleChange('startTime')({ target: { value: '11:33' } }); expect(spy).toHaveBeenLastCalledWith( @@ -142,88 +168,109 @@ describe('Trigger', () => { end_time: undefined, interval_second: '60', start_time: undefined, - } + }, }, - '10' + '10', ); }); it('builds trigger with a date if both start and end checkboxes are checked', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: true } }); - (tree.instance() as Trigger).handleChange('hasEndDate')({ target: { type: 'checkbox', checked: true } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: true }, + }); + (tree.instance() as Trigger).handleChange('hasEndDate')({ + target: { type: 'checkbox', checked: true }, + }); expect(spy).toHaveBeenLastCalledWith( { periodic_schedule: { end_time: testDate, interval_second: '60', start_time: testDate } }, - '10' + '10', ); }); it('resets trigger to no start date if it is added then removed', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: true } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: false } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: true }, + }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: false }, + }); expect(spy).toHaveBeenLastCalledWith( - { periodic_schedule: { end_time: undefined, interval_second: '60', start_time: undefined } }, - '10' + { + periodic_schedule: { end_time: undefined, interval_second: '60', start_time: undefined }, + }, + '10', ); }); it('builds trigger with a weekly interval', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.WEEK } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.WEEK }, + }); expect(spy).toHaveBeenLastCalledWith( { - periodic_schedule: - { + periodic_schedule: { end_time: undefined, interval_second: (7 * 24 * 60 * 60).toString(), - start_time: undefined - } + start_time: undefined, + }, }, - '10' + '10', ); }); it('builds trigger with an every-three-months interval', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.MONTH } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.MONTH }, + }); (tree.instance() as Trigger).handleChange('intervalValue')({ target: { value: 3 } }); expect(spy).toHaveBeenLastCalledWith( { - periodic_schedule: - { + periodic_schedule: { end_time: undefined, interval_second: (3 * 30 * 24 * 60 * 60).toString(), - start_time: undefined - } + start_time: undefined, + }, }, - '10' + '10', ); }); it('builds trigger with the specified max concurrency setting', () => { const spy = jest.fn(); const tree = shallow(); - (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.INTERVALED } }); + (tree.instance() as Trigger).handleChange('type')({ + target: { value: TriggerType.INTERVALED }, + }); (tree.instance() as Trigger).handleChange('maxConcurrentRuns')({ target: { value: '3' } }); expect(spy).toHaveBeenLastCalledWith( { - periodic_schedule: - { + periodic_schedule: { end_time: undefined, interval_second: '60', - start_time: undefined - } + start_time: undefined, + }, }, - '3' + '3', ); }); }); @@ -235,7 +282,7 @@ describe('Trigger', () => { (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); expect(spy).toHaveBeenLastCalledWith( { cron_schedule: { cron: '0 * * * * ?', end_time: undefined, start_time: undefined } }, - '10' + '10', ); }); @@ -243,11 +290,13 @@ describe('Trigger', () => { const spy = jest.fn(); const tree = shallow(); (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); - (tree.instance() as Trigger).handleChange('hasStartDate')({ target: { type: 'checkbox', checked: true } }); + (tree.instance() as Trigger).handleChange('hasStartDate')({ + target: { type: 'checkbox', checked: true }, + }); (tree.instance() as Trigger).handleChange('startDate')({ target: { value: '2018-03-23' } }); expect(spy).toHaveBeenLastCalledWith( { cron_schedule: { cron: '0 * * * * ?', end_time: undefined, start_time: testDate } }, - '10' + '10', ); }); @@ -255,11 +304,15 @@ describe('Trigger', () => { const spy = jest.fn(); const tree = shallow(); (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); - (tree.instance() as Trigger).handleChange('hasEndDate')({ target: { type: 'checkbox', checked: true } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.DAY } }); + (tree.instance() as Trigger).handleChange('hasEndDate')({ + target: { type: 'checkbox', checked: true }, + }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.DAY }, + }); expect(spy).toHaveBeenLastCalledWith( { cron_schedule: { cron: '0 0 0 * * ?', end_time: testDate, start_time: undefined } }, - '10' + '10', ); }); @@ -267,14 +320,16 @@ describe('Trigger', () => { const spy = jest.fn(); const tree = shallow(); (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.WEEK } }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.WEEK }, + }); (tree.instance() as any)._toggleCheckAllDays(); (tree.instance() as any)._toggleDay(1); (tree.instance() as any)._toggleDay(5); (tree.instance() as any)._toggleDay(6); expect(spy).toHaveBeenLastCalledWith( { cron_schedule: { cron: '0 0 0 ? * 1,5,6', end_time: undefined, start_time: undefined } }, - '10' + '10', ); }); @@ -282,16 +337,28 @@ describe('Trigger', () => { const spy = jest.fn(); const tree = shallow(); (tree.instance() as Trigger).handleChange('type')({ target: { value: TriggerType.CRON } }); - (tree.instance() as Trigger).handleChange('intervalCategory')({ target: { value: PeriodicInterval.WEEK } }); + (tree.instance() as Trigger).handleChange('intervalCategory')({ + target: { value: PeriodicInterval.WEEK }, + }); (tree.instance() as any)._toggleCheckAllDays(); (tree.instance() as any)._toggleDay(1); (tree.instance() as any)._toggleDay(5); (tree.instance() as any)._toggleDay(6); - (tree.instance() as Trigger).handleChange('editCron')({ target: { type: 'checkbox', checked: true } }); - (tree.instance() as Trigger).handleChange('cron')({ target: { value: 'oops this will break!' } }); + (tree.instance() as Trigger).handleChange('editCron')({ + target: { type: 'checkbox', checked: true }, + }); + (tree.instance() as Trigger).handleChange('cron')({ + target: { value: 'oops this will break!' }, + }); expect(spy).toHaveBeenLastCalledWith( - { cron_schedule: { cron: 'oops this will break!', end_time: undefined, start_time: undefined } }, - '10' + { + cron_schedule: { + cron: 'oops this will break!', + end_time: undefined, + start_time: undefined, + }, + }, + '10', ); }); }); diff --git a/frontend/src/components/Trigger.tsx b/frontend/src/components/Trigger.tsx index fd7e8b67ef66..485e502a5d93 100644 --- a/frontend/src/components/Trigger.tsx +++ b/frontend/src/components/Trigger.tsx @@ -24,8 +24,12 @@ import Separator from '../atoms/Separator'; import { commonCss } from '../Css'; import { dateToPickerFormat } from '../lib/TriggerUtils'; import { - PeriodicInterval, TriggerType, triggers, buildCron, - pickersToDate, buildTrigger + PeriodicInterval, + TriggerType, + triggers, + buildCron, + pickersToDate, + buildTrigger, } from '../lib/TriggerUtils'; import { ApiTrigger } from '../apis/job'; import { stylesheet } from 'typestyle'; @@ -52,7 +56,7 @@ interface TriggerState { const css = stylesheet({ noMargin: { - margin: 0 + margin: 0, }, }); @@ -61,8 +65,13 @@ export default class Trigger extends React.Component super(props); const now = new Date(); - const inAWeek = new Date(now.getFullYear(), now.getMonth(), now.getDate() + 7, - now.getHours(), now.getMinutes()); + const inAWeek = new Date( + now.getFullYear(), + now.getMonth(), + now.getDate() + 7, + now.getHours(), + now.getMinutes(), + ); const [startDate, startTime] = dateToPickerFormat(now); const [endDate, endTime] = dateToPickerFormat(inAWeek); @@ -91,114 +100,217 @@ export default class Trigger extends React.Component } public render(): JSX.Element { - const { cron, editCron, endDate, endTime, hasEndDate, hasStartDate, intervalCategory, - intervalValue, maxConcurrentRuns, selectedDays, startDate, startTime, type } = this.state; - - return
- - {Array.from(triggers.entries()).map((trigger, i) => ( - - {trigger[1].displayName} - - ))} - + const { + cron, + editCron, + endDate, + endTime, + hasEndDate, + hasStartDate, + intervalCategory, + intervalValue, + maxConcurrentRuns, + selectedDays, + startDate, + startTime, + type, + } = this.state; + return (
- + + {Array.from(triggers.entries()).map((trigger, i) => ( + + {trigger[1].displayName} + + ))} + -
- } - label='Has start date' /> - - - -
+
+ + +
+ + } + label='Has start date' + /> + + + +
+ +
+ + } + label='Has end date' + /> + + + +
-
- } - label='Has end date' /> - - - + + Run every + {type === TriggerType.INTERVALED && ( +
+ + +
+ )} + + + {Object.keys(PeriodicInterval).map((interval: PeriodicInterval, i) => ( + + {PeriodicInterval[interval] + (type === TriggerType.INTERVALED ? 's' : '')} + + ))} + +
- - Run every - {type === TriggerType.INTERVALED && ( + {type === TriggerType.CRON && ( +
+ {intervalCategory === PeriodicInterval.WEEK && ( +
+ On: + + } + label='All' + /> + + {['S', 'M', 'T', 'W', 'T', 'F', 'S'].map((day, i) => ( + + ))} +
+ )} +
- - + + } + label={ + + Allow editing cron expression. ( format is specified{' '} + + here + + ) + + } + />
- )} - - - {Object.keys(PeriodicInterval).map((interval: PeriodicInterval, i) => ( - - {PeriodicInterval[interval] + (type === TriggerType.INTERVALED ? 's' : '')} - - ))} - - -
+ - {type === TriggerType.CRON && ( -
- {intervalCategory === PeriodicInterval.WEEK && ( -
- On: - - } label='All' /> - - {['S', 'M', 'T', 'W', 'T', 'F', 'S'].map((day, i) => ( - - ))} -
- )} - -
- - } label={ - Allow editing cron expression. ( - format is specified - here - ) - } /> +
Note: Start and end dates/times are handled outside of cron.
- - - -
Note: Start and end dates/times are handled outside of cron.
-
- )} -
; + )} +
+ ); } public handleChange = (name: string) => (event: any) => { @@ -206,14 +318,29 @@ export default class Trigger extends React.Component const value = target.type === 'checkbox' ? target.checked : target.value; // Make sure the desired field is set on the state object first, then // use the state values to compute the new trigger - this.setState({ - [name]: value, - } as any, this._updateTrigger.bind(this)); - } + this.setState( + { + [name]: value, + } as any, + this._updateTrigger.bind(this), + ); + }; private _updateTrigger(): void { - const { hasStartDate, hasEndDate, startDate, startTime, endDate, endTime, editCron, - intervalCategory, intervalValue, type, cron, selectedDays } = this.state; + const { + hasStartDate, + hasEndDate, + startDate, + startTime, + endDate, + endTime, + editCron, + intervalCategory, + intervalValue, + type, + cron, + selectedDays, + } = this.state; const startDateTime = pickersToDate(hasStartDate, startDate, startTime); const endDateTime = pickersToDate(hasEndDate, endDate, endTime); @@ -221,16 +348,25 @@ export default class Trigger extends React.Component // TODO: Why build the cron string unless the TriggerType is not CRON? // Unless cron editing is enabled, calculate the new cron string, set it in state, // then use it to build new trigger object and notify the parent - this.setState({ - cron: editCron ? cron : buildCron(startDateTime, intervalCategory, selectedDays), - }, () => { - const trigger = buildTrigger( - intervalCategory, intervalValue, startDateTime, endDateTime, type, this.state.cron); + this.setState( + { + cron: editCron ? cron : buildCron(startDateTime, intervalCategory, selectedDays), + }, + () => { + const trigger = buildTrigger( + intervalCategory, + intervalValue, + startDateTime, + endDateTime, + type, + this.state.cron, + ); - if (this.props.onChange) { - this.props.onChange(trigger, trigger ? this.state.maxConcurrentRuns : undefined); - } - }); + if (this.props.onChange) { + this.props.onChange(trigger, trigger ? this.state.maxConcurrentRuns : undefined); + } + }, + ); } private _isAllDaysChecked(): boolean { @@ -250,12 +386,18 @@ export default class Trigger extends React.Component const newDays = this.state.selectedDays; newDays[index] = !newDays[index]; const startDate = pickersToDate( - this.state.hasStartDate, this.state.startDate, this.state.startTime); + this.state.hasStartDate, + this.state.startDate, + this.state.startTime, + ); const cron = buildCron(startDate, this.state.intervalCategory, this.state.selectedDays); - this.setState({ - cron, - selectedDays: newDays, - }, this._updateTrigger.bind(this)); + this.setState( + { + cron, + selectedDays: newDays, + }, + this._updateTrigger.bind(this), + ); } } diff --git a/frontend/src/components/UploadPipelineDialog.test.tsx b/frontend/src/components/UploadPipelineDialog.test.tsx index 7cae68c60307..a6725d60cd81 100644 --- a/frontend/src/components/UploadPipelineDialog.test.tsx +++ b/frontend/src/components/UploadPipelineDialog.test.tsx @@ -74,7 +74,9 @@ describe('UploadPipelineDialog', () => { const spy = jest.fn(); tree = shallow(); (tree.instance() as any)._dropzoneRef = { current: { open: () => null } }; - (tree.instance() as UploadPipelineDialog).handleChange('uploadPipelineName')({ target: { value: 'test name' } }); + (tree.instance() as UploadPipelineDialog).handleChange('uploadPipelineName')({ + target: { value: 'test name' }, + }); tree.find('#confirmUploadBtn').simulate('click'); expect(spy).toHaveBeenLastCalledWith(true, 'test name', null, '', ImportMethod.LOCAL, ''); }); diff --git a/frontend/src/components/UploadPipelineDialog.tsx b/frontend/src/components/UploadPipelineDialog.tsx index fed9fb2d9db5..ddbd9ec8b6d5 100644 --- a/frontend/src/components/UploadPipelineDialog.tsx +++ b/frontend/src/components/UploadPipelineDialog.tsx @@ -28,6 +28,7 @@ import Radio from '@material-ui/core/Radio'; import { TextFieldProps } from '@material-ui/core/TextField'; import { padding, commonCss, zIndex, color } from '../Css'; import { stylesheet, classes } from 'typestyle'; +import { ExternalLink } from '../atoms/ExternalLink'; const css = stylesheet({ dropOverlay: { @@ -54,7 +55,14 @@ export enum ImportMethod { interface UploadPipelineDialogProps { open: boolean; - onClose: (confirmed: boolean, name: string, file: File | null, url: string, method: ImportMethod, description?: string) => Promise; + onClose: ( + confirmed: boolean, + name: string, + file: File | null, + url: string, + method: ImportMethod, + description?: string, + ) => Promise; } interface UploadPipelineDialogState { @@ -68,7 +76,10 @@ interface UploadPipelineDialogState { uploadPipelineName: string; } -class UploadPipelineDialog extends React.Component { +class UploadPipelineDialog extends React.Component< + UploadPipelineDialogProps, + UploadPipelineDialogState +> { private _dropzoneRef = React.createRef(); constructor(props: any) { @@ -88,69 +99,110 @@ class UploadPipelineDialog extends React.Component this._uploadDialogClosed(false)} - open={this.props.open} classes={{ paper: css.root }}> + this._uploadDialogClosed(false)} + open={this.props.open} + classes={{ paper: css.root }} + > Upload and name your pipeline
- } onChange={() => this.setState({ importMethod: ImportMethod.LOCAL })} /> - } onChange={() => this.setState({ importMethod: ImportMethod.URL })} /> + } + onChange={() => this.setState({ importMethod: ImportMethod.LOCAL })} + /> + } + onChange={() => this.setState({ importMethod: ImportMethod.URL })} + />
{importMethod === ImportMethod.LOCAL && ( - - - {dropzoneActive && ( -
Drop files..
- )} + + {dropzoneActive &&
Drop files..
}
- Choose a pipeline package file from your computer, and give the pipeline a unique name. + Choose a pipeline package file from your computer, and give the pipeline a unique + name.
You can also drag and drop the file here.
- + - + ), readOnly: true, - }} /> + }} + />
)} {importMethod === ImportMethod.URL && ( -
- URL must be publicly accessible. -
- +
URL must be publicly accessible.
+ +
)} - - + required={true} + value={uploadPipelineName} + variant='outlined' + />
{/* this._uploadDialogClosed.bind(this)(false)}> Cancel - this._uploadDialogClosed.bind(this)(true)} - title='Upload' busy={busy} disabled={ - !uploadPipelineName || (importMethod === ImportMethod.LOCAL ? !file : !fileUrl)} /> + this._uploadDialogClosed.bind(this)(true)} + title='Upload' + busy={busy} + disabled={ + !uploadPipelineName || (importMethod === ImportMethod.LOCAL ? !file : !fileUrl) + } + />
); @@ -173,7 +231,7 @@ class UploadPipelineDialog extends React.Component ( +
+ For expected file format, refer to{' '} + + Compile Pipeline Documentation + + . +
+); diff --git a/frontend/src/components/__snapshots__/CustomTable.test.tsx.snap b/frontend/src/components/__snapshots__/CustomTable.test.tsx.snap index 5f0df2ad04fd..f97473f24178 100644 --- a/frontend/src/components/__snapshots__/CustomTable.test.tsx.snap +++ b/frontend/src/components/__snapshots__/CustomTable.test.tsx.snap @@ -55,11 +55,11 @@ exports[`CustomTable renders a collapsed row 1`] = ` indeterminate={false} onChange={[Function]} /> +
-
- +
+ +
-
- col1 -
+ col1
-
- col2 -
+ col2
+
-
+ google + +`; + +exports[`Description When in inline mode renders markdown list as pure text 1`] = ` + + +* abc +* def + +`; + +exports[`Description When in inline mode renders paragraphs separated by space 1`] = ` + + +Paragraph 1 + +Paragraph 2 + + +`; + +exports[`Description When in inline mode renders pure text 1`] = ` + + this is a line of pure text + +`; + +exports[`Description When in inline mode renders raw link 1`] = ` + + https://www.google.com + +`; + +exports[`Description When in normal mode renders empty string 1`] = ``; + +exports[`Description When in normal mode renders markdown link 1`] = ` + + google + +`; + +exports[`Description When in normal mode renders markdown list as list 1`] = ` +
    +
  • + abc +
  • +
  • + def +
  • +
+`; + +exports[`Description When in normal mode renders paragraphs 1`] = ` +
+

+ Paragraph 1 +

+

+ Paragraph 2 +

+
+`; + +exports[`Description When in normal mode renders pure text 1`] = ` + + this is a line of pure text + +`; + +exports[`Description When in normal mode renders raw link 1`] = ` + + https://www.google.com + +`; diff --git a/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap b/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap index fd6e86405125..725d83649e4f 100644 --- a/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap +++ b/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap @@ -215,6 +215,30 @@ exports[`DetailsTable does render arrays as JSON 2`] = ` `; +exports[`DetailsTable does render values with the provided valueComponent 1`] = ` + +
+
+ + key + + + + foobar + + +
+
+
+`; + exports[`DetailsTable shows a row with a title 1`] = `
1 - + this string: gs://path is a GCS path @@ -27,39 +23,31 @@ exports[`LogViewer does not linkify non http/https urls 1`] = ` exports[`LogViewer linkifies standalone https urls 1`] = `
1 - + this string: - + https://path.com - + is a url @@ -70,39 +58,31 @@ exports[`LogViewer linkifies standalone https urls 1`] = ` exports[`LogViewer linkifies standalone urls 1`] = `
1 - + this string: - + http://path.com - + is a url @@ -113,39 +93,31 @@ exports[`LogViewer linkifies standalone urls 1`] = ` exports[`LogViewer linkifies substring urls 1`] = `
1 - + this string: - + http://path.com - + is a url @@ -156,21 +128,17 @@ exports[`LogViewer linkifies substring urls 1`] = ` exports[`LogViewer renders a multi-line log 1`] = `
1 - + Lorem Ipsum is simply dummy text of the printing and typesetting @@ -181,31 +149,19 @@ exports[`LogViewer renders a multi-line log 1`] = ` exports[`LogViewer renders a row with error 1`] = `
1 - + line1 with error @@ -216,31 +172,19 @@ exports[`LogViewer renders a row with error 1`] = ` exports[`LogViewer renders a row with error word as substring 1`] = `
1 - + line1 with errorWord @@ -251,21 +195,17 @@ exports[`LogViewer renders a row with error word as substring 1`] = ` exports[`LogViewer renders a row with given index as line number 1`] = `
1 - + line1 @@ -276,31 +216,19 @@ exports[`LogViewer renders a row with given index as line number 1`] = ` exports[`LogViewer renders a row with upper case error 1`] = `
1 - + line1 with ERROR @@ -311,31 +239,19 @@ exports[`LogViewer renders a row with upper case error 1`] = ` exports[`LogViewer renders a row with upper case warning 1`] = `
1 - + line1 with WARNING @@ -346,31 +262,19 @@ exports[`LogViewer renders a row with upper case warning 1`] = ` exports[`LogViewer renders a row with warn 1`] = `
1 - + line1 with warn @@ -381,31 +285,19 @@ exports[`LogViewer renders a row with warn 1`] = ` exports[`LogViewer renders a row with warning 1`] = `
1 - + line1 with warning @@ -416,31 +308,19 @@ exports[`LogViewer renders a row with warning 1`] = ` exports[`LogViewer renders a row with warning word as substring 1`] = `
1 - + line1 with warning:something @@ -462,21 +342,17 @@ exports[`LogViewer renders an empty container when no logs passed 1`] = ` exports[`LogViewer renders one log line 1`] = `
1 - + first line @@ -487,21 +363,17 @@ exports[`LogViewer renders one log line 1`] = ` exports[`LogViewer renders one long line without breaking 1`] = `
1 - + Lorem Ipsum is simply dummy text of the printing and typesettingindustry. Lorem Ipsum has been the industry's standard dummy text eversince the 1500s, when an unknown printer took a galley of type andscrambled it to make a type specimen book. It has survived not only fivecenturies, but also the leap into electronic typesetting, remainingessentially unchanged. It was popularised in the 1960s with the releaseof Letraset sheets containing Lorem Ipsum passages, and more recentlywith desktop publishing software like Aldus PageMaker including versionsof Lorem Ipsum. @@ -512,21 +384,17 @@ exports[`LogViewer renders one long line without breaking 1`] = ` exports[`LogViewer renders two log lines 1`] = `
1 - + first line diff --git a/frontend/src/components/__snapshots__/MinioArtifactLink.test.tsx.snap b/frontend/src/components/__snapshots__/MinioArtifactLink.test.tsx.snap new file mode 100644 index 000000000000..fb03d0878d4e --- /dev/null +++ b/frontend/src/components/__snapshots__/MinioArtifactLink.test.tsx.snap @@ -0,0 +1,33 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`MinioArtifactLink handles empty artifact 1`] = `null`; + +exports[`MinioArtifactLink handles invalid artifact: no bucket 1`] = `null`; + +exports[`MinioArtifactLink handles invalid artifact: no key 1`] = `null`; + +exports[`MinioArtifactLink handles minio artifact 1`] = ` + + minio://foo/bar + +`; + +exports[`MinioArtifactLink handles null artifact 1`] = `null`; + +exports[`MinioArtifactLink handles s3 artifact 1`] = ` + + s3://foo/bar + +`; + +exports[`MinioArtifactLink handles undefined artifact 1`] = `null`; diff --git a/frontend/src/components/__snapshots__/SideNav.test.tsx.snap b/frontend/src/components/__snapshots__/SideNav.test.tsx.snap index a562766e3680..ddf3c9d3feee 100644 --- a/frontend/src/components/__snapshots__/SideNav.test.tsx.snap +++ b/frontend/src/components/__snapshots__/SideNav.test.tsx.snap @@ -174,6 +174,27 @@ exports[`SideNav populates the display build information using the response from
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
+ + + +
- Allow editing cron expression. ( format is specified + Allow editing cron expression. ( format is specified + @@ -596,7 +597,8 @@ exports[`Trigger renders all week days enabled 1`] = ` } label={ - Allow editing cron expression. ( format is specified + Allow editing cron expression. ( format is specified + @@ -1038,7 +1040,8 @@ exports[`Trigger renders periodic schedule controls if the trigger type is CRON } label={ - Allow editing cron expression. ( format is specified + Allow editing cron expression. ( format is specified + @@ -1349,7 +1352,8 @@ exports[`Trigger renders week days if the trigger type is CRON and interval is w } label={ - Allow editing cron expression. ( format is specified + Allow editing cron expression. ( format is specified + diff --git a/frontend/src/components/__snapshots__/UploadPipelineDialog.test.tsx.snap b/frontend/src/components/__snapshots__/UploadPipelineDialog.test.tsx.snap index ca88139a5d3c..8b5a590d9e88 100644 --- a/frontend/src/components/__snapshots__/UploadPipelineDialog.test.tsx.snap +++ b/frontend/src/components/__snapshots__/UploadPipelineDialog.test.tsx.snap @@ -48,6 +48,7 @@ exports[`UploadPipelineDialog renders alternate UI for uploading via URL 1`] = ` > URL must be publicly accessible.
+ You can also drag and drop the file here.
+ You can also drag and drop the file here.
+ You can also drag and drop the file here.
+ You can also drag and drop the file here.
+ { expect(tree).toMatchSnapshot(); }); - const data = [ - [0, 1, 2], - [3, 4, 5], - [6, 7, 8], - ]; + const data = [[0, 1, 2], [3, 4, 5], [6, 7, 8]]; const config: ConfusionMatrixConfig = { axes: ['test x axis', 'test y axis'], data, @@ -42,7 +38,7 @@ describe('ConfusionMatrix', () => { }); it('does not break on asymetric data', () => { - const testConfig = {...config}; + const testConfig = { ...config }; testConfig.data = data.slice(1); const tree = shallow(); expect(tree).toMatchSnapshot(); @@ -60,7 +56,10 @@ describe('ConfusionMatrix', () => { it('activates row/column on cell hover', () => { const tree = shallow(); - tree.find('td').at(2).simulate('mouseOver'); + tree + .find('td') + .at(2) + .simulate('mouseOver'); expect(tree.state()).toHaveProperty('activeCell', [0, 0]); }); diff --git a/frontend/src/components/viewers/ConfusionMatrix.tsx b/frontend/src/components/viewers/ConfusionMatrix.tsx index 7deaf331b57f..fd3ea98b039e 100644 --- a/frontend/src/components/viewers/ConfusionMatrix.tsx +++ b/frontend/src/components/viewers/ConfusionMatrix.tsx @@ -40,17 +40,20 @@ interface ConfusionMatrixState { class ConfusionMatrix extends Viewer { private _opacities: number[][] = []; private _config = this.props.configs[0]; - private _max = this._config && + private _max = + this._config && Math.max(...this._config.data.map(d => d.map(n => +n)).map(d => Math.max(...d))); private _minRegularCellDimension = 15; private _maxRegularCellDimension = 80; - private _cellDimension = this._config ? - Math.max( - Math.min( - (this.props.maxDimension || 700) / this._config.data.length, - this._maxRegularCellDimension), - this._minRegularCellDimension) - 1 : - 0; + private _cellDimension = this._config + ? Math.max( + Math.min( + (this.props.maxDimension || 700) / this._config.data.length, + this._maxRegularCellDimension, + ), + this._minRegularCellDimension, + ) - 1 + : 0; private _shrinkThreshold = 600; private _css = stylesheet({ @@ -174,53 +177,96 @@ class ConfusionMatrix extends Viewer const [xAxisLabel, yAxisLabel] = this._config.axes; const small = this._isSmall(); - return
- - - {!small && } - {this._config.data.map((row, r) => - {!small && } - {row.map((cell, c) => - )} - )} - - {/* Footer */} - {!small && - )} - - } - -
{yAxisLabel}
-
- {this._config.labels[r]} -
-
this.setState({ activeCell: [r, c] })}> -
- {cell} -
- {this._config.labels.map((label, i) => -
- {label} + return ( +
+ + + {!small && ( + + + + )} + {this._config.data.map((row, r) => ( + + {!small && ( + + )} + {row.map((cell, c) => ( + + ))} + + ))} + + {/* Footer */} + {!small && ( + + + ))} + + + )} + +
{yAxisLabel}
+
+ {this._config.labels[r]} +
+
this.setState({ activeCell: [r, c] })} + > +
+ {cell} +
+ {this._config.labels.map((label, i) => ( + +
+ {label} +
+
{xAxisLabel}
+ + {!small && ( +
+
+ {this._max} +
+ {new Array(legendNotches).fill(0).map((_, i) => ( +
+ + {Math.floor((i / legendNotches) * this._max)} +
-
{xAxisLabel}
- - {!small &&
-
- {this._max} -
- {new Array(legendNotches).fill(0).map((_, i) => -
- {Math.floor(i / legendNotches * this._max)} -
)} -
} -
; + ))} +
+ )} +
+ ); } private _isSmall(): boolean { diff --git a/frontend/src/components/viewers/HTMLViewer.tsx b/frontend/src/components/viewers/HTMLViewer.tsx index 66e652e76af8..e7b59911297c 100644 --- a/frontend/src/components/viewers/HTMLViewer.tsx +++ b/frontend/src/components/viewers/HTMLViewer.tsx @@ -64,8 +64,14 @@ class HTMLViewer extends Viewer { return null; } - return