From f92b9f464de89a38a4bde149290dede2d94c8631 Mon Sep 17 00:00:00 2001 From: Grigori Fursin Date: Wed, 10 Apr 2024 09:30:05 +0200 Subject: [PATCH] first commit --- .gitignore | 160 +++++++++++++ COPYRIGHT.txt | 1 + README-test-model.md | 117 ++++++++++ README.md | 1 + .../get-dataset-cognata-mlcommons/README.md | 187 +++++++++++++++ script/get-dataset-cognata-mlcommons/_cm.yaml | 65 ++++++ .../customize.py | 55 +++++ .../get-dataset-cognata-mlcommons/license.txt | 107 +++++++++ .../README-extra.md | 5 + .../get-ml-model-abtf-ssd-pytorch/README.md | 170 ++++++++++++++ script/get-ml-model-abtf-ssd-pytorch/_cm.yaml | 84 +++++++ .../customize.py | 36 +++ .../README.md | 218 ++++++++++++++++++ .../_cm.yaml | 89 +++++++ .../customize.py | 28 +++ .../test-ssd-resnet50-cognata-pytorch/run.bat | 6 + .../test-ssd-resnet50-cognata-pytorch/run.sh | 6 + 17 files changed, 1335 insertions(+) create mode 100644 .gitignore create mode 100644 COPYRIGHT.txt create mode 100644 README-test-model.md create mode 100644 README.md create mode 100644 script/get-dataset-cognata-mlcommons/README.md create mode 100644 script/get-dataset-cognata-mlcommons/_cm.yaml create mode 100644 script/get-dataset-cognata-mlcommons/customize.py create mode 100644 script/get-dataset-cognata-mlcommons/license.txt create mode 100644 script/get-ml-model-abtf-ssd-pytorch/README-extra.md create mode 100644 script/get-ml-model-abtf-ssd-pytorch/README.md create mode 100644 script/get-ml-model-abtf-ssd-pytorch/_cm.yaml create mode 100644 script/get-ml-model-abtf-ssd-pytorch/customize.py create mode 100644 script/test-ssd-resnet50-cognata-pytorch/README.md create mode 100644 script/test-ssd-resnet50-cognata-pytorch/_cm.yaml create mode 100644 script/test-ssd-resnet50-cognata-pytorch/customize.py create mode 100644 script/test-ssd-resnet50-cognata-pytorch/run.bat create mode 100644 script/test-ssd-resnet50-cognata-pytorch/run.sh diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..68bc17f --- /dev/null +++ b/.gitignore @@ -0,0 +1,160 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/COPYRIGHT.txt b/COPYRIGHT.txt new file mode 100644 index 0000000..bb27d28 --- /dev/null +++ b/COPYRIGHT.txt @@ -0,0 +1 @@ +Copyright (c) 2024 MLCommons diff --git a/README-test-model.md b/README-test-model.md new file mode 100644 index 0000000..f9b7d14 --- /dev/null +++ b/README-test-model.md @@ -0,0 +1,117 @@ +# CM automation for ABTF-MLPerf + +*Testing ABTF SSD PyTorch model via the [MLCommons CM automation meta-framework](https://github.com/mlcommons/ck).* + +## Install CM + +Follow [this online guide](https://access.cknowledge.org/playground/?action=install) to install CM for your OS. + +## Install virtual environment + +We suggest to create a virtual environment to avoid messing up your Python installation: + +### Linux + +```bash +python3 -m venv ABTF +. ABTF/bin/activate ; export CM_REPOS=$PWD/ABTF/CM +``` +### Windows + +```bash +python -m venv ABTF +call ABTF\Scripts\activate.bat & set CM_REPOS=%CD%\ABTF\CM +``` + +## Install all CM automation recipes + +Pull [main MLOps automation recipes](https://access.cknowledge.org/playground/?action=scripts) from MLCommons: + +```bash +cm pull repo mlcommons@ck --checkout=dev +``` + +Pull this CM repository with automation recipes for the MLCommons-ABTF benchmark: + +```bash +cm pull repo cknowledge@cm4abtf +``` + +## Clean CM cache + +Clean CM cache if you want to start from scratch + +```bash +cm rm cache -f +``` + + + + + + + + + +Download private test image `0000008766.png` and model `baseline_8mp.pth` to your local directory. + + +Import `baseline_8mp.pth` to CM: +```bash +cmr "get ml-model abtf-ssd-pytorch _local.baseline_8mp.pth" +``` + +Get Git repo with ABTF SSD-ResNet50 PyTorch model: + +```bash +cmr "get git repo _repo.https://github.com/mlcommons/abtf-ssd-pytorch" --env.CM_GIT_BRANCH=cognata-cm --extra_cache_tags=abtf,ssd,pytorch,cm-model --env.CM_GIT_CHECKOUT_PATH_ENV_NAME=CM_ABTF_SSD_PYTORCH +``` + +Make test prediction: + +```bash +cmr "test abtf ssd-resnet50 cognata pytorch" --input=0000008766.png --output=0000008766_prediction_test.jpg --config=baseline_8MP +``` + +Export PyTorch model to ONNX: +```bash +cmr "test abtf ssd-resnet50 cognata pytorch" --input=0000008766.png --output=0000008766_prediction_test.jpg --config=baseline_8MP --export_model=baseline_8mp.onnx +``` + +Test exported ONNX model with LoadGen (performance): +```bash +cm run script "python app loadgen-generic _onnxruntime" --modelpath=baseline_8mp.onnx --samples=1 --quiet +``` + + +Test different versions of PyTorch +```bash +cmr "install python-venv" --name=abtf2 +cmr "test abtf ssd-resnet50 cognata pytorch" --adr.python.name=abtf2 --adr.torch.version=1.13.1 --adr.torchvision.version=0.14.1 --input=0000008766.png --output=0000008766_prediction_test.jpg --config=baseline_8MP +``` + +## TBD + +### Main features + +* Test PyTorch model with Python LoadGen +* Test PyTorch model with [C++ loadgen](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/app-mlperf-inference-mlcommons-cpp) +* Automate loading of Cognata dataset via CM +* Add Cognata dataset to loadgen +* Process PyTorch model with MLPerf inference infrastructure for SSD-ResNet50 +* Add support for MLCommons Croissant + +### Testing docker + +```bash +cm docker script --tags=test,abtf,ssd-pytorch,_cognata --docker_cm_repo=ctuning@mlcommons-ck --env.CM_GH_TOKEN={TOKEN} --input=road.jpg --output=road_ssd.jpg +``` + +```bash +cm docker script --tags=test,abtf,ssd-pytorch,_cognata --docker_cm_repo=ctuning@mlcommons-ck --docker_os=ubuntu --docker_os_version=23.04 --input=road.jpg --output=road_ssd.jpg +``` +TBD: pass file to CM docker: [meta](https://github.com/mlcommons/ck/blob/master/cm-mlops/script/build-mlperf-inference-server-nvidia/_cm.yaml#L197). + +## CM automation developers + +* [Grigori Fursin](https://cKnowledge.org/gfursin) (MLCommons Task Force on Automation and Reproducibility) diff --git a/README.md b/README.md new file mode 100644 index 0000000..b609e1c --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# Collective Mind interface and automation for ABTF diff --git a/script/get-dataset-cognata-mlcommons/README.md b/script/get-dataset-cognata-mlcommons/README.md new file mode 100644 index 0000000..606d817 --- /dev/null +++ b/script/get-dataset-cognata-mlcommons/README.md @@ -0,0 +1,187 @@ +
+Click here to see the table of contents. + +* [About](#about) +* [Summary](#summary) +* [Reuse this script in your project](#reuse-this-script-in-your-project) + * [ Install CM automation language](#install-cm-automation-language) + * [ Check CM script flags](#check-cm-script-flags) + * [ Run this script from command line](#run-this-script-from-command-line) + * [ Run this script from Python](#run-this-script-from-python) + * [ Run this script via GUI](#run-this-script-via-gui) + * [ Run this script via Docker (beta)](#run-this-script-via-docker-(beta)) +* [Customization](#customization) + * [ Variations](#variations) + * [ Default environment](#default-environment) +* [Script workflow, dependencies and native scripts](#script-workflow-dependencies-and-native-scripts) +* [Script output](#script-output) +* [New environment keys (filter)](#new-environment-keys-(filter)) +* [New environment keys auto-detected from customize](#new-environment-keys-auto-detected-from-customize) +* [Maintainers](#maintainers) + +
+ +*Note that this README is automatically generated - don't edit!* + +### About + +#### Summary + +* Category: *AI/ML datasets.* +* CM GitHub repository: *[mlcommons@ck](https://github.com/mlcommons/ck/tree/master/cm-mlops)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-dataset-cognata)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* CM "database" tags to find this script: *get,dataset,cognata,object-detection,original* +* Output cached? *True* +___ +### Reuse this script in your project + +#### Install CM automation language + +* [Installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md) +* [CM intro](https://doi.org/10.5281/zenodo.8105339) + +#### Pull CM repository with this automation + +```cm pull repo mlcommons@ck``` + + +#### Run this script from command line + +1. `cm run script --tags=get,dataset,cognata,object-detection,original[,variations] ` + +2. `cmr "get dataset cognata object-detection original[ variations]" ` + +* `variations` can be seen [here](#variations) + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,dataset,cognata,object-detection,original' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,dataset,cognata,object-detection,original"``` + +Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=get,dataset,cognata,object-detection,original) to generate CM CMD. + +#### Run this script via Docker (beta) + +`cm docker script "get dataset cognata object-detection original[ variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**dataset-type**" +
+ Click here to expand this section. + + * `_calibration` + - Workflow: + * **`_validation`** (default) + - Environment variables: + - *CM_DATASET_CALIBRATION*: `no` + - Workflow: + +
+ + + * Group "**size**" +
+ Click here to expand this section. + + * **`_50`** (default) + - Environment variables: + - *CM_DATASET_SIZE*: `50` + - Workflow: + * `_500` + - Environment variables: + - *CM_DATASET_SIZE*: `500` + - Workflow: + * `_full` + - Environment variables: + - *CM_DATASET_SIZE*: `` + - Workflow: + * `_size.#` + - Environment variables: + - *CM_DATASET_SIZE*: `#` + - Workflow: + +
+ + +#### Default variations + +`_50,_validation` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + +* CM_DATASET_CALIBRATION: `no` + +
+ +___ +### Script workflow, dependencies and native scripts + +
+Click here to expand this section. + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-dataset-cognata/_cm.json)*** + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-python3) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-dataset-cognata/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-dataset-cognata/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-dataset-cognata/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-dataset-cognata/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-dataset-cognata/_cm.json) +
+ +___ +### Script output +`cmr "get dataset cognata object-detection original[,variations]" -j` +#### New environment keys (filter) + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_ANNOTATIONS_DIR_PATH` +* `CM_DATASET_ANNOTATIONS_FILE_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +#### New environment keys auto-detected from customize + +* `CM_CALIBRATION_DATASET_PATH` +* `CM_DATASET_PATH` +* `CM_DATASET_PATH_ROOT` +___ +### Maintainers + +* [Open MLCommons taskforce on automation and reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) \ No newline at end of file diff --git a/script/get-dataset-cognata-mlcommons/_cm.yaml b/script/get-dataset-cognata-mlcommons/_cm.yaml new file mode 100644 index 0000000..b2bf5c4 --- /dev/null +++ b/script/get-dataset-cognata-mlcommons/_cm.yaml @@ -0,0 +1,65 @@ +alias: get-dataset-cognata-mlcommons +uid: 6e2b5db76833416d + +automation_alias: script +automation_uid: 5b4e0237da074764 + +private: true + +cache: true + +category: AI/ML datasets +category_sort: 8500 + +default_env: + CM_DATASET_CALIBRATION: 'no' + +deps: +- names: + - python + - python3 + tags: get,python3 + +env: + CM_DATASET: COGNATA + +new_env_keys: +- CM_DATASET_PATH +- CM_DATASET_PATH_ROOT +- CM_DATASET_ANNOTATIONS_DIR_PATH +- CM_DATASET_ANNOTATIONS_FILE_PATH +- CM_CALIBRATION_DATASET_PATH + +tags: +- get +- dataset +- mlcommons-cognata +- object-detection +- original + +variations: + '50': + default: true + env: + CM_DATASET_SIZE: '50' + group: size + '500': + env: + CM_DATASET_SIZE: '500' + group: size + calibration: + env: {} + group: dataset-type + full: + env: + CM_DATASET_SIZE: '' + group: size + size.#: + env: + CM_DATASET_SIZE: '#' + group: size + validation: + default: true + env: + CM_DATASET_CALIBRATION: 'no' + group: dataset-type diff --git a/script/get-dataset-cognata-mlcommons/customize.py b/script/get-dataset-cognata-mlcommons/customize.py new file mode 100644 index 0000000..6c55731 --- /dev/null +++ b/script/get-dataset-cognata-mlcommons/customize.py @@ -0,0 +1,55 @@ +from cmind import utils +import os +import shutil +import sys + + +def ask_user(question, default="yes"): + valid = ["yes", "y", "no", "n"] + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return default.startswith('y') + elif choice in valid: + return choice.startswith('y') + else: + print("Please input y/n\n") + +def preprocess(i): + + env = i['env'] + + script_path = i['run_script_input']['path'] + + with open(os.path.join(script_path, "license.txt"), "r") as f: + print(f.read()) + + response = ask_user("Do you accept?") + + print(response) + + + return {'return': 0} + +def postprocess(i): + env = i['env'] + return {'return': -1} #todo + if env.get('CM_DATASET_CALIBRATION','') == "no": + env['CM_DATASET_PATH_ROOT'] = os.path.join(os.getcwd(), 'install') + env['CM_DATASET_PATH'] = os.path.join(os.getcwd(), 'install', 'validation', 'data') + env['CM_DATASET_CAPTIONS_DIR_PATH'] = os.path.join(os.getcwd(), 'install', 'captions') + env['CM_DATASET_LATENTS_DIR_PATH'] = os.path.join(os.getcwd(), 'install', 'latents') + else: + env['CM_CALIBRATION_DATASET_PATH'] = os.path.join(os.getcwd(), 'install', 'calibration', 'data') + + return {'return': 0} diff --git a/script/get-dataset-cognata-mlcommons/license.txt b/script/get-dataset-cognata-mlcommons/license.txt new file mode 100644 index 0000000..f46fa42 --- /dev/null +++ b/script/get-dataset-cognata-mlcommons/license.txt @@ -0,0 +1,107 @@ +DATA END USER LICENSE AGREEMENT + +This Data End User License Agreement (“Agreement”) is by and between MLCommons, a Delaware non- +profit corporation (“MLC”) and the undersigned MLCommons Member that is downloading or using the + +MLCommons Cognata Dataset made available by MLCommons (“Licensee”). This Agreement will be +effective upon the date last signed, as reflected in the signature block below. +1. Background. MLC, an open engineering consortium, has licensed a dataset known as MLCommons +Cognata Dataset (the “Data”) which it wishes to make available to its Members in accordance with +the terms set forth in this Agreement. The term Data refers collectively to the MLCommons Cognata +datasets and materials delivered by MLC as a whole, to subsets of such datasets and materials, as +well as to the separate images and annotations comprising such datasets and materials. MLC agrees +that, to the best of its knowledge, the Data will not include any data that is “personal data” or +“personally identifiable” according to any applicable law or regulation. +2. Data License. Subject to the License Restrictions set forth in Section 4 below, MLC grants to Licensee +a non-exclusive, non-transferable, non-sublicenseable worldwide license during the term of the +Agreement to download, install, copy, modify, distribute, publicly display, prepare derivative works, +and otherwise use the Data for the purpose of benchmarking. The rights and license granted under +this Section 2.1 shall be referred to hereinafter as the “License.” +3. Derivative Work, Applications and Output. The License includes the right to create derivative +works of the Data. Any use of the Data incorporated in these derivative works will be governed by +the provisions of this Agreement, however, MLC and its licensors will not be responsible for any +adaptations, revisions, transformations, alterations, or any other modifications of the Data. The +License includes the right to incorporate and use the Data in applications (the “Applications”), +provided that all Data incorporated or used by these Applications is and will remain governed by the +provisions of this Agreement. The term “Output” refers to any analysis, results and calculations +arrived at from the use of the Data, and to any changes to the models and networks that will be +trained using the Data. +4. License Restrictions. +a. The License will only remain in effect so long as Licensee remains an MLC member in good +standing. +b. Licensee may not provide the Data as a whole, or on a standalone basis, to third parties but may +distribute the Applications and Output in the course of Licensee’s ordinary course business. This +includes the right to distribute a subset of the Data in Licensee’s Applications to run relevant +benchmarks generated from the Output. +c. While the License includes the right to use the Data in commercial activity such as +benchmarking, marketing, customer demonstrations, bidding and requests for proposals or +information, it excludes the right to use the Data directly in connection with models used in or +items in production, including training production models. +d. Licensee acknowledges that MLC’s data licensor, Cognata Ltd, is the owner of all right, title and +interest in the Data. It will acknowledge and identify “Cognata” as a data provider in connection +with any distribution, display or publication of the Data, Applications, or Output. +e. Licensee will ensure that its use of the Data will be done in accordance with all applicable law. + + +5. Term. The License will continue in effect until terminated as set forth below: +a. Licensee may terminate the License by informing MLC that all Data, Output and Applications +that incorporate the Data in Licensee’s possession or control have been deleted. +b. MLC may terminate the License upon providing Licensee with at least thirty (30) days prior +notice of termination sent to the email address of the primary contact associated with +Licensee’s Membership in MLC. +c. The License will terminate immediately and automatically without any notice if Licensee ceases +to be an MLC Member or if Licensee violates any of the terms and conditions of this Agreement. +Upon termination of this Agreement, Licensee must delete all copies of the Data and Output in its +possession and cease distributing any Data, Output or Applications that incorporate the Data. +Termination will not limit any of MLC’s rights or remedies at law or in equity. Sections 8 through 11 +of this Agreement, inclusive, shall survive such termination. +6. Storage. For the duration of this Agreement, Licensee may download and store local copies of the +Data on systems under its control. +7. License. This Agreement provides certain license rights to use of the Data but does not constitute a +sale of the Data. MLC’s licensor will continue to own all right, title and interest in the Data. +8. Indemnification. Licensee agrees to indemnify and hold MLC and its licensors, and their respective +officers, directors and agents harmless from any claims, expenses, or liabilities caused by Licensee’s +use of the Data or Licensee’s publication or use of Output; except that Licensee shall have no liability +or obligation to indemnify MLC or its licensor from or against any claims, expenses, or liabilities +resulting from (a) third party claims that the Data infringes, misappropriation, or otherwise violates +third party intellectual property or other rights or (b) MLC’s negligent acts or omissions or willful +misconduct. Licensee will promptly inform MLC in writing of any claim by a third party that use of +the Data in accordance with this Agreement violates the proprietary rights of such claimant +(including any threat, warning or notice prior to such claim in which the claimant alleges that the +Data infringes his/her/its intellectual property rights), which notice will include copies of all available +material related to the claim and will indicate the estimated amount claimed by the claimant, if any. +9. Disclaimers & Limited Liability. +EXCEPT AS EXPLICITLY SET FORTH IN THIS AGREEMENT, THE DATA IS PROVIDED “AS IS,” WITHOUT +WARRANTY OR REPRESENTATION OF ANY KIND WHETHER EXPRESS, IMPLIED, STATUTORY, OR +OTHER. THIS INCLUDES, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, +ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT KNOWN OR +DISCOVERABLE. LICENSEES ARE RESPONSIBLE FOR ANY AND ALL CONCLUSIONS, DECISIONS AND/OR +ACTIONS TAKEN BY THEM IN RELIANCE ON THE DATA. LICENSEE ASSUMES ALL RESPONSIBILITIES +FOR ANY USE OF THE OUTPUT. IN NO EVENT WILL COGNATA LTD, MLC’S DATA LICENSOR, +RESPONSIBLE TO LICENSEE IN ANY WAY FOR THE DATA. +EXCEPT FOR MATTERS FOR WHICH LIABILITY CANNOT BE EXCLUDED OR LIMITED UNDER APPLICABLE +LAW: (A) IN NO EVENT SHALL EITHER PARTY BE LIABLE FOR INCIDENTAL, INDIRECT, SPECIAL, +EXEMPLARY, PUNITIVE AND/OR CONSEQUENTIAL DAMAGES (HOWEVER ARISING) AND/OR FOR +LOSS OF PROFIT, LOSS OF USE, LOSS OF DATA, LOSS OF REVENUES, LOSS OF SAVINGS, BUSINESS +INTERRUPTION, OR LOSS OF REPUTATION ARISING FROM AND/OR IN CONNECTION WITH THIS +AGREEMENT, AND/OR THE USE OF THE DATA AND (B) THE MAXIMUM AGGREGATE LIABILITY OF + + +EITHER PARTY FOR ANY AND ALL DAMAGES AND LOSSES ARISING FROM AND/OR IN CONNECTION +WITH THIS AGREEMENT, AND/OR THE USE OF THE DATA SHALL NOT EXCEED THE AMOUNTS PAID +FOR THE APPLICABLE DATA. +LICENSEE AGREES THAT IT IS NOT ENTITLED TO RECOVER ANY OTHER DAMAGES OR LOSSES EVEN IF +THE ABOVE DAMAGES REMEDY DOESN'T FULLY COMPENSATE LICENSEE FOR ANY DAMAGES OR +LOSSES OR FAILS OF ITS ESSENTIAL PURPOSE AND EVEN IF MLC KNEW OR SHOULD HAVE KNOWN +ABOUT THE POSSIBILITY OF THE DAMAGES OR LOSSES. THE ABOVE LIMITATIONS AND EXCLUSIONS +OF LIABILITY SHALL APPLY TO ALL CAUSES OF ACTION AND REGARDLESS OF THE THEORY OF +LIABILITY (CONTRACT, TORT, EQUITY, BREACH OF STATUTORY DUTY, STRICT LIABILITY OR +OTHERWISE) AND EVEN IF SUCH DAMAGES OR LOSSES WERE OR COULD HAVE BEEN FORESEEABLE. +10. Amendment. MLC may update this Agreement upon 30 days’ written notice to Licensee. +11. Dispute resolution. This Agreement will be governed by the laws of the State of Delaware, without +reference to conflict of laws principles. If unable to resolve a dispute amicably, the parties agree that +disputes arising out of or related to this Agreement will be subject to the exclusive jurisdiction of the +state and federal courts located in San Francisco, California. +IN WITNESS WHEREOF, the undersigned parties have executed this Data End User License Agreement as +of the dates set forth below their signatures. diff --git a/script/get-ml-model-abtf-ssd-pytorch/README-extra.md b/script/get-ml-model-abtf-ssd-pytorch/README-extra.md new file mode 100644 index 0000000..c1d4093 --- /dev/null +++ b/script/get-ml-model-abtf-ssd-pytorch/README-extra.md @@ -0,0 +1,5 @@ +# Example to import local model + +```bash +cmr "get ml-model abtf-ssd-pytorch _local.test_8mp.pth" +``` diff --git a/script/get-ml-model-abtf-ssd-pytorch/README.md b/script/get-ml-model-abtf-ssd-pytorch/README.md new file mode 100644 index 0000000..7fc218f --- /dev/null +++ b/script/get-ml-model-abtf-ssd-pytorch/README.md @@ -0,0 +1,170 @@ +Automatically generated README for this automation recipe: **get-ml-model-abtf-ssd-pytorch** + +Category: **AI/ML models** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=get-ml-model-abtf-ssd-pytorch,59cfc2a22f5d4f46) ] [ [Notes from the authors, contributors and users](README-extra.md) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@ck](https://github.com/mlcommons/ck/tree/dev/cm-mlops)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/get-ml-model-abtf-ssd-pytorch)* +* CM meta description for this script: *[_cm.json](_cm.json)* +* All CM tags to find and reuse this script (see in above meta description): *get,ml-model,abtf-ssd-pytorch* +* Output cached? *True* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@ck``` + +#### Print CM help from the command line + +````cmr "get ml-model abtf-ssd-pytorch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=get,ml-model,abtf-ssd-pytorch` + +`cm run script --tags=get,ml-model,abtf-ssd-pytorch[,variations] ` + +*or* + +`cmr "get ml-model abtf-ssd-pytorch"` + +`cmr "get ml-model abtf-ssd-pytorch [variations]" ` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'get,ml-model,abtf-ssd-pytorch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="get,ml-model,abtf-ssd-pytorch"``` + +Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=get,ml-model,abtf-ssd-pytorch) to generate CM CMD. + +#### Run this script via Docker (beta) + +`cm docker script "get ml-model abtf-ssd-pytorch[variations]" ` + +___ +### Customization + + +#### Variations + + * Group "**epoch**" +
+ Click here to expand this section. + + * `_e01` + - Environment variables: + - *CM_ML_MODEL_CHECKSUM*: `31d177228308bbe43917c912b01c2d67` + - *CM_ML_MODEL_FILENAME*: `SSD_e1.pth` + - *CM_ML_MODEL_URL*: `https://www.dropbox.com/scl/fi/7nqt5z8gplgeaveo933eo/SSD_e1.pth?rlkey=7lyb4qs2hzg491bfprwcuvx54&dl=0` + - *CM_ML_MODEL*: `abtf-ssd-pytorch` + - *CM_ML_MODEL_DATASET*: `coco` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `300` + - *CM_ML_MODEL_IMAGE_WIDTH*: `300` + - Workflow: + * **`_e65`** (default) + - Environment variables: + - *CM_ML_MODEL_CHECKSUM*: `f769eb0321ac7fc1c16f982db6131d2f` + - *CM_ML_MODEL_FILENAME*: `SSD_e65.pth` + - *CM_ML_MODEL_URL*: `https://www.dropbox.com/scl/fi/wkegl2qxvm8cefbqq00o3/SSD_e65.pth?rlkey=ez26jafjdcly665npl6pdqxl8&dl=0` + - *CM_ML_MODEL*: `abtf-ssd-pytorch` + - *CM_ML_MODEL_DATASET*: `coco` + - *CM_ML_MODEL_IMAGE_HEIGHT*: `300` + - *CM_ML_MODEL_IMAGE_WIDTH*: `300` + - Workflow: + * `_local.#` + - Environment variables: + - *CM_ML_MODEL_FILENAME*: `#` + - *CM_ML_MODEL_LOCAL*: `yes` + - *CM_SKIP_DOWNLOAD*: `yes` + - Workflow: + +
+ + +#### Default variations + +`_e65` +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/get-ml-model-abtf-ssd-pytorch/_cm.json)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/detect-os) + * download,file,_wget + * `if (CM_SKIP_DOWNLOAD != yes)` + * CM names: `--adr.['get-ml-model']...` + - CM script: [download-file](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/download-file) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/get-ml-model-abtf-ssd-pytorch/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/get-ml-model-abtf-ssd-pytorch/_cm.json) + 1. ***Run native script if exists*** + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/get-ml-model-abtf-ssd-pytorch/_cm.json) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/get-ml-model-abtf-ssd-pytorch/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/get-ml-model-abtf-ssd-pytorch/_cm.json) + +___ +### Script output +`cmr "get ml-model abtf-ssd-pytorch [,variations]" -j` +#### New environment keys (filter) + +* `CM_ML_MODEL_*` +#### New environment keys auto-detected from customize + +* `CM_ML_MODEL_FILE` +* `CM_ML_MODEL_FILE_WITH_PATH` \ No newline at end of file diff --git a/script/get-ml-model-abtf-ssd-pytorch/_cm.yaml b/script/get-ml-model-abtf-ssd-pytorch/_cm.yaml new file mode 100644 index 0000000..a30d93d --- /dev/null +++ b/script/get-ml-model-abtf-ssd-pytorch/_cm.yaml @@ -0,0 +1,84 @@ +alias: get-ml-model-abtf-ssd-pytorch +uid: 59cfc2a22f5d4f46 + +automation_alias: script +automation_uid: 5b4e0237da074764 + +cache: true + +category: AI/ML models + +private: true + +deps: + +- tags: detect,os + +- tags: get,git,repo,_repo.https://github.com/mlcommons/abtf-ssd-pytorch + names: + - abtf-ssd-pytorch-git-repo + env: + CM_GIT_CHECKOUT_PATH_ENV_NAME: CM_ABTF_SSD_PYTORCH + CM_GIT_BRANCH: cognata-cm + extra_cache_tags: abtf,ssd,pytorch,cm-model + + +- tags: download,file,_wget + env: + CM_DOWNLOAD_CHECKSUM: <<>> + CM_DOWNLOAD_FINAL_ENV_NAME: CM_ML_MODEL_FILE_WITH_PATH + CM_VERIFY_SSL: 'no' + force_cache: true + names: + - get-ml-model-weights + skip_if_env: + CM_SKIP_DOWNLOAD: + - 'yes' + update_tags_from_env_with_prefix: + _url.: + - CM_ML_MODEL_URL + + + +new_env_keys: +- CM_ML_MODEL_* + +print_env_at_the_end: + CM_ML_MODEL_FILE_WITH_PATH: Path to the ML model weights + CM_ML_MODEL_CODE_WITH_PATH: Path to the ML model code + +tags: +- get +- ml-model +- abtf-ssd-pytorch + +variations: + e01: + env: + CM_ML_MODEL: abtf-ssd-pytorch + CM_ML_MODEL_CHECKSUM: 31d177228308bbe43917c912b01c2d67 + CM_ML_MODEL_DATASET: coco + CM_ML_MODEL_FILENAME: SSD_e1.pth + CM_ML_MODEL_IMAGE_HEIGHT: '300' + CM_ML_MODEL_IMAGE_WIDTH: '300' + CM_ML_MODEL_URL: https://www.dropbox.com/scl/fi/7nqt5z8gplgeaveo933eo/SSD_e1.pth?rlkey=7lyb4qs2hzg491bfprwcuvx54&dl=0 + group: epoch + + e65: + default: true + env: + CM_ML_MODEL: abtf-ssd-pytorch + CM_ML_MODEL_CHECKSUM: f769eb0321ac7fc1c16f982db6131d2f + CM_ML_MODEL_DATASET: coco + CM_ML_MODEL_FILENAME: SSD_e65.pth + CM_ML_MODEL_IMAGE_HEIGHT: '300' + CM_ML_MODEL_IMAGE_WIDTH: '300' + CM_ML_MODEL_URL: https://www.dropbox.com/scl/fi/wkegl2qxvm8cefbqq00o3/SSD_e65.pth?rlkey=ez26jafjdcly665npl6pdqxl8&dl=0 + group: epoch + + local.#: + env: + CM_ML_MODEL_FILENAME: '#' + CM_ML_MODEL_LOCAL: 'yes' + CM_SKIP_DOWNLOAD: 'yes' + group: epoch diff --git a/script/get-ml-model-abtf-ssd-pytorch/customize.py b/script/get-ml-model-abtf-ssd-pytorch/customize.py new file mode 100644 index 0000000..f8d8fb7 --- /dev/null +++ b/script/get-ml-model-abtf-ssd-pytorch/customize.py @@ -0,0 +1,36 @@ +from cmind import utils +import os + +def preprocess(i): + + os_info = i['os_info'] + + env = i['env'] + + if env.get('CM_ML_MODEL_LOCAL', '') == 'yes': + ml_model = env.get('CM_ML_MODEL_FILENAME', '') + if ml_model == '': + return {'return':1, 'error':'_local.{model name.pth} is not specified'} + + if not os.path.isabs(ml_model): + ml_model = os.path.join(env.get('CM_TMP_CURRENT_PATH',''), ml_model) + + if not os.path.isfile(ml_model): + return {'return':1, 'error':'ML model {} is not found'.format(ml_model)} + + env['CM_ML_MODEL_FILE_WITH_PATH'] = ml_model + + return {'return':0} + +def postprocess(i): + + env = i['env'] + + env['CM_ML_MODEL_FILE'] = os.path.basename(env['CM_ML_MODEL_FILE_WITH_PATH']) + + env['CM_ML_MODEL_CODE_WITH_PATH'] = env['CM_ABTF_SSD_PYTORCH'] + + env['CM_GET_DEPENDENT_CACHED_PATH'] = env['CM_ML_MODEL_FILE_WITH_PATH'] + + return {'return':0} + diff --git a/script/test-ssd-resnet50-cognata-pytorch/README.md b/script/test-ssd-resnet50-cognata-pytorch/README.md new file mode 100644 index 0000000..cadb4c2 --- /dev/null +++ b/script/test-ssd-resnet50-cognata-pytorch/README.md @@ -0,0 +1,218 @@ +Automatically generated README for this automation recipe: **test-abtf-ssd-pytorch** + +Category: **Tests** + +License: **Apache 2.0** + +Maintainers: [Public MLCommons Task Force on Automation and Reproducibility](https://github.com/mlcommons/ck/blob/master/docs/taskforce.md) + +--- +*[ [Online info and GUI to run this CM script](https://access.cknowledge.org/playground/?action=scripts&name=test-abtf-ssd-pytorch,91bfc4333b054c21) ]* + +--- +#### Summary + +* CM GitHub repository: *[mlcommons@ck](https://github.com/mlcommons/ck/tree/dev/cm-mlops)* +* GitHub directory for this script: *[GitHub](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch)* +* CM meta description for this script: *[_cm.yaml](_cm.yaml)* +* All CM tags to find and reuse this script (see in above meta description): *test,abtf,ssd,pytorch,ssd-pytorch* +* Output cached? *False* +* See [pipeline of dependencies](#dependencies-on-other-cm-scripts) on other CM scripts + + +--- +### Reuse this script in your project + +#### Install MLCommons CM automation meta-framework + +* [Install CM](https://access.cknowledge.org/playground/?action=install) +* [CM Getting Started Guide](https://github.com/mlcommons/ck/blob/master/docs/getting-started.md) + +#### Pull CM repository with this automation recipe (CM script) + +```cm pull repo mlcommons@ck``` + +#### Print CM help from the command line + +````cmr "test abtf ssd pytorch ssd-pytorch" --help```` + +#### Customize and run this script from the command line with different variations and flags + +`cm run script --tags=test,abtf,ssd,pytorch,ssd-pytorch` + +`cm run script --tags=test,abtf,ssd,pytorch,ssd-pytorch[,variations] [--input_flags]` + +*or* + +`cmr "test abtf ssd pytorch ssd-pytorch"` + +`cmr "test abtf ssd pytorch ssd-pytorch [variations]" [--input_flags]` + + +* *See the list of `variations` [here](#variations) and check the [Gettings Started Guide](https://github.com/mlcommons/ck/blob/dev/docs/getting-started.md) for more details.* + + +#### Input Flags + +* --**input**=input image (png) +* --**output**=output image (png) +* --**export_model**=ONNX model name to be exported from PyTorch + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "input":...} +``` +#### Run this script from Python + +
+Click here to expand this section. + +```python + +import cmind + +r = cmind.access({'action':'run' + 'automation':'script', + 'tags':'test,abtf,ssd,pytorch,ssd-pytorch' + 'out':'con', + ... + (other input keys for this script) + ... + }) + +if r['return']>0: + print (r['error']) + +``` + +
+ + +#### Run this script via GUI + +```cmr "cm gui" --script="test,abtf,ssd,pytorch,ssd-pytorch"``` + +Use this [online GUI](https://cKnowledge.org/cm-gui/?tags=test,abtf,ssd,pytorch,ssd-pytorch) to generate CM CMD. + +#### Run this script via Docker (beta) + +`cm docker script "test abtf ssd pytorch ssd-pytorch[variations]" [--input_flags]` + +___ +### Customization + + +#### Variations + + * Group "**dataset**" +
+ Click here to expand this section. + + * **`_coco`** (default) + - Environment variables: + - *CM_ABTF_DATASET*: `coco` + - *CM_ABTF_SSD_PYTORCH_BRANCH*: `main` + - Workflow: + * `_cognata` + - Environment variables: + - *CM_ABTF_DATASET*: `Cognata` + - *CM_ABTF_SSD_PYTORCH_BRANCH*: `cognata` + - *CM_ABTF_ML_MODEL_CONFIG*: `baseline_8MP` + - Workflow: + +
+ + + * Group "**device**" +
+ Click here to expand this section. + + * **`_cpu`** (default) + - Environment variables: + - *CM_DEVICE*: `cpu` + - Workflow: + * `_cuda` + - Environment variables: + - *CM_DEVICE*: `cuda` + - Workflow: + +
+ + +#### Default variations + +`_coco,_cpu` + +#### Script flags mapped to environment +
+Click here to expand this section. + +* `--export_model=value` → `CM_ABTF_EXPORT_MODEL_TO_ONNX=value` +* `--input=value` → `CM_INPUT_IMAGE=value` +* `--output=value` → `CM_OUTPUT_IMAGE=value` + +**Above CLI flags can be used in the Python CM API as follows:** + +```python +r=cm.access({... , "export_model":...} +``` + +
+ +#### Default environment + +
+Click here to expand this section. + +These keys can be updated via `--env.KEY=VALUE` or `env` dictionary in `@input.json` or using script flags. + + +
+ +___ +### Dependencies on other CM scripts + + + 1. ***Read "deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/_cm.yaml)*** + * detect,os + - CM script: [detect-os](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/detect-os) + * get,python3 + * CM names: `--adr.['python', 'python3']...` + - CM script: [get-python3](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-python3) + * get,generic-python-lib,_numpy + - CM script: [get-generic-python-lib](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-generic-python-lib) + * get,generic-python-lib,_package.Pillow + - CM script: [get-generic-python-lib](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-generic-python-lib) + * get,generic-python-lib,_onnx + - CM script: [get-generic-python-lib](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-generic-python-lib) + * get,generic-python-lib,_torch + * CM names: `--adr.['torch']...` + - CM script: [get-generic-python-lib](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-generic-python-lib) + * get,generic-python-lib,_torchvision + - CM script: [get-generic-python-lib](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-generic-python-lib) + * get,generic-python-lib,_opencv-python + - CM script: [get-generic-python-lib](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-generic-python-lib) + * get,ml-model,abtf-ssd-pytorch + * CM names: `--adr.['ml-model']...` + - CM script: [get-ml-model-abtf-ssd-pytorch](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-ml-model-abtf-ssd-pytorch) + * get,git,repo,_repo.https://github.com/mlcommons/abtf-ssd-pytorch + * CM names: `--adr.['abtf-ssd-pytorch-git-repo']...` + - CM script: [get-git-repo](https://github.com/mlcommons/ck/tree/master/cm-mlops/script/get-git-repo) + 1. ***Run "preprocess" function from [customize.py](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/customize.py)*** + 1. Read "prehook_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/_cm.yaml) + 1. ***Run native script if exists*** + * [run-coco.bat](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/run-coco.bat) + * [run-coco.sh](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/run-coco.sh) + * [run-cognata.bat](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/run-cognata.bat) + * [run-cognata.sh](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/run-cognata.sh) + 1. Read "posthook_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/_cm.yaml) + 1. ***Run "postrocess" function from [customize.py](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/customize.py)*** + 1. Read "post_deps" on other CM scripts from [meta](https://github.com/mlcommons/ck/tree/dev/cm-mlops/script/test-abtf-ssd-pytorch/_cm.yaml) + +___ +### Script output +`cmr "test abtf ssd pytorch ssd-pytorch [,variations]" [--input_flags] -j` +#### New environment keys (filter) + +#### New environment keys auto-detected from customize diff --git a/script/test-ssd-resnet50-cognata-pytorch/_cm.yaml b/script/test-ssd-resnet50-cognata-pytorch/_cm.yaml new file mode 100644 index 0000000..d9d4ad3 --- /dev/null +++ b/script/test-ssd-resnet50-cognata-pytorch/_cm.yaml @@ -0,0 +1,89 @@ +alias: test-ssd-resnet50-cognata-pytorch +uid: 8a063cb8582a4a96 + +automation_alias: script +automation_uid: 5b4e0237da074764 + +private: true + +category: Tests + +tags: +- test +- abtf +- ssd-resnet50 +- cognata +- pytorch + +input_mapping: + input: CM_INPUT_IMAGE + output: CM_OUTPUT_IMAGE + export_model: CM_ABTF_EXPORT_MODEL_TO_ONNX + config: CM_ABTF_ML_MODEL_CONFIG + dataset: CM_ABTF_DATASET + num_classes: CM_ABTF_NUM_CLASSES + +default_env: + CM_ABTF_ML_MODEL_CONFIG: baseline_8MP + CM_ABTF_DATASET: Cognata + +deps: + +- tags: detect,os + +- tags: get,python3 + names: + - python + - python3 + +- tags: get,generic-python-lib,_numpy +- tags: get,generic-python-lib,_package.Pillow +- tags: get,generic-python-lib,_opencv-python +- tags: get,generic-python-lib,_onnx +- tags: get,generic-python-lib,_torch + names: + - torch +- tags: get,generic-python-lib,_torchvision + + +- tags: get,ml-model,abtf-ssd-pytorch + names: + - ml-model + + + + +variations: + cpu: + group: device + default: True + env: + CM_DEVICE: cpu + + cuda: + group: + device + env: + CM_DEVICE: cuda + +input_description: + input: + desc: input image (png) + output: + desc: output image (png) + export_model: + desc: ONNX model name to be exported from PyTorch + num-classes: + desc: change number of classes (13) + +docker: + skip_run_cmd: 'no' + all_gpus: 'yes' + input_paths: + - input + - output + - export_model + skip_input_for_fake_run: + - input + - output + - export_model diff --git a/script/test-ssd-resnet50-cognata-pytorch/customize.py b/script/test-ssd-resnet50-cognata-pytorch/customize.py new file mode 100644 index 0000000..e752428 --- /dev/null +++ b/script/test-ssd-resnet50-cognata-pytorch/customize.py @@ -0,0 +1,28 @@ +from cmind import utils +import os + +def preprocess(i): + os_info = i['os_info'] + + env = i['env'] + + print ('') + print ('Current directory: {}'.format(os.getcwd())) + + print ('') + + extra = '' + if env.get('CM_ABTF_NUM_CLASSES', '')!='': + extra +=' --num-classes '+str(env['CM_ABTF_NUM_CLASSES']) + + if extra!='': + print ('') + print ('Extra command line: {}'.format(extra)) + + env['CM_ABTF_EXTRA_CMD'] = extra + + return {'return':0} + +def postprocess(i): + + return {'return':0} diff --git a/script/test-ssd-resnet50-cognata-pytorch/run.bat b/script/test-ssd-resnet50-cognata-pytorch/run.bat new file mode 100644 index 0000000..09eec39 --- /dev/null +++ b/script/test-ssd-resnet50-cognata-pytorch/run.bat @@ -0,0 +1,6 @@ +@echo off + +echo ======================================================= + +%CM_PYTHON_BIN_WITH_PATH% %CM_ML_MODEL_CODE_WITH_PATH%\test_image.py --pretrained-model "%CM_ML_MODEL_FILE_WITH_PATH%" --dataset %CM_ABTF_DATASET% --config %CM_ABTF_ML_MODEL_CONFIG% --input %CM_INPUT_IMAGE% --output %CM_OUTPUT_IMAGE% %CM_ABTF_EXTRA_CMD% +IF %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL% diff --git a/script/test-ssd-resnet50-cognata-pytorch/run.sh b/script/test-ssd-resnet50-cognata-pytorch/run.sh new file mode 100644 index 0000000..64f8026 --- /dev/null +++ b/script/test-ssd-resnet50-cognata-pytorch/run.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "=======================================================" + +${CM_PYTHON_BIN_WITH_PATH} ${CM_ML_MODEL_CODE_WITH_PATH}/test_image.py --pretrained-model "${CM_ML_MODEL_FILE_WITH_PATH}" --dataset ${CM_ABTF_DATASET} --config ${CM_ABTF_ML_MODEL_CONFIG} --input ${CM_INPUT_IMAGE} --output ${CM_OUTPUT_IMAGE} ${CM_ABTF_EXTRA_CMD} +test $? -eq 0 || exit $?