diff --git a/.github/workflows/code_scan.yml b/.github/workflows/code_scan.yml index d644c7d5751..7fa3c3b0923 100644 --- a/.github/workflows/code_scan.yml +++ b/.github/workflows/code_scan.yml @@ -20,7 +20,10 @@ jobs: with: python-version: "3.10" - name: Install dependencies - run: python -m pip install tox==4.21.1 + run: | + pip install --require-hashes --no-deps -r requirements/gh-actions.txt + pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt + pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt - name: Trivy Scanning env: TRIVY_DOWNLOAD_URL: ${{ vars.TRIVY_DOWNLOAD_URL }} @@ -43,7 +46,11 @@ jobs: with: python-version: "3.10" - name: Install dependencies - run: python -m pip install tox==4.21.1 + run: | + pip install --require-hashes --no-deps -r requirements/gh-actions.txt + pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt + pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt + rm /tmp/otx-dev-requirements.txt - name: Bandit Scanning run: tox -e bandit-scan - name: Upload Bandit artifact diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d94d0b738ae..afd8064ae23 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -21,7 +21,11 @@ jobs: with: python-version: "3.10" - name: Install dependencies - run: python -m pip install -r requirements/dev.txt + run: | + pip install --require-hashes --no-deps -r requirements/gh-actions.txt + pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt + pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt + rm /tmp/otx-dev-requirements.txt - name: Build-Docs run: tox -e build-doc - name: Create gh-pages branch diff --git a/.github/workflows/docs_stable.yml b/.github/workflows/docs_stable.yml index 1a6c5e58733..cfb98be200e 100644 --- a/.github/workflows/docs_stable.yml +++ b/.github/workflows/docs_stable.yml @@ -22,7 +22,11 @@ jobs: with: python-version: "3.10" - name: Install dependencies - run: python -m pip install -r requirements/dev.txt + run: | + pip install --require-hashes --no-deps -r requirements/gh-actions.txt + pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt + pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt + rm /tmp/otx-dev-requirements.txt - name: Build-Docs run: tox -e build-doc - name: Create gh-pages branch diff --git a/.github/workflows/perf-accuracy.yml b/.github/workflows/perf-accuracy.yml index 1318403c3be..ef367a6f9d1 100644 --- a/.github/workflows/perf-accuracy.yml +++ b/.github/workflows/perf-accuracy.yml @@ -33,6 +33,34 @@ on: - export - optimize default: optimize + artifact-prefix: + type: string + default: perf-accuracy-benchmark + workflow_call: + inputs: + model-type: + type: string + description: Model type to run benchmark [default, all] + default: default + data-size: + type: string + description: Dataset size to run benchmark [small, medium, large, all] + default: all + num-repeat: + type: number + description: Overrides default per-data-size number of repeat setting + default: 0 + num-epoch: + type: number + description: Overrides default per-model number of epoch setting + default: 0 + eval-upto: + type: string + description: The last operation to evaluate. 'optimize' means all. [train, export, optimize] + default: optimize + artifact-prefix: + type: string + default: perf-accuracy-benchmark # Declare default permissions as read only. permissions: read-all @@ -73,4 +101,4 @@ jobs: task: ${{ matrix.task }} timeout-minutes: 8640 upload-artifact: true - artifact-prefix: perf-accuracy-benchmark + artifact-prefix: ${{ inputs.perf-accuracy-benchmark }} diff --git a/.github/workflows/perf-speed.yml b/.github/workflows/perf-speed.yml index 3e33a782c2b..26995b0077c 100644 --- a/.github/workflows/perf-speed.yml +++ b/.github/workflows/perf-speed.yml @@ -33,6 +33,34 @@ on: - export - optimize default: optimize + artifact-prefix: + type: string + default: perf-speed-benchmark + workflow_call: + inputs: + model-type: + type: string + description: Model type to run benchmark [default, all] + default: default + data-size: + type: string + description: Dataset size to run benchmark [small, medium, large, all] + default: medium + num-repeat: + type: number + description: Overrides default per-data-size number of repeat setting + default: 1 + num-epoch: + type: number + description: Overrides default per-model number of epoch setting + default: 3 + eval-upto: + type: string + description: The last operation to evaluate. 'optimize' means all [train, export, optimize] + default: optimize + artifact-prefix: + type: string + default: perf-speed-benchmark # Declare default permissions as read only. permissions: read-all @@ -59,4 +87,4 @@ jobs: task: all timeout-minutes: 8640 upload-artifact: true - artifact-prefix: perf-speed-benchmark + artifact-prefix: ${{ inputs.artifact-prefix }} diff --git a/.github/workflows/pre_merge.yml b/.github/workflows/pre_merge.yml index fd2cbddbe12..bc3d01c662a 100644 --- a/.github/workflows/pre_merge.yml +++ b/.github/workflows/pre_merge.yml @@ -31,9 +31,10 @@ jobs: python-version: "3.10" - name: Install dependencies run: | - pip install pip-tools==7.3.0 + pip install --require-hashes --no-deps -r requirements/gh-actions.txt pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt + rm /tmp/otx-dev-requirements.txt - name: Code quality checks run: tox -vv -e pre-commit-all-py310-pt1 Unit-Test: @@ -79,9 +80,10 @@ jobs: python-version: "3.8" - name: Install dependencies run: | - pip install pip-tools==7.3.0 + pip install --require-hashes --no-deps -r requirements/gh-actions.txt pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt + rm /tmp/otx-dev-requirements.txt - name: Run unit test run: tox -vv -e unittest-all-py38-pt1 - name: Upload coverage artifact diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 776b9507352..81f1719b431 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -33,9 +33,10 @@ jobs: python-version: "3.10" - name: Install pypa/build run: | - pip install pip-tools==7.3.0 + pip install --require-hashes --no-deps -r requirements/gh-actions.txt pip-compile --generate-hashes -o /tmp/otx-publish-requirements.txt requirements/publish.txt pip install --require-hashes --no-deps -r /tmp/otx-publish-requirements.txt + rm /tmp/otx-publish-requirements.txt - name: Build sdist run: python -m build --sdist - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 diff --git a/.github/workflows/publish_internal.yml b/.github/workflows/publish_internal.yml index 802ba7b10a5..d3574b73377 100644 --- a/.github/workflows/publish_internal.yml +++ b/.github/workflows/publish_internal.yml @@ -31,9 +31,10 @@ jobs: python-version: "3.10" - name: Install pypa/build run: | - pip install pip-tools==7.3.0 + pip install --require-hashes --no-deps -r requirements/gh-actions.txt pip-compile --generate-hashes -o /tmp/otx-publish-requirements.txt requirements/publish.txt pip install --require-hashes --no-deps -r /tmp/otx-publish-requirements.txt + rm /tmp/otx-publish-requirements.txt - name: Build sdist run: python -m build --sdist - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 @@ -56,9 +57,10 @@ jobs: python-version: "3.10" - name: Install dependencies run: | - pip install pip-tools==7.3.0 + pip install --require-hashes --no-deps -r requirements/gh-actions.txt pip-compile --generate-hashes -o /tmp/otx-publish-requirements.txt requirements/publish.txt pip install --require-hashes --no-deps -r /tmp/otx-publish-requirements.txt + rm /tmp/otx-publish-requirements.txt - name: Download artifacts uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a # v3.0.2 with: diff --git a/.github/workflows/run_tests_in_tox.yml b/.github/workflows/run_tests_in_tox.yml index 1adc0c2c641..470040f854e 100644 --- a/.github/workflows/run_tests_in_tox.yml +++ b/.github/workflows/run_tests_in_tox.yml @@ -52,9 +52,10 @@ jobs: python-version: ${{ inputs.python-version }} - name: Install dependencies run: | - pip install pip-tools==7.3.0 + pip install --require-hashes --no-deps -r requirements/gh-actions.txt pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt + rm /tmp/otx-dev-requirements.txt - name: Run Tests env: MLFLOW_TRACKING_SERVER_URI: ${{ vars.MLFLOW_TRACKING_SERVER_URI }} diff --git a/.github/workflows/run_tests_in_tox_custom.yml b/.github/workflows/run_tests_in_tox_custom.yml index 8bb28ade61a..3fb84957b6e 100644 --- a/.github/workflows/run_tests_in_tox_custom.yml +++ b/.github/workflows/run_tests_in_tox_custom.yml @@ -58,9 +58,10 @@ jobs: python-version: ${{ inputs.python-version }} - name: Install dependencies run: | - pip install pip-tools==7.3.0 + pip install --require-hashes --no-deps -r requirements/gh-actions.txt pip-compile --generate-hashes -o /tmp/otx-dev-requirements.txt requirements/dev.txt pip install --require-hashes --no-deps -r /tmp/otx-dev-requirements.txt + rm /tmp/otx-dev-requirements.txt - name: Run Tests env: MLFLOW_TRACKING_SERVER_URI: ${{ vars.MLFLOW_TRACKING_SERVER_URI }} diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml index 3badd5ab79a..ceb401b21f6 100644 --- a/.github/workflows/weekly.yml +++ b/.github/workflows/weekly.yml @@ -10,41 +10,23 @@ on: permissions: read-all jobs: - Regression-Tests: - strategy: - fail-fast: false - matrix: - include: - - toxenv_task: "iseg" - test_dir: "tests/regression/instance_segmentation/test_instance_segmentation.py" - task: "instance_segmentation" - - toxenv_task: "iseg_t" - test_dir: "tests/regression/instance_segmentation/test_tiling_instance_segmentation.py" - task: "instance_segmentation" - - toxenv_task: "seg" - test_dir: "tests/regression/semantic_segmentation" - task: "segmentation" - - toxenv_task: "det" - test_dir: "tests/regression/detection" - task: "detection" - - toxenv_task: "ano" - test_dir: "tests/regression/anomaly" - task: "anomaly" - - toxenv_task: "act" - test_dir: "tests/regression/action" - task: "action" - - toxenv_task: "cls" - test_dir: "tests/regression/classification" - task: "classification" - name: Regression-Test-py310-${{ matrix.toxenv_task }} - uses: ./.github/workflows/run_tests_in_tox.yml + Performance-Speed-Tests: + name: Performance-Speed-py310 + uses: ./.github/workflows/perf-speed.yml with: - python-version: "3.10" - toxenv-pyver: "py310" - toxenv-task: ${{ matrix.toxenv_task }} - tests-dir: ${{ matrix.test_dir }} - runs-on: "['self-hosted', 'Linux', 'X64', 'dmount']" - task: ${{ matrix.task }} - timeout-minutes: 8640 - upload-artifact: true - artifact-prefix: "weekly-test-results" + model-type: default + data-size: medium + num-repeat: 1 + num-epoch: 3 + eval-upto: optimize + artifact-prefix: weekly-perf-speed-benchmark + Performance-Accuracy-Tests: + name: Performance-Accuracy-py310 + uses: ./.github/workflows/perf-accuracy.yml + with: + model-type: default + data-size: all + num-repeat: 0 + num-epoch: 0 + eval-upto: optimize + artifact-prefix: weekly-perf-accuracy-benchmark diff --git a/requirements/gh-actions.txt b/requirements/gh-actions.txt new file mode 100644 index 00000000000..33029eb1409 --- /dev/null +++ b/requirements/gh-actions.txt @@ -0,0 +1,45 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --generate-hashes --output-file=requirements.txt requirements/gh-actions.txt +# +build==1.0.3 \ + --hash=sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b \ + --hash=sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f + # via pip-tools +click==8.1.7 \ + --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ + --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de + # via pip-tools +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 + # via build +pip-tools==7.4.0 \ + --hash=sha256:a92a6ddfa86ff389fe6ace381d463bc436e2c705bd71d52117c25af5ce867bb7 \ + --hash=sha256:b67432fd0759ed834c5367f9e0ce8c95441acecfec9c8e24b41aca166757adf0 + # via -r requirements/gh-actions.txt +pyproject-hooks==1.0.0 \ + --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ + --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 + # via + # build + # pip-tools +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via + # build + # pip-tools + # pyproject-hooks +wheel==0.42.0 \ + --hash=sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d \ + --hash=sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8 + # via pip-tools + +# WARNING: The following packages were not pinned, but pip requires them to be +# pinned when the requirements file includes hashes and the requirement is not +# satisfied by a package already installed. Consider using the --allow-unsafe flag. +# pip +# setuptools \ No newline at end of file diff --git a/src/otx/algorithms/classification/adapters/mmcls/models/heads/custom_vision_transformer_head.py b/src/otx/algorithms/classification/adapters/mmcls/models/heads/custom_vision_transformer_head.py index b58d0803589..38a2d704c2c 100644 --- a/src/otx/algorithms/classification/adapters/mmcls/models/heads/custom_vision_transformer_head.py +++ b/src/otx/algorithms/classification/adapters/mmcls/models/heads/custom_vision_transformer_head.py @@ -6,8 +6,6 @@ from mmcls.models.builder import HEADS from mmcls.models.heads import VisionTransformerClsHead -from otx.algorithms.common.utils import cast_bf16_to_fp32 - @HEADS.register_module() class CustomVisionTransformerClsHead(VisionTransformerClsHead): @@ -34,15 +32,6 @@ def loss(self, cls_score, gt_label, feature=None): losses["loss"] = loss return losses - def post_process(self, pred): - """Post processing.""" - pred = cast_bf16_to_fp32(pred) - return super().post_process(pred) - - def forward(self, x): - """Forward fuction of CustomVisionTransformerClsHead class.""" - return self.simple_test(x) - def forward_train(self, x, gt_label, **kwargs): """Forward_train fuction of CustomVisionTransformerClsHead class.""" x = self.pre_logits(x) diff --git a/tests/perf/test_classification.py b/tests/perf/test_classification.py index 820d644ae40..9397dc5413e 100644 --- a/tests/perf/test_classification.py +++ b/tests/perf/test_classification.py @@ -52,7 +52,7 @@ class TestPerfSingleLabelClassification: @pytest.mark.parametrize("fxt_model_id", MODEL_TEMPLATES, ids=MODEL_IDS, indirect=True) @pytest.mark.parametrize("fxt_benchmark", BENCHMARK_CONFIGS.items(), ids=BENCHMARK_CONFIGS.keys(), indirect=True) - def test_accuracy(self, fxt_model_id: str, fxt_benchmark: OTXBenchmark): + def test_accuracy(self, fxt_model_id: str, fxt_benchmark: OTXBenchmark, fxt_check_benchmark_result: Callable): """Benchmark accruacy metrics.""" result = fxt_benchmark.run( model_id=fxt_model_id, @@ -301,7 +301,7 @@ def test_accuracy(self, fxt_model_id: str, fxt_benchmark: OTXBenchmark, fxt_chec @pytest.mark.parametrize("fxt_model_id", MODEL_TEMPLATES, ids=MODEL_IDS, indirect=True) @pytest.mark.parametrize("fxt_benchmark", BENCHMARK_CONFIGS.items(), ids=BENCHMARK_CONFIGS.keys(), indirect=True) - def test_speed(self, fxt_model_id: str, fxt_benchmark: OTXBenchmark, fxt_check_benchmark_results: Callable): + def test_speed(self, fxt_model_id: str, fxt_benchmark: OTXBenchmark, fxt_check_benchmark_result: Callable): """Benchmark train time per iter / infer time per image.""" fxt_benchmark.track_resources = True result = fxt_benchmark.run( diff --git a/tests/run_code_checks.sh b/tests/run_code_checks.sh deleted file mode 100755 index 395d3660bd9..00000000000 --- a/tests/run_code_checks.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -WORK_DIR=$(mktemp -d) -python3 -m venv "$WORK_DIR" -# shellcheck source=/dev/null -source "$WORK_DIR"/bin/activate -pip install pip --upgrade -pip install wheel -pip install ote_sdk/ -pip install ote_cli/ -pip install pre-commit -echo "" -echo "" -echo "" -echo " ##############################################" -echo " ######## ########" -echo " ######## ./tests/run_code_checks.sh ########" -echo " ######## ########" -echo " ##############################################" -echo "" -pre-commit run --all-files - diff --git a/tests/run_model_templates_tests.py b/tests/run_model_templates_tests.py deleted file mode 100644 index 4dfd18ce7ce..00000000000 --- a/tests/run_model_templates_tests.py +++ /dev/null @@ -1,81 +0,0 @@ -""" Runs tests selectively depending on changed files. """ - -import os -import sys -from subprocess import run - -from tests.test_suite.run_test_command import collect_env_vars - -ALGO_ROOT_DIR = "external" -ALGO_DIRS = [ - os.path.join(ALGO_ROOT_DIR, d) for d in os.listdir(ALGO_ROOT_DIR) if os.path.isdir(os.path.join(ALGO_ROOT_DIR, d)) -] -IMPORTANT_DIRS = [ - "ote_cli/", - "ote_sdk/", - "tests/", -] - -wd = sys.argv[1] - - -def what_to_test(): - """ - Returns a dict containing information whether it is needed - to run tests for particular algorithm. - """ - - print(f"{sys.argv=}") - run_algo_tests = {d: True for d in ALGO_DIRS} - if len(sys.argv) > 2: - run_algo_tests = {d: False for d in ALGO_DIRS} - changed_files = sys.argv[2:] - print(f"{changed_files=}") - - for changed_file in changed_files: - if any(changed_file.startswith(d) for d in IMPORTANT_DIRS): - run_algo_tests = {d: True for d in ALGO_DIRS} - break - - for d in ALGO_DIRS: - if changed_file.startswith(d): - run_algo_tests[d] = True - - for k, v in run_algo_tests.items(): - print("run", k, v) - - return run_algo_tests - - -def test(run_algo_tests): - """ - Runs tests for algorithms and other stuff (misc). - """ - - passed = {} - success = True - command = ["pytest", os.path.join("tests", "ote_cli", "misc"), "-v"] - try: - res = run(command, env=collect_env_vars(wd), check=True).returncode == 0 - except: # noqa: E722 - res = False - passed["misc"] = res - success *= res - for algo_dir in ALGO_DIRS: - if run_algo_tests[algo_dir]: - command = ["pytest", os.path.join(algo_dir, "tests", "ote_cli"), "-v", "-rxXs", "--durations=10"] - try: - res = run(command, env=collect_env_vars(wd), check=True).returncode == 0 - except: # noqa: E722 - res = False - passed[algo_dir] = res - success *= res - - for k, v in passed.items(): - res = "PASSED" if v else "FAILED" - print(f"Tests for {k} {res}") - - sys.exit(1 - success) - - -test(what_to_test()) diff --git a/tests/run_model_templates_tests.sh b/tests/run_model_templates_tests.sh deleted file mode 100755 index 1569c6305c9..00000000000 --- a/tests/run_model_templates_tests.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash - -python3 -m venv venv || exit 1 -# shellcheck source=/dev/null -. venv/bin/activate || exit 1 -pip install --upgrade pip || exit 1 -pip install -e ote_cli || exit 1 -pip install -e ote_sdk || exit 1 - -python tests/run_model_templates_tests.py "$(pwd)" "$@" || exit 1