From 8244753f253114aae106d51d5965fdc17f47927a Mon Sep 17 00:00:00 2001 From: Prokofiev Kirill Date: Thu, 11 Apr 2024 02:39:43 +0200 Subject: [PATCH] Extend exportable code (#3284) * added functionality to export optimized and already exported to IR models * fix unit test * fix unit test * fix pre-commit * add functionality to export during oprimization * fix test. Remove additional parameter * minor fix * fix unit test --- MANIFEST.in | 1 + docs/source/guide/tutorials/base/export.rst | 17 +- .../base/how_to_train/classification.rst | 54 ++--- .../tutorials/base/how_to_train/detection.rst | 20 +- .../how_to_train/instance_segmentation.rst | 30 +-- .../how_to_train/semantic_segmentation.rst | 32 +-- src/otx/core/exporter/base.py | 60 ++++-- .../exporter/exportable_code/demo/LICENSE | 201 ++++++++++++++++++ .../exporter/exportable_code/demo/README.md | 2 +- src/otx/core/model/anomaly.py | 1 + src/otx/core/model/base.py | 7 + src/otx/engine/engine.py | 52 +++-- tests/unit/engine/test_engine.py | 21 ++ 13 files changed, 405 insertions(+), 93 deletions(-) create mode 100644 src/otx/core/exporter/exportable_code/demo/LICENSE diff --git a/MANIFEST.in b/MANIFEST.in index 7a07397ba97..f8b0efffebd 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,6 +4,7 @@ recursive-include src/otx *.yaml recursive-include src/otx *.json recursive-include src/otx requirements.txt recursive-include src/otx README.md +recursive-include src/otx LICENSE recursive-exclude src/otx *.c graft tests global-exclude *.py[cod] diff --git a/docs/source/guide/tutorials/base/export.rst b/docs/source/guide/tutorials/base/export.rst index 724e4701a7a..0a92a38c019 100644 --- a/docs/source/guide/tutorials/base/export.rst +++ b/docs/source/guide/tutorials/base/export.rst @@ -66,7 +66,20 @@ using the command below: --export_format EXPORTABLE_CODE --work-dir outputs/deploy -After that, you can use the resulting ``openvino.zip`` archive in other applications. +After that, you can use the resulting ``exportable_code.zip`` archive in other applications. + +4. It is also possible to pass already exported/optimized OpenVINO IR model +to create archive with demo and pack in the IR model. + +.. code-block:: shell + + (otx) ...$ otx export -c CONFIG + --checkpoint {OPENVINO_IR.XML} + --work-dir outputs/deploy + +.. note:: + + You can also obtain ``exportable_code.zip`` right away during model optimization passing ``export_demo_package=True`` parameter to CLI or API call. ************* Demonstration @@ -75,7 +88,7 @@ Demonstration Using the exported demo, we're able to run the model in the demonstration mode outside of this repository, using only the ported ``.zip`` archive with minimum required packages. The demo allows us to apply our model on the custom data or the online footage from a web camera and see how it will work in a real-life scenario. It is not required to install OTX or PyTorch. -1. Unzip the ``openvino.zip`` +1. Unzip the ``exportable_code.zip`` archive. .. code-block:: diff --git a/docs/source/guide/tutorials/base/how_to_train/classification.rst b/docs/source/guide/tutorials/base/how_to_train/classification.rst index 1cdec117e28..81a42898269 100644 --- a/docs/source/guide/tutorials/base/how_to_train/classification.rst +++ b/docs/source/guide/tutorials/base/how_to_train/classification.rst @@ -85,10 +85,10 @@ please keep the exact same name for the train/val/test folder, to identify the d ├── tulips val ├── daisy - ├── ... + ├── ... test ├── daisy - ├── ... + ├── ... ********* Training @@ -108,26 +108,26 @@ The list of supported recipes for classification is available with the command l .. code-block:: shell (otx) ...$ otx find --task MULTI_CLASS_CLS - ┏━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ - ┃ Task ┃ Model Name ┃ Recipe Path ┃ - ┡━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ - │ MULTI_CLASS_CLS │ openvino_model │ src/otx/recipe/classification/multi_class_cls/openvino_model.yaml │ - │ MULTI_CLASS_CLS │ tv_efficientnet_b0 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b0.yaml │ - │ MULTI_CLASS_CLS │ tv_resnet_50 │ src/otx/recipe/classification/multi_class_cls/tv_resnet_50.yaml │ - │ MULTI_CLASS_CLS │ efficientnet_v2_light │ src/otx/recipe/classification/multi_class_cls/efficientnet_v2_light.yaml │ - │ MULTI_CLASS_CLS │ tv_efficientnet_b3 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b3.yaml │ - │ MULTI_CLASS_CLS │ efficientnet_b0_light │ src/otx/recipe/classification/multi_class_cls/efficientnet_b0_light.yaml │ - │ MULTI_CLASS_CLS │ tv_efficientnet_v2_l │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_v2_l.yaml │ - │ MULTI_CLASS_CLS │ tv_efficientnet_b1 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b1.yaml │ - │ MULTI_CLASS_CLS │ tv_mobilenet_v3_small │ src/otx/recipe/classification/multi_class_cls/tv_mobilenet_v3_small.yaml │ - │ MULTI_CLASS_CLS │ otx_mobilenet_v3_large │ src/otx/recipe/classification/multi_class_cls/otx_mobilenet_v3_large.yaml │ - │ MULTI_CLASS_CLS │ otx_deit_tiny │ src/otx/recipe/classification/multi_class_cls/otx_deit_tiny.yaml │ - │ MULTI_CLASS_CLS │ tv_efficientnet_b4 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b4.yaml │ - │ MULTI_CLASS_CLS │ otx_efficientnet_v2 │ src/otx/recipe/classification/multi_class_cls/otx_efficientnet_v2.yaml │ - │ MULTI_CLASS_CLS │ mobilenet_v3_large_light │ src/otx/recipe/classification/multi_class_cls/mobilenet_v3_large_light.yaml │ - │ MULTI_CLASS_CLS │ otx_efficientnet_b0 │ src/otx/recipe/classification/multi_class_cls/otx_efficientnet_b0.yaml │ - │ MULTI_CLASS_CLS │ otx_dino_v2 │ src/otx/recipe/classification/multi_class_cls/otx_dino_v2.yaml │ - │ MULTI_CLASS_CLS │ otx_dino_v2_linear_probe │ src/otx/recipe/classification/multi_class_cls/otx_dino_v2_linear_probe.yaml │ + ┏━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Task ┃ Model Name ┃ Recipe Path ┃ + ┡━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ MULTI_CLASS_CLS │ openvino_model │ src/otx/recipe/classification/multi_class_cls/openvino_model.yaml │ + │ MULTI_CLASS_CLS │ tv_efficientnet_b0 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b0.yaml │ + │ MULTI_CLASS_CLS │ tv_resnet_50 │ src/otx/recipe/classification/multi_class_cls/tv_resnet_50.yaml │ + │ MULTI_CLASS_CLS │ efficientnet_v2_light │ src/otx/recipe/classification/multi_class_cls/efficientnet_v2_light.yaml │ + │ MULTI_CLASS_CLS │ tv_efficientnet_b3 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b3.yaml │ + │ MULTI_CLASS_CLS │ efficientnet_b0_light │ src/otx/recipe/classification/multi_class_cls/efficientnet_b0_light.yaml │ + │ MULTI_CLASS_CLS │ tv_efficientnet_v2_l │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_v2_l.yaml │ + │ MULTI_CLASS_CLS │ tv_efficientnet_b1 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b1.yaml │ + │ MULTI_CLASS_CLS │ tv_mobilenet_v3_small │ src/otx/recipe/classification/multi_class_cls/tv_mobilenet_v3_small.yaml │ + │ MULTI_CLASS_CLS │ otx_mobilenet_v3_large │ src/otx/recipe/classification/multi_class_cls/otx_mobilenet_v3_large.yaml │ + │ MULTI_CLASS_CLS │ otx_deit_tiny │ src/otx/recipe/classification/multi_class_cls/otx_deit_tiny.yaml │ + │ MULTI_CLASS_CLS │ tv_efficientnet_b4 │ src/otx/recipe/classification/multi_class_cls/tv_efficientnet_b4.yaml │ + │ MULTI_CLASS_CLS │ otx_efficientnet_v2 │ src/otx/recipe/classification/multi_class_cls/otx_efficientnet_v2.yaml │ + │ MULTI_CLASS_CLS │ mobilenet_v3_large_light │ src/otx/recipe/classification/multi_class_cls/mobilenet_v3_large_light.yaml │ + │ MULTI_CLASS_CLS │ otx_efficientnet_b0 │ src/otx/recipe/classification/multi_class_cls/otx_efficientnet_b0.yaml │ + │ MULTI_CLASS_CLS │ otx_dino_v2 │ src/otx/recipe/classification/multi_class_cls/otx_dino_v2.yaml │ + │ MULTI_CLASS_CLS │ otx_dino_v2_linear_probe │ src/otx/recipe/classification/multi_class_cls/otx_dino_v2_linear_probe.yaml │ └─────────────────┴──────────────────────────┴────────────────────────────────────────────────────────────────────────────────┘ .. tab-item:: API @@ -136,7 +136,7 @@ The list of supported recipes for classification is available with the command l from otx.engine.utils.api import list_models - model_lists = list_models(task="MULTI_CLASS_CLS", pattern="*efficient") + model_lists = list_models(task="MULTI_CLASS_CLS", pattern="*efficient") print(model_lists) ''' [ @@ -486,7 +486,7 @@ with OpenVINO™ PTQ. .. code-block:: shell - (otx) ...$ otx optimize --work_dir otx-workspace \ + (otx) ...$ otx optimize --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_052847/exported_model.xml ... @@ -505,6 +505,10 @@ with OpenVINO™ PTQ. The optimization time highly relies on the hardware characteristics, for example on Intel(R) Core(TM) i9-10980XE it took about 9 seconds. Please note, that PTQ will take some time without logging to optimize the model. +.. note:: + + You can also pass ``export_demo_package=True`` parameter to obtain ``exportable_code.zip`` archive with packed optimized model and demo package. Please refer to :doc:`export tutorial <../export>`. + 3. Finally, we can also evaluate the optimized model by passing it to the ``otx test`` function. @@ -514,7 +518,7 @@ it to the ``otx test`` function. .. code-block:: shell - (otx) ...$ otx test --work_dir otx-workspace \ + (otx) ...$ otx test --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_055042/optimized_model.xml \ --engine.device cpu diff --git a/docs/source/guide/tutorials/base/how_to_train/detection.rst b/docs/source/guide/tutorials/base/how_to_train/detection.rst index cbaf7bd0e73..04459ba3963 100644 --- a/docs/source/guide/tutorials/base/how_to_train/detection.rst +++ b/docs/source/guide/tutorials/base/how_to_train/detection.rst @@ -129,12 +129,12 @@ The list of supported recipes for object detection is available with the command .. code-block:: shell (otx) ...$ otx find --task DETECTION --pattern atss - ┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ - ┃ Task ┃ Model Name ┃ Recipe Path ┃ - ┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ - │ DETECTION │ atss_mobilenetv2_tile │ src/otx/recipe/detection/atss_mobilenetv2_tile.yaml │ - │ DETECTION │ atss_resnext101 │ src/otx/recipe/detection/atss_resnext101.yaml │ - │ DETECTION │ atss_mobilenetv2 │ src/otx/recipe/detection/atss_mobilenetv2.yaml │ + ┏━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Task ┃ Model Name ┃ Recipe Path ┃ + ┡━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ DETECTION │ atss_mobilenetv2_tile │ src/otx/recipe/detection/atss_mobilenetv2_tile.yaml │ + │ DETECTION │ atss_resnext101 │ src/otx/recipe/detection/atss_resnext101.yaml │ + │ DETECTION │ atss_mobilenetv2 │ src/otx/recipe/detection/atss_mobilenetv2.yaml │ └───────────┴───────────────────────┴────────────────────────────────────────────────────────────────┘ .. tab-item:: API @@ -520,7 +520,7 @@ with OpenVINO™ PTQ. .. code-block:: shell - (otx) ...$ otx optimize --work_dir otx-workspace \ + (otx) ...$ otx optimize --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_052847/exported_model.xml ... @@ -539,6 +539,10 @@ with OpenVINO™ PTQ. The optimization time highly relies on the hardware characteristics, for example on Intel(R) Core(TM) i9-11900 it took about 25 seconds. Please note, that PTQ will take some time without logging to optimize the model. +.. note:: + + You can also pass ``export_demo_package=True`` parameter to obtain ``exportable_code.zip`` archive with packed optimized model and demo package. Please refer to :doc:`export tutorial <../export>`. + 3. Finally, we can also evaluate the optimized model by passing it to the ``otx test`` function. @@ -548,7 +552,7 @@ it to the ``otx test`` function. .. code-block:: shell - (otx) ...$ otx test --work_dir otx-workspace \ + (otx) ...$ otx test --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_055042/optimized_model.xml \ --engine.device cpu diff --git a/docs/source/guide/tutorials/base/how_to_train/instance_segmentation.rst b/docs/source/guide/tutorials/base/how_to_train/instance_segmentation.rst index 113e76f49cc..44e55b4533a 100644 --- a/docs/source/guide/tutorials/base/how_to_train/instance_segmentation.rst +++ b/docs/source/guide/tutorials/base/how_to_train/instance_segmentation.rst @@ -129,17 +129,17 @@ The list of supported recipes for instance segmentation is available with the co (otx) ...$ otx find --task INSTANCE_SEGMENTATION - ┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ - ┃ Task ┃ Model Name ┃ Recipe Path ┃ - ┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ - │ INSTANCE_SEGMENTATION │ openvino_model │ src/otx/recipe/instance_segmentation/openvino_model.yaml │ - │ INSTANCE_SEGMENTATION │ maskrcnn_r50 │ src/otx/recipe/instance_segmentation/maskrcnn_r50.yaml │ - │ INSTANCE_SEGMENTATION │ maskrcnn_r50_tile │ src/otx/recipe/instance_segmentation/maskrcnn_r50_tile.yaml │ - │ INSTANCE_SEGMENTATION │ maskrcnn_swint │ src/otx/recipe/instance_segmentation/maskrcnn_swint.yaml │ - │ INSTANCE_SEGMENTATION │ maskrcnn_efficientnetb2b │ src/otx/recipe/instance_segmentation/maskrcnn_efficientnetb2b.yaml │ - │ INSTANCE_SEGMENTATION │ rtmdet_inst_tiny │ src/otx/recipe/instance_segmentation/rtmdet_inst_tiny.yaml │ - │ INSTANCE_SEGMENTATION │ maskrcnn_efficientnetb2b_tile │ src/otx/recipe/instance_segmentation/maskrcnn_efficientnetb2b_tile.yaml │ - │ INSTANCE_SEGMENTATION │ maskrcnn_swint_tile │ src/otx/recipe/instance_segmentation/maskrcnn_swint_tile.yaml │ + ┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Task ┃ Model Name ┃ Recipe Path ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ INSTANCE_SEGMENTATION │ openvino_model │ src/otx/recipe/instance_segmentation/openvino_model.yaml │ + │ INSTANCE_SEGMENTATION │ maskrcnn_r50 │ src/otx/recipe/instance_segmentation/maskrcnn_r50.yaml │ + │ INSTANCE_SEGMENTATION │ maskrcnn_r50_tile │ src/otx/recipe/instance_segmentation/maskrcnn_r50_tile.yaml │ + │ INSTANCE_SEGMENTATION │ maskrcnn_swint │ src/otx/recipe/instance_segmentation/maskrcnn_swint.yaml │ + │ INSTANCE_SEGMENTATION │ maskrcnn_efficientnetb2b │ src/otx/recipe/instance_segmentation/maskrcnn_efficientnetb2b.yaml │ + │ INSTANCE_SEGMENTATION │ rtmdet_inst_tiny │ src/otx/recipe/instance_segmentation/rtmdet_inst_tiny.yaml │ + │ INSTANCE_SEGMENTATION │ maskrcnn_efficientnetb2b_tile │ src/otx/recipe/instance_segmentation/maskrcnn_efficientnetb2b_tile.yaml │ + │ INSTANCE_SEGMENTATION │ maskrcnn_swint_tile │ src/otx/recipe/instance_segmentation/maskrcnn_swint_tile.yaml │ └───────────────────────┴───────────────────────────────┴────────────────────────────────────────────────────────────────────────────────────┘ .. tab-item:: API @@ -431,7 +431,7 @@ OpenVINO™ model (.xml) with OpenVINO™ PTQ. .. code-block:: shell - (otx) ...$ otx optimize --work_dir otx-workspace \ + (otx) ...$ otx optimize --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_052847/exported_model.xml ... @@ -448,6 +448,10 @@ OpenVINO™ model (.xml) with OpenVINO™ PTQ. Please note, that PTQ will take some time (generally less than NNCF optimization) without logging to optimize the model. +.. note:: + + You can also pass `export_demo_package=True` parameter to obtain `exportable_code.zip` archive with packed optimized model and demo package. Please refer to :doc:`export tutorial <../export>`. + 3. Finally, we can also evaluate the optimized model by passing it to the ``otx test`` function. @@ -457,7 +461,7 @@ it to the ``otx test`` function. .. code-block:: shell - (otx) ...$ otx test --work_dir otx-workspace \ + (otx) ...$ otx test --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_055042/optimized_model.xml \ --engine.device cpu diff --git a/docs/source/guide/tutorials/base/how_to_train/semantic_segmentation.rst b/docs/source/guide/tutorials/base/how_to_train/semantic_segmentation.rst index 5022820c147..05f24156c2a 100644 --- a/docs/source/guide/tutorials/base/how_to_train/semantic_segmentation.rst +++ b/docs/source/guide/tutorials/base/how_to_train/semantic_segmentation.rst @@ -58,17 +58,17 @@ The list of supported recipes for semantic segmentation is available with the co (otx) ...$ otx find --task SEMANTIC_SEGMENTATION - ┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ - ┃ Task ┃ Model Name ┃ Recipe Path ┃ - ┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ - │ SEMANTIC_SEGMENTATION │ openvino_model │ src/otx/recipe/semantic_segmentation/openvino_model.yaml │ - │ SEMANTIC_SEGMENTATION │ segnext_t │ src/otx/recipe/semantic_segmentation/segnext_t.yaml │ - │ SEMANTIC_SEGMENTATION │ segnext_b │ src/otx/recipe/semantic_segmentation/segnext_b.yaml │ - │ SEMANTIC_SEGMENTATION │ dino_v2 │ src/otx/recipe/semantic_segmentation/dino_v2.yaml │ - │ SEMANTIC_SEGMENTATION │ litehrnet_18 │ src/otx/recipe/semantic_segmentation/litehrnet_18.yaml │ - │ SEMANTIC_SEGMENTATION │ segnext_s │ src/otx/recipe/semantic_segmentation/segnext_s.yaml │ - │ SEMANTIC_SEGMENTATION │ litehrnet_x │ src/otx/recipe/semantic_segmentation/litehrnet_x.yaml │ - │ SEMANTIC_SEGMENTATION │ litehrnet_s │ src/otx/recipe/semantic_segmentation/litehrnet_s.yaml │ + ┏━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Task ┃ Model Name ┃ Recipe Path ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ SEMANTIC_SEGMENTATION │ openvino_model │ src/otx/recipe/semantic_segmentation/openvino_model.yaml │ + │ SEMANTIC_SEGMENTATION │ segnext_t │ src/otx/recipe/semantic_segmentation/segnext_t.yaml │ + │ SEMANTIC_SEGMENTATION │ segnext_b │ src/otx/recipe/semantic_segmentation/segnext_b.yaml │ + │ SEMANTIC_SEGMENTATION │ dino_v2 │ src/otx/recipe/semantic_segmentation/dino_v2.yaml │ + │ SEMANTIC_SEGMENTATION │ litehrnet_18 │ src/otx/recipe/semantic_segmentation/litehrnet_18.yaml │ + │ SEMANTIC_SEGMENTATION │ segnext_s │ src/otx/recipe/semantic_segmentation/segnext_s.yaml │ + │ SEMANTIC_SEGMENTATION │ litehrnet_x │ src/otx/recipe/semantic_segmentation/litehrnet_x.yaml │ + │ SEMANTIC_SEGMENTATION │ litehrnet_s │ src/otx/recipe/semantic_segmentation/litehrnet_s.yaml │ └───────────────────────┴───────────────────────────────┴────────────────────────────────────────────────────────────────────────────────────┘ .. tab-item:: API @@ -92,7 +92,7 @@ The list of supported recipes for semantic segmentation is available with the co ] ''' -1. On this step we will configure configuration +1. On this step we will configure configuration with: - all necessary configs for litehrnet_18 @@ -342,7 +342,7 @@ OpenVINO™ model (.xml) with OpenVINO™ PTQ. .. code-block:: shell - (otx) ...$ otx optimize --work_dir otx-workspace \ + (otx) ...$ otx optimize --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_052847/exported_model.xml ... @@ -359,6 +359,10 @@ OpenVINO™ model (.xml) with OpenVINO™ PTQ. Please note, that PTQ will take some time (generally less than NNCF optimization) without logging to optimize the model. +.. note:: + + You can also pass ``export_demo_package=True`` parameter to obtain ``exportable_code.zip`` archive with packed optimized model and demo package. Please refer to :doc:`export tutorial <../export>`. + 3. Finally, we can also evaluate the optimized model by passing it to the ``otx test`` function. @@ -368,7 +372,7 @@ it to the ``otx test`` function. .. code-block:: shell - (otx) ...$ otx test --work_dir otx-workspace \ + (otx) ...$ otx test --work_dir otx-workspace \ --checkpoint otx-workspace/20240312_055042/optimized_model.xml \ --engine.device cpu diff --git a/src/otx/core/exporter/base.py b/src/otx/core/exporter/base.py index 6982afbe929..5224cf53c5e 100644 --- a/src/otx/core/exporter/base.py +++ b/src/otx/core/exporter/base.py @@ -13,6 +13,8 @@ from typing import TYPE_CHECKING, Any, Literal from zipfile import ZipFile +from openvino.model_api.models import Model + from otx.core.exporter.exportable_code import demo from otx.core.types.export import OTXExportFormatType from otx.core.types.precision import OTXPrecisionType @@ -86,7 +88,12 @@ def export( if export_format == OTXExportFormatType.ONNX: return self.to_onnx(model, output_dir, base_model_name, precision) if export_format == OTXExportFormatType.EXPORTABLE_CODE: - return self.to_exportable_code(model, output_dir, base_model_name, precision) + return self.to_exportable_code( + model, + output_dir, + base_model_name, + precision, + ) msg = f"Unsupported export format: {export_format}" raise ValueError(msg) @@ -154,21 +161,40 @@ def to_exportable_code( Returns: Path: path to the exported model. """ - work_dir = Path(demo.__file__).parent + work_demo_dir = Path(demo.__file__).parent parameters: dict[str, Any] = {} - if self.metadata is not None: - parameters["type_of_model"] = self.metadata.get(("model_info", "task_type"), "") - parameters["converter_type"] = self.metadata.get(("model_info", "model_type"), "") - parameters["model_parameters"] = { - "labels": self.metadata.get(("model_info", "labels"), ""), - "labels_ids": self.metadata.get(("model_info", "label_ids"), ""), - } - output_zip_path = output_dir / "exportable_code.zip" Path.mkdir(output_dir, exist_ok=True) + is_ir_model = isinstance(model, Model) + with tempfile.TemporaryDirectory() as temp_dir, ZipFile(output_zip_path, "x") as arch: # model files - path_to_model = self.to_openvino(model, Path(temp_dir), base_model_name, precision) + path_to_model = ( + self.to_openvino(model, Path(temp_dir), base_model_name, precision) + if not is_ir_model + else Path(model.inference_adapter.model_path) + ) + + if not path_to_model.exists(): + msg = f"File {path_to_model} does not exist. Check the model path." + raise RuntimeError(msg) + + if not is_ir_model and self.metadata is not None: + parameters["type_of_model"] = self.metadata.get(("model_info", "task_type"), "") + parameters["converter_type"] = self.metadata.get(("model_info", "model_type"), "") + parameters["model_parameters"] = { + "labels": self.metadata.get(("model_info", "labels"), ""), + "labels_ids": self.metadata.get(("model_info", "label_ids"), ""), + } + elif is_ir_model: + model_info = model.get_model().rt_info["model_info"] + parameters["type_of_model"] = model_info["task_type"].value if "task_type" in model_info else "" + parameters["converter_type"] = model_info["model_type"].value if "model_type" in model_info else "" + parameters["model_parameters"] = { + "labels": model_info["labels"].value if "labels" in model_info else "", + "labels_ids": model_info["label_ids"].value if "label_ids" in model_info else "", + } + arch.write(str(path_to_model), Path("model") / "model.xml") arch.write(path_to_model.with_suffix(".bin"), Path("model") / "model.bin") @@ -178,15 +204,15 @@ def to_exportable_code( ) # python files arch.write( - work_dir / "requirements.txt", + work_demo_dir / "requirements.txt", Path("python") / "requirements.txt", ) - arch.write(work_dir.parents[5] / "LICENSE", Path("python") / "LICENSE") - arch.write(work_dir / "demo.py", Path("python") / "demo.py") - arch.write(work_dir / "README.md", Path("./") / "README.md") - arch.write(work_dir / "setup.py", Path("python") / "setup.py") + arch.write(work_demo_dir / "demo.py", Path("python") / "demo.py") + arch.write(work_demo_dir / "setup.py", Path("python") / "setup.py") + arch.write(work_demo_dir / "README.md", Path("./") / "README.md") + arch.write(work_demo_dir / "LICENSE", Path("./") / "LICENSE") # write demo_package - demo_package = work_dir / "demo_package" + demo_package = work_demo_dir / "demo_package" for root, _, files in os.walk(demo_package): if root.endswith("__pycache__"): continue diff --git a/src/otx/core/exporter/exportable_code/demo/LICENSE b/src/otx/core/exporter/exportable_code/demo/LICENSE new file mode 100644 index 00000000000..24bdeff5e1b --- /dev/null +++ b/src/otx/core/exporter/exportable_code/demo/LICENSE @@ -0,0 +1,201 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, +and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by +the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all +other entities that control, are controlled by, or are under common +control with that entity. For the purposes of this definition, +"control" means (i) the power, direct or indirect, to cause the +direction or management of such entity, whether by contract or +otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity +exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation +source, and configuration files. + +"Object" form shall mean any form resulting from mechanical +transformation or translation of a Source form, including but +not limited to compiled object code, generated documentation, +and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or +Object form, made available under the License, as indicated by a +copyright notice that is included in or attached to the work +(an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object +form, that is based on (or derived from) the Work and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. For the purposes +of this License, Derivative Works shall not include works that remain +separable from, or merely link (or bind by name) to the interfaces of, +the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including +the original version of the Work and any modifications or additions +to that Work or Derivative Works thereof, that is intentionally +submitted to Licensor for inclusion in the Work by the copyright owner +or by an individual or Legal Entity authorized to submit on behalf of +the copyright owner. For the purposes of this definition, "submitted" +means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, +and issue tracking systems that are managed by, or on behalf of, the +Licensor for the purpose of discussing and improving the Work, but +excluding communication that is conspicuously marked or otherwise +designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity +on behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the +Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of +this License, each Contributor hereby grants to You a perpetual, +worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, +use, offer to sell, sell, import, and otherwise transfer the Work, +where such license applies only to those patent claims licensable +by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) +with the Work to which such Contribution(s) was submitted. If You +institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work +or a Contribution incorporated within the Work constitutes direct +or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate +as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the +Work or Derivative Works thereof in any medium, with or without +modifications, and in Source or Object form, provided that You +meet the following conditions: + +(a) You must give any other recipients of the Work or +Derivative Works a copy of this License; and + +(b) You must cause any modified files to carry prominent notices +stating that You changed the files; and + +(c) You must retain, in the Source form of any Derivative Works +that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, +excluding those notices that do not pertain to any part of +the Derivative Works; and + +(d) If the Work includes a "NOTICE" text file as part of its +distribution, then any Derivative Works that You distribute must +include a readable copy of the attribution notices contained +within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one +of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or +documentation, if provided along with the Derivative Works; or, +within a display generated by the Derivative Works, if and +wherever such third-party notices normally appear. The contents +of the NOTICE file are for informational purposes only and +do not modify the License. You may add Your own attribution +notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided +that such additional attribution notices cannot be construed +as modifying the License. + +You may add Your own copyright statement to Your modifications and +may provide additional or different license terms and conditions +for use, reproduction, or distribution of Your modifications, or +for any such Derivative Works as a whole, provided Your use, +reproduction, and distribution of the Work otherwise complies with +the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, +any Contribution intentionally submitted for inclusion in the Work +by You to the Licensor shall be under the terms and conditions of +this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify +the terms of any separate license agreement you may have executed +with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade +names, trademarks, service marks, or product names of the Licensor, +except as required for reasonable and customary use in describing the +origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or +agreed to in writing, Licensor provides the Work (and each +Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied, including, without limitation, any warranties or conditions +of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any +risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, +whether in tort (including negligence), contract, or otherwise, +unless required by applicable law (such as deliberate and grossly +negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a +result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, +work stoppage, computer failure or malfunction, or any and all +other commercial damages or losses), even if such Contributor +has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing +the Work or Derivative Works thereof, You may choose to offer, +and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this +License. However, in accepting such obligations, You may act only +on Your own behalf and on Your sole responsibility, not on behalf +of any other Contributor, and only if You agree to indemnify, +defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason +of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following +boilerplate notice, with the fields enclosed by brackets "[]" +replaced with your own identifying information. (Don't include +the brackets!) The text should be enclosed in the appropriate +comment syntax for the file format. We also recommend that a +file or class name and description of purpose be included on the +same "printed page" as the copyright notice for easier +identification within third-party archives. + +Copyright (C) 2018-2021 Intel Corporation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/src/otx/core/exporter/exportable_code/demo/README.md b/src/otx/core/exporter/exportable_code/demo/README.md index 12b94a05401..5ae4fd497bd 100644 --- a/src/otx/core/exporter/exportable_code/demo/README.md +++ b/src/otx/core/exporter/exportable_code/demo/README.md @@ -5,6 +5,7 @@ Exportable code is a .zip archive that contains simple demo to get and visualize ## Structure of generated zip - `README.md` +- `LICENSE` - model - `model.xml` - `model.bin` @@ -26,7 +27,6 @@ Exportable code is a .zip archive that contains simple demo to get and visualize - `__init__.py` - `visualizer.py` - `vis_utils.py` - - `LICENSE` - `demo.py` - `requirements.txt` - `setup.py` diff --git a/src/otx/core/model/anomaly.py b/src/otx/core/model/anomaly.py index fafad629406..fb1b080a42e 100644 --- a/src/otx/core/model/anomaly.py +++ b/src/otx/core/model/anomaly.py @@ -417,6 +417,7 @@ def export( base_name: (str): base name for the exported model file. Extension is defined by the target export format export_format (OTXExportFormatType): format of the output model precision (OTXExportPrecisionType): precision of the output model + Returns: Path: path to the exported model. """ diff --git a/src/otx/core/model/base.py b/src/otx/core/model/base.py index 8084d7aa85e..5b781209dea 100644 --- a/src/otx/core/model/base.py +++ b/src/otx/core/model/base.py @@ -32,6 +32,7 @@ ) from otx.core.data.entity.tile import OTXTileBatchDataEntity, T_OTXTileBatchDataEntity from otx.core.exporter.base import OTXModelExporter +from otx.core.exporter.native import OTXNativeModelExporter from otx.core.metrics import MetricInput, NullMetricCallable from otx.core.optimizer.callable import OptimizerCallableSupportHPO from otx.core.schedulers import LRSchedulerListCallable, PicklableLRSchedulerCallable @@ -595,6 +596,7 @@ def export( @property def _exporter(self) -> OTXModelExporter: + """Defines exporter of the model. Should be overridden in subclasses.""" msg = ( "To export this OTXModel, you should implement an appropriate exporter for it. " "You can try to reuse ones provided in `otx.core.exporter.*`." @@ -881,6 +883,11 @@ def _read_ptq_config_from_ir(self, ov_model: Model) -> dict[str, Any]: return argparser.instantiate_classes(initial_ptq_config).as_dict() + @property + def _exporter(self) -> OTXNativeModelExporter: + """Exporter of the OVModel for exportable code.""" + return OTXNativeModelExporter(input_size=(1, 3, self.model.h, self.model.w), **self._export_parameters) + @property def model_adapter_parameters(self) -> dict: """Model parameters for export.""" diff --git a/src/otx/engine/engine.py b/src/otx/engine/engine.py index 1694c21c6e6..f1e636c60ba 100644 --- a/src/otx/engine/engine.py +++ b/src/otx/engine/engine.py @@ -7,6 +7,7 @@ import inspect import logging +import tempfile from contextlib import contextmanager from pathlib import Path from typing import TYPE_CHECKING, Any, ClassVar, Iterable, Iterator, Literal @@ -477,7 +478,7 @@ def predict( def export( self, - checkpoint: str | Path | None = None, + checkpoint: PathLike | None = None, export_format: OTXExportFormatType = OTXExportFormatType.OPENVINO, export_precision: OTXPrecisionType = OTXPrecisionType.FP32, explain: bool = False, @@ -485,7 +486,7 @@ def export( """Export the trained model to OpenVINO Intermediate Representation (IR) or ONNX formats. Args: - checkpoint (str | Path | None, optional): Checkpoint to export. Defaults to None. + checkpoint (PathLike | None, optional): Checkpoint to export. Defaults to None. export_config (ExportConfig | None, optional): Config that allows to set export format and precision. Defaults to None. explain (bool): Whether to get "saliency_map" and "feature_vector" or not. @@ -516,19 +517,33 @@ def export( ``` """ ckpt_path = str(checkpoint) if checkpoint is not None else self.checkpoint - if ckpt_path is None: msg = "To make export, checkpoint must be specified." raise RuntimeError(msg) + is_ir_ckpt = Path(ckpt_path).suffix in [".xml"] - self.model.eval() - loaded_checkpoint = torch.load(ckpt_path) - self.model.label_info = loaded_checkpoint["state_dict"]["label_info"] + if is_ir_ckpt and export_format != OTXExportFormatType.EXPORTABLE_CODE: + msg = ( + "Export format is automatically changed to EXPORTABLE_CODE, " + "since openvino IR model is passed as a checkpoint." + ) + warn(msg, stacklevel=1) + export_format = OTXExportFormatType.EXPORTABLE_CODE - self.model.load_state_dict(loaded_checkpoint) + if is_ir_ckpt and not isinstance(self.model, OVModel): + # create OVModel + self.model = self._auto_configurator.get_ov_model( + model_name=str(checkpoint), + label_info=self.datamodule.label_info, + ) - self.model.explain_mode = explain + if not is_ir_ckpt: + self.model.eval() + loaded_checkpoint = torch.load(ckpt_path) + self.model.label_info = loaded_checkpoint["state_dict"]["label_info"] + self.model.load_state_dict(loaded_checkpoint) + self.model.explain_mode = explain exported_model_path = self.model.export( output_dir=Path(self.work_dir), base_name=self._EXPORTED_MODEL_BASE_NAME, @@ -544,6 +559,7 @@ def optimize( checkpoint: PathLike | None = None, datamodule: TRAIN_DATALOADERS | OTXDataModule | None = None, max_data_subset_size: int | None = None, + export_demo_package: bool = False, ) -> Path: """Applies NNCF.PTQ to the underlying models (now works only for OV models). @@ -556,6 +572,8 @@ def optimize( max_data_subset_size (int | None): The maximum size of the train subset from `datamodule` that would be used for model optimization. If not set, NNCF.PTQ will select subset size according to it's default settings. + export_demo_package (bool): Whether to export demo package with optimized models. + It outputs zip archive with stand-alone demo package. Returns: Path: path to the optimized model. @@ -599,11 +617,19 @@ def optimize( if max_data_subset_size is not None: ptq_config["subset_size"] = max_data_subset_size - return model.optimize( - Path(self.work_dir), - optimize_datamodule, - ptq_config, - ) + if not export_demo_package: + return model.optimize( + Path(self.work_dir), + optimize_datamodule, + ptq_config, + ) + + with tempfile.TemporaryDirectory() as tmp_dir: + tmp_model_path = model.optimize(Path(tmp_dir), optimize_datamodule, ptq_config) + return self.export( + checkpoint=tmp_model_path, + export_format=OTXExportFormatType.EXPORTABLE_CODE, + ) def explain( self, diff --git a/tests/unit/engine/test_engine.py b/tests/unit/engine/test_engine.py index a43eb158ac8..e5fe19fc5b8 100644 --- a/tests/unit/engine/test_engine.py +++ b/tests/unit/engine/test_engine.py @@ -204,6 +204,22 @@ def test_exporting(self, fxt_engine, mocker) -> None: precision=OTXPrecisionType.FP32, ) + # check exportable code with IR OpenVINO model + mock_export = mocker.patch("otx.engine.engine.OVModel.export") + fxt_engine.checkpoint = "path/to/checkpoint.xml" + mock_get_ov_model = mocker.patch( + "otx.engine.engine.AutoConfigurator.get_ov_model", + return_value=OVModel(model_name="efficientnet-b0-pytorch", model_type="classification"), + ) + fxt_engine.export(export_format=OTXExportFormatType.EXPORTABLE_CODE, checkpoint="path/to/checkpoint.xml") + mock_get_ov_model.assert_called_once() + mock_export.assert_called_with( + output_dir=Path(fxt_engine.work_dir), + base_name="exported_model", + export_format=OTXExportFormatType.EXPORTABLE_CODE, + precision=OTXPrecisionType.FP32, + ) + def test_optimizing_model(self, fxt_engine, mocker) -> None: with pytest.raises(RuntimeError, match="supports only OV IR or ONNX checkpoints"): fxt_engine.optimize() @@ -221,6 +237,11 @@ def test_optimizing_model(self, fxt_engine, mocker) -> None: fxt_engine.optimize(max_data_subset_size=100) assert mock_ov_model.return_value.optimize.call_args[0][2]["subset_size"] == 100 + # Optimize and export with exportable code + mocker_export = mocker.patch.object(fxt_engine, "export") + fxt_engine.optimize(export_demo_package=True) + mocker_export.assert_called_once() + def test_explain(self, fxt_engine, mocker) -> None: mocker.patch("otx.engine.engine.OTXModel.load_state_dict") mock_process_explain = mocker.patch("otx.algo.utils.xai_utils.process_saliency_maps_in_pred_entity")