Skip to content

Commit

Permalink
Add Anomaly modelAPI changes to releases/1.4.0 (#2563)
Browse files Browse the repository at this point in the history
* bug fix for legacy openvino models

* Apply otx anomaly 1.5 changes

* Fix tests

* Fix compression config

* fix modelAPI imports

* update integration tests

* Edit config types

* Update keys in deployed model

---------

Co-authored-by: Ashwin Vaidya <ashwinitinvaidya@gmail.com>
Co-authored-by: Kim, Sungchul <sungchul.kim@intel.com>
  • Loading branch information
3 people authored Oct 24, 2023
1 parent ac8a7dd commit 8fafd40
Show file tree
Hide file tree
Showing 27 changed files with 238 additions and 378 deletions.
2 changes: 1 addition & 1 deletion requirements/openvino.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# OpenVINO Requirements. #
nncf==2.6.0
onnx==1.13.0
openvino-model-api==0.1.3
openvino-model-api==0.1.6
openvino==2023.0
openvino-dev==2023.0
openvino-telemetry>=2022.1.0
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,9 @@

import numpy as np
from openvino.model_api.adapters import OpenvinoAdapter
from openvino.model_api.adapters.utils import RESIZE_TYPES, InputTransform
from openvino.model_api.models.model import Model
from openvino.model_api.models.utils import (
RESIZE_TYPES,
Detection,
InputTransform,
)
from openvino.model_api.models.utils import Detection

from otx.api.entities.datasets import DatasetItemEntity

Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

51 changes: 32 additions & 19 deletions src/otx/algorithms/anomaly/tasks/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import subprocess # nosec B404
import tempfile
from glob import glob
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union
from warnings import warn

Expand All @@ -36,7 +35,6 @@
PostProcessingConfigurationCallback,
)
from omegaconf import DictConfig, ListConfig
from openvino.runtime import Core, serialize
from pytorch_lightning import Trainer

from otx.algorithms.anomaly.adapters.anomalib.callbacks import (
Expand All @@ -47,6 +45,8 @@
from otx.algorithms.anomaly.adapters.anomalib.data import OTXAnomalyDataModule
from otx.algorithms.anomaly.adapters.anomalib.logger import get_logger
from otx.algorithms.anomaly.configs.base.configuration import BaseAnomalyConfig
from otx.algorithms.common.utils import embed_ir_model_data
from otx.algorithms.common.utils.utils import embed_onnx_model_data
from otx.api.entities.datasets import DatasetEntity
from otx.api.entities.inference_parameters import InferenceParameters
from otx.api.entities.metrics import NullPerformance, Performance, ScoreMetric
Expand Down Expand Up @@ -296,6 +296,8 @@ def export(
self._export_to_onnx(onnx_path)

if export_type == ExportType.ONNX:
self._add_metadata_to_ir(onnx_path, export_type)

with open(onnx_path, "rb") as file:
output_model.set_data("model.onnx", file.read())
else:
Expand All @@ -306,7 +308,7 @@ def export(
bin_file = glob(os.path.join(self.config.project.path, "*.bin"))[0]
xml_file = glob(os.path.join(self.config.project.path, "*.xml"))[0]

self._add_metadata_to_ir(xml_file)
self._add_metadata_to_ir(xml_file, export_type)

with open(bin_file, "rb") as file:
output_model.set_data("openvino.bin", file.read())
Expand All @@ -319,40 +321,51 @@ def export(
output_model.set_data("label_schema.json", label_schema_to_bytes(self.task_environment.label_schema))
self._set_metadata(output_model)

def _add_metadata_to_ir(self, xml_file: str) -> None:
"""Adds the metadata to the model IR.
def _add_metadata_to_ir(self, model_file: str, export_type: ExportType) -> None:
"""Adds the metadata to the model IR or ONNX.
Adds the metadata to the model IR. So that it can be used with the new modelAPI.
This is because the metadata.json is not used by the new modelAPI.
# TODO CVS-114640
# TODO: Step 1. Remove metadata.json when modelAPI becomes the default inference method.
# TODO: Step 2. Remove this function when Anomalib is upgraded as the model graph will contain the required ops
# TODO: Step 2. Update this function when Anomalib is upgraded as the model graph will contain the required ops
# TODO: Step 3. Update modelAPI to remove pre/post-processing steps when Anomalib version is upgraded.
"""
metadata = self._get_metadata_dict()
core = Core()
model = core.read_model(xml_file)
extra_model_data: Dict[Tuple[str, str], Any] = {}
for key, value in metadata.items():
if key == "transform":
if key in ("transform", "min", "max"):
continue
model.set_rt_info(value, ["model_info", key])
extra_model_data[("model_info", key)] = value
# Add transforms
if "transform" in metadata:
for transform_dict in metadata["transform"]["transform"]["transforms"]:
transform = transform_dict.pop("__class_fullname__")
if transform == "Normalize":
model.set_rt_info(self._serialize_list(transform_dict["mean"]), ["model_info", "mean_values"])
model.set_rt_info(self._serialize_list(transform_dict["std"]), ["model_info", "scale_values"])
extra_model_data[("model_info", "mean_values")] = self._serialize_list(
[x * 255.0 for x in transform_dict["mean"]]
)
extra_model_data[("model_info", "scale_values")] = self._serialize_list(
[x * 255.0 for x in transform_dict["std"]]
)
elif transform == "Resize":
model.set_rt_info(transform_dict["height"], ["model_info", "orig_height"])
model.set_rt_info(transform_dict["width"], ["model_info", "orig_width"])
extra_model_data[("model_info", "orig_height")] = transform_dict["height"]
extra_model_data[("model_info", "orig_width")] = transform_dict["width"]
else:
warn(f"Transform {transform} is not supported currently")
model.set_rt_info("AnomalyDetection", ["model_info", "model_type"])
tmp_xml_path = Path(Path(xml_file).parent) / "tmp.xml"
serialize(model, str(tmp_xml_path))
tmp_xml_path.rename(xml_file)
Path(str(tmp_xml_path.parent / tmp_xml_path.stem) + ".bin").unlink()
# Since we only need the diff of max and min, we fuse the min and max into one op
if "min" in metadata and "max" in metadata:
extra_model_data[("model_info", "normalization_scale")] = metadata["max"] - metadata["min"]

extra_model_data[("model_info", "reverse_input_channels")] = False
extra_model_data[("model_info", "model_type")] = "AnomalyDetection"
extra_model_data[("model_info", "labels")] = "Normal Anomaly"
if export_type == ExportType.OPENVINO:
embed_ir_model_data(model_file, extra_model_data)
elif export_type == ExportType.ONNX:
embed_onnx_model_data(model_file, extra_model_data)
else:
raise RuntimeError(f"not supported export type {export_type}")

def _serialize_list(self, arr: Union[Tuple, List]) -> str:
"""Converts a list to space separated string."""
Expand Down
Loading

0 comments on commit 8fafd40

Please sign in to comment.