Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add PostInferenceHook feature initial implementation and notebook #355

Merged
merged 39 commits into from
May 15, 2024
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
2bd5493
Initial post inference hook implementation
ljcornel Mar 11, 2024
050ba7c
Add file system data collection and empty label trigger
ljcornel Mar 12, 2024
8b32ee3
Fix log level setting in actions
ljcornel Mar 12, 2024
5526ca3
Update notebook 012
ljcornel Mar 12, 2024
a95579b
Fix RateLimiter iteration method
ljcornel Mar 12, 2024
8bdb868
Update notebook documentation
ljcornel Mar 12, 2024
f26d8c8
Add `ObjectCountTrigger`
ljcornel Mar 13, 2024
f435367
Fix object count trigger, minor fix in job helpers
ljcornel Mar 13, 2024
f226b2b
Improve string representation of hooks, actions and triggers
ljcornel Mar 14, 2024
8bfc9fe
Add `HttpRequestAction`
ljcornel Mar 14, 2024
79bbd23
Add config serialization mechanism to save and load hooks
ljcornel Mar 18, 2024
31febba
Fix inference time benchmark
ljcornel Mar 18, 2024
65a7f7e
Remove absolute path resolution in `FileSystemDataCollection` and imp…
ljcornel Mar 19, 2024
f502a07
Update 012_post_inference_hooks.ipynb
ljcornel Mar 19, 2024
a281671
Add integration test for post inference hook
ljcornel Mar 19, 2024
c53713b
Merge branch 'main' into post-inference-hooks
ljcornel Mar 19, 2024
a72a243
Update test cassettes
ljcornel Mar 20, 2024
d78d5d3
Fix `Iterator` import for python 3.10
ljcornel Mar 20, 2024
b2ed981
Merge branch 'main' into post-inference-hooks
ljcornel Mar 28, 2024
a2a27cd
Add exception logging to actions and remove cassettes
ljcornel Mar 28, 2024
071ae1f
Update test cassettes
ljcornel Mar 28, 2024
29728cb
Improve label trigger using set logic
ljcornel Apr 2, 2024
5afb717
Improve label trigger using set logic -- initialization
ljcornel Apr 2, 2024
1769acf
Address PR comments
ljcornel Apr 2, 2024
39bf0b5
Merge branch 'post-inference-hooks' of https://github.com/openvinotoo…
ljcornel Apr 2, 2024
6b6572f
Address PR comments
ljcornel May 6, 2024
a96e979
Merge branch 'main' into post-inference-hooks
ljcornel May 6, 2024
08da432
Add option to limit queue size for post inference actions
ljcornel May 6, 2024
dbe43ba
Merge branch 'main' into post-inference-hooks
ljcornel May 6, 2024
b745772
Update inference_hook_interfaces.py
ljcornel May 7, 2024
e2b178c
Merge branch 'main' into post-inference-hooks
ljcornel May 7, 2024
9459fd4
Fix black and flake8 issues
ljcornel May 7, 2024
d4494a7
Fix label trigger, minor improvement to model load procedure
ljcornel May 10, 2024
987a0f6
Merge branch 'main' into post-inference-hooks
ljcornel May 10, 2024
7d605c4
Update model-api and openvino requirement, improve notebook
ljcornel May 14, 2024
19c9845
Fix cassette recording workflow
ljcornel May 14, 2024
e4883cf
Merge branch 'main' into post-inference-hooks
ljcornel May 14, 2024
f725086
Update DEVELOP cassettes
ljcornel May 15, 2024
6fa4095
Update LEGACY cassettes
ljcornel May 15, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
94 changes: 90 additions & 4 deletions geti_sdk/deployment/deployment.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.

import datetime
import json
import logging
import os
Expand All @@ -27,6 +27,7 @@
from geti_sdk.rest_converters import ProjectRESTConverter

from .deployed_model import DeployedModel
from .inference_hook_interfaces import PostInferenceHookInterface
from .utils import OVMS_README_PATH, generate_ovms_model_name


Expand All @@ -49,6 +50,7 @@ def __attrs_post_init__(self):
self._inference_converters: Dict[str, Any] = {}
self._path_to_temp_resources: Optional[str] = None
self._requires_resource_cleanup: bool = False
self._post_inference_hooks: List[PostInferenceHookInterface] = []

@property
def is_single_task(self) -> bool:
Expand Down Expand Up @@ -99,6 +101,15 @@ def save(self, path_to_folder: Union[str, os.PathLike]) -> bool:
with open(project_filepath, "w") as project_file:
json.dump(project_dict, project_file, indent=4)

# Save post inference hooks, if any
if self.post_inference_hooks:
hook_config_file = os.path.join(deployment_folder, "hook_config.json")
hook_configs: List[Dict[str, Any]] = []
for hook in self.post_inference_hooks:
hook_configs.append(hook.to_dict())
with open(hook_config_file, "w") as file:
json.dump({"post_inference_hooks": hook_configs}, file)

# Clean up temp resources if needed
if self._requires_resource_cleanup:
self._remove_temporary_resources()
Expand Down Expand Up @@ -135,7 +146,23 @@ def from_folder(cls, path_to_folder: Union[str, os.PathLike]) -> "Deployment":
models.append(
DeployedModel.from_folder(os.path.join(deployment_folder, task_folder))
)
return cls(models=models, project=project)
deployment = cls(models=models, project=project)

# Load post inference hooks, if any
hook_config_file = os.path.join(deployment_folder, "hook_config.json")
if os.path.isfile(hook_config_file):
available_hooks = {
subcls.__name__: subcls
for subcls in PostInferenceHookInterface.__subclasses__()
}
with open(hook_config_file, "r") as file:
hook_dict = json.load(file)
for hook_data in hook_dict["post_inference_hooks"]:
for hook_name, hook_args in hook_data.items():
target_hook = available_hooks[hook_name]
hook = target_hook.from_dict(hook_args)
deployment.add_post_inference_hook(hook)
return deployment

def load_inference_models(self, device: str = "CPU"):
"""
Expand All @@ -153,13 +180,15 @@ def load_inference_models(self, device: str = "CPU"):
self._are_models_loaded = True
logging.info(f"Inference models loaded on device `{device}` successfully.")

def infer(self, image: np.ndarray) -> Prediction:
def infer(self, image: np.ndarray, name: Optional[str] = None) -> Prediction:
"""
Run inference on an image for the full model chain in the deployment.

:param image: Image to run inference on, as a numpy array containing the pixel
data. The image is expected to have dimensions [height x width x channels],
with the channels in RGB order
:param name: Optional name for the image, if specified this will be used in
any post inference hooks belonging to the deployment.
:return: inference results
"""
self._check_models_loaded()
Expand All @@ -172,9 +201,12 @@ def infer(self, image: np.ndarray) -> Prediction:
# Multi-task inference
else:
prediction = self._infer_pipeline(image=image, explain=False)
self._execute_post_inference_hooks(
image=image, prediction=prediction, name=name
)
return prediction

def explain(self, image: np.ndarray) -> Prediction:
def explain(self, image: np.ndarray, name: Optional[str] = None) -> Prediction:
"""
Run inference on an image for the full model chain in the deployment. The
resulting prediction will also contain saliency maps and the feature vector
Expand All @@ -183,6 +215,8 @@ def explain(self, image: np.ndarray) -> Prediction:
:param image: Image to run inference on, as a numpy array containing the pixel
data. The image is expected to have dimensions [height x width x channels],
with the channels in RGB order
:param name: Optional name for the image, if specified this will be used in
any post inference hooks belonging to the deployment.
:return: inference results
"""
self._check_models_loaded()
Expand All @@ -195,6 +229,9 @@ def explain(self, image: np.ndarray) -> Prediction:
# Multi-task inference
else:
prediction = self._infer_pipeline(image=image, explain=True)
self._execute_post_inference_hooks(
image=image, prediction=prediction, name=name
)
return prediction

def _check_models_loaded(self) -> None:
Expand Down Expand Up @@ -437,3 +474,52 @@ def generate_ovms_config(self, output_folder: Union[str, os.PathLike]) -> None:
f"file with instructions on how to launch OVMS, connect to it and run "
f"inference. Please follow the instructions outlined there to get started."
)

@property
def post_inference_hooks(self) -> List[PostInferenceHookInterface]:
"""
Return the currently active post inference hooks for the deployment

:return: list of PostInferenceHook objects
"""
return self._post_inference_hooks

def clear_inference_hooks(self) -> None:
"""
Remove all post inference hooks for the deployment
"""
n_hooks = len(self.post_inference_hooks)
self._post_inference_hooks = []
if n_hooks != 0:
logging.info(
f"Post inference hooks cleared. {n_hooks} hooks were removed "
f"successfully"
)

def add_post_inference_hook(self, hook: PostInferenceHookInterface) -> None:
"""
Add a post inference hook, which will be executed after each call to
`Deployment.infer`

:param hook: PostInferenceHook to be added to the deployment
"""
self._post_inference_hooks.append(hook)
logging.info(f"Hook `{hook}` added.")
logging.info(
f"Deployment now contains {len(self.post_inference_hooks)} "
f"post inference hooks."
)

def _execute_post_inference_hooks(
self, image: np.ndarray, prediction: Prediction, name: Optional[str] = None
) -> None:
"""
Execute all post inference hooks

:param image: Numpy image which was inferred
:param prediction: Prediction for the image
:param name: Optional name for the image
"""
timestamp = datetime.datetime.now()
for hook in self._post_inference_hooks:
hook.run(image, prediction, name, timestamp)
Loading
Loading