Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add "properties_path" in BundleWorkflow #7542

Merged
merged 23 commits into from
Apr 1, 2024
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
129dc88
add properties_path
KumoLiu Mar 13, 2024
5aa4f38
remove ignite based properties
KumoLiu Mar 13, 2024
b2ea289
fix unittest
KumoLiu Mar 13, 2024
cf34758
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 13, 2024
67670a1
fix ci
KumoLiu Mar 13, 2024
cca300e
Merge branch 'bundle' of https://github.com/KumoLiu/MONAI into bundle
KumoLiu Mar 13, 2024
e941dde
Merge remote-tracking branch 'yliu/bundle' into bundle
KumoLiu Mar 26, 2024
06d9a34
Revert "Merge remote-tracking branch 'yliu/bundle' into bundle"
KumoLiu Mar 26, 2024
22735af
Revert "Revert "Merge remote-tracking branch 'yliu/bundle' into bundle""
KumoLiu Mar 26, 2024
e274826
Revert "Merge remote-tracking branch 'yliu/bundle' into bundle"
KumoLiu Mar 26, 2024
d671512
Revert "Merge branch 'bundle' of https://github.com/KumoLiu/MONAI int…
KumoLiu Mar 26, 2024
ca90a4b
Revert "remove ignite based properties"
KumoLiu Mar 26, 2024
6526f59
DCO Remediation Commit for YunLiu <55491388+KumoLiu@users.noreply.git…
KumoLiu Mar 26, 2024
cc61a92
Merge branch 'dev' into bundle
KumoLiu Mar 26, 2024
54ac375
fix flake8
KumoLiu Mar 26, 2024
40bb130
add determinism for mixup
KumoLiu Mar 26, 2024
5e6edaa
fix mypy
KumoLiu Mar 26, 2024
918cb77
add unittest
KumoLiu Mar 27, 2024
5a90086
Merge remote-tracking branch 'origin/dev' into bundle
KumoLiu Mar 27, 2024
53797a8
Merge branch 'dev' into bundle
KumoLiu Mar 27, 2024
b7a21b0
Merge remote-tracking branch 'yliu/bundle' into bundle
KumoLiu Apr 1, 2024
a717800
Merge branch 'dev' into bundle
KumoLiu Apr 1, 2024
b320035
fix ci
KumoLiu Apr 1, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 0 additions & 38 deletions monai/bundle/properties.py
KumoLiu marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,6 @@
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: "dataset_dir",
},
"trainer": {
BundleProperty.DESC: "training workflow engine.",
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: f"train{ID_SEP_KEY}trainer",
},
"network_def": {
BundleProperty.DESC: "network module for the training.",
BundleProperty.REQUIRED: False,
Expand All @@ -63,23 +58,12 @@
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: f"train{ID_SEP_KEY}dataset",
},
"train_inferer": {
BundleProperty.DESC: "MONAI Inferer object to execute the model computation in training.",
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: f"train{ID_SEP_KEY}inferer",
},
"train_dataset_data": {
BundleProperty.DESC: "data source for the training dataset.",
BundleProperty.REQUIRED: False,
BundlePropertyConfig.ID: f"train{ID_SEP_KEY}dataset{ID_SEP_KEY}data",
BundlePropertyConfig.REF_ID: None, # no reference to this ID
},
"train_handlers": {
BundleProperty.DESC: "event-handlers for the training logic.",
BundleProperty.REQUIRED: False,
BundlePropertyConfig.ID: f"train{ID_SEP_KEY}handlers",
BundlePropertyConfig.REF_ID: f"train{ID_SEP_KEY}trainer{ID_SEP_KEY}train_handlers",
},
"train_preprocessing": {
BundleProperty.DESC: "preprocessing for the training input data.",
BundleProperty.REQUIRED: False,
Expand All @@ -98,12 +82,6 @@
BundlePropertyConfig.ID: f"train{ID_SEP_KEY}key_metric",
BundlePropertyConfig.REF_ID: f"train{ID_SEP_KEY}trainer{ID_SEP_KEY}key_train_metric",
},
"evaluator": {
BundleProperty.DESC: "validation workflow engine.",
BundleProperty.REQUIRED: False,
BundlePropertyConfig.ID: f"validate{ID_SEP_KEY}evaluator",
BundlePropertyConfig.REF_ID: "validator", # this REF_ID is the arg name of `ValidationHandler`
},
"val_interval": {
BundleProperty.DESC: "validation interval during the training.",
BundleProperty.REQUIRED: False,
Expand Down Expand Up @@ -175,33 +153,17 @@
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: "dataset",
},
"evaluator": {
BundleProperty.DESC: "inference / evaluation workflow engine.",
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: "evaluator",
},
"network_def": {
BundleProperty.DESC: "network module for the inference.",
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: "network_def",
},
"inferer": {
BundleProperty.DESC: "MONAI Inferer object to execute the model computation in inference.",
BundleProperty.REQUIRED: True,
BundlePropertyConfig.ID: "inferer",
},
"dataset_data": {
BundleProperty.DESC: "data source for the inference / evaluation dataset.",
BundleProperty.REQUIRED: False,
BundlePropertyConfig.ID: f"dataset{ID_SEP_KEY}data",
BundlePropertyConfig.REF_ID: None, # no reference to this ID
},
"handlers": {
BundleProperty.DESC: "event-handlers for the inference / evaluation logic.",
BundleProperty.REQUIRED: False,
BundlePropertyConfig.ID: "handlers",
BundlePropertyConfig.REF_ID: f"evaluator{ID_SEP_KEY}val_handlers",
},
"preprocessing": {
BundleProperty.DESC: "preprocessing for the input data.",
BundleProperty.REQUIRED: False,
Expand Down
25 changes: 20 additions & 5 deletions monai/bundle/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

from __future__ import annotations

import json
import os
import sys
import time
Expand All @@ -24,6 +25,7 @@
from monai.bundle.config_parser import ConfigParser
from monai.bundle.properties import InferProperties, MetaProperties, TrainProperties
from monai.bundle.utils import DEFAULT_EXP_MGMT_SETTINGS, EXPR_KEY, ID_REF_KEY, ID_SEP_KEY
from monai.config import PathLike
from monai.utils import BundleProperty, BundlePropertyConfig, deprecated_arg, deprecated_arg_default, ensure_tuple

__all__ = ["BundleWorkflow", "ConfigWorkflow"]
Expand All @@ -46,6 +48,7 @@ class BundleWorkflow(ABC):
or "infer", "inference", "eval", "evaluation" for a inference workflow,
other unsupported string will raise a ValueError.
default to `None` for common workflow.
properties_path: the path to the JSON file of properties.

"""

Expand All @@ -59,16 +62,26 @@ class BundleWorkflow(ABC):
new_name="workflow_type",
msg_suffix="please use `workflow_type` instead.",
)
def __init__(self, workflow_type: str | None = None, workflow: str | None = None):
def __init__(
self, workflow_type: str | None = None, workflow: str | None = None, properties_path: PathLike | None = None
):
workflow_type = workflow if workflow is not None else workflow_type
if workflow_type is None:
if workflow_type is None and properties_path is None:
self.properties = copy(MetaProperties)
self.workflow_type = None
return
if workflow_type.lower() in self.supported_train_type:
if properties_path is not None:
properties_path = Path(properties_path)
if not properties_path.is_file():
raise ValueError(f"Property file {properties_path} does not exist.")
with open(properties_path) as json_file:
self.properties = json.load(json_file)
self.workflow_type = None
return
if workflow_type.lower() in self.supported_train_type: # type: ignore[union-attr]
self.properties = {**TrainProperties, **MetaProperties}
self.workflow_type = "train"
elif workflow_type.lower() in self.supported_infer_type:
elif workflow_type.lower() in self.supported_infer_type: # type: ignore[union-attr]
self.properties = {**InferProperties, **MetaProperties}
self.workflow_type = "infer"
else:
Expand Down Expand Up @@ -206,6 +219,7 @@ class ConfigWorkflow(BundleWorkflow):
or "infer", "inference", "eval", "evaluation" for a inference workflow,
other unsupported string will raise a ValueError.
default to `None` for common workflow.
properties_path: the path to the JSON file of properties.
override: id-value pairs to override or add the corresponding config content.
e.g. ``--net#input_chns 42``, ``--net %/data/other.json#net_arg``

Expand All @@ -230,10 +244,11 @@ def __init__(
tracking: str | dict | None = None,
workflow_type: str | None = None,
workflow: str | None = None,
properties_path: PathLike | None = None,
**override: Any,
) -> None:
workflow_type = workflow if workflow is not None else workflow_type
super().__init__(workflow_type=workflow_type)
super().__init__(workflow_type=workflow_type, properties_path=properties_path)
Nic-Ma marked this conversation as resolved.
Show resolved Hide resolved
if config_file is not None:
_config_files = ensure_tuple(config_file)
self.config_root_path = Path(_config_files[0]).parent
Expand Down
10 changes: 8 additions & 2 deletions tests/test_bundle_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,10 @@

TEST_CASE_2 = [os.path.join(os.path.dirname(__file__), "testing_data", "inference.yaml")]

TEST_CASE_3 = [os.path.join(os.path.dirname(__file__), "testing_data", "config_fl_train.json")]
TEST_CASE_3 = [
os.path.join(os.path.dirname(__file__), "testing_data", "config_fl_train.json"),
os.path.join(os.path.dirname(__file__), "testing_data", "fl_train_properties.json"),
]


class TestBundleWorkflow(unittest.TestCase):
Expand Down Expand Up @@ -101,10 +104,11 @@ def test_inference_config(self, config_file):
logging_file=os.path.join(os.path.dirname(__file__), "testing_data", "logging.conf"),
**override,
)
inferer.add_property(name="inferer", required=True, config_id="inferer")
self._test_inferer(inferer)

@parameterized.expand([TEST_CASE_3])
def test_train_config(self, config_file):
def test_train_config(self, config_file, properties_path):
# test standard MONAI model-zoo config workflow
trainer = ConfigWorkflow(
workflow_type="train",
Expand All @@ -113,6 +117,7 @@ def test_train_config(self, config_file):
init_id="initialize",
run_id="run",
final_id="finalize",
properties_path=properties_path,
)
# should initialize before parsing any bundle content
trainer.initialize()
Expand Down Expand Up @@ -144,6 +149,7 @@ def test_train_config(self, config_file):
def test_non_config(self):
# test user defined python style workflow
inferer = NonConfigWorkflow(self.filename, self.data_dir)
inferer.add_property(name="inferer", required=True)
self._test_inferer(inferer)


Expand Down
126 changes: 126 additions & 0 deletions tests/testing_data/fl_train_properties.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
{
"bundle_root": {
"description": "root path of the bundle.",
"required": true,
"id": "bundle_root"
},
"device": {
"description": "target device to execute the bundle workflow.",
"required": true,
"id": "device"
},
"dataset_dir": {
"description": "directory path of the dataset.",
"required": true,
"id": "dataset_dir"
},
"trainer": {
"description": "training workflow engine.",
"required": true,
"id": "train::trainer"
},
"network_def": {
"description": "network module for the training.",
"required": false,
"id": "network_def"
},
"max_epochs": {
"description": "max number of epochs to execute the training.",
"required": true,
"id": "train::trainer::max_epochs"
},
"train_dataset": {
"description": "PyTorch dataset object for the training logic.",
"required": true,
"id": "train::dataset"
},
"train_inferer": {
"description": "MONAI Inferer object to execute the model computation in training.",
"required": true,
"id": "train::inferer"
},
"train_dataset_data": {
"description": "data source for the training dataset.",
"required": false,
"id": "train::dataset::data",
"refer_id": null
},
"train_handlers": {
"description": "event-handlers for the training logic.",
"required": false,
"id": "train::handlers",
"refer_id": "train::trainer::train_handlers"
},
"train_preprocessing": {
"description": "preprocessing for the training input data.",
"required": false,
"id": "train::preprocessing",
"refer_id": "train::dataset::transform"
},
"train_postprocessing": {
"description": "postprocessing for the training model output data.",
"required": false,
"id": "train::postprocessing",
"refer_id": "train::trainer::postprocessing"
},
"train_key_metric": {
"description": "key metric to compute on the training data.",
"required": false,
"id": "train::key_metric",
"refer_id": "train::trainer::key_train_metric"
},
"evaluator": {
"description": "validation workflow engine.",
"required": false,
"id": "validate::evaluator",
"refer_id": "validator"
},
"val_interval": {
"description": "validation interval during the training.",
"required": false,
"id": "val_interval",
"refer_id": "interval"
},
"val_handlers": {
"description": "event-handlers for the validation logic.",
"required": false,
"id": "validate::handlers",
"refer_id": "validate::evaluator::val_handlers"
},
"val_dataset": {
"description": "PyTorch dataset object for the validation logic.",
"required": false,
"id": "validate::dataset",
"refer_id": "validate::dataloader::dataset"
},
"val_dataset_data": {
"description": "data source for the validation dataset.",
"required": false,
"id": "validate::dataset::data",
"refer_id": null
},
"val_inferer": {
"description": "MONAI Inferer object to execute the model computation in validation.",
"required": false,
"id": "validate::inferer",
"refer_id": "validate::evaluator::inferer"
},
"val_preprocessing": {
"description": "preprocessing for the validation input data.",
"required": false,
"id": "validate::preprocessing",
"refer_id": "validate::dataset::transform"
},
"val_postprocessing": {
"description": "postprocessing for the validation model output data.",
"required": false,
"id": "validate::postprocessing",
"refer_id": "validate::evaluator::postprocessing"
},
"val_key_metric": {
"description": "key metric to compute on the validation data.",
"required": false,
"id": "validate::key_metric",
"refer_id": "validate::evaluator::key_val_metric"
}
}
Loading