Skip to content

Commit

Permalink
Add @attrs decorator for base configs (#1229)
Browse files Browse the repository at this point in the history
* Add @attr decorator for base configs
* Update config.py
* Fix import

Signed-off-by: Songki Choi <songki.choi@intel.com>
Co-authored-by: Harim Kang <harim.kang@intel.com>
  • Loading branch information
goodsong81 and harimkang authored Aug 29, 2022
1 parent 4a06d98 commit 8b50623
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions external/model-preparation-algorithm/mpa_tasks/apis/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
#

from attr import attrs
from sys import maxsize

from ote_sdk.configuration.elements import (ParameterGroup,
Expand Down Expand Up @@ -31,7 +32,10 @@ class LearningRateSchedule(ConfigurableEnum):
CUSTOM = 'custom'


@attrs
class BaseConfig(ConfigurableParameters):

@attrs
class BaseLearningParameters(ParameterGroup):
batch_size = configurable_integer(
default_value=5,
Expand Down Expand Up @@ -93,6 +97,7 @@ class BaseLearningParameters(ParameterGroup):
affects_outcome_of=ModelLifecycle.NONE
)

@attrs
class BasePostprocessing(ParameterGroup):
result_based_confidence_threshold = configurable_boolean(
default_value=True,
Expand All @@ -110,6 +115,7 @@ class BasePostprocessing(ParameterGroup):
affects_outcome_of=ModelLifecycle.INFERENCE
)

@attrs
class BaseNNCFOptimization(ParameterGroup):
enable_quantization = configurable_boolean(
default_value=True,
Expand All @@ -134,6 +140,7 @@ class BaseNNCFOptimization(ParameterGroup):
affects_outcome_of=ModelLifecycle.TRAINING
)

@attrs
class BasePOTParameter(ParameterGroup):
stat_subset_size = configurable_integer(
header="Number of data samples",
Expand All @@ -143,6 +150,7 @@ class BasePOTParameter(ParameterGroup):
max_value=maxsize
)

@attrs
class BaseAlgoBackendParameters(ParameterGroup):
train_type = selectable(default_value=TrainType.Incremental,
header='train type',
Expand Down

0 comments on commit 8b50623

Please sign in to comment.