Skip to content

Commit

Permalink
feat(table-mode): integrate binding constraints manager in the table …
Browse files Browse the repository at this point in the history
…mode
  • Loading branch information
laurent-laporte-pro committed Mar 3, 2024
1 parent 776a682 commit fdc82a1
Show file tree
Hide file tree
Showing 4 changed files with 190 additions and 37 deletions.
111 changes: 91 additions & 20 deletions antarest/study/business/binding_constraint_management.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Dict, List, Optional, Union
import typing as t

from pydantic import BaseModel, validator

Expand All @@ -10,13 +10,15 @@
InvalidConstraintName,
MissingDataError,
NoConstraintError,
ConfigFileNotFound,
)
from antarest.matrixstore.model import MatrixData
from antarest.study.business.utils import execute_or_add_commands
from antarest.study.business.utils import execute_or_add_commands, AllOptionalMetaclass, camel_case_model
from antarest.study.model import Study
from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
BindingConstraintFrequency,
BindingConstraintOperator,
BindingConstraintProperties as ConfigBCProperties,
)
from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id
from antarest.study.storage.storage_service import StudyStorageService
Expand Down Expand Up @@ -80,13 +82,13 @@ class ConstraintTermDTO(BaseModel):
data: the constraint term data (link or cluster), if any.
"""

id: Optional[str]
weight: Optional[float]
offset: Optional[float]
data: Optional[Union[AreaLinkDTO, AreaClusterDTO]]
id: t.Optional[str]
weight: t.Optional[float]
offset: t.Optional[float]
data: t.Optional[t.Union[AreaLinkDTO, AreaClusterDTO]]

@validator("id")
def id_to_lower(cls, v: Optional[str]) -> Optional[str]:
def id_to_lower(cls, v: t.Optional[str]) -> t.Optional[str]:
"""Ensure the ID is lower case."""
if v is None:
return None
Expand All @@ -101,7 +103,7 @@ def generate_id(self) -> str:

class UpdateBindingConstProps(BaseModel):
key: str
value: Any
value: t.Any


class BindingConstraintPropertiesWithName(BindingConstraintProperties):
Expand All @@ -114,11 +116,49 @@ class BindingConstraintDTO(BaseModel):
enabled: bool = True
time_step: BindingConstraintFrequency
operator: BindingConstraintOperator
values: Optional[Union[List[List[MatrixData]], str]] = None
comments: Optional[str] = None
filter_year_by_year: Optional[str] = None
filter_synthesis: Optional[str] = None
constraints: Optional[List[ConstraintTermDTO]]
values: t.Optional[t.Union[t.List[t.List[MatrixData]], str]] = None
comments: t.Optional[str] = None
filter_year_by_year: t.Optional[str] = None
filter_synthesis: t.Optional[str] = None
constraints: t.Optional[t.List[ConstraintTermDTO]]


# noinspection SpellCheckingInspection
_ALL_BINDING_CONSTRAINTS_PATH = "input/bindingconstraints/bindingconstraints"


class _BaseBindingConstraintDTO(
BaseModel,
extra="forbid",
validate_assignment=True,
allow_population_by_field_name=True,
):
name: str
enabled: bool = True
time_step: BindingConstraintFrequency = BindingConstraintFrequency.HOURLY
operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL
comments: str = ""
filter_synthesis: str = "hourly, daily, weekly, monthly, annual"
filter_year_by_year: str = "hourly, daily, weekly, monthly, annual"


@camel_case_model
class GetBindingConstraintDTO(_BaseBindingConstraintDTO, metaclass=AllOptionalMetaclass, use_none=True):
"""
DTO object used to get the binding constraint properties.
"""

@classmethod
def create_dto(cls, bc_section: ConfigBCProperties.BindingConstraintSection) -> "GetBindingConstraintDTO":
return cls(
name=bc_section.name,
enabled=bc_section.enabled,
time_step=bc_section.time_step,
operator=bc_section.operator,
comments=bc_section.comments,
filter_synthesis=bc_section.filter_synthesis,
filter_year_by_year=bc_section.filter_year_by_year,
)


class BindingConstraintManager:
Expand Down Expand Up @@ -166,7 +206,7 @@ def parse_constraint(key: str, value: str, char: str, new_config: BindingConstra

@staticmethod
def process_constraint(
constraint_value: Dict[str, Any],
constraint_value: t.Dict[str, t.Any],
) -> BindingConstraintDTO:
new_config: BindingConstraintDTO = BindingConstraintDTO(
id=constraint_value["id"],
Expand All @@ -189,8 +229,8 @@ def process_constraint(
@staticmethod
def constraints_to_coeffs(
constraint: BindingConstraintDTO,
) -> Dict[str, List[float]]:
coeffs: Dict[str, List[float]] = {}
) -> t.Dict[str, t.List[float]]:
coeffs: t.Dict[str, t.List[float]] = {}
if constraint.constraints is not None:
for term in constraint.constraints:
if term.id is not None and term.weight is not None:
Expand All @@ -201,8 +241,8 @@ def constraints_to_coeffs(
return coeffs

def get_binding_constraint(
self, study: Study, constraint_id: Optional[str]
) -> Union[BindingConstraintDTO, List[BindingConstraintDTO], None]:
self, study: Study, constraint_id: t.Optional[str]
) -> t.Union[BindingConstraintDTO, t.List[BindingConstraintDTO], None]:
storage_service = self.storage_service.get_storage(study)
file_study = storage_service.get_raw(study)
config = file_study.tree.get(["input", "bindingconstraints", "bindingconstraints"])
Expand Down Expand Up @@ -289,7 +329,7 @@ def update_binding_constraint(
execute_or_add_commands(study, file_study, [command], self.storage_service)

@staticmethod
def find_constraint_term_id(constraints_term: List[ConstraintTermDTO], constraint_term_id: str) -> int:
def find_constraint_term_id(constraints_term: t.List[ConstraintTermDTO], constraint_term_id: str) -> int:
try:
index = [elm.id for elm in constraints_term].index(constraint_term_id)
return index
Expand Down Expand Up @@ -347,7 +387,7 @@ def update_constraint_term(
self,
study: Study,
binding_constraint_id: str,
term: Union[ConstraintTermDTO, str],
term: t.Union[ConstraintTermDTO, str],
) -> None:
file_study = self.storage_service.get_storage(study).get_raw(study)
constraint = self.get_binding_constraint(study, binding_constraint_id)
Expand Down Expand Up @@ -404,3 +444,34 @@ def remove_constraint_term(
term_id: str,
) -> None:
return self.update_constraint_term(study, binding_constraint_id, term_id)

def get_all_binding_constraints_props(
self,
study: Study,
) -> t.Mapping[str, GetBindingConstraintDTO]:
"""
Retrieve all binding constraints properties from the study.
Args:
study: Study from which to retrieve the storages.
Returns:
A mapping of binding constraint IDs to their properties.
# Raises:
# STStorageConfigNotFound: If no storages are found in the specified area.
"""
file_study = self.storage_service.get_storage(study).get_raw(study)

path = _ALL_BINDING_CONSTRAINTS_PATH
try:
bc_config = file_study.tree.get(path.split("/"), depth=3)
except KeyError:
raise ConfigFileNotFound(path) from None

bc_props = ConfigBCProperties.parse_obj(bc_config)
bc_map = {
bc_id: GetBindingConstraintDTO.create_dto(bc)
for bc_id, bc in bc_props.constraints.items()
}
return bc_map
5 changes: 4 additions & 1 deletion antarest/study/business/table_mode_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -694,7 +694,10 @@ def get_table_data(
for storage in storages
}
elif table_type == TableTemplateType.BINDING_CONSTRAINT:
pass
bc_map = self._binding_constraint_manager.get_all_binding_constraints_props(study)
data = {bc_id: bc.dict(by_alias=True) for bc_id, bc in bc_map.items()}
else: # pragma: no cover
raise NotImplementedError(f"Table type {table_type} not implemented")

df = pd.DataFrame.from_dict(data, orient="index")
if columns:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ class AbstractTerm(IniProperties):
"""

weight: float = 0.0
offset: float = 0.0
offset: int = 0

def __str__(self) -> str:
"""String representation used in configuration files."""
Expand Down Expand Up @@ -231,7 +231,7 @@ class BindingConstraintProperties(IniProperties):
'operator': <BindingConstraintOperator.LESS: 'less'>,
'terms': {'at.at_dsr 0': {'area': 'at',
'cluster': 'at_dsr 0',
'offset': 0.0,
'offset': 0,
'weight': 6.5}},
'type': <BindingConstraintFrequency.DAILY: 'daily'>},
{'comments': '',
Expand All @@ -243,7 +243,7 @@ class BindingConstraintProperties(IniProperties):
'operator': <BindingConstraintOperator.GREATER: 'greater'>,
'terms': {'be.be_dsr 0': {'area': 'be',
'cluster': 'be_dsr 0',
'offset': 0.0,
'offset': 0,
'weight': 8.3}},
'type': <BindingConstraintFrequency.DAILY: 'daily'>}]
"""
Expand All @@ -268,7 +268,7 @@ class BindingConstraintSection(IniProperties):
... "at.cl1": 1,
... "de.cl2": "-88.77%7",
... "at%de": -0.06,
... "at%es": "8.5%0.5",
... "at%es": "8.5%5",
... }
>>> bc = BindingConstraintProperties.BindingConstraintSection.parse_obj(obj)
Expand All @@ -282,19 +282,16 @@ class BindingConstraintSection(IniProperties):
'operator': <BindingConstraintOperator.LESS: 'less'>,
'terms': {'at%de': {'area1': 'at',
'area2': 'de',
'offset': 0.0,
'offset': 0,
'weight': -0.06},
'at%es': {'area1': 'at',
'area2': 'es',
'offset': 0.5,
'weight': 8.5},
'at%es': {'area1': 'at', 'area2': 'es', 'offset': 5, 'weight': 8.5},
'at.cl1': {'area': 'at',
'cluster': 'cl1',
'offset': 0.0,
'offset': 0,
'weight': 1.0},
'de.cl2': {'area': 'de',
'cluster': 'cl2',
'offset': 7.0,
'offset': 7,
'weight': -88.77}},
'type': <BindingConstraintFrequency.HOURLY: 'hourly'>}
Expand All @@ -308,10 +305,10 @@ class BindingConstraintSection(IniProperties):
>>> pprint(bc2.to_config())
{'at%de': '-0.06',
'at%es': '8.5%0.5',
'at%es': '8.5%5',
'at.cl1': '1.0',
'comments': '',
'de.cl2': '-88.77%7.0',
'de.cl2': '-88.77%7',
'enabled': True,
'filter-synthesis': 'hourly, annual',
'filter-year-by-year': '',
Expand All @@ -324,7 +321,7 @@ class BindingConstraintSection(IniProperties):
id: str
name: str
enabled: bool = True
type: BindingConstraintFrequency = BindingConstraintFrequency.HOURLY
time_step: BindingConstraintFrequency = Field(default=BindingConstraintFrequency.HOURLY, alias="type")
operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL
comments: str = ""
filter_synthesis: str = Field(default="hourly, daily, weekly, monthly, annual", alias="filter-synthesis")
Expand All @@ -343,7 +340,7 @@ def _populate_section(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMap
"id": values.pop("id", None),
"name": values.pop("name", None),
"enabled": values.pop("enabled", None),
"type": values.pop("type", None),
"type": values.pop("time_step", None),
"operator": values.pop("operator", None),
"comments": values.pop("comments", None),
"filter-synthesis": values.pop("filter_synthesis", None),
Expand All @@ -352,6 +349,8 @@ def _populate_section(cls, values: t.MutableMapping[str, t.Any]) -> t.MutableMap

if new_values["id"] is None:
new_values["id"] = _generate_bc_id(new_values["name"])
if new_values["type"] is None:
new_values["type"] = values.pop("type", None)
if new_values["filter-synthesis"] is None:
new_values["filter-synthesis"] = values.pop("filter-synthesis", None)
if new_values["filter-year-by-year"] is None:
Expand Down Expand Up @@ -386,7 +385,7 @@ def to_config(self) -> t.Mapping[str, t.Any]:
"id": self.id,
"name": self.name,
"enabled": self.enabled,
"type": self.type,
"type": self.time_step,
"operator": self.operator,
"comments": self.comments,
"filter-synthesis": self.filter_synthesis,
Expand Down
Loading

0 comments on commit fdc82a1

Please sign in to comment.