Skip to content

Commit

Permalink
Changes in response to review
Browse files Browse the repository at this point in the history
  • Loading branch information
brynpickering committed Feb 21, 2025
1 parent f0feabc commit 77808df
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 91 deletions.
11 changes: 4 additions & 7 deletions docs/advanced/mode.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,10 @@ As an example, if you wanted to generate 10 SPORES, all of which are within 10%

```yaml
config.build.mode: spores
config.solve:
spores:
# The number of SPORES to generate:
number: 10
parameters:
# The fraction above the cost-optimal cost to set the maximum cost during SPORES:
spores_slack: 0.1
# The number of SPORES to generate:
config.solve.spores.number: 10:
# The fraction above the cost-optimal cost to set the maximum cost during SPORES:
parameters.spores_slack: 0.1
```

You will now also need a `spores_score` cost class in your model.
Expand Down
19 changes: 2 additions & 17 deletions src/calliope/backend/backend_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -937,33 +937,18 @@ def has_integer_or_binary_variables(self) -> bool:
"""

@abstractmethod
def _solve(
self,
solver: str,
solver_io: str | None = None,
solver_options: dict | None = None,
save_logs: str | None = None,
warmstart: bool = False,
**solve_config,
) -> xr.Dataset:
def _solve(self, solve_config: config.Solve, warmstart: bool = False) -> xr.Dataset:
"""Optimise built model.
If solution is optimal, interface objects (decision variables, global
expressions, constraints, objective) can be successfully evaluated for their
values at optimality.
Args:
solver (str): Name of solver to optimise with.
solver_io (str | None, optional): If chosen solver has a python interface, set to "python" for potential
performance gains, otherwise should be left as None. Defaults to None.
solver_options (dict | None, optional): Solver options/parameters to pass directly to solver.
See solver documentation for available parameters that can be influenced. Defaults to None.
save_logs (str | None, optional): If given, solver logs and built LP file will be saved to this filepath.
Defaults to None.
solve_config: (config.Solve): Calliope Solve configuration object.
warmstart (bool, optional): If True, and the chosen solver is capable of implementing it, an existing
optimal solution will be used to warmstart the next solve run.
Defaults to False.
**solve_config: solve configuration overrides.
Returns:
xr.Dataset: Dataset of decision variable values if the solution was optimal/feasible,
Expand Down
18 changes: 5 additions & 13 deletions src/calliope/backend/gurobi_backend_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,26 +240,18 @@ def get_global_expression( # noqa: D102, override
else:
return global_expression

def _solve(
self,
solver: str,
solver_io: str | None = None,
solver_options: dict | None = None,
save_logs: str | None = None,
warmstart: bool = False,
**solve_config,
) -> xr.Dataset:
def _solve(self, solve_config: config.Solve, warmstart: bool = False) -> xr.Dataset:
self._instance.resetParams()

if solver_options is not None:
for k, v in solver_options.items():
if solve_config.solver_options is not None:
for k, v in solve_config.solver_options.items():
self._instance.setParam(k, v)

if not warmstart:
self._instance.setParam("LPWarmStart", 0)

if save_logs is not None:
logdir = Path(save_logs)
if solve_config.save_logs is not None:
logdir = Path(solve_config.save_logs)
self._instance.setParam("LogFile", (logdir / "gurobi.log").as_posix())

self._instance.update()
Expand Down
38 changes: 15 additions & 23 deletions src/calliope/backend/pyomo_backend_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,14 +204,12 @@ def _objective_setter(
self._add_component(name, objective_dict, _objective_setter, "objectives")

def set_objective(self, name: str) -> None: # noqa: D102, override
for obj_name, obj in self.objectives.items():
if obj.item().active and obj_name != name:
self.log("objectives", obj_name, "Objective deactivated.", level="info")
obj.item().deactivate()
if obj_name == name:
obj.item().activate()
self.log("objectives", obj_name, "Objective activated.", level="info")
self.objectives[self.objective].item().deactivate()
self.log("objectives", self.objective, "Objective deactivated.", level="info")

self.objectives[name].item().activate()
self.objective = name
self.log("objectives", name, "Objective activated.", level="info")

def get_parameter( # noqa: D102, override
self, name: str, as_backend_objs: bool = True
Expand Down Expand Up @@ -285,36 +283,30 @@ def get_global_expression( # noqa: D102, override
return global_expression

def _solve( # noqa: D102, override
self,
solver: str,
solver_io: str | None = None,
solver_options: dict | None = None,
save_logs: str | None = None,
warmstart: bool = False,
**solve_config,
self, solve_config: config.Solve, warmstart: bool = False
) -> xr.Dataset:
if solver == "cbc" and self.shadow_prices.is_active:
if solve_config.solver == "cbc" and self.shadow_prices.is_active:
model_warn(
"Switching off shadow price tracker as constraint duals cannot be accessed from the CBC solver"
)
self.shadow_prices.deactivate()
opt = SolverFactory(solver, solver_io=solver_io)
opt = SolverFactory(solve_config.solver, solver_io=solve_config.solver_io)

if solver_options:
for k, v in solver_options.items():
if solve_config.solver_options:
for k, v in solve_config.solver_options.items():
opt.options[k] = v

solve_kwargs = {}
if save_logs is not None:
if solve_config.save_logs is not None:
solve_kwargs.update({"symbolic_solver_labels": True, "keepfiles": True})
logdir = Path(save_logs)
logdir = Path(solve_config.save_logs)
logdir.mkdir(parents=True, exist_ok=True)
TempfileManager.tempdir = logdir # Sets log output dir

if warmstart and solver in ["glpk", "cbc"]:
if warmstart and solve_config.solver in ["glpk", "cbc"]:
model_warn(
f"The chosen solver, {solver}, does not support warmstart, which may "
"impact performance."
f"The chosen solver, {solve_config.solver}, does not support warmstart, "
"which may impact performance."
)
warmstart = False

Expand Down
7 changes: 7 additions & 0 deletions src/calliope/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,13 @@ class SolveSpores(ConfigBaseModel):
tracking_parameter: str | None = None
"""If given, an input parameter name with which to filter technologies for consideration in SPORES scoring."""

score_iteration_threshold_relative: float = Field(default=0.1, ge=0)
"""A factor to apply to flow capacities above which they will increment the SPORES score.
E.g., if the previous iteration flow capacity was `100` then, with a threshold value of 0.1,
only capacities above `10` in the current iteration will cause the SPORES score to increase for that technology at that node.
If, say, the current iteration's capacity is `8` then the SPORES score will not change for that technology (as if it had no )
"""


class Solve(ConfigBaseModel):
"""Base configuration options used when solving a Calliope optimisation problem (`calliope.Model.solve`)."""
Expand Down
60 changes: 29 additions & 31 deletions src/calliope/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,9 +357,7 @@ def solve(self, force: bool = False, warmstart: bool = False, **kwargs) -> None:
elif mode == "spores":
results = self._solve_spores(self.config.solve)
else:
results = self.backend._solve(
warmstart=warmstart, **self.config.solve.model_dump()
)
results = self.backend._solve(self.config.solve, warmstart=warmstart)

log_time(
LOGGER,
Expand Down Expand Up @@ -510,6 +508,9 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:
Optimisation is undertaken iteratively for slices of the timeseries, with
some data being passed between slices.
Args:
solver_config (config.Solve): Calliope Solver configuration object.
Returns:
xr.Dataset: Results dataset.
"""
Expand All @@ -519,9 +520,7 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:

LOGGER.info("Optimisation model | Running first time window.")

step_results = self.backend._solve(
warmstart=False, **solver_config.model_dump()
)
iteration_results = self.backend._solve(solver_config, warmstart=False)

results_list = []

Expand All @@ -531,15 +530,17 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:
f"Optimisation model | Running time window starting at {windowstep_as_string}."
)
results_list.append(
step_results.sel(timesteps=slice(None, windowstep - self._TS_OFFSET))
iteration_results.sel(
timesteps=slice(None, windowstep - self._TS_OFFSET)
)
)
previous_step_results = results_list[-1]
previous_iteration_results = results_list[-1]
horizonstep = self._model_data.horizonsteps.sel(windowsteps=windowstep)
new_inputs = self.inputs.sel(
timesteps=slice(windowstep, horizonstep)
).drop_vars(["horizonsteps", "windowsteps"], errors="ignore")

if len(new_inputs.timesteps) != len(step_results.timesteps):
if len(new_inputs.timesteps) != len(iteration_results.timesteps):
LOGGER.info(
"Optimisation model | Reaching the end of the timeseries. "
"Re-building model with shorter time horizon."
Expand All @@ -554,18 +555,16 @@ def _solve_operate(self, solver_config: config.Solve) -> xr.Dataset:
self.backend.update_parameter(param_name, param_data)
self.backend.inputs[param_name] = param_data

if "storage" in step_results:
if "storage" in iteration_results:
self.backend.update_parameter(
"storage_initial",
self._recalculate_storage_initial(previous_step_results),
self._recalculate_storage_initial(previous_iteration_results),
)

step_results = self.backend._solve(
warmstart=False, **solver_config.model_dump()
)
iteration_results = self.backend._solve(solver_config, warmstart=False)

self._start_window_idx = 0
results_list.append(step_results.sel(timesteps=slice(windowstep, None)))
results_list.append(iteration_results.sel(timesteps=slice(windowstep, None)))
results = xr.concat(results_list, dim="timesteps", combine_attrs="no_conflicts")
results.attrs["termination_condition"] = ",".join(
set(result.attrs["termination_condition"] for result in results_list)
Expand Down Expand Up @@ -599,21 +598,18 @@ def _solve_spores(self, solver_config: config.Solve) -> xr.Dataset:
xr.Dataset: Results dataset.
"""
LOGGER.info("Optimisation model | Resetting SPORES parameters.")
self.backend.update_parameter(
"spores_score", self.inputs.get("spores_score", xr.DataArray(0))
)
self.backend.update_parameter(
"spores_baseline_cost",
self.inputs.get("spores_baseline_cost", xr.DataArray(np.inf)),
)
for init_param in ["spores_score", "spores_baseline_cost"]:
default = xr.DataArray(self.inputs.attrs["defaults"][init_param])
self.backend.update_parameter(
init_param, self.inputs.get(init_param, default)
)

self.backend.set_objective(self.config.build.objective)

spores_config: config.SolveSpores = solver_config.spores
if not spores_config.skip_baseline_run:
LOGGER.info("Optimisation model | Running baseline model.")
baseline_results = self.backend._solve(
warmstart=False, **solver_config.model_dump()
)
baseline_results = self.backend._solve(solver_config, warmstart=False)
else:
LOGGER.info("Optimisation model | Using existing baseline model results.")
baseline_results = self.results.copy()
Expand All @@ -625,20 +621,19 @@ def _solve_spores(self, solver_config: config.Solve) -> xr.Dataset:
spores_config.save_per_spore_path / "baseline.nc"
)

# We store the results from each iteration in the `results_list` to later concatenate into a single dataset.
results_list: list[xr.Dataset] = [baseline_results]
spore_range = range(1, spores_config.number + 1)
for spore in spore_range:
LOGGER.info(f"Optimisation model | Running SPORE {spore}.")
self._spores_update_model(baseline_results, results_list[-1], spores_config)

step_results = self.backend._solve(
warmstart=False, **solver_config.model_dump()
)
results_list.append(step_results)
iteration_results = self.backend._solve(solver_config, warmstart=False)
results_list.append(iteration_results)

if spores_config.save_per_spore_path is not None:
LOGGER.info(f"Optimisation model | Saving SPORE {spore} to file.")
step_results.assign_coords(spores=spore).to_netcdf(
iteration_results.assign_coords(spores=spore).to_netcdf(
spores_config.save_per_spore_path / f"spore_{spore}.nc"
)

Expand Down Expand Up @@ -672,7 +667,10 @@ def _spores_update_model(
previous_cap = previous_results["flow_cap"].where(spores_techs)

# Make sure that penalties are applied only to non-negligible deployments of capacity
min_relevant_size = 0.1 * previous_cap.max(["nodes", "techs"])
min_relevant_size = (
spores_config.score_iteration_threshold_relative
* previous_cap.max(["nodes", "techs"])
)

new_score = (
# Where capacity was deployed more than the minimal relevant size, assign an integer penalty (score)
Expand Down

0 comments on commit 77808df

Please sign in to comment.