Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add SciPyDifferentialEvolution optimiser #132

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# [Unreleased](https://github.com/pybop-team/PyBOP)

## Features

- [#131](https://github.com/pybop-team/PyBOP/issues/131) - Adds `SciPyDifferentialEvolution` optimiser, adds functionality for user-selectable maximum iteration limit to `SciPyMinimize`, `NLoptOptimize`, and `BaseOptimiser` classes.
- [#107](https://github.com/pybop-team/PyBOP/issues/107) - Adds Equivalent Circuit Model (ECM) with examples, Import/Export parameter methods `ParameterSet.import_parameter` and `ParameterSet.export_parameters`, updates default FittingProblem.signal definition to `"Voltage [V]"`, and testing infrastructure
- [#127](https://github.com/pybop-team/PyBOP/issues/127) - Adds Windows and macOS runners to the `test_on_push` action
- [#114](https://github.com/pybop-team/PyBOP/issues/114) - Adds standard plotting class `pybop.StandardPlot()` via plotly backend
Expand Down
2 changes: 1 addition & 1 deletion pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
#
from .optimisers.base_optimiser import BaseOptimiser
from .optimisers.nlopt_optimize import NLoptOptimize
from .optimisers.scipy_minimize import SciPyMinimize
from .optimisers.scipy_optimisers import SciPyMinimize, SciPyDifferentialEvolution
from .optimisers.pints_optimisers import (
GradientDescent,
Adam,
Expand Down
5 changes: 4 additions & 1 deletion pybop/optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ def __init__(
if issubclass(self.optimiser, pybop.NLoptOptimize):
self.optimiser = self.optimiser(self.n_parameters)

elif issubclass(self.optimiser, pybop.SciPyMinimize):
elif issubclass(
self.optimiser, (pybop.SciPyMinimize, pybop.SciPyDifferentialEvolution)
):
self.optimiser = self.optimiser()

else:
Expand Down Expand Up @@ -133,6 +135,7 @@ def _run_pybop(self):
cost_function=self.cost,
x0=self.x0,
bounds=self.bounds,
maxiter=self._max_iterations,
)
self.log = self.optimiser.log

Expand Down
3 changes: 2 additions & 1 deletion pybop/optimisers/base_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@ class BaseOptimiser:
def __init__(self):
pass

def optimise(self, cost_function, x0=None, bounds=None):
def optimise(self, cost_function, x0=None, bounds=None, maxiter=None):
"""
Optimisiation method to be overloaded by child classes.

"""
self.cost_function = cost_function
self.x0 = x0
self.bounds = bounds
self.maxiter = maxiter

# Run optimisation
result = self._runoptimise(self.cost_function, x0=self.x0, bounds=self.bounds)
Expand Down
7 changes: 6 additions & 1 deletion pybop/optimisers/nlopt_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@ class NLoptOptimize(BaseOptimiser):
Wrapper class for the NLOpt optimiser class. Extends the BaseOptimiser class.
"""

def __init__(self, n_param, xtol=None, method=None):
def __init__(self, n_param, xtol=None, method=None, maxiter=None):
super().__init__()
self.n_param = n_param
self.maxiter = maxiter

if method is not None:
self.optim = nlopt.opt(method, self.n_param)
Expand Down Expand Up @@ -46,6 +47,10 @@ def cost_wrapper(x, grad):
self.optim.set_lower_bounds(bounds["lower"])
self.optim.set_upper_bounds(bounds["upper"])

# Set max iterations
if self.maxiter is not None:
self.optim.set_maxeval(self.maxiter)

# Run the optimser
x = self.optim.optimize(x0)

Expand Down
63 changes: 0 additions & 63 deletions pybop/optimisers/scipy_minimize.py

This file was deleted.

148 changes: 148 additions & 0 deletions pybop/optimisers/scipy_optimisers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
from scipy.optimize import minimize, differential_evolution
from .base_optimiser import BaseOptimiser


class SciPyMinimize(BaseOptimiser):
"""
Wrapper class for the SciPy optimisation class. Extends the BaseOptimiser class.
"""

def __init__(self, method=None, bounds=None, maxiter=None):
super().__init__()
self.method = method
self.bounds = bounds
self.maxiter = maxiter
if self.maxiter is not None:
self.options = {"maxiter": self.maxiter}
else:
self.options = {}

if self.method is None:
self.method = "COBYLA" # "L-BFGS-B"

def _runoptimise(self, cost_function, x0, bounds):
"""
Run the SciPy optimisation method.

Inputs
----------
cost_function: function for optimising
method: optimisation algorithm
x0: initialisation array
bounds: bounds array
"""

# Add callback storing history of parameter values
self.log = [[x0]]

def callback(x):
self.log.append([x])

# Reformat bounds
if bounds is not None:
bounds = (
(lower, upper) for lower, upper in zip(bounds["lower"], bounds["upper"])
)

output = minimize(
cost_function,
x0,
method=self.method,
bounds=bounds,
options=self.options,
callback=callback,
)

# Get performance statistics
x = output.x
final_cost = output.fun

return x, final_cost

def needs_sensitivities(self):
"""
Returns True if the optimiser needs sensitivities.
"""
return False

def name(self):
"""
Returns the name of the optimiser.
"""
return "SciPyMinimize"


class SciPyDifferentialEvolution(BaseOptimiser):
"""
Wrapper class for the SciPy differential_evolution optimisation method. Extends the BaseOptimiser class.
"""

def __init__(self, bounds=None, strategy="best1bin", maxiter=1000, popsize=15):
super().__init__()
self.bounds = bounds
self.strategy = strategy
self.maxiter = maxiter
self.popsize = popsize

def _runoptimise(self, cost_function, x0=None, bounds=None):
"""
Run the SciPy differential_evolution optimisation method.

Inputs
----------
cost_function : function
The objective function to be minimized.
x0 : array_like
Initial guess. Only used to determine the dimensionality of the problem.
bounds : sequence or `Bounds`
Bounds for variables. There are two ways to specify the bounds:
1. Instance of `Bounds` class.
2. Sequence of (min, max) pairs for each element in x, defining the finite lower and upper bounds for the optimizing argument of `cost_function`.
"""

if bounds is None:
raise ValueError("Bounds must be specified for differential_evolution.")

if x0 is not None:
print(
"Ignoring x0. Initial conditions are not used for differential_evolution."
)

# Add callback storing history of parameter values
self.log = []

def callback(x, convergence):
self.log.append([x])

# Reformat bounds if necessary
if isinstance(bounds, dict):
bounds = [
(lower, upper) for lower, upper in zip(bounds["lower"], bounds["upper"])
]

output = differential_evolution(
cost_function,
bounds,
strategy=self.strategy,
maxiter=self.maxiter,
popsize=self.popsize,
callback=callback,
)

# Get performance statistics
x = output.x
final_cost = output.fun

return x, final_cost

def needs_sensitivities(self):
"""
Returns False as differential_evolution does not need sensitivities.
"""
return False

def name(self):
"""
Returns the name of the optimiser.
"""
return "SciPyDifferentialEvolution"
7 changes: 6 additions & 1 deletion tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ def cost(self, problem):
[
(pybop.NLoptOptimize, "NLoptOptimize"),
(pybop.SciPyMinimize, "SciPyMinimize"),
(pybop.SciPyDifferentialEvolution, "SciPyDifferentialEvolution"),
(pybop.GradientDescent, "Gradient descent"),
(pybop.Adam, "Adam"),
(pybop.CMAES, "Covariance Matrix Adaptation Evolution Strategy (CMA-ES)"),
Expand All @@ -63,7 +64,11 @@ def test_optimiser_classes(self, cost, optimiser_class, expected_name):
assert opt.optimiser is not None
assert opt.optimiser.name() == expected_name

if optimiser_class not in [pybop.NLoptOptimize, pybop.SciPyMinimize]:
if optimiser_class not in [
pybop.NLoptOptimize,
pybop.SciPyMinimize,
pybop.SciPyDifferentialEvolution,
]:
assert opt.optimiser.boundaries is None

if optimiser_class == pybop.NLoptOptimize:
Expand Down
1 change: 1 addition & 0 deletions tests/unit/test_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ def test_spm_optimisers(self, spm_cost, x0):
optimisers = [
pybop.NLoptOptimize,
pybop.SciPyMinimize,
pybop.SciPyDifferentialEvolution,
pybop.CMAES,
pybop.Adam,
pybop.GradientDescent,
Expand Down