Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add SciPyDifferentialEvolution optimiser #132

Merged
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# [Unreleased](https://github.com/pybop-team/PyBOP)

## Features
- [#131](https://github.com/pybop-team/PyBOP/issues/131) - Adds `SciPyDifferentialEvolution` optimiser
- [#127](https://github.com/pybop-team/PyBOP/issues/127) - Adds Windows and macOS runners to the `test_on_push` action
- [#114](https://github.com/pybop-team/PyBOP/issues/114) - Adds standard plotting class `pybop.StandardPlot()` via plotly backend
- [#114](https://github.com/pybop-team/PyBOP/issues/114) - Adds `quick_plot()`, `plot_convergence()`, and `plot_cost2d()` methods
Expand All @@ -9,6 +10,7 @@
- [#120](https://github.com/pybop-team/PyBOP/issues/120) - Updates the parameterisation test settings including the number of iterations

## Bug Fixes
- [#131](https://github.com/pybop-team/PyBOP/issues/131) - Increases the SciPyMinimize optimiser assertion tolerances reduce CI/CD failures

# [v23.11](https://github.com/pybop-team/PyBOP/releases/tag/v23.11)
- Initial release
Expand Down
2 changes: 1 addition & 1 deletion pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
#
from .optimisers.base_optimiser import BaseOptimiser
from .optimisers.nlopt_optimize import NLoptOptimize
from .optimisers.scipy_minimize import SciPyMinimize
from .optimisers.scipy_optimisers import SciPyMinimize, SciPyDifferentialEvolution
from .optimisers.pints_optimisers import (
GradientDescent,
Adam,
Expand Down
4 changes: 3 additions & 1 deletion pybop/optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ def __init__(
if issubclass(self.optimiser, pybop.NLoptOptimize):
self.optimiser = self.optimiser(self.n_parameters)

elif issubclass(self.optimiser, pybop.SciPyMinimize):
elif issubclass(
self.optimiser, (pybop.SciPyMinimize, pybop.SciPyDifferentialEvolution)
):
self.optimiser = self.optimiser()

else:
Expand Down
55 changes: 0 additions & 55 deletions pybop/optimisers/scipy_minimize.py

This file was deleted.

138 changes: 138 additions & 0 deletions pybop/optimisers/scipy_optimisers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
from scipy.optimize import minimize, differential_evolution
from .base_optimiser import BaseOptimiser


class SciPyMinimize(BaseOptimiser):
"""
Wrapper class for the SciPy optimisation class. Extends the BaseOptimiser class.
"""

def __init__(self, method=None, bounds=None):
super().__init__()
self.method = method
self.bounds = bounds

if self.method is None:
self.method = "COBYLA" # "L-BFGS-B"

def _runoptimise(self, cost_function, x0, bounds):
"""
Run the SciPy optimisation method.

Inputs
----------
cost_function: function for optimising
method: optimisation algorithm
x0: initialisation array
bounds: bounds array
"""

# Add callback storing history of parameter values
self.log = [[x0]]

def callback(x):
self.log.append([x])

# Reformat bounds
if bounds is not None:
bounds = (
(lower, upper) for lower, upper in zip(bounds["lower"], bounds["upper"])
)

output = minimize(
cost_function, x0, method=self.method, bounds=bounds, callback=callback
)

# Get performance statistics
x = output.x
final_cost = output.fun

return x, final_cost

def needs_sensitivities(self):
"""
Returns True if the optimiser needs sensitivities.
"""
return False

def name(self):
"""
Returns the name of the optimiser.
"""
return "SciPyMinimize"


class SciPyDifferentialEvolution(BaseOptimiser):
"""
Wrapper class for the SciPy differential_evolution optimisation method. Extends the BaseOptimiser class.
"""

def __init__(self, bounds=None, strategy="best1bin", maxiter=1000, popsize=15):
super().__init__()
self.bounds = bounds
self.strategy = strategy
self.maxiter = maxiter
self.popsize = popsize

def _runoptimise(self, cost_function, x0=None, bounds=None):
"""
Run the SciPy differential_evolution optimisation method.

Inputs
----------
cost_function : function
The objective function to be minimized.
x0 : array_like
Initial guess. Only used to determine the dimensionality of the problem.
bounds : sequence or `Bounds`
Bounds for variables. There are two ways to specify the bounds:
1. Instance of `Bounds` class.
2. Sequence of (min, max) pairs for each element in x, defining the finite lower and upper bounds for the optimizing argument of `cost_function`.
"""

if bounds is None:
raise ValueError("Bounds must be specified for differential_evolution.")

if x0 is not None:
print(
"Ignoring x0. Initial conditions are not used for differential_evolution."
)

# Add callback storing history of parameter values
self.log = []

def callback(x, convergence):
self.log.append([x])

# Reformat bounds if necessary
if isinstance(bounds, dict):
bounds = [
(lower, upper) for lower, upper in zip(bounds["lower"], bounds["upper"])
]

output = differential_evolution(
cost_function,
bounds,
strategy=self.strategy,
maxiter=self.maxiter,
popsize=self.popsize,
callback=callback,
)

# Get performance statistics
x = output.x
final_cost = output.fun

return x, final_cost

def needs_sensitivities(self):
"""
Returns False as differential_evolution does not need sensitivities.
"""
return False

def name(self):
"""
Returns the name of the optimiser.
"""
return "SciPyDifferentialEvolution"
7 changes: 6 additions & 1 deletion tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ def cost(self, problem):
[
(pybop.NLoptOptimize, "NLoptOptimize"),
(pybop.SciPyMinimize, "SciPyMinimize"),
(pybop.SciPyDifferentialEvolution, "SciPyDifferentialEvolution"),
(pybop.GradientDescent, "Gradient descent"),
(pybop.Adam, "Adam"),
(pybop.CMAES, "Covariance Matrix Adaptation Evolution Strategy (CMA-ES)"),
Expand All @@ -63,7 +64,11 @@ def test_optimiser_classes(self, cost, optimiser_class, expected_name):
assert opt.optimiser is not None
assert opt.optimiser.name() == expected_name

if optimiser_class not in [pybop.NLoptOptimize, pybop.SciPyMinimize]:
if optimiser_class not in [
pybop.NLoptOptimize,
pybop.SciPyMinimize,
pybop.SciPyDifferentialEvolution,
]:
assert opt.optimiser.boundaries is None

if optimiser_class == pybop.NLoptOptimize:
Expand Down
10 changes: 8 additions & 2 deletions tests/unit/test_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ def test_spm_optimisers(self, spm_cost, x0):
optimisers = [
pybop.NLoptOptimize,
pybop.SciPyMinimize,
pybop.SciPyDifferentialEvolution,
pybop.CMAES,
pybop.Adam,
pybop.GradientDescent,
Expand Down Expand Up @@ -137,8 +138,13 @@ def test_spm_optimisers(self, spm_cost, x0):
x, final_cost = parameterisation.run()

# Assertions
np.testing.assert_allclose(final_cost, 0, atol=1e-2)
np.testing.assert_allclose(x, x0, atol=1e-1)
# Note: SciPyMinimize has a different tolerance due to the local optimisation algorithms
if optimiser in [pybop.SciPyMinimize]:
np.testing.assert_allclose(final_cost, 0, atol=1e-2)
np.testing.assert_allclose(x, x0, atol=2e-1)
else:
np.testing.assert_allclose(final_cost, 0, atol=1e-2)
np.testing.assert_allclose(x, x0, atol=1e-1)

@pytest.mark.parametrize("init_soc", [0.3, 0.7])
@pytest.mark.unit
Expand Down