Skip to content

Commit

Permalink
Merge pull request #495 from pybop-team/492-bug-fix-transformations-f…
Browse files Browse the repository at this point in the history
…or-gaussianloglikelihood

Transformation fixes
  • Loading branch information
martinjrobins committed Sep 14, 2024
2 parents d5c7b3a + 6569eaa commit 2062c2f
Show file tree
Hide file tree
Showing 16 changed files with 295 additions and 95 deletions.
6 changes: 3 additions & 3 deletions examples/standalone/cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,14 @@ def __init__(self, problem=None):
"""
super().__init__(problem)

self.parameters = pybop.Parameters(
self._parameters = pybop.Parameters(
pybop.Parameter(
"x",
initial_value=4.2,
bounds=[-1, 10],
),
)
self.x0 = self.parameters.initial_value()
self.x0 = self._parameters.initial_value()

def compute(
self, y: dict = None, dy: np.ndarray = None, calculate_grad: bool = False
Expand All @@ -59,4 +59,4 @@ def compute(
float
The calculated cost value for the given parameter.
"""
return self.parameters["x"].value ** 2 + 42
return self._parameters["x"].value ** 2 + 42
18 changes: 10 additions & 8 deletions pybop/costs/_likelihoods.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,9 +111,11 @@ def __init__(
self._dsigma_scale = dsigma_scale
self._logpi = -0.5 * self.n_data * np.log(2 * np.pi)

# Add sigma parameter, join with self.parameters, reapply transformations
self.sigma = Parameters()
self._add_sigma_parameters(sigma0)
self.parameters.join(self.sigma)
self.join_parameters(self.sigma)
self.transformation = self._parameters.construct_transformation()

def _add_sigma_parameters(self, sigma0):
sigma0 = [sigma0] if not isinstance(sigma0, list) else sigma0
Expand Down Expand Up @@ -235,11 +237,11 @@ def __init__(

# Store the likelihood and prior
self._log_likelihood = log_likelihood
self.parameters = self._log_likelihood.parameters
self._parameters = self._log_likelihood.parameters
self._has_separable_problem = self._log_likelihood.has_separable_problem

if log_prior is None:
self._prior = JointLogPrior(*self.parameters.priors())
self._prior = JointLogPrior(*self._parameters.priors())
else:
self._prior = log_prior

Expand Down Expand Up @@ -271,16 +273,16 @@ def compute(

if calculate_grad:
if isinstance(self._prior, BasePrior):
log_prior, dp = self._prior.logpdfS1(self.parameters.current_value())
log_prior, dp = self._prior.logpdfS1(self._parameters.current_value())
else:
# Compute log prior first
log_prior = self._prior.logpdf(self.parameters.current_value())
log_prior = self._prior.logpdf(self._parameters.current_value())

# Compute a finite difference approximation of the gradient of the log prior
delta = self.parameters.initial_value() * self.gradient_step
delta = self._parameters.initial_value() * self.gradient_step
dp = []

for parameter, step_size in zip(self.parameters, delta):
for parameter, step_size in zip(self._parameters, delta):
param_value = parameter.value
upper_value = param_value * (1 + step_size)
lower_value = param_value * (1 - step_size)
Expand All @@ -293,7 +295,7 @@ def compute(
)
dp.append(gradient)
else:
log_prior = self._prior.logpdf(self.parameters.current_value())
log_prior = self._prior.logpdf(self._parameters.current_value())

if not np.isfinite(log_prior).any():
return (-np.inf, -self.grad_fail) if calculate_grad else -np.inf
Expand Down
2 changes: 1 addition & 1 deletion pybop/costs/_weighted_cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def __init__(self, *costs, weights: Optional[list[float]] = None):
super().__init__()

for cost in self.costs:
self.parameters.join(cost.parameters)
self.join_parameters(cost.parameters)

# Weighted costs do not use this functionality
self._has_separable_problem = False
Expand Down
25 changes: 19 additions & 6 deletions pybop/costs/base_cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class BaseCost:
"""

def __init__(self, problem: Optional[BaseProblem] = None):
self.parameters = Parameters()
self._parameters = Parameters()
self.transformation = None
self.problem = problem
self.verbose = False
Expand All @@ -44,17 +44,17 @@ def __init__(self, problem: Optional[BaseProblem] = None):
self._de = 1.0
if isinstance(self.problem, BaseProblem):
self._target = self.problem.target
self.parameters.join(self.problem.parameters)
self._parameters.join(self.problem.parameters)
self.n_outputs = self.problem.n_outputs
self.signal = self.problem.signal
self.transformation = self.parameters.construct_transformation()
self.transformation = self._parameters.construct_transformation()
self._has_separable_problem = True
self.grad_fail = None
self.set_fail_gradient()

@property
def n_parameters(self):
return len(self.parameters)
return len(self._parameters)

@property
def has_separable_problem(self):
Expand Down Expand Up @@ -95,8 +95,8 @@ def __call__(
self.has_transform = self.transformation is not None and apply_transform
if self.has_transform:
inputs = self.transformation.to_model(inputs)
inputs = self.parameters.verify(inputs)
self.parameters.update(values=list(inputs.values()))
inputs = self._parameters.verify(inputs)
self._parameters.update(values=list(inputs.values()))

y, dy = None, None
if self._has_separable_problem:
Expand Down Expand Up @@ -183,3 +183,16 @@ def verify_args(self, dy: ndarray, calculate_grad: bool):
raise ValueError(
"Forward model sensitivities need to be provided alongside `calculate_grad=True` for `cost.compute`."
)

def join_parameters(self, parameters):
"""
Setter for joining parameters. This method sets the fail gradient if the join adds parameters.
"""
original_n_params = self.n_parameters
self._parameters.join(parameters)
if original_n_params != self.n_parameters:
self.set_fail_gradient()

@property
def parameters(self):
return self._parameters
2 changes: 1 addition & 1 deletion pybop/costs/fitting_costs.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ def compute(
float
The observer cost (negative of the log likelihood).
"""
inputs = self.parameters.as_dict()
inputs = self._parameters.as_dict()
log_likelihood = self._observer.log_likelihood(
self._target, self._observer.domain_data, inputs
)
Expand Down
1 change: 1 addition & 0 deletions pybop/models/base_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ def build(
A valid initial state, e.g. the initial state of charge or open-circuit voltage.
Defaults to None, indicating that the existing initial state of charge (for an ECM)
or initial concentrations (for an EChem model) will be used.
Accepted keys either `"Initial open-circuit voltage [V]"` or ``"Initial SoC"`
dataset : pybop.Dataset or dict, optional
The dataset to be used in the model construction.
check_model : bool, optional
Expand Down
2 changes: 1 addition & 1 deletion pybop/optimisers/base_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ def __init__(

if isinstance(cost, BaseCost):
self.cost = cost
self.parameters = self.cost.parameters
self._transformation = self.cost.transformation
self.parameters.join(cost.parameters)
self.set_allow_infeasible_solutions()
if isinstance(cost, WeightedCost):
self.minimising = cost.minimising
Expand Down
18 changes: 11 additions & 7 deletions pybop/optimisers/scipy_optimisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def _run(self):
x=self._transformation.to_model(result.x)
if self._transformation
else result.x,
final_cost=self.cost(result.x),
final_cost=self.cost(result.x, apply_transform=True),
n_iterations=nit,
scipy_result=result,
)
Expand Down Expand Up @@ -165,13 +165,13 @@ def cost_wrapper(self, x):
self.log_update(x=[x])

if not self._options["jac"]:
cost = self.cost(x) / self._cost0
cost = self.cost(x, apply_transform=True) / self._cost0
if np.isinf(cost):
self.inf_count += 1
cost = 1 + 0.9**self.inf_count # for fake finite gradient
return cost if self.minimising else -cost

L, dl = self.cost(x, calculate_grad=True)
L, dl = self.cost(x, calculate_grad=True, apply_transform=True)
return (L, dl) if self.minimising else (-L, -dl)

def _run_optimiser(self):
Expand All @@ -198,7 +198,7 @@ def base_callback(intermediate_result: Union[OptimizeResult, np.ndarray]):
cost = intermediate_result.fun
else:
x_best = intermediate_result
cost = self.cost(x_best)
cost = self.cost(x_best, apply_transform=True)

self.log_update(
x_best=x_best,
Expand All @@ -212,7 +212,7 @@ def base_callback(intermediate_result: Union[OptimizeResult, np.ndarray]):
)

# Compute the absolute initial cost and resample if required
self._cost0 = np.abs(self.cost(self.x0))
self._cost0 = np.abs(self.cost(self.x0, apply_transform=True))
if np.isinf(self._cost0):
for _i in range(1, self.num_resamples):
try:
Expand All @@ -224,7 +224,7 @@ def base_callback(intermediate_result: Union[OptimizeResult, np.ndarray]):
stacklevel=2,
)
break
self._cost0 = np.abs(self.cost(self.x0))
self._cost0 = np.abs(self.cost(self.x0, apply_transform=True))
if not np.isinf(self._cost0):
break
if np.isinf(self._cost0):
Expand Down Expand Up @@ -352,7 +352,11 @@ def callback(intermediate_result: OptimizeResult):

def cost_wrapper(x):
self.log_update(x=[x])
return self.cost(x) if self.minimising else -self.cost(x)
return (
self.cost(x, apply_transform=True)
if self.minimising
else -self.cost(x, apply_transform=True)
)

return differential_evolution(
cost_wrapper,
Expand Down
16 changes: 10 additions & 6 deletions pybop/parameters/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,12 +341,16 @@ def get_bounds(self, apply_transform: bool = False) -> dict:
for param in self.param.values():
if param.bounds is not None:
if apply_transform and param.transformation is not None:
bounds["lower"].append(
float(param.transformation.to_search(param.bounds[0]))
)
bounds["upper"].append(
float(param.transformation.to_search(param.bounds[1]))
)
lower = float(param.transformation.to_search(param.bounds[0]))
upper = float(param.transformation.to_search(param.bounds[1]))
if np.isnan(lower) or np.isnan(upper):
raise ValueError(
"Transformed bounds resulted in NaN values.\n"
"If you've not applied bounds, this is due to the defaults applied from the prior distribution,\n"
"consider bounding the parameters to avoid this error."
)
bounds["lower"].append(lower)
bounds["upper"].append(upper)
else:
bounds["lower"].append(param.bounds[0])
bounds["upper"].append(param.bounds[1])
Expand Down
32 changes: 0 additions & 32 deletions pybop/transformation/base_transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,35 +126,3 @@ def verify_input(
raise ValueError(f"Transform must have {self._n_parameters} elements")

return input_array


# ---- To be implemented with Monte Carlo PR ------ #
# class TransformedLogPDF(BaseCost):
# """Transformed log-PDF class."""
# def __init__(self, log_pdf, transformation):
# self._log_pdf = log_pdf
# self._transformation = transformation

# def __call__(self, q):
# p = self._transformation.to_model(q)
# log_pdf = self._log_pdf(p)

# # Calculate the PDF using change of variable
# # Wikipedia: https://w.wiki/UsJ
# log_jacobian_det = self._transformation.log_jacobian_det(q)
# return log_pdf + log_jacobian_det

# def _evaluateS1(self, x):
# p = self._transformation.to_model(x)
# log_pdf, log_pdf_derivatives = self._log_pdf._evaluateS1(p)
# log_jacobian_det, log_jacobian_det_derivatives = self._transformation.log_jacobian_det_S1(x)
# return log_pdf + log_jacobian_det, log_pdf_derivatives + log_jacobian_det_derivatives

# class TransformedLogPrior:
# """Transformed log-prior class."""
# def __init__(self, log_prior, transformation):
# self._log_prior = log_prior
# self._transformation = transformation

# def __call__(self, q):
# return self
1 change: 1 addition & 0 deletions tests/integration/test_thevenin_parameterisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def get_data(self, model):
[
(
"Discharge at 0.5C for 2 minutes (4 seconds period)",
"Rest for 20 seconds (4 seconds period)",
"Charge at 0.5C for 2 minutes (4 seconds period)",
),
]
Expand Down
Loading

0 comments on commit 2062c2f

Please sign in to comment.