diff --git a/src/george/modeling.py b/src/george/modeling.py index 6fe441b6..44a48ed5 100644 --- a/src/george/modeling.py +++ b/src/george/modeling.py @@ -115,15 +115,15 @@ def compute_gradient(self, *args, **kwargs): """ _EPS = 1.254e-5 - vector = self.get_parameter_vector() + vector = self.get_parameter_vector(include_frozen=True) value0 = self.get_value(*args, **kwargs) grad = np.empty([len(vector)] + list(value0.shape), dtype=np.float64) for i, v in enumerate(vector): vector[i] = v + _EPS - self.set_parameter_vector(vector) + self.set_parameter_vector(vector, include_frozen=True) value = self.get_value(*args, **kwargs) vector[i] = v - self.set_parameter_vector(vector) + self.set_parameter_vector(vector, include_frozen=True) grad[i] = (value - value0) / _EPS return grad diff --git a/tests/test_modeling.py b/tests/test_modeling.py index 050871ce..43457bbb 100644 --- a/tests/test_modeling.py +++ b/tests/test_modeling.py @@ -72,6 +72,13 @@ def compute_gradient(self, x): return dict(m=x, b=np.ones(len(x))) +class LinearWhiteNoiseWithoutGrad(Model): + parameter_names = ("m", "b") + + def get_value(self, x): + return self.m * x + self.b + + def test_gp_callable_white_noise(N=50, seed=1234): np.random.seed(seed) x = np.random.uniform(0, 5) @@ -86,6 +93,20 @@ def test_gp_callable_white_noise(N=50, seed=1234): check_gradient(gp, y) +def test_gp_callable_white_noise_without_grad(N=50, seed=1234): + np.random.seed(seed) + x = np.random.uniform(0, 5) + y = 5 + np.sin(x) + gp = GP(10. * kernels.ExpSquaredKernel(1.3), mean=5.0, + white_noise=LinearWhiteNoiseWithoutGrad(-6, 0.01), + fit_white_noise=True) + gp.compute(x) + check_gradient(gp, y) + + gp.freeze_parameter("white_noise:m") + check_gradient(gp, y) + + def test_parameters(): kernel = 10 * kernels.ExpSquaredKernel(1.0) kernel += 0.5 * kernels.RationalQuadraticKernel(log_alpha=0.1, metric=5.0)