Skip to content

Commit

Permalink
Re-introduces ARD kernel, allows turning on/off
Browse files Browse the repository at this point in the history
  • Loading branch information
JohnGoertz committed Feb 17, 2022
1 parent 082a38a commit 7ac2a47
Showing 1 changed file with 16 additions and 9 deletions.
25 changes: 16 additions & 9 deletions gumbi/regression/pymc3/GP.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,11 +301,12 @@ def fit(self, outputs=None, linear_dims=None, continuous_dims=None, continuous_l

return self

def _make_continuous_cov(self, continuous_cov_func, D_in, idx_s, n_s, ℓ_μ, ℓ_σ, stabilize=True, eps=1e-6):
def _make_continuous_cov(self, continuous_cov_func, D_in, idx_s, n_s, ℓ_μ, ℓ_σ, ARD=True, stabilize=True, eps=1e-6):

def continuous_cov(suffix):
# ℓ = pm.InverseGamma(f'ℓ_{suffix}', mu=ℓ_μ, sigma=ℓ_σ, shape=n_s)
= pm.Gamma(f'ℓ_{suffix}', alpha=2, beta=1)
shape = n_s if ARD else 1
# ℓ = pm.InverseGamma(f'ℓ_{suffix}', mu=ℓ_μ, sigma=ℓ_σ, shape=shape)
= pm.Gamma(f'ℓ_{suffix}', alpha=2, beta=1, shape=shape)
η = pm.Gamma(f'η_{suffix}', alpha=2, beta=1)
cov = η ** 2 * continuous_cov_func(input_dim=D_in, active_dims=idx_s, ls=)
if stabilize:
Expand Down Expand Up @@ -336,7 +337,8 @@ def coreg_cov(suffix, D_out, idx):
# TODO: add full probabilistic model description to docstring
# TODO: allow dimension-specific continuous kernel specification
# TODO: allow single multi-dimensional continuous kernel rather than independent kernels per dimension
def build_model(self, seed=None, continuous_kernel='ExpQuad', heteroskedastic_inputs=False, heteroskedastic_outputs=True, sparse=False, n_u=100):
def build_model(self, seed=None, continuous_kernel='ExpQuad', heteroskedastic_inputs=False,
heteroskedastic_outputs=True, sparse=False, n_u=100, ARD=True):
r"""Compile a marginalized pymc3 model for the GP.
Each dimension in :attr:`continuous_dims` is combined in an ExpQuad kernel with a principled
Expand All @@ -361,6 +363,10 @@ def build_model(self, seed=None, continuous_kernel='ExpQuad', heteroskedastic_in
Whether to use a `sparse approximation`_ to the GP.
n_u: int, default 100
Number of inducing points to use for the sparse approximation, if required.
ARD: bool, default True
Whether to use "Automatic Relevance Determination" in the continuous kernel. If _True_, each continuous
dimension receives its own lengthscale; otherwise a single lengthscale is used for all continuous
dimensions.
Returns
-------
Expand Down Expand Up @@ -392,7 +398,7 @@ def build_model(self, seed=None, continuous_kernel='ExpQuad', heteroskedastic_in
'n_u': n_u,
}

gp_dict = self._construct_kernels(X, continuous_kernel, seed, sparse, latent=False)
gp_dict = self._construct_kernels(X, continuous_kernel, seed, sparse, latent=False, ARD=ARD)

with self.model:

Expand Down Expand Up @@ -474,7 +480,7 @@ def _prepare_lengthscales(self, X):
ℓ_μ, ℓ_σ = [stat for stat in np.array([get_ℓ_prior(dim) for dim in X_s.T]).T]
return ℓ_μ, ℓ_σ

def _construct_kernels(self, X, continuous_kernel, seed, sparse, latent, stabilize=True, eps=1e-6):
def _construct_kernels(self, X, continuous_kernel, seed, sparse, latent, ARD=True, stabilize=True, eps=1e-6):

continuous_kernels = ['ExpQuad', 'RatQuad', 'Matern32', 'Matern52', 'Exponential', 'Cosine']
assert_in('Continuous kernel', continuous_kernel, continuous_kernels)
Expand All @@ -487,7 +493,7 @@ def _construct_kernels(self, X, continuous_kernel, seed, sparse, latent, stabili
ℓ_μ, ℓ_σ = self._prepare_lengthscales(X)

continuous_cov = self._make_continuous_cov(continuous_cov_func, D_in, idxs['s'], ns['s'], ℓ_μ, ℓ_σ,
stabilize=stabilize, eps=eps)
ARD=ARD, stabilize=stabilize, eps=eps)
linear_cov = self._make_linear_cov(D_in, idxs['l'], ns['l'])
coreg_cov = self._make_coreg_cov(D_in, seed)

Expand Down Expand Up @@ -547,7 +553,7 @@ def _construct_kernels(self, X, continuous_kernel, seed, sparse, latent, stabili
self.gp_dict = gp_dict
return gp_dict

def build_latent(self, seed=None, continuous_kernel='ExpQuad', prior_name='latent_prior', eps=1e-6):
def build_latent(self, seed=None, continuous_kernel='ExpQuad', prior_name='latent_prior', ARD=True, eps=1e-6):

if self.additive:
raise NotImplementedError('Additive/latent GPs are not yet implemented')
Expand All @@ -562,7 +568,8 @@ def build_latent(self, seed=None, continuous_kernel='ExpQuad', prior_name='laten
self.sparse = False
self.latent = True

gp_dict = self._construct_kernels(X, continuous_kernel, seed, sparse=False, latent=True, stabilize=True, eps=eps)
gp_dict = self._construct_kernels(X, continuous_kernel, seed, sparse=False, latent=True, ARD=ARD,
stabilize=True, eps=eps)

with self.model:
self.prior = gp_dict['total'].prior(prior_name, X=X)
Expand Down

0 comments on commit 7ac2a47

Please sign in to comment.