Skip to content

Commit

Permalink
Dimension-scaled covar_module (#2450)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #2450

Added the dimension-scaled covar_module with lognormal prior, and the accompanying LogNomal noise prior.

Reviewed By: dme65, saitcakmak

Differential Revision: D60079260

fbshipit-source-id: d267e37a91f3b55c8e9f864cb4c4af1201fae607
  • Loading branch information
Carl Hvarfner authored and facebook-github-bot committed Jul 29, 2024
1 parent 88d9111 commit 96a71e7
Show file tree
Hide file tree
Showing 2 changed files with 121 additions and 4 deletions.
78 changes: 75 additions & 3 deletions botorch/models/utils/gpytorch_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,31 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from typing import Optional
r"""
Pre-packaged kernels for bayesian optimization, including a Scale/Matern
kernel that is well-suited to low-dimensional high-noise problems, and
a dimension-agnostic RBF kernel without outputscale.
References:
.. [Hvarfner2024vanilla]
C. Hvarfner, E. O. Hellsten, L. Nardi,
Vanilla Bayesian Optimization Performs Great in High Dimensions.
In International Conference on Machine Learning, 2024.
"""

from math import log, sqrt
from typing import Optional, Union

import torch
from gpytorch.constraints.constraints import GreaterThan
from gpytorch.kernels import MaternKernel, ScaleKernel
from gpytorch.kernels import MaternKernel, RBFKernel, ScaleKernel
from gpytorch.likelihoods.gaussian_likelihood import GaussianLikelihood
from gpytorch.priors.torch_priors import GammaPrior
from gpytorch.priors.torch_priors import GammaPrior, LogNormalPrior

MIN_INFERRED_NOISE_LEVEL = 1e-4
SQRT2 = sqrt(2)
SQRT3 = sqrt(3)


def get_matern_kernel_with_gamma_prior(
Expand Down Expand Up @@ -53,3 +69,59 @@ def get_gaussian_likelihood_with_gamma_prior(
initial_value=noise_prior_mode,
),
)


def get_gaussian_likelihood_with_lognormal_prior(
batch_shape: Optional[torch.Size] = None,
) -> GaussianLikelihood:
"""Return Gaussian likelihood with a LogNormal(-4.0, 1.0) prior.
This prior is based on [Hvarfner2024vanilla]_.
Args:
batch_shape: Batch shape for the likelihood.
Returns:
GaussianLikelihood with LogNormal(-4.0, 1.0) prior and constrains the
noise level to be greater than MIN_INFERRED_NOISE_LEVEL (=1e-4).
"""
batch_shape = torch.Size() if batch_shape is None else batch_shape
noise_prior = LogNormalPrior(loc=-4.0, scale=1.0)
return GaussianLikelihood(
noise_prior=noise_prior,
batch_shape=batch_shape,
noise_constraint=GreaterThan(
MIN_INFERRED_NOISE_LEVEL,
transform=None,
initial_value=noise_prior.mode,
),
)


def get_covar_module_with_dim_scaled_prior(
ard_num_dims: int,
batch_shape: Optional[torch.Size] = None,
use_rbf_kernel: bool = True,
) -> Union[MaternKernel, RBFKernel, ScaleKernel]:
"""Returns an RBF or Matern kernel with priors
from [Hvarfner2024vanilla]_.
Args:
ard_num_dims: Number of feature dimensions for ARD.
batch_shape: Batch shape for the covariance module.
use_rbf_kernel: Whether to use an RBF kernel. If False, uses a Matern kernel.
Returns:
A Kernel constructed according to the given arguments. The prior is constrained
to have lengthscales larger than 0.025 for numerical stability.
"""
base_class = RBFKernel if use_rbf_kernel else MaternKernel
lengthscale_prior = LogNormalPrior(loc=SQRT2 + log(ard_num_dims) * 0.5, scale=SQRT3)
base_kernel = base_class(
ard_num_dims=ard_num_dims,
batch_shape=batch_shape,
lengthscale_prior=lengthscale_prior,
lengthscale_constraint=GreaterThan(
2.5e-2, transform=None, initial_value=lengthscale_prior.mode
),
)
return base_kernel
47 changes: 46 additions & 1 deletion test/models/utils/test_gpytorch_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,25 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import math

import torch
from botorch.models.utils.gpytorch_modules import (
get_covar_module_with_dim_scaled_prior,
get_gaussian_likelihood_with_gamma_prior,
get_gaussian_likelihood_with_lognormal_prior,
get_matern_kernel_with_gamma_prior,
MIN_INFERRED_NOISE_LEVEL,
SQRT2,
SQRT3,
)
from botorch.utils.testing import BotorchTestCase
from gpytorch.constraints.constraints import GreaterThan
from gpytorch.kernels.matern_kernel import MaternKernel
from gpytorch.kernels.rbf_kernel import RBFKernel
from gpytorch.kernels.scale_kernel import ScaleKernel
from gpytorch.likelihoods.gaussian_likelihood import GaussianLikelihood
from gpytorch.priors.torch_priors import GammaPrior
from gpytorch.priors.torch_priors import GammaPrior, LogNormalPrior


class TestGPyTorchModules(BotorchTestCase):
Expand Down Expand Up @@ -57,3 +63,42 @@ def test_get_gaussian_likelihood_with_gamma_prior(self):
self.assertAllClose(constraint.lower_bound.item(), MIN_INFERRED_NOISE_LEVEL)
self.assertIsNone(constraint._transform)
self.assertAllClose(constraint.initial_value.item(), 2.0)

def test_get_covar_module_with_dim_scaled_prior(self):
for batch_shape in (None, torch.Size([2])):
kernel = get_covar_module_with_dim_scaled_prior(
ard_num_dims=2, batch_shape=batch_shape
)
self.assertIsInstance(kernel, RBFKernel)
self.assertEqual(kernel.batch_shape, batch_shape or torch.Size([]))
prior = kernel.lengthscale_prior
self.assertIsInstance(prior, LogNormalPrior)
self.assertAllClose(prior.loc.item(), SQRT2 + 0.5 * math.log(2))
self.assertAllClose(prior.scale.item(), SQRT3)
self.assertIsInstance(kernel, RBFKernel)
matern_kernel = get_covar_module_with_dim_scaled_prior(
ard_num_dims=2,
batch_shape=batch_shape,
use_rbf_kernel=False,
)
self.assertIsInstance(matern_kernel, MaternKernel)

self.assertEqual(matern_kernel.batch_shape, batch_shape or torch.Size([]))
self.assertEqual(matern_kernel.ard_num_dims, 2)

def test_get_gaussian_likelihood_with_log_normal_prior(self):
for batch_shape in (None, torch.Size([2])):
likelihood = get_gaussian_likelihood_with_lognormal_prior(
batch_shape=batch_shape
)
self.assertIsInstance(likelihood, GaussianLikelihood)
expected_shape = (batch_shape or torch.Size([])) + (1,)
self.assertEqual(likelihood.raw_noise.shape, expected_shape)
prior = likelihood.noise_covar.noise_prior
self.assertIsInstance(prior, LogNormalPrior)
self.assertAllClose(prior.loc.item(), -4.0)
self.assertAllClose(prior.scale.item(), 1.0)
constraint = likelihood.noise_covar.raw_noise_constraint
self.assertIsInstance(constraint, GreaterThan)
self.assertAllClose(constraint.lower_bound.item(), MIN_INFERRED_NOISE_LEVEL)
self.assertIsNone(constraint._transform)

0 comments on commit 96a71e7

Please sign in to comment.