Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more kernels #244

Merged
merged 12 commits into from
May 21, 2024
9 changes: 5 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- `mypy` for search space and objectives
- Class hierarchy for objectives
- Deserialization is now also possible from optional class name abbreviations
- `Kernel`, `MaternKernel`, `AdditiveKernel`, `ProductKernel` and `ScaleKernel`
classes for specifying kernels
- `AdditiveKernel`, `LinearKernel`, `MaternKernel`, `PeriodicKernel`,
`PiecewisePolynomialKernel`, `PolynomialKernel`, `ProductKernel`, `RBFKernel`,
`RFFKernel`, `RQKernel`, `ScaleKernel` classes for specifying kernels
- `GammaPrior`, `HalfCauchyPrior`, `NormalPrior`, `HalfNormalPrior`, `LogNormalPrior`
and `SmoothedBoxPrior` classes for specifying priors
- `KernelFactory` protocol enabling context-dependent construction of kernels
- Preset mechanism for `GaussianProcessSurrogate`
- `hypothesis` strategies and roundtrip test for kernels, constraints, objectives,
priors and acquisition functions
- New acquisition functions: `qSR`, `qNEI`, `LogEI`, `qLogEI`, `qLogNEI`
- Serialization user guide
- Basic deserialization tests using different class type specifiers
- `GammaPrior`, `HalfCauchyPrior`, `NormalPrior`, `HalfNormalPrior`, `LogNormalPrior`
and `SmoothedBoxPrior` can now be chosen as lengthscale prior
- Environment variables user guide
- Utility for estimating memory requirements of discrete product search space

Expand Down
24 changes: 22 additions & 2 deletions baybe/kernels/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,31 @@
"""Kernels for Gaussian process surrogate models."""
"""Kernels for Gaussian process surrogate models.

from baybe.kernels.basic import MaternKernel
The kernel classes mimic classes from GPyTorch. For details on specification and
arguments see https://docs.gpytorch.ai/en/stable/kernels.html.
"""

from baybe.kernels.basic import (
LinearKernel,
MaternKernel,
PeriodicKernel,
PiecewisePolynomialKernel,
PolynomialKernel,
RBFKernel,
RFFKernel,
RQKernel,
)
from baybe.kernels.composite import AdditiveKernel, ProductKernel, ScaleKernel

__all__ = [
"AdditiveKernel",
"LinearKernel",
"MaternKernel",
"PeriodicKernel",
"PiecewisePolynomialKernel",
"PolynomialKernel",
"ProductKernel",
"RBFKernel",
"RFFKernel",
"RQKernel",
"ScaleKernel",
]
165 changes: 164 additions & 1 deletion baybe/kernels/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from attrs import define, field
from attrs.converters import optional as optional_c
from attrs.validators import in_, instance_of
from attrs.validators import ge, in_, instance_of
from attrs.validators import optional as optional_v

from baybe.kernels.base import Kernel
Expand All @@ -13,6 +13,34 @@
from baybe.utils.validation import finite_float


@define(frozen=True)
class LinearKernel(Kernel):
"""A linear kernel."""

variance_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel variance parameter."""

variance_initial_value: Optional[float] = field(
Scienfitz marked this conversation as resolved.
Show resolved Hide resolved
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel variance parameter."""

def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.
import torch

from baybe.utils.torch import DTypeFloatTorch

gpytorch_kernel = super().to_gpytorch(*args, **kwargs)
if (initial_value := self.variance_initial_value) is not None:
gpytorch_kernel.variance = torch.tensor(
initial_value, dtype=DTypeFloatTorch
)
return gpytorch_kernel


@define(frozen=True)
class MaternKernel(Kernel):
"""A Matern kernel using a smoothness parameter."""
Expand All @@ -34,3 +62,138 @@ class MaternKernel(Kernel):
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class PeriodicKernel(Kernel):
"""A periodic kernel."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""

period_length_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel period length."""

period_length_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel period length."""

def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.
import torch

from baybe.utils.torch import DTypeFloatTorch

gpytorch_kernel = super().to_gpytorch(*args, **kwargs)
# lengthscale is handled by the base class

if (initial_value := self.period_length_initial_value) is not None:
gpytorch_kernel.period_length = torch.tensor(
initial_value, dtype=DTypeFloatTorch
)
return gpytorch_kernel


@define(frozen=True)
class PiecewisePolynomialKernel(Kernel):
"""A piecewise polynomial kernel."""

q: int = field(validator=in_([0, 1, 2, 3]), default=2)
"""A smoothness parameter."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class PolynomialKernel(Kernel):
"""A polynomial kernel."""

power: int = field(validator=[instance_of(int), ge(0)])
"""The power of the polynomial term."""

offset_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel offset."""

offset_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel offset."""

def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.
import torch

from baybe.utils.torch import DTypeFloatTorch

gpytorch_kernel = super().to_gpytorch(*args, **kwargs)
if (initial_value := self.offset_initial_value) is not None:
gpytorch_kernel.offset = torch.tensor(initial_value, dtype=DTypeFloatTorch)
return gpytorch_kernel


@define(frozen=True)
class RBFKernel(Kernel):
"""A radial basis function (RBF) kernel."""
AVHopp marked this conversation as resolved.
Show resolved Hide resolved

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class RFFKernel(Kernel):
"""A random Fourier features (RFF) kernel."""
AVHopp marked this conversation as resolved.
Show resolved Hide resolved

num_samples: int = field(validator=[instance_of(int), ge(1)])
"""The number of frequencies to draw."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class RQKernel(Kernel):
AdrianSosic marked this conversation as resolved.
Show resolved Hide resolved
"""A rational quadratic (RQ) kernel."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""
6 changes: 3 additions & 3 deletions baybe/kernels/composite.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Composite kernels (that is, kernels composed of other kernels)."""

from functools import reduce
from operator import add, mul
from typing import Optional

Expand Down Expand Up @@ -56,7 +56,7 @@ class AdditiveKernel(Kernel):
def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.

return add(*(k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))
return reduce(add, (k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))


@define(frozen=True)
Expand All @@ -71,4 +71,4 @@ class ProductKernel(Kernel):
def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.

return mul(*(k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))
return reduce(mul, (k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))
6 changes: 5 additions & 1 deletion baybe/priors/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
"""Prior distributions."""
"""Prior distributions.
The prior classes mimic classes from GPyTorch. For details on specification and
arguments see https://docs.gpytorch.ai/en/stable/priors.html.
"""

from baybe.priors.basic import (
GammaPrior,
Expand Down
77 changes: 75 additions & 2 deletions tests/hypothesis_strategies/kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,16 @@

import hypothesis.strategies as st

from baybe.kernels.basic import MaternKernel
from baybe.kernels.basic import (
LinearKernel,
MaternKernel,
PeriodicKernel,
PiecewisePolynomialKernel,
PolynomialKernel,
RBFKernel,
RFFKernel,
RQKernel,
)
from baybe.kernels.composite import AdditiveKernel, ProductKernel, ScaleKernel

from ..hypothesis_strategies.basic import finite_floats
Expand All @@ -19,6 +28,13 @@ class KernelType(Enum):
PRODUCT = "PRODUCT"


linear_kernels = st.builds(
LinearKernel,
variance_prior=st.one_of(st.none(), priors),
variance_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates linear kernels."""

matern_kernels = st.builds(
MaternKernel,
nu=st.sampled_from((0.5, 1.5, 2.5)),
Expand All @@ -27,8 +43,65 @@ class KernelType(Enum):
)
"""A strategy that generates Matern kernels."""

periodic_kernels = st.builds(
PeriodicKernel,
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
period_length_prior=st.one_of(st.none(), priors),
period_length_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates periodic kernels."""

piecewise_polynomial_kernels = st.builds(
PiecewisePolynomialKernel,
q=st.integers(min_value=0, max_value=3),
Scienfitz marked this conversation as resolved.
Show resolved Hide resolved
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates piecewise polynomial kernels."""

polynomial_kernels = st.builds(
PolynomialKernel,
power=st.integers(min_value=0),
offset_prior=st.one_of(st.none(), priors),
offset_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates polynomial kernels."""

base_kernels = st.one_of([matern_kernels])
rbf_kernels = st.builds(
RBFKernel,
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates radial basis function (RBF) kernels."""

rff_kernels = st.builds(
RFFKernel,
num_samples=st.integers(min_value=1),
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates random Fourier features (RFF) kernels."""

rq_kernels = st.builds(
RQKernel,
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates rational quadratic (RQ) kernels."""

base_kernels = st.one_of(
[
matern_kernels, # on top because it is the default for many use cases
linear_kernels,
rbf_kernels,
rq_kernels,
rff_kernels,
piecewise_polynomial_kernels,
polynomial_kernels,
periodic_kernels,
]
)
"""A strategy that generates base kernels to be used within more complex kernels."""


Expand Down
Loading
Loading