Skip to content

Commit

Permalink
Add more kernels (#244)
Browse files Browse the repository at this point in the history
This PR is adding more kernel choices. There are some notable points to
consider below:

**Bugfix arithmetic Kernels**
These used to apply operators like `mul(*a_list)` but since the
operators are defined as 2-input only this failed for a 3-composite
kernel test I added. It was fixed via rephrasing it to `reduce(mul,
a_list)`

**Removed Prior Iteration Tests**
These have become obsolete as the kernels are tested with many priors.
I've also reduced the prior used with the scale kernel in iteration
tests to one choice, otherwise the list of kernels just got too large.

**RQKernel alpha**
Even though this kernel has an attribute called `alpha` (also visible in
the equation
[here](https://docs.gpytorch.ai/en/stable/kernels.html#rqkernel)) it
does not seem to accept priors for it. Hence, I have ignored `alpha`,
both regarding prior and initial value in our corresponding kernel

**CosineKernel**
I was not able to find prior settings that work with this kernel in the
iteration tests. The error I get seem purely computational (a la could
not fit any reasonable model etc). I have thus not included it in this
PR
  • Loading branch information
AdrianSosic authored May 21, 2024
2 parents 835b949 + bc1bdd5 commit ae5bad0
Show file tree
Hide file tree
Showing 7 changed files with 311 additions and 26 deletions.
9 changes: 5 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- `mypy` for search space and objectives
- Class hierarchy for objectives
- Deserialization is now also possible from optional class name abbreviations
- `Kernel`, `MaternKernel`, `AdditiveKernel`, `ProductKernel` and `ScaleKernel`
classes for specifying kernels
- `AdditiveKernel`, `LinearKernel`, `MaternKernel`, `PeriodicKernel`,
`PiecewisePolynomialKernel`, `PolynomialKernel`, `ProductKernel`, `RBFKernel`,
`RFFKernel`, `RQKernel`, `ScaleKernel` classes for specifying kernels
- `GammaPrior`, `HalfCauchyPrior`, `NormalPrior`, `HalfNormalPrior`, `LogNormalPrior`
and `SmoothedBoxPrior` classes for specifying priors
- `KernelFactory` protocol enabling context-dependent construction of kernels
- Preset mechanism for `GaussianProcessSurrogate`
- `hypothesis` strategies and roundtrip test for kernels, constraints, objectives,
priors and acquisition functions
- New acquisition functions: `qSR`, `qNEI`, `LogEI`, `qLogEI`, `qLogNEI`
- Serialization user guide
- Basic deserialization tests using different class type specifiers
- `GammaPrior`, `HalfCauchyPrior`, `NormalPrior`, `HalfNormalPrior`, `LogNormalPrior`
and `SmoothedBoxPrior` can now be chosen as lengthscale prior
- Environment variables user guide
- Utility for estimating memory requirements of discrete product search space

Expand Down
24 changes: 22 additions & 2 deletions baybe/kernels/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,31 @@
"""Kernels for Gaussian process surrogate models."""
"""Kernels for Gaussian process surrogate models.
from baybe.kernels.basic import MaternKernel
The kernel classes mimic classes from GPyTorch. For details on specification and
arguments see https://docs.gpytorch.ai/en/stable/kernels.html.
"""

from baybe.kernels.basic import (
LinearKernel,
MaternKernel,
PeriodicKernel,
PiecewisePolynomialKernel,
PolynomialKernel,
RBFKernel,
RFFKernel,
RQKernel,
)
from baybe.kernels.composite import AdditiveKernel, ProductKernel, ScaleKernel

__all__ = [
"AdditiveKernel",
"LinearKernel",
"MaternKernel",
"PeriodicKernel",
"PiecewisePolynomialKernel",
"PolynomialKernel",
"ProductKernel",
"RBFKernel",
"RFFKernel",
"RQKernel",
"ScaleKernel",
]
165 changes: 164 additions & 1 deletion baybe/kernels/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from attrs import define, field
from attrs.converters import optional as optional_c
from attrs.validators import in_, instance_of
from attrs.validators import ge, in_, instance_of
from attrs.validators import optional as optional_v

from baybe.kernels.base import Kernel
Expand All @@ -13,6 +13,34 @@
from baybe.utils.validation import finite_float


@define(frozen=True)
class LinearKernel(Kernel):
"""A linear kernel."""

variance_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel variance parameter."""

variance_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel variance parameter."""

def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.
import torch

from baybe.utils.torch import DTypeFloatTorch

gpytorch_kernel = super().to_gpytorch(*args, **kwargs)
if (initial_value := self.variance_initial_value) is not None:
gpytorch_kernel.variance = torch.tensor(
initial_value, dtype=DTypeFloatTorch
)
return gpytorch_kernel


@define(frozen=True)
class MaternKernel(Kernel):
"""A Matern kernel using a smoothness parameter."""
Expand All @@ -34,3 +62,138 @@ class MaternKernel(Kernel):
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class PeriodicKernel(Kernel):
"""A periodic kernel."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""

period_length_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel period length."""

period_length_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel period length."""

def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.
import torch

from baybe.utils.torch import DTypeFloatTorch

gpytorch_kernel = super().to_gpytorch(*args, **kwargs)
# lengthscale is handled by the base class

if (initial_value := self.period_length_initial_value) is not None:
gpytorch_kernel.period_length = torch.tensor(
initial_value, dtype=DTypeFloatTorch
)
return gpytorch_kernel


@define(frozen=True)
class PiecewisePolynomialKernel(Kernel):
"""A piecewise polynomial kernel."""

q: int = field(validator=in_([0, 1, 2, 3]), default=2)
"""A smoothness parameter."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class PolynomialKernel(Kernel):
"""A polynomial kernel."""

power: int = field(validator=[instance_of(int), ge(0)])
"""The power of the polynomial term."""

offset_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel offset."""

offset_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel offset."""

def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.
import torch

from baybe.utils.torch import DTypeFloatTorch

gpytorch_kernel = super().to_gpytorch(*args, **kwargs)
if (initial_value := self.offset_initial_value) is not None:
gpytorch_kernel.offset = torch.tensor(initial_value, dtype=DTypeFloatTorch)
return gpytorch_kernel


@define(frozen=True)
class RBFKernel(Kernel):
"""A radial basis function (RBF) kernel."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class RFFKernel(Kernel):
"""A random Fourier features (RFF) kernel."""

num_samples: int = field(validator=[instance_of(int), ge(1)])
"""The number of frequencies to draw."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""


@define(frozen=True)
class RQKernel(Kernel):
"""A rational quadratic (RQ) kernel."""

lengthscale_prior: Optional[Prior] = field(
default=None, validator=optional_v(instance_of(Prior))
)
"""An optional prior on the kernel lengthscale."""

lengthscale_initial_value: Optional[float] = field(
default=None, converter=optional_c(float), validator=optional_v(finite_float)
)
"""An optional initial value for the kernel lengthscale."""
6 changes: 3 additions & 3 deletions baybe/kernels/composite.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Composite kernels (that is, kernels composed of other kernels)."""

from functools import reduce
from operator import add, mul
from typing import Optional

Expand Down Expand Up @@ -56,7 +56,7 @@ class AdditiveKernel(Kernel):
def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.

return add(*(k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))
return reduce(add, (k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))


@define(frozen=True)
Expand All @@ -71,4 +71,4 @@ class ProductKernel(Kernel):
def to_gpytorch(self, *args, **kwargs): # noqa: D102
# See base class.

return mul(*(k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))
return reduce(mul, (k.to_gpytorch(*args, **kwargs) for k in self.base_kernels))
6 changes: 5 additions & 1 deletion baybe/priors/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
"""Prior distributions."""
"""Prior distributions.
The prior classes mimic classes from GPyTorch. For details on specification and
arguments see https://docs.gpytorch.ai/en/stable/priors.html.
"""

from baybe.priors.basic import (
GammaPrior,
Expand Down
77 changes: 75 additions & 2 deletions tests/hypothesis_strategies/kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,16 @@

import hypothesis.strategies as st

from baybe.kernels.basic import MaternKernel
from baybe.kernels.basic import (
LinearKernel,
MaternKernel,
PeriodicKernel,
PiecewisePolynomialKernel,
PolynomialKernel,
RBFKernel,
RFFKernel,
RQKernel,
)
from baybe.kernels.composite import AdditiveKernel, ProductKernel, ScaleKernel

from ..hypothesis_strategies.basic import finite_floats
Expand All @@ -19,6 +28,13 @@ class KernelType(Enum):
PRODUCT = "PRODUCT"


linear_kernels = st.builds(
LinearKernel,
variance_prior=st.one_of(st.none(), priors),
variance_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates linear kernels."""

matern_kernels = st.builds(
MaternKernel,
nu=st.sampled_from((0.5, 1.5, 2.5)),
Expand All @@ -27,8 +43,65 @@ class KernelType(Enum):
)
"""A strategy that generates Matern kernels."""

periodic_kernels = st.builds(
PeriodicKernel,
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
period_length_prior=st.one_of(st.none(), priors),
period_length_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates periodic kernels."""

piecewise_polynomial_kernels = st.builds(
PiecewisePolynomialKernel,
q=st.integers(min_value=0, max_value=3),
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates piecewise polynomial kernels."""

polynomial_kernels = st.builds(
PolynomialKernel,
power=st.integers(min_value=0),
offset_prior=st.one_of(st.none(), priors),
offset_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates polynomial kernels."""

base_kernels = st.one_of([matern_kernels])
rbf_kernels = st.builds(
RBFKernel,
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates radial basis function (RBF) kernels."""

rff_kernels = st.builds(
RFFKernel,
num_samples=st.integers(min_value=1),
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates random Fourier features (RFF) kernels."""

rq_kernels = st.builds(
RQKernel,
lengthscale_prior=st.one_of(st.none(), priors),
lengthscale_initial_value=st.one_of(st.none(), finite_floats()),
)
"""A strategy that generates rational quadratic (RQ) kernels."""

base_kernels = st.one_of(
[
matern_kernels, # on top because it is the default for many use cases
linear_kernels,
rbf_kernels,
rq_kernels,
rff_kernels,
piecewise_polynomial_kernels,
polynomial_kernels,
periodic_kernels,
]
)
"""A strategy that generates base kernels to be used within more complex kernels."""


Expand Down
Loading

0 comments on commit ae5bad0

Please sign in to comment.