Skip to content

Commit

Permalink
Hypervolume Knowledge Gradient (#1950)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #1950

This includes support for decoupled evaluations and multi-fidelity optimization

Differential Revision: D47809667

fbshipit-source-id: 33021e43e3b65250f3c00e4652af74a07f1836a8
  • Loading branch information
sdaulton authored and facebook-github-bot committed Aug 10, 2023
1 parent 3506538 commit 372efe8
Show file tree
Hide file tree
Showing 7 changed files with 1,027 additions and 31 deletions.
31 changes: 1 addition & 30 deletions botorch/acquisition/knowledge_gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from botorch.acquisition.cost_aware import CostAwareUtility
from botorch.acquisition.monte_carlo import MCAcquisitionFunction, qSimpleRegret
from botorch.acquisition.objective import MCAcquisitionObjective, PosteriorTransform
from botorch.acquisition.projected import ProjectedAcquisitionFunction
from botorch.exceptions.errors import UnsupportedError
from botorch.models.model import Model
from botorch.sampling.base import MCSampler
Expand Down Expand Up @@ -483,36 +484,6 @@ def forward(self, X: Tensor) -> Tensor:
return values.mean(dim=0)


class ProjectedAcquisitionFunction(AcquisitionFunction):
r"""
Defines a wrapper around an `AcquisitionFunction` that incorporates the project
operator. Typically used to handle value functions in look-ahead methods.
"""

def __init__(
self,
base_value_function: AcquisitionFunction,
project: Callable[[Tensor], Tensor],
) -> None:
r"""
Args:
base_value_function: The wrapped `AcquisitionFunction`.
project: A callable mapping a `batch_shape x q x d` tensor of design
points to a tensor with shape `batch_shape x q_term x d` projected
to the desired target set (e.g. the target fidelities in case of
multi-fidelity optimization). For the basic case, `q_term = q`.
"""
super().__init__(base_value_function.model)
self.base_value_function = base_value_function
self.project = project
self.objective = getattr(base_value_function, "objective", None)
self.posterior_transform = base_value_function.posterior_transform
self.sampler = getattr(base_value_function, "sampler", None)

def forward(self, X: Tensor) -> Tensor:
return self.base_value_function(self.project(X))


def _get_value_function(
model: Model,
objective: Optional[MCAcquisitionObjective] = None,
Expand Down
6 changes: 6 additions & 0 deletions botorch/acquisition/multi_objective/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
ExpectedHypervolumeImprovement,
MultiObjectiveAnalyticAcquisitionFunction,
)
from botorch.acquisition.multi_objective.hypervolume_knowledge_gradient import (
qHypervolumeKnowledgeGradient,
qMultiFidelityHypervolumeKnowledgeGradient,
)
from botorch.acquisition.multi_objective.max_value_entropy_search import (
qMultiObjectiveMaxValueEntropy,
)
Expand Down Expand Up @@ -35,6 +39,8 @@
"get_default_partitioning_alpha",
"prune_inferior_points_multi_objective",
"qExpectedHypervolumeImprovement",
"qHypervolumeKnowledgeGradient",
"qMultiFidelityHypervolumeKnowledgeGradient",
"qNoisyExpectedHypervolumeImprovement",
"MOMF",
"qMultiObjectiveMaxValueEntropy",
Expand Down
Loading

0 comments on commit 372efe8

Please sign in to comment.