Skip to content

Commit

Permalink
Only do checks for _optimize_acqf_sequential_q when it will be used (
Browse files Browse the repository at this point in the history
…#1598)

Summary:
X-link: facebook/Ax#1598

Pull Request resolved: #1808

`optimize_acqf` calls `_optimize_acqf_sequential_q` when `sequential=True` and `q > 1`. We had been doing input validation for `_optimize_acqf_sequential` even when it was not called, in the `q=1` case. This unnecessary check became a problem when `sequential=True` became a default for MBM in facebook/Ax#1585 , breaking Ax benchmarks.

This PR moves checks for sequential optimization to `_optimize_acqf_sequential_q`, so they will only happen if `_optimize_acqf_sequential_q` is called.

Reviewed By: saitcakmak

Differential Revision: D45324522

fbshipit-source-id: 1757abddcc1e3480c687800605b972c7ae603f8b
  • Loading branch information
esantorella authored and facebook-github-bot committed Apr 27, 2023
1 parent c3d100f commit 1264316
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 39 deletions.
79 changes: 41 additions & 38 deletions botorch/optim/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,33 +97,6 @@ def __post_init__(self) -> None:
"bounds should be a `2 x d` tensor, current shape: "
f"{list(self.bounds.shape)}."
)
# validate that linear constraints across the q-dim and
# self.sequential are not present together
if self.inequality_constraints is not None and self.sequential is True:
for constraint in self.inequality_constraints:
if len(constraint[0].shape) > 1:
raise UnsupportedError(
"Linear inequality constraints across the q-dimension are not "
"supported for sequential optimization."
)
if self.equality_constraints is not None and self.sequential is True:
for constraint in self.equality_constraints:
if len(constraint[0].shape) > 1:
raise UnsupportedError(
"Linear equality constraints across the q-dimension are not "
"supported for sequential optimization."
)

# TODO: Validate constraints if provided:
# https://github.com/pytorch/botorch/pull/1231
if self.batch_initial_conditions is not None and self.sequential:
raise UnsupportedError(
"`batch_initial_conditions` is not supported for sequential "
"optimization. Either avoid specifying "
"`batch_initial_conditions` to use the custom initializer or "
"use the `ic_generator` kwarg to generate initial conditions "
"for the case of nonlinear inequality constraints."
)

d = self.bounds.shape[1]
if self.batch_initial_conditions is not None:
Expand Down Expand Up @@ -152,17 +125,6 @@ def __post_init__(self) -> None:
"`batch_initial_conditions` is None`."
)

if self.sequential and self.q > 1:
if not self.return_best_only:
raise NotImplementedError(
"`return_best_only=False` only supported for joint optimization."
)
if isinstance(self.acq_function, OneShotAcquisitionFunction):
raise NotImplementedError(
"sequential optimization currently not supported for one-shot "
"acquisition functions. Must have `sequential=False`."
)

def get_ic_generator(self) -> TGenInitialConditions:
if self.ic_generator is not None:
return self.ic_generator
Expand Down Expand Up @@ -211,12 +173,53 @@ def _optimize_acqf_all_features_fixed(
return X, acq_value


def _validate_sequential_inputs(opt_inputs: OptimizeAcqfInputs) -> None:
# validate that linear constraints across the q-dim and
# self.sequential are not present together
if opt_inputs.inequality_constraints is not None:
for constraint in opt_inputs.inequality_constraints:
if len(constraint[0].shape) > 1:
raise UnsupportedError(
"Linear inequality constraints across the q-dimension are not "
"supported for sequential optimization."
)
if opt_inputs.equality_constraints is not None:
for constraint in opt_inputs.equality_constraints:
if len(constraint[0].shape) > 1:
raise UnsupportedError(
"Linear equality constraints across the q-dimension are not "
"supported for sequential optimization."
)

# TODO: Validate constraints if provided:
# https://github.com/pytorch/botorch/pull/1231
if opt_inputs.batch_initial_conditions is not None:
raise UnsupportedError(
"`batch_initial_conditions` is not supported for sequential "
"optimization. Either avoid specifying "
"`batch_initial_conditions` to use the custom initializer or "
"use the `ic_generator` kwarg to generate initial conditions "
"for the case of nonlinear inequality constraints."
)

if not opt_inputs.return_best_only:
raise NotImplementedError(
"`return_best_only=False` only supported for joint optimization."
)
if isinstance(opt_inputs.acq_function, OneShotAcquisitionFunction):
raise NotImplementedError(
"sequential optimization currently not supported for one-shot "
"acquisition functions. Must have `sequential=False`."
)


def _optimize_acqf_sequential_q(
opt_inputs: OptimizeAcqfInputs, timeout_sec: Optional[float], start_time: float
) -> Tuple[Tensor, Tensor]:
"""
Helper function for `optimize_acqf` when sequential=True and q > 1.
"""
_validate_sequential_inputs(opt_inputs)
if timeout_sec is not None:
# When using sequential optimization, we allocate the total timeout
# evenly across the individual acquisition optimizations.
Expand Down
2 changes: 1 addition & 1 deletion test/optim/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ def test_optimize_acqf_sequential(
q=q,
num_restarts=num_restarts,
raw_samples=raw_samples,
batch_initial_conditions=mock_gen_batch_initial_conditions,
batch_initial_conditions=torch.zeros((1, 1, 3)),
sequential=True,
)

Expand Down

0 comments on commit 1264316

Please sign in to comment.