Skip to content

Commit

Permalink
Add timeout ability to gen_* functions (pytorch#1562)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: pytorch#1562

Exposes the ability to pass a `timeout_sec` kwarg to `gen_candidates_scipy` and `gen_candidates_torch` that allows to stop the optimization after running for more than `timeout_sec` seconds.

Differential Revision: D42024857

fbshipit-source-id: bb4f2287d19bac96881029678e8458cae646ef66
  • Loading branch information
Balandat authored and facebook-github-bot committed Dec 28, 2022
1 parent 9f6320c commit 2657240
Show file tree
Hide file tree
Showing 3 changed files with 58 additions and 10 deletions.
24 changes: 22 additions & 2 deletions botorch/generation/gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from __future__ import annotations

import time
import warnings
from functools import partial
from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union
Expand All @@ -29,7 +30,7 @@
)
from botorch.optim.stopping import ExpMAStoppingCriterion
from botorch.optim.utils import _filter_kwargs, columnwise_clamp, fix_features
from scipy.optimize import minimize
from botorch.optim.utils.timeout import minimize_with_timeout
from scipy.optimize.optimize import OptimizeResult
from torch import Tensor
from torch.optim import Optimizer
Expand All @@ -47,6 +48,7 @@ def gen_candidates_scipy(
nonlinear_inequality_constraints: Optional[List[Callable]] = None,
options: Optional[Dict[str, Any]] = None,
fixed_features: Optional[Dict[int, Optional[float]]] = None,
timeout_sec: Optional[float] = None,
) -> Tuple[Tensor, Tensor]:
r"""Generate a set of candidates using `scipy.optimize.minimize`.
Expand Down Expand Up @@ -80,6 +82,9 @@ def gen_candidates_scipy(
If the dictionary value is None, then that feature will just be
fixed to the clamped value and not optimized. Assumes values to be
compatible with lower_bounds and upper_bounds!
timeout_sec: Timeout (in seconds) for `scipy.optimize.minimize` routine -
if provided, optimization will stop after this many seconds and return
the best solution found so far.
Returns:
2-element tuple containing
Expand Down Expand Up @@ -140,6 +145,7 @@ def gen_candidates_scipy(
equality_constraints=_no_fixed_features.equality_constraints,
options=options,
fixed_features=None,
timeout_sec=timeout_sec,
)
clamped_candidates = _no_fixed_features.acquisition_function._construct_X_full(
clamped_candidates
Expand Down Expand Up @@ -221,7 +227,7 @@ def f_np_wrapper(x: np.ndarray, f: Callable):
def f(x):
return -acquisition_function(x)

res = minimize(
res = minimize_with_timeout(
fun=f_np_wrapper,
args=(f,),
x0=x0,
Expand All @@ -235,6 +241,7 @@ def f(x):
for k, v in options.items()
if k not in ["method", "callback", "with_grad"]
},
timeout_sec=timeout_sec,
)
_process_scipy_result(res=res, options=options)

Expand Down Expand Up @@ -273,6 +280,7 @@ def gen_candidates_torch(
options: Optional[Dict[str, Union[float, str]]] = None,
callback: Optional[Callable[[int, Tensor, Tensor], NoReturn]] = None,
fixed_features: Optional[Dict[int, Optional[float]]] = None,
timeout_sec: Optional[float] = None,
) -> Tuple[Tensor, Tensor]:
r"""Generate a set of candidates using a `torch.optim` optimizer.
Expand All @@ -296,6 +304,9 @@ def gen_candidates_torch(
If the dictionary value is None, then that feature will just be
fixed to the clamped value and not optimized. Assumes values to be
compatible with lower_bounds and upper_bounds!
timeout_sec: Timeout (in seconds) for optimization. If provided,
`gen_candidates_torch` will stop after this many seconds and return
the best solution found so far.
Returns:
2-element tuple containing
Expand All @@ -316,6 +327,7 @@ def gen_candidates_torch(
upper_bounds=bounds[1],
)
"""
start_time = time.monotonic()
options = options or {}

# if there are fixed features we may optimize over a domain of lower dimension
Expand All @@ -340,6 +352,7 @@ def gen_candidates_torch(
options=options,
callback=callback,
fixed_features=None,
timeout_sec=timeout_sec - time.monotonic() if timeout_sec else None,
)
clamped_candidates = subproblem.acquisition_function._construct_X_full(
clamped_candidates
Expand Down Expand Up @@ -372,6 +385,11 @@ def assign_grad():

_optimizer.step(assign_grad)
stop = stopping_criterion.evaluate(fvals=loss.detach())
if timeout_sec is not None:
runtime = time.monotonic() - start_time
if runtime > timeout_sec:
stop = True
logger.info(f"Optimization timed out after {runtime} seconds.")

clamped_candidates = _clamp(clamped_candidates)
with torch.no_grad():
Expand Down Expand Up @@ -435,6 +453,8 @@ def _process_scipy_result(res: OptimizeResult, options: Dict[str, Any]) -> None:
"`scipy.minimize` exited by reaching the function evaluation limit of "
f"`maxfun: {options.get('maxfun')}`."
)
elif "Optimization timed out after" in res.message:
logger.info(res.message)
else:
with warnings.catch_warnings():
warnings.simplefilter("always", category=OptimizationWarning)
Expand Down
40 changes: 33 additions & 7 deletions test/generation/test_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,9 @@ def _setUp(self, double=False, expand=False):


class TestGenCandidates(TestBaseCandidateGeneration):
def test_gen_candidates(self, gen_candidates=gen_candidates_scipy, options=None):
def test_gen_candidates(
self, gen_candidates=gen_candidates_scipy, options=None, timeout_sec=None
):
options = options or {}
options = {**options, "maxiter": options.get("maxiter", 5)}
for double in (True, False):
Expand All @@ -89,6 +91,7 @@ def test_gen_candidates(self, gen_candidates=gen_candidates_scipy, options=None)
"lower_bounds": 0,
"upper_bounds": 1,
"options": options or {},
"timeout_sec": timeout_sec,
}
if gen_candidates is gen_candidates_torch:
kwargs["callback"] = mock.MagicMock()
Expand Down Expand Up @@ -177,6 +180,11 @@ def test_gen_candidates_with_fixed_features(
self.assertTrue(-EPS <= candidates[0] <= 1 + EPS)
self.assertTrue(candidates[1].item() == 0.25)

def test_gen_candidates_torch_with_fixed_features(self):
self.test_gen_candidates_with_fixed_features(
gen_candidates=gen_candidates_torch, options={"disp": False}
)

def test_gen_candidates_scipy_with_fixed_features_inequality_constraints(self):
options = {"maxiter": 5}
for double in (True, False):
Expand Down Expand Up @@ -235,6 +243,29 @@ def test_gen_candidates_scipy_maxiter_behavior(self):
self.assertFalse(any(issubclass(w.category, OptimizationWarning) for w in ws))
self.assertTrue("function evaluation limit" in logs.output[-1])

def test_gen_candidates_scipy_timeout_behavior(self):
# Check that no warnings are raised & log produced on hitting timeout.
for method in ("SLSQP", "L-BFGS-B"):
with warnings.catch_warnings(record=True) as ws, self.assertLogs(
"botorch", level="INFO"
) as logs:
self.test_gen_candidates(options={"method": method}, timeout_sec=0.001)
self.assertFalse(
any(issubclass(w.category, OptimizationWarning) for w in ws)
)
self.assertTrue("Optimization timed out" in logs.output[-1])

def test_gen_candidates_torch_timeout_behavior(self):
# Check that no warnings are raised & log produced on hitting timeout.
with warnings.catch_warnings(record=True) as ws, self.assertLogs(
"botorch", level="INFO"
) as logs:
self.test_gen_candidates(
gen_candidates=gen_candidates_torch, timeout_sec=0.001
)
self.assertFalse(any(issubclass(w.category, OptimizationWarning) for w in ws))
self.assertTrue("Optimization timed out" in logs.output[-1])

def test_gen_candidates_scipy_warns_opt_no_res(self):
ckwargs = {"dtype": torch.float, "device": self.device}

Expand All @@ -244,7 +275,7 @@ def test_gen_candidates_scipy_warns_opt_no_res(self):
"status returned to `res.`"
)
with mock.patch(
"botorch.generation.gen.minimize"
"botorch.generation.gen.minimize_with_timeout"
) as mock_minimize, warnings.catch_warnings(record=True) as ws:
mock_minimize.return_value = OptimizeResult(x=test_ics.cpu().numpy())

Expand All @@ -261,11 +292,6 @@ def test_gen_candidates_scipy_warns_opt_no_res(self):
)
self.assertTrue(expected_warning_raised)

def test_gen_candidates_torch_with_fixed_features(self):
self.test_gen_candidates_with_fixed_features(
gen_candidates=gen_candidates_torch, options={"disp": False}
)

def test_gen_candidates_scipy_nan_handling(self):
for dtype, expected_regex in [
(torch.float, "Consider using"),
Expand Down
4 changes: 3 additions & 1 deletion test/optim/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -541,7 +541,9 @@ def nlc1(x):

# Make sure we return the initial solution if SLSQP fails to return
# a feasible point.
with mock.patch("botorch.generation.gen.minimize") as mock_minimize:
with mock.patch(
"botorch.generation.gen.minimize_with_timeout"
) as mock_minimize:
# By setting "success" to True and "status" to 0, we prevent a
# warning that `minimize` failed, which isn't the behavior
# we're looking to test here.
Expand Down

0 comments on commit 2657240

Please sign in to comment.