From 8cb3ea0366c90da42e9e76db9a1399571be70005 Mon Sep 17 00:00:00 2001 From: Max Balandat Date: Wed, 28 Dec 2022 10:00:57 -0800 Subject: [PATCH] Add timeout ability to gen_* functions (#1562) Summary: Pull Request resolved: https://github.com/pytorch/botorch/pull/1562 Exposes the ability to pass a `timeout_sec` kwarg to `gen_candidates_scipy` and `gen_candidates_torch` that allows to stop the optimization after running for more than `timeout_sec` seconds. Differential Revision: D42024857 fbshipit-source-id: 1fec382ce1dfa80e54383eaf8a5edec777726d25 --- botorch/generation/gen.py | 24 ++++++++++++++++++++-- test/generation/test_gen.py | 40 ++++++++++++++++++++++++++++++------- test/optim/test_optimize.py | 4 +++- 3 files changed, 58 insertions(+), 10 deletions(-) diff --git a/botorch/generation/gen.py b/botorch/generation/gen.py index 6737906512..4b7731163c 100644 --- a/botorch/generation/gen.py +++ b/botorch/generation/gen.py @@ -10,6 +10,7 @@ from __future__ import annotations +import time import warnings from functools import partial from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union @@ -29,7 +30,7 @@ ) from botorch.optim.stopping import ExpMAStoppingCriterion from botorch.optim.utils import _filter_kwargs, columnwise_clamp, fix_features -from scipy.optimize import minimize +from botorch.optim.utils.timeout import minimize_with_timeout from scipy.optimize.optimize import OptimizeResult from torch import Tensor from torch.optim import Optimizer @@ -47,6 +48,7 @@ def gen_candidates_scipy( nonlinear_inequality_constraints: Optional[List[Callable]] = None, options: Optional[Dict[str, Any]] = None, fixed_features: Optional[Dict[int, Optional[float]]] = None, + timeout_sec: Optional[float] = None, ) -> Tuple[Tensor, Tensor]: r"""Generate a set of candidates using `scipy.optimize.minimize`. @@ -80,6 +82,9 @@ def gen_candidates_scipy( If the dictionary value is None, then that feature will just be fixed to the clamped value and not optimized. Assumes values to be compatible with lower_bounds and upper_bounds! + timeout_sec: Timeout (in seconds) for `scipy.optimize.minimize` routine - + if provided, optimization will stop after this many seconds and return + the best solution found so far. Returns: 2-element tuple containing @@ -140,6 +145,7 @@ def gen_candidates_scipy( equality_constraints=_no_fixed_features.equality_constraints, options=options, fixed_features=None, + timeout_sec=timeout_sec, ) clamped_candidates = _no_fixed_features.acquisition_function._construct_X_full( clamped_candidates @@ -221,7 +227,7 @@ def f_np_wrapper(x: np.ndarray, f: Callable): def f(x): return -acquisition_function(x) - res = minimize( + res = minimize_with_timeout( fun=f_np_wrapper, args=(f,), x0=x0, @@ -235,6 +241,7 @@ def f(x): for k, v in options.items() if k not in ["method", "callback", "with_grad"] }, + timeout_sec=timeout_sec, ) _process_scipy_result(res=res, options=options) @@ -273,6 +280,7 @@ def gen_candidates_torch( options: Optional[Dict[str, Union[float, str]]] = None, callback: Optional[Callable[[int, Tensor, Tensor], NoReturn]] = None, fixed_features: Optional[Dict[int, Optional[float]]] = None, + timeout_sec: Optional[float] = None, ) -> Tuple[Tensor, Tensor]: r"""Generate a set of candidates using a `torch.optim` optimizer. @@ -296,6 +304,9 @@ def gen_candidates_torch( If the dictionary value is None, then that feature will just be fixed to the clamped value and not optimized. Assumes values to be compatible with lower_bounds and upper_bounds! + timeout_sec: Timeout (in seconds) for optimization. If provided, + `gen_candidates_torch` will stop after this many seconds and return + the best solution found so far. Returns: 2-element tuple containing @@ -316,6 +327,7 @@ def gen_candidates_torch( upper_bounds=bounds[1], ) """ + start_time = time.monotonic() options = options or {} # if there are fixed features we may optimize over a domain of lower dimension @@ -340,6 +352,7 @@ def gen_candidates_torch( options=options, callback=callback, fixed_features=None, + timeout_sec=timeout_sec - time.monotonic() if timeout_sec else None, ) clamped_candidates = subproblem.acquisition_function._construct_X_full( clamped_candidates @@ -372,6 +385,11 @@ def assign_grad(): _optimizer.step(assign_grad) stop = stopping_criterion.evaluate(fvals=loss.detach()) + if timeout_sec is not None: + runtime = time.monotonic() - start_time + if runtime > timeout_sec: + stop = True + logger.info(f"Optimization timed out after {runtime} seconds.") clamped_candidates = _clamp(clamped_candidates) with torch.no_grad(): @@ -435,6 +453,8 @@ def _process_scipy_result(res: OptimizeResult, options: Dict[str, Any]) -> None: "`scipy.minimize` exited by reaching the function evaluation limit of " f"`maxfun: {options.get('maxfun')}`." ) + elif "Optimization timed out after" in res.message: + logger.info(res.message) else: with warnings.catch_warnings(): warnings.simplefilter("always", category=OptimizationWarning) diff --git a/test/generation/test_gen.py b/test/generation/test_gen.py index 5ec6205e6d..1a2eeb9798 100644 --- a/test/generation/test_gen.py +++ b/test/generation/test_gen.py @@ -67,7 +67,9 @@ def _setUp(self, double=False, expand=False): class TestGenCandidates(TestBaseCandidateGeneration): - def test_gen_candidates(self, gen_candidates=gen_candidates_scipy, options=None): + def test_gen_candidates( + self, gen_candidates=gen_candidates_scipy, options=None, timeout_sec=None + ): options = options or {} options = {**options, "maxiter": options.get("maxiter", 5)} for double in (True, False): @@ -89,6 +91,7 @@ def test_gen_candidates(self, gen_candidates=gen_candidates_scipy, options=None) "lower_bounds": 0, "upper_bounds": 1, "options": options or {}, + "timeout_sec": timeout_sec, } if gen_candidates is gen_candidates_torch: kwargs["callback"] = mock.MagicMock() @@ -177,6 +180,11 @@ def test_gen_candidates_with_fixed_features( self.assertTrue(-EPS <= candidates[0] <= 1 + EPS) self.assertTrue(candidates[1].item() == 0.25) + def test_gen_candidates_torch_with_fixed_features(self): + self.test_gen_candidates_with_fixed_features( + gen_candidates=gen_candidates_torch, options={"disp": False} + ) + def test_gen_candidates_scipy_with_fixed_features_inequality_constraints(self): options = {"maxiter": 5} for double in (True, False): @@ -235,6 +243,29 @@ def test_gen_candidates_scipy_maxiter_behavior(self): self.assertFalse(any(issubclass(w.category, OptimizationWarning) for w in ws)) self.assertTrue("function evaluation limit" in logs.output[-1]) + def test_gen_candidates_scipy_timeout_behavior(self): + # Check that no warnings are raised & log produced on hitting timeout. + for method in ("SLSQP", "L-BFGS-B"): + with warnings.catch_warnings(record=True) as ws, self.assertLogs( + "botorch", level="INFO" + ) as logs: + self.test_gen_candidates(options={"method": method}, timeout_sec=0.001) + self.assertFalse( + any(issubclass(w.category, OptimizationWarning) for w in ws) + ) + self.assertTrue("Optimization timed out" in logs.output[-1]) + + def test_gen_candidates_torch_timeout_behavior(self): + # Check that no warnings are raised & log produced on hitting timeout. + with warnings.catch_warnings(record=True) as ws, self.assertLogs( + "botorch", level="INFO" + ) as logs: + self.test_gen_candidates( + gen_candidates=gen_candidates_torch, timeout_sec=0.001 + ) + self.assertFalse(any(issubclass(w.category, OptimizationWarning) for w in ws)) + self.assertTrue("Optimization timed out" in logs.output[-1]) + def test_gen_candidates_scipy_warns_opt_no_res(self): ckwargs = {"dtype": torch.float, "device": self.device} @@ -244,7 +275,7 @@ def test_gen_candidates_scipy_warns_opt_no_res(self): "status returned to `res.`" ) with mock.patch( - "botorch.generation.gen.minimize" + "botorch.generation.gen.minimize_with_timeout" ) as mock_minimize, warnings.catch_warnings(record=True) as ws: mock_minimize.return_value = OptimizeResult(x=test_ics.cpu().numpy()) @@ -261,11 +292,6 @@ def test_gen_candidates_scipy_warns_opt_no_res(self): ) self.assertTrue(expected_warning_raised) - def test_gen_candidates_torch_with_fixed_features(self): - self.test_gen_candidates_with_fixed_features( - gen_candidates=gen_candidates_torch, options={"disp": False} - ) - def test_gen_candidates_scipy_nan_handling(self): for dtype, expected_regex in [ (torch.float, "Consider using"), diff --git a/test/optim/test_optimize.py b/test/optim/test_optimize.py index 1c527899a3..5ecdf8851c 100644 --- a/test/optim/test_optimize.py +++ b/test/optim/test_optimize.py @@ -541,7 +541,9 @@ def nlc1(x): # Make sure we return the initial solution if SLSQP fails to return # a feasible point. - with mock.patch("botorch.generation.gen.minimize") as mock_minimize: + with mock.patch( + "botorch.generation.gen.minimize_with_timeout" + ) as mock_minimize: # By setting "success" to True and "status" to 0, we prevent a # warning that `minimize` failed, which isn't the behavior # we're looking to test here.