diff --git a/bayes_opt/__init__.py b/bayes_opt/__init__.py index 325143cef..7ed07ed46 100644 --- a/bayes_opt/__init__.py +++ b/bayes_opt/__init__.py @@ -9,6 +9,7 @@ from bayes_opt.constraint import ConstraintModel from bayes_opt.domain_reduction import SequentialDomainReductionTransformer from bayes_opt.logger import JSONLogger, ScreenLogger +from bayes_opt.target_space import TargetSpace __version__ = importlib.metadata.version("bayesian-optimization") @@ -16,6 +17,7 @@ __all__ = [ "acquisition", "BayesianOptimization", + "TargetSpace", "ConstraintModel", "Events", "ScreenLogger", diff --git a/bayes_opt/acquisition.py b/bayes_opt/acquisition.py index 9a8a0729e..7bc47e0b4 100644 --- a/bayes_opt/acquisition.py +++ b/bayes_opt/acquisition.py @@ -1,4 +1,22 @@ -"""Acquisition functions for Bayesian Optimization.""" +"""Acquisition functions for Bayesian Optimization. + +The acquisition functions in this module can be grouped the following way: + +- One of the base acquisition functions + (:py:class:`UpperConfidenceBound`, + :py:class:`ProbabilityOfImprovement` and + :py:class:`ExpectedImprovement`) is always dictating the basic + behavior of the suggestion step. They can be used alone or combined with a meta acquisition function. +- :py:class:`GPHedge` is a meta acquisition function that combines multiple + base acquisition functions and determines the most suitable one for a particular problem. +- :py:class:`ConstantLiar` is a meta acquisition function that can be + used for parallelized optimization and discourages sampling near a previously suggested, but not yet + evaluated, point. +- :py:class:`AcquisitionFunction` is the base class for all + acquisition functions. You can implement your own acquisition function by subclassing it. See the + `Acquisition Functions notebook <../acquisition.html>`__ to understand the many ways this class can be + modified. +""" from __future__ import annotations @@ -373,6 +391,11 @@ def decay_exploration(self) -> None: """Decay kappa by a constant rate. Adjust exploration/exploitation trade-off by reducing kappa. + + Note + ---- + + This method is called automatically at the end of each ``suggest()`` call. """ if self.exploration_decay is not None and ( self.exploration_decay_delay is None or self.exploration_decay_delay <= self.i @@ -495,6 +518,11 @@ def decay_exploration(self) -> None: r"""Decay xi by a constant rate. Adjust exploration/exploitation trade-off by reducing xi. + + Note + ---- + + This method is called automatically at the end of each ``suggest()`` call. """ if self.exploration_decay is not None and ( self.exploration_decay_delay is None or self.exploration_decay_delay <= self.i @@ -625,6 +653,11 @@ def decay_exploration(self) -> None: r"""Decay xi by a constant rate. Adjust exploration/exploitation trade-off by reducing xi. + + Note + ---- + + This method is called automatically at the end of each ``suggest()`` call. """ if self.exploration_decay is not None and ( self.exploration_decay_delay is None or self.exploration_decay_delay <= self.i diff --git a/bayes_opt/bayesian_optimization.py b/bayes_opt/bayesian_optimization.py index 378b3a4fd..363b31464 100644 --- a/bayes_opt/bayesian_optimization.py +++ b/bayes_opt/bayesian_optimization.py @@ -93,8 +93,9 @@ class BayesianOptimization(Observable): Dictionary with parameters names as keys and a tuple with minimum and maximum values. - constraint: A ConstraintModel. Note that the names of arguments of the - constraint function and of f need to be the same. + constraint: ConstraintModel. + Note that the names of arguments of the constraint function and of + f need to be the same. random_state: int or numpy.random.RandomState, optional(default=None) If the value is an integer, it is used as the seed for creating a @@ -112,19 +113,6 @@ class BayesianOptimization(Observable): This behavior may be desired in high noise situations where repeatedly probing the same point will give different answers. In other situations, the acquisition may occasionally generate a duplicate point. - - Methods - ------- - probe() - Evaluates the function on the given points. - Can be used to guide the optimizer. - - maximize() - Tries to find the parameters that yield the maximum value for the - given function. - - set_bounds() - Allows changing the lower and upper searching bounds """ def __init__( @@ -303,12 +291,20 @@ def maximize(self, init_points=5, n_iter=25): Parameters ---------- init_points : int, optional(default=5) - Number of iterations before the explorations starts the exploration - for the maximum. + Number of random points to probe before starting the optimization. n_iter: int, optional(default=25) Number of iterations where the method attempts to find the maximum value. + + Warning + ------- + The maximize loop only fits the GP when suggesting a new point to + probe based on the acquisition function. This means that the GP may + not be fitted on all points registered to the target space when the + method completes. If you intend to use the GP model after the + optimization routine, make sure to fit it manually, e.g. by calling + ``optimizer._gp.fit(optimizer.space.params, optimizer.space.target)``. """ self._prime_subscriptions() self.dispatch(Events.OPTIMIZATION_START) diff --git a/bayes_opt/constraint.py b/bayes_opt/constraint.py index 79c0a76ab..46cf3f793 100644 --- a/bayes_opt/constraint.py +++ b/bayes_opt/constraint.py @@ -33,12 +33,11 @@ class ConstraintModel: random_state : np.random.RandomState or int or None, default=None Random state to use. - Notes - ----- + Note + ---- In case of multiple constraints, this model assumes conditional - independence. This means that for each constraint, the probability of - fulfillment is the cdf of a univariate Gaussian. The overall probability - is a simply the product of the individual probabilities. + independence. This means that the overall probability of fulfillment is a + simply the product of the individual probabilities. """ def __init__(self, fun, lb, ub, random_state=None): @@ -112,9 +111,9 @@ def fit(self, X, Y): Parameters ---------- - X : + X : np.ndarray of shape (n_samples, n_features) Parameters of the constraint function. - Y : + Y : np.ndarray of shape (n_samples, n_constraints) Values of the constraint function. @@ -146,6 +145,9 @@ def predict(self, X): :math:`c^{\text{up}}` the lower and upper bounds of the constraint respectively. + Note + ---- + In case of multiple constraints, we assume conditional independence. This means we calculate the probability of constraint fulfilment individually, with the joint probability given as their product. diff --git a/docsrc/code_docs.rst b/docsrc/code_docs.rst deleted file mode 100644 index 0088ff170..000000000 --- a/docsrc/code_docs.rst +++ /dev/null @@ -1,34 +0,0 @@ -Code Documentation -================== - -This page shows the documentation generated by sphinx automatically scanning the source code. - -Bayesian Optimization ---------------------- - -.. autoclass:: bayes_opt.BayesianOptimization - :members: - -Acquisition Functions ---------------------- - -.. automodule:: bayes_opt.acquisition - :members: - -Target Space ------------- - -.. autoclass:: bayes_opt.target_space.TargetSpace - :members: - -Domain reduction ----------------- - -.. autoclass:: bayes_opt.domain_reduction.SequentialDomainReductionTransformer - :members: - -Constraints ------------ - -.. autoclass:: bayes_opt.constraint.ConstraintModel - :members: diff --git a/docsrc/conf.py b/docsrc/conf.py index c5dbb4437..d6a02dc0b 100644 --- a/docsrc/conf.py +++ b/docsrc/conf.py @@ -12,6 +12,7 @@ # import os import sys +import time import shutil from glob import glob from pathlib import Path @@ -44,7 +45,8 @@ 'IPython.sphinxext.ipython_console_highlighting', 'sphinx.ext.mathjax', "sphinx.ext.napoleon", - 'sphinx_immaterial' + 'sphinx.ext.intersphinx', + 'sphinx_immaterial', ] source_suffix = { @@ -58,6 +60,16 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [] +# Link types to the corresponding documentations +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None), + 'sklearn': ('https://scikit-learn.org/stable/', None), +} + + +napoleon_use_rtype = False # -- Options for HTML output ------------------------------------------------- @@ -67,7 +79,7 @@ html_title = "Bayesian Optimization" html_theme = "sphinx_immaterial" -copyright = 'Fernando Nogueira and the bayesian-optimization developers' +copyright = f"{time.strftime('%Y')}, Fernando Nogueira and the bayesian-optimization developers" # material theme options (see theme.conf for more information) html_theme_options = { @@ -122,6 +134,7 @@ "version_dropdown": True, "version_json": '../versions.json', # END: version_dropdown + "scope": "/", # share preferences across subsites "toc_title_is_page_title": True, # BEGIN: social icons "social": [ diff --git a/docsrc/examples.rst b/docsrc/examples.rst deleted file mode 100644 index 0bd58ae34..000000000 --- a/docsrc/examples.rst +++ /dev/null @@ -1,14 +0,0 @@ -Examples -======== - -.. toctree:: - :maxdepth: 2 - :caption: Example Notebooks: - - Basic Tour - Advanced Tour - Constrained Bayesian Optimization - Sequential Domain Reduction - Acquisition Functions - Exploration vs. Exploitation - Visualization of a 1D-Optimization diff --git a/docsrc/index.rst b/docsrc/index.rst index 98afb614b..d6f9384d0 100644 --- a/docsrc/index.rst +++ b/docsrc/index.rst @@ -1,11 +1,32 @@ .. toctree:: :hidden: - :maxdepth: 3 - :caption: Contents: Quickstart - Example Notebooks - /code_docs + +.. toctree:: + :hidden: + :maxdepth: 3 + :caption: Example Notebooks: + + Basic Tour + Advanced Tour + Constrained Bayesian Optimization + Sequential Domain Reduction + Acquisition Functions + Exploration vs. Exploitation + Visualization of a 1D-Optimization + +.. toctree:: + :hidden: + :maxdepth: 2 + :caption: API reference: + + reference/bayes_opt + reference/acquisition + reference/constraint + reference/domain_reduction + reference/target_space + reference/other .. raw:: html diff --git a/docsrc/reference/acquisition.rst b/docsrc/reference/acquisition.rst new file mode 100644 index 000000000..aa3485af4 --- /dev/null +++ b/docsrc/reference/acquisition.rst @@ -0,0 +1,14 @@ +:py:mod:`bayes_opt.acquisition` +------------------------------- + +.. automodule:: bayes_opt.acquisition + :members: AcquisitionFunction + +.. toctree:: + :hidden: + + acquisition/UpperConfidenceBound + acquisition/ProbabilityOfImprovement + acquisition/ExpectedImprovement + acquisition/GPHedge + acquisition/ConstantLiar diff --git a/docsrc/reference/acquisition/ConstantLiar.rst b/docsrc/reference/acquisition/ConstantLiar.rst new file mode 100644 index 000000000..1e7e4d901 --- /dev/null +++ b/docsrc/reference/acquisition/ConstantLiar.rst @@ -0,0 +1,5 @@ +:py:class:`bayes_opt.acquisition.ConstantLiar` +---------------------------------------------- + +.. autoclass:: bayes_opt.acquisition.ConstantLiar + :members: diff --git a/docsrc/reference/acquisition/ExpectedImprovement.rst b/docsrc/reference/acquisition/ExpectedImprovement.rst new file mode 100644 index 000000000..ad606c8b9 --- /dev/null +++ b/docsrc/reference/acquisition/ExpectedImprovement.rst @@ -0,0 +1,5 @@ +:py:class:`bayes_opt.acquisition.ExpectedImprovement` +----------------------------------------------------- + +.. autoclass:: bayes_opt.acquisition.ExpectedImprovement + :members: diff --git a/docsrc/reference/acquisition/GPHedge.rst b/docsrc/reference/acquisition/GPHedge.rst new file mode 100644 index 000000000..8b4428d52 --- /dev/null +++ b/docsrc/reference/acquisition/GPHedge.rst @@ -0,0 +1,5 @@ +:py:class:`bayes_opt.acquisition.GPHedge` +----------------------------------------- + +.. autoclass:: bayes_opt.acquisition.GPHedge + :members: diff --git a/docsrc/reference/acquisition/ProbabilityOfImprovement.rst b/docsrc/reference/acquisition/ProbabilityOfImprovement.rst new file mode 100644 index 000000000..fe59b54ea --- /dev/null +++ b/docsrc/reference/acquisition/ProbabilityOfImprovement.rst @@ -0,0 +1,5 @@ +:py:class:`bayes_opt.acquisition.ProbabilityOfImprovement` +---------------------------------------------------------- + +.. autoclass:: bayes_opt.acquisition.ProbabilityOfImprovement + :members: diff --git a/docsrc/reference/acquisition/UpperConfidenceBound.rst b/docsrc/reference/acquisition/UpperConfidenceBound.rst new file mode 100644 index 000000000..46a08ad92 --- /dev/null +++ b/docsrc/reference/acquisition/UpperConfidenceBound.rst @@ -0,0 +1,5 @@ +:py:class:`bayes_opt.acquisition.UpperConfidenceBound` +------------------------------------------------------ + +.. autoclass:: bayes_opt.acquisition.UpperConfidenceBound + :members: diff --git a/docsrc/reference/bayes_opt.rst b/docsrc/reference/bayes_opt.rst new file mode 100644 index 000000000..54480284f --- /dev/null +++ b/docsrc/reference/bayes_opt.rst @@ -0,0 +1,5 @@ +:py:class:`bayes_opt.BayesianOptimization` +------------------------------------------ + +.. autoclass:: bayes_opt.BayesianOptimization + :members: diff --git a/docsrc/reference/constraint.rst b/docsrc/reference/constraint.rst new file mode 100644 index 000000000..64239fa78 --- /dev/null +++ b/docsrc/reference/constraint.rst @@ -0,0 +1,7 @@ +:py:class:`bayes_opt.ConstraintModel` +------------------------------------------------ + +See the `Constrained Optimization notebook <../constraints.html#2.-Advanced-Constrained-Optimization>`__ for a complete example. + +.. autoclass:: bayes_opt.constraint.ConstraintModel + :members: diff --git a/docsrc/reference/domain_reduction.rst b/docsrc/reference/domain_reduction.rst new file mode 100644 index 000000000..cd7524d04 --- /dev/null +++ b/docsrc/reference/domain_reduction.rst @@ -0,0 +1,7 @@ +:py:class:`bayes_opt.SequentialDomainReductionTransformer` +---------------------------------------------------------- + +See the `Sequential Domain Reduction notebook <../domain_reduction.html>`__ for a complete example. + +.. autoclass:: bayes_opt.SequentialDomainReductionTransformer + :members: \ No newline at end of file diff --git a/docsrc/reference/other.rst b/docsrc/reference/other.rst new file mode 100644 index 000000000..22836cfd6 --- /dev/null +++ b/docsrc/reference/other.rst @@ -0,0 +1,11 @@ +Other +----- + +.. autoclass:: bayes_opt.ScreenLogger + :members: + +.. autoclass:: bayes_opt.JSONLogger + :members: + +.. autoclass:: bayes_opt.Events + :members: diff --git a/docsrc/reference/target_space.rst b/docsrc/reference/target_space.rst new file mode 100644 index 000000000..38f654467 --- /dev/null +++ b/docsrc/reference/target_space.rst @@ -0,0 +1,5 @@ +:py:class:`bayes_opt.TargetSpace` +--------------------------------- + +.. autoclass:: bayes_opt.TargetSpace + :members: