From fb6739cb60bdb1e5f7d67490ee765ba195a77a92 Mon Sep 17 00:00:00 2001 From: Ce Gao Date: Fri, 11 Oct 2019 03:37:39 -0500 Subject: [PATCH] feat: Support random state in random search (#873) * feat: Support random search Signed-off-by: Ce Gao * feat: Update docs Signed-off-by: Ce Gao --- docs/algorithm-settings.md | 9 +++++++-- .../v1alpha3/hyperopt/base_hyperopt_service.py | 5 +++-- pkg/suggestion/v1alpha3/hyperopt_service.py | 17 ++++++++++++++++- 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/docs/algorithm-settings.md b/docs/algorithm-settings.md index 942acc9249f..4448cb55516 100644 --- a/docs/algorithm-settings.md +++ b/docs/algorithm-settings.md @@ -35,7 +35,12 @@ Random sampling is an alternative to grid search when the number of discrete par ### [Hyperopt][] -Algorithm name in katib is `random`. +Algorithm name in katib is `random`, and there are some algortihm settings that we support: + +| Setting Name | Description | Example | +|------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------| +| random_state | [int]: Set random state to something other than None for reproducible results. | 10 | + ## TPE @@ -75,7 +80,7 @@ Algorithm name in katib is `skopt-bayesian-optimization`, and there are some alg | n_initial_points | [int, default=10]: Number of evaluations of `func` with initialization points before approximating it with `base_estimator`. Points provided as `x0` count as initialization points. If len(x0) < n_initial_points additional points are sampled at random. More in [skopt document](https://scikit-optimize.github.io/#skopt.Optimizer) | 10 | | acq_func | [string, default=`"gp_hedge"`]: Function to minimize over the posterior distribution. More in [skopt document](https://scikit-optimize.github.io/#skopt.Optimizer) | gp_hedge | | acq_optimizer | [string, "sampling" or "lbfgs", default="auto"]: Method to minimize the acquistion function. The fit model is updated with the optimal value obtained by optimizing acq_func with acq_optimizer. More in [skopt document](https://scikit-optimize.github.io/#skopt.Optimizer) | auto | -| random_state | [int, RandomState instance, or None (default)]: Set random state to something other than None for reproducible results. | 10 | +| random_state | [int]: Set random state to something other than None for reproducible results. | 10 | ## References diff --git a/pkg/suggestion/v1alpha3/hyperopt/base_hyperopt_service.py b/pkg/suggestion/v1alpha3/hyperopt/base_hyperopt_service.py index 636d86624cf..455228fea9f 100644 --- a/pkg/suggestion/v1alpha3/hyperopt/base_hyperopt_service.py +++ b/pkg/suggestion/v1alpha3/hyperopt/base_hyperopt_service.py @@ -8,7 +8,8 @@ logger = logging.getLogger("BaseHyperoptService") class BaseHyperoptService(object): - def __init__(self, algorithm_name="tpe"): + def __init__(self, algorithm_name="tpe", random_state=None): + self.random_state = random_state if algorithm_name == 'tpe': self.hyperopt_algorithm = hyperopt.tpe.suggest elif algorithm_name == 'random': @@ -42,7 +43,7 @@ def getSuggestions(self, search_space, trials, request_number): hyperopt_search_space[param.name] = hyperopt.hp.choice( param.name, param.list) # New hyperopt variables - hyperopt_rstate = np.random.RandomState() + hyperopt_rstate = np.random.RandomState(self.random_state) hyperopt_domain = hyperopt.Domain( None, hyperopt_search_space, pass_expr_memo_ctrl=None) diff --git a/pkg/suggestion/v1alpha3/hyperopt_service.py b/pkg/suggestion/v1alpha3/hyperopt_service.py index ff00a0915f7..c29bc4624f0 100644 --- a/pkg/suggestion/v1alpha3/hyperopt_service.py +++ b/pkg/suggestion/v1alpha3/hyperopt_service.py @@ -17,8 +17,10 @@ def GetSuggestions(self, request, context): """ Main function to provide suggestion. """ + name, config = OptimizerConfiguration.convertAlgorithmSpec( + request.experiment.spec.algorithm) base_serice = BaseHyperoptService( - algorithm_name=request.experiment.spec.algorithm.algorithm_name) + algorithm_name=name, random_state=config.random_state) search_space = HyperParameterSearchSpace.convert(request.experiment) trials = Trial.convert(request.trials) new_assignments = base_serice.getSuggestions( @@ -26,3 +28,16 @@ def GetSuggestions(self, request, context): return api_pb2.GetSuggestionsReply( parameter_assignments=Assignment.generate(new_assignments) ) + + +class OptimizerConfiguration(object): + def __init__(self, random_state=None): + self.random_state = random_state + + @staticmethod + def convertAlgorithmSpec(algorithm_spec): + optmizer = OptimizerConfiguration() + for s in algorithm_spec.algorithm_setting: + if s.name == "random_state": + optmizer.random_state = int(s.value) + return algorithm_spec.algorithm_name, optmizer