From e04906ee83d88a5c500215da0a1b1e432aaa6f3b Mon Sep 17 00:00:00 2001 From: Floris-Jan Willemsen Date: Thu, 17 Oct 2024 16:32:20 -0700 Subject: [PATCH] Succesfully implemented hyperparameter tuning --- kernel_tuner/hyper.py | 29 +++++++++++++++++++++-------- kernel_tuner/strategies/common.py | 7 +++---- test/strategies/test_common.py | 9 +++++++-- test/test_hyper.py | 6 +++--- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/kernel_tuner/hyper.py b/kernel_tuner/hyper.py index 34fc14bcc..289fcd270 100644 --- a/kernel_tuner/hyper.py +++ b/kernel_tuner/hyper.py @@ -42,6 +42,10 @@ def tune_hyper_params(target_strategy: str, hyper_params: dict, *args, **kwargs) # - The methodology returns the fitness metric # - The fitness metric is fed back into the meta-strategy + iterations = 1 + if "iterations" in kwargs: + iterations = kwargs['iterations'] + del kwargs['iterations'] if "cache" in kwargs: del kwargs['cache'] @@ -55,15 +59,24 @@ def put_if_not_present(target_dict, key, value): kwargs['verify'] = None arguments = [target_strategy] - return kernel_tuner.tune_kernel('hyperparamtuning', None, [], arguments, hyper_params, *args, lang='Hypertuner', - objective='score', objective_higher_is_better=True, **kwargs) + return kernel_tuner.tune_kernel('hyperparamtuning', None, [], arguments, hyper_params, *args, lang='Hypertuner', + objective='score', objective_higher_is_better=True, iterations=iterations, **kwargs) if __name__ == "__main__": # TODO remove in production + # hyperparams = { + # 'popsize': [10, 20, 30], + # 'maxiter': [50, 100, 150], + # 'w': [0.25, 0.5, 0.75], + # 'c1': [1.0, 2.0, 3.0], + # 'c2': [0.5, 1.0, 1.5] + # } hyperparams = { - 'popsize': [10, 20, 30], - 'maxiter': [50, 100, 150], - 'w': [0.25, 0.5, 0.75], - 'c1': [1.0, 2.0, 3.0], - 'c2': [0.5, 1.0, 1.5] + 'popsize': [10], + 'maxiter': [50], + 'w': [0.25, 0.5], + 'c1': [1.0], + 'c2': [0.5] } - tune_hyper_params('pso', hyperparams) + result, env = tune_hyper_params('pso', hyperparams) + print(result) + print(env['best_config']) diff --git a/kernel_tuner/strategies/common.py b/kernel_tuner/strategies/common.py index f4cdca53d..3420c86ea 100644 --- a/kernel_tuner/strategies/common.py +++ b/kernel_tuner/strategies/common.py @@ -55,10 +55,12 @@ def get_options(strategy_options, options): class CostFunc: def __init__(self, searchspace: Searchspace, tuning_options, runner, *, scaling=False, snap=True): self.runner = runner - self.tuning_options = tuning_options self.snap = snap self.scaling = scaling self.searchspace = searchspace + self.tuning_options = tuning_options + if isinstance(self.tuning_options, dict): + self.tuning_options['max_fevals'] = min(tuning_options['max_fevals'] if 'max_fevals' in tuning_options else np.inf, searchspace.size) self.results = [] def __call__(self, x, check_restrictions=True): @@ -104,9 +106,6 @@ def __call__(self, x, check_restrictions=True): self.tuning_options.unique_results[x_int] = result self.results.append(result) - if len(self.results) > 100: - print(result) - raise ValueError(self.results) # upon returning from this function control will be given back to the strategy, so reset the start time self.runner.last_strategy_start_time = perf_counter() diff --git a/test/strategies/test_common.py b/test/strategies/test_common.py index 29ead8615..973b93a52 100644 --- a/test/strategies/test_common.py +++ b/test/strategies/test_common.py @@ -1,10 +1,13 @@ import sys from time import perf_counter +from pytest import raises + from kernel_tuner.interface import Options from kernel_tuner.searchspace import Searchspace from kernel_tuner.strategies import common from kernel_tuner.strategies.common import CostFunc +from kernel_tuner.util import StopCriterionReached try: from mock import Mock @@ -42,8 +45,10 @@ def restrictions(_): restrictions=restrictions, strategy_options={}, verbose=True, cache={}, unique_results={}, objective="time", objective_higher_is_better=False, metrics=None) - time = CostFunc(Searchspace(tune_params, restrictions, 1024), tuning_options, runner)(x) - assert time == sys.float_info.max + + with raises(StopCriterionReached): + time = CostFunc(Searchspace(tune_params, restrictions, 1024), tuning_options, runner)(x) + assert time == sys.float_info.max def test_setup_method_arguments(): diff --git a/test/test_hyper.py b/test/test_hyper.py index 30a5462c8..1b7ccc559 100644 --- a/test/test_hyper.py +++ b/test/test_hyper.py @@ -15,6 +15,6 @@ def test_hyper(env): target_strategy = "genetic_algorithm" - result = tune_hyper_params(target_strategy, hyper_params, iterations=1, verbose=True, cache=cache_filename) - raise ValueError(result) - assert len(result) > 0 + result, env = tune_hyper_params(target_strategy, hyper_params, iterations=1, verbose=True, cache=cache_filename) + assert len(result) >= 2 # Look into why the hyperparamtuner returns more results than the searchspace size + assert 'best_config' in env