Skip to content

Commit

Permalink
Succesfully implemented hyperparameter tuning
Browse files Browse the repository at this point in the history
  • Loading branch information
fjwillemsen committed Oct 17, 2024
1 parent fd823aa commit e04906e
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 17 deletions.
29 changes: 21 additions & 8 deletions kernel_tuner/hyper.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ def tune_hyper_params(target_strategy: str, hyper_params: dict, *args, **kwargs)
# - The methodology returns the fitness metric
# - The fitness metric is fed back into the meta-strategy

iterations = 1
if "iterations" in kwargs:
iterations = kwargs['iterations']
del kwargs['iterations']
if "cache" in kwargs:
del kwargs['cache']

Expand All @@ -55,15 +59,24 @@ def put_if_not_present(target_dict, key, value):
kwargs['verify'] = None
arguments = [target_strategy]

return kernel_tuner.tune_kernel('hyperparamtuning', None, [], arguments, hyper_params, *args, lang='Hypertuner',
objective='score', objective_higher_is_better=True, **kwargs)
return kernel_tuner.tune_kernel('hyperparamtuning', None, [], arguments, hyper_params, *args, lang='Hypertuner',
objective='score', objective_higher_is_better=True, iterations=iterations, **kwargs)

if __name__ == "__main__": # TODO remove in production
# hyperparams = {
# 'popsize': [10, 20, 30],
# 'maxiter': [50, 100, 150],
# 'w': [0.25, 0.5, 0.75],
# 'c1': [1.0, 2.0, 3.0],
# 'c2': [0.5, 1.0, 1.5]
# }
hyperparams = {
'popsize': [10, 20, 30],
'maxiter': [50, 100, 150],
'w': [0.25, 0.5, 0.75],
'c1': [1.0, 2.0, 3.0],
'c2': [0.5, 1.0, 1.5]
'popsize': [10],
'maxiter': [50],
'w': [0.25, 0.5],
'c1': [1.0],
'c2': [0.5]
}
tune_hyper_params('pso', hyperparams)
result, env = tune_hyper_params('pso', hyperparams)
print(result)
print(env['best_config'])
7 changes: 3 additions & 4 deletions kernel_tuner/strategies/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,12 @@ def get_options(strategy_options, options):
class CostFunc:
def __init__(self, searchspace: Searchspace, tuning_options, runner, *, scaling=False, snap=True):
self.runner = runner
self.tuning_options = tuning_options
self.snap = snap
self.scaling = scaling
self.searchspace = searchspace
self.tuning_options = tuning_options
if isinstance(self.tuning_options, dict):
self.tuning_options['max_fevals'] = min(tuning_options['max_fevals'] if 'max_fevals' in tuning_options else np.inf, searchspace.size)
self.results = []

def __call__(self, x, check_restrictions=True):
Expand Down Expand Up @@ -104,9 +106,6 @@ def __call__(self, x, check_restrictions=True):
self.tuning_options.unique_results[x_int] = result

self.results.append(result)
if len(self.results) > 100:
print(result)
raise ValueError(self.results)

# upon returning from this function control will be given back to the strategy, so reset the start time
self.runner.last_strategy_start_time = perf_counter()
Expand Down
9 changes: 7 additions & 2 deletions test/strategies/test_common.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import sys
from time import perf_counter

from pytest import raises

from kernel_tuner.interface import Options
from kernel_tuner.searchspace import Searchspace
from kernel_tuner.strategies import common
from kernel_tuner.strategies.common import CostFunc
from kernel_tuner.util import StopCriterionReached

try:
from mock import Mock
Expand Down Expand Up @@ -42,8 +45,10 @@ def restrictions(_):
restrictions=restrictions, strategy_options={},
verbose=True, cache={}, unique_results={},
objective="time", objective_higher_is_better=False, metrics=None)
time = CostFunc(Searchspace(tune_params, restrictions, 1024), tuning_options, runner)(x)
assert time == sys.float_info.max

with raises(StopCriterionReached):
time = CostFunc(Searchspace(tune_params, restrictions, 1024), tuning_options, runner)(x)
assert time == sys.float_info.max


def test_setup_method_arguments():
Expand Down
6 changes: 3 additions & 3 deletions test/test_hyper.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@ def test_hyper(env):

target_strategy = "genetic_algorithm"

result = tune_hyper_params(target_strategy, hyper_params, iterations=1, verbose=True, cache=cache_filename)
raise ValueError(result)
assert len(result) > 0
result, env = tune_hyper_params(target_strategy, hyper_params, iterations=1, verbose=True, cache=cache_filename)
assert len(result) >= 2 # Look into why the hyperparamtuner returns more results than the searchspace size
assert 'best_config' in env

0 comments on commit e04906e

Please sign in to comment.