-
Notifications
You must be signed in to change notification settings - Fork 358
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Fix unexpected recommendation #951
Changes from all commits
ee5303b
9e23adf
4fc0be7
3dc40b2
67256ca
fea0b5f
5d52f98
cfa1a84
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -120,7 +120,7 @@ def __init__( | |
] = utils.Archive() # dict like structure taking np.ndarray as keys and Value as values | ||
self.current_bests = { | ||
x: utils.MultiValue(self.parametrization, np.inf, reference=self.parametrization) | ||
for x in ["optimistic", "pessimistic", "average"] | ||
for x in ["optimistic", "pessimistic", "average", "minimum"] | ||
} | ||
# pruning function, called at each "tell" | ||
# this can be desactivated or modified by each implementation | ||
|
@@ -395,7 +395,7 @@ def _update_archive_and_bests(self, candidate: p.Parameter, loss: tp.FloatLoss) | |
# update current best records | ||
# this may have to be improved if we want to keep more kinds of best losss | ||
|
||
for name in ["optimistic", "pessimistic", "average"]: | ||
for name in self.current_bests: | ||
if mvalue is self.current_bests[name]: # reboot | ||
best = min(self.archive.values(), key=lambda mv, n=name: mv.get_estimation(n)) # type: ignore | ||
# rebuild best point may change, and which value did not track the updated value anyway | ||
|
@@ -488,7 +488,8 @@ def recommend(self) -> p.Parameter: | |
""" | ||
recom_data = self._internal_provide_recommendation() # pylint: disable=assignment-from-none | ||
if recom_data is None: | ||
return self.current_bests["pessimistic"].parameter | ||
name = "minimum" if self.parametrization.descriptors.deterministic_function else "pessimistic" | ||
return self.current_bests[name].parameter | ||
Comment on lines
+491
to
+492
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @teytaud this is a drastic change in my opinion, you may want to crosscheck it in the xps There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think it is safe but yes I'll check xps... complicated though are the absolute indicators are just new. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I guess we can wait a bit to merge if it is safer There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. or I provide a fix for the noisy variants |
||
return self.parametrization.spawn_child().set_standardized_data(recom_data, deterministic=True) | ||
|
||
def _internal_tell_not_asked(self, candidate: p.Parameter, loss: tp.FloatLoss) -> None: | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -8,6 +8,7 @@ | |
import numpy as np | ||
import nevergrad.common.typing as tp | ||
from nevergrad.common import testing | ||
import nevergrad as ng | ||
from . import optimizerlib | ||
from . import experimentalvariants as xpvariants | ||
from . import base | ||
|
@@ -89,6 +90,8 @@ def test_tell_types(value: tp.Any, error: bool) -> None: | |
|
||
def test_base_optimizer() -> None: | ||
zeroptim = xpvariants.Zero(parametrization=2, budget=4, num_workers=1) | ||
# add descriptor to replicate old behavior, returning pessimistic best | ||
zeroptim.parametrization.descriptors.deterministic_function = False | ||
representation = repr(zeroptim) | ||
expected = "parametrization=Array{(2,)}" | ||
assert expected in representation, f"Unexpected representation: {representation}" | ||
|
@@ -152,3 +155,25 @@ def test_naming() -> None: | |
np.testing.assert_equal( | ||
repr(opt), f"Instance of BlubluOptimizer(parametrization={instru_str}, budget=4, num_workers=1)" | ||
) | ||
|
||
|
||
class MinStorageFunc: | ||
"""Stores the minimum value obtained so far""" | ||
|
||
def __init__(self) -> None: | ||
self.min_loss = float("inf") | ||
|
||
def __call__(self, score: int) -> float: | ||
self.min_loss = min(score, self.min_loss) | ||
return score | ||
|
||
|
||
def test_recommendation_correct() -> None: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. A possible test would be the example in the issue. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Actually that is the same test, just simplified a bit, but it does exactly the same |
||
# Run this several times to debug: | ||
# pytest nevergrad/optimization/test_base.py::test_recommendation_correct --count=20 --exitfirst | ||
func = MinStorageFunc() | ||
choice_size = 20 | ||
param = ng.p.Choice(range(choice_size)).set_name(f"Choice{choice_size}") | ||
optimizer = optimizerlib.OnePlusOne(parametrization=param, budget=300, num_workers=1) | ||
recommendation = optimizer.minimize(func) | ||
assert func.min_loss == recommendation.value |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
ah yes cool as a shortcut :-)