Skip to content

Commit

Permalink
[python-package] respect 'verbose' setting when using custom objectiv…
Browse files Browse the repository at this point in the history
…e function (fixes #6014) (#6428)
  • Loading branch information
jameslamb committed Jul 12, 2024
1 parent 525f8b4 commit 2bc3ab8
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 6 deletions.
21 changes: 18 additions & 3 deletions src/io/config.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,24 @@ void GetFirstValueAsInt(const std::unordered_map<std::string, std::vector<std::s
}

void Config::SetVerbosity(const std::unordered_map<std::string, std::vector<std::string>>& params) {
int verbosity = Config().verbosity;
GetFirstValueAsInt(params, "verbose", &verbosity);
GetFirstValueAsInt(params, "verbosity", &verbosity);
int verbosity = 1;

// if "verbosity" was found in params, prefer that to any other aliases
const auto verbosity_iter = params.find("verbosity");
if (verbosity_iter != params.end()) {
GetFirstValueAsInt(params, "verbosity", &verbosity);
} else {
// if "verbose" was found in params and "verbosity" was not, use that value
const auto verbose_iter = params.find("verbose");
if (verbose_iter != params.end()) {
GetFirstValueAsInt(params, "verbose", &verbosity);
} else {
// if "verbosity" and "verbose" were both missing from params, don't modify LightGBM's log level
return;
}
}

// otherwise, update LightGBM's log level based on the passed-in value
if (verbosity < 0) {
LightGBM::Log::ResetLogLevel(LightGBM::LogLevel::Fatal);
} else if (verbosity == 0) {
Expand Down
2 changes: 1 addition & 1 deletion tests/python_package_test/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ def test_add_features_does_not_fail_if_initial_dataset_has_zero_informative_feat
arr_a = np.zeros((100, 1), dtype=np.float32)
arr_b = rng.uniform(size=(100, 5))

dataset_a = lgb.Dataset(arr_a).construct()
dataset_a = lgb.Dataset(arr_a, params={"verbose": 0}).construct()
expected_msg = (
"[LightGBM] [Warning] There are no meaningful features which satisfy "
"the provided configuration. Decreasing Dataset parameters min_data_in_bin "
Expand Down
17 changes: 16 additions & 1 deletion tests/python_package_test/test_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -1469,6 +1469,7 @@ def test_parameters_are_loaded_from_model_file(tmp_path, capsys, rng):
"metric": ["l2", "rmse"],
"num_leaves": 5,
"num_threads": 1,
"verbosity": 0,
}
model_file = tmp_path / "model.txt"
orig_bst = lgb.train(params, ds, num_boost_round=1, categorical_feature=[1, 2])
Expand Down Expand Up @@ -4274,11 +4275,25 @@ def test_verbosity_and_verbose(capsys):
"verbosity": 0,
}
lgb.train(params, ds, num_boost_round=1)
expected_msg = "[LightGBM] [Warning] verbosity is set=0, verbose=1 will be ignored. " "Current value: verbosity=0"
expected_msg = "[LightGBM] [Warning] verbosity is set=0, verbose=1 will be ignored. Current value: verbosity=0"
stdout = capsys.readouterr().out
assert expected_msg in stdout


def test_verbosity_is_respected_when_using_custom_objective(capsys):
X, y = make_synthetic_regression()
ds = lgb.Dataset(X, y)
params = {
"objective": mse_obj,
"nonsense": 123,
"num_leaves": 3,
}
lgb.train({**params, "verbosity": -1}, ds, num_boost_round=1)
assert capsys.readouterr().out == ""
lgb.train({**params, "verbosity": 0}, ds, num_boost_round=1)
assert "[LightGBM] [Warning] Unknown parameter: nonsense" in capsys.readouterr().out


@pytest.mark.parametrize("verbosity_param", lgb.basic._ConfigAliases.get("verbosity"))
@pytest.mark.parametrize("verbosity", [-1, 0])
def test_verbosity_can_suppress_alias_warnings(capsys, verbosity_param, verbosity):
Expand Down
13 changes: 13 additions & 0 deletions tests/python_package_test/test_sklearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -1290,6 +1290,19 @@ def test_max_depth_warning_is_never_raised(capsys, estimator_class, max_depth):
assert "Provided parameters constrain tree depth" not in capsys.readouterr().out


def test_verbosity_is_respected_when_using_custom_objective(capsys):
X, y = make_synthetic_regression()
params = {
"objective": objective_ls,
"nonsense": 123,
"num_leaves": 3,
}
lgb.LGBMRegressor(**params, verbosity=-1, n_estimators=1).fit(X, y)
assert capsys.readouterr().out == ""
lgb.LGBMRegressor(**params, verbosity=0, n_estimators=1).fit(X, y)
assert "[LightGBM] [Warning] Unknown parameter: nonsense" in capsys.readouterr().out


@pytest.mark.parametrize("estimator_class", [lgb.LGBMModel, lgb.LGBMClassifier, lgb.LGBMRegressor, lgb.LGBMRanker])
def test_getting_feature_names_in_np_input(estimator_class):
# input is a numpy array, which doesn't have feature names. LightGBM adds
Expand Down
2 changes: 1 addition & 1 deletion tests/python_package_test/test_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def dummy_metric(_, __):
eval_records = {}
callbacks = [lgb.record_evaluation(eval_records), lgb.log_evaluation(2), lgb.early_stopping(10)]
lgb.train(
{"objective": "binary", "metric": ["auc", "binary_error"]},
{"objective": "binary", "metric": ["auc", "binary_error"], "verbose": 1},
lgb_train,
num_boost_round=10,
feval=dummy_metric,
Expand Down

0 comments on commit 2bc3ab8

Please sign in to comment.