def test_get_best_booster(self) -> None: unexpected_value = 20 # out of scope. params = {"verbose": -1, "lambda_l1": unexpected_value} # type: Dict dataset = lgb.Dataset(np.zeros((10, 10))) study = optuna.create_study() with TemporaryDirectory() as tmpdir: tuner = LightGBMTunerCV(params, dataset, study=study, model_dir=tmpdir, return_cvbooster=True) with pytest.raises(ValueError): tuner.get_best_booster() with mock.patch.object(_OptunaObjectiveCV, "_get_cv_scores", return_value=[1.0]): tuner.tune_regularization_factors() best_boosters = tuner.get_best_booster().boosters for booster in best_boosters: assert booster.params["lambda_l1"] != unexpected_value tuner2 = LightGBMTunerCV(params, dataset, study=study, model_dir=tmpdir, return_cvbooster=True) best_boosters2 = tuner2.get_best_booster().boosters for booster, booster2 in zip(best_boosters, best_boosters2): assert booster.params == booster2.params
def test_get_best_booster_with_error(self) -> None: params: Dict = {"verbose": -1} dataset = lgb.Dataset(np.zeros((10, 10))) study = optuna.create_study() tuner = LightGBMTunerCV( params, dataset, study=study, model_dir=None, return_cvbooster=True ) # No trial is completed yet. with pytest.raises(ValueError): tuner.get_best_booster() with mock.patch.object(_OptunaObjectiveCV, "_get_cv_scores", return_value=[1.0]): tuner.tune_regularization_factors() tuner2 = LightGBMTunerCV( params, dataset, study=study, model_dir=None, return_cvbooster=True ) # Resumed the study does not have the best booster. with pytest.raises(ValueError): tuner2.get_best_booster() with TemporaryDirectory() as tmpdir: tuner3 = LightGBMTunerCV( params, dataset, study=study, model_dir=tmpdir, return_cvbooster=True ) # The booster was not saved hence not found in the `model_dir`. with pytest.raises(ValueError): tuner3.get_best_booster()
def test_resume_run(self) -> None: params: Dict = {"verbose": -1} dataset = lgb.Dataset(np.zeros((10, 10))) study = optuna.create_study() tuner = LightGBMTunerCV(params, dataset, study=study) with mock.patch.object(_OptunaObjectiveCV, "_get_cv_scores", return_value=[1.0]): tuner.tune_regularization_factors() n_trials = len(study.trials) assert n_trials == len(study.trials) tuner2 = LightGBMTuner(params, dataset, valid_sets=dataset, study=study) with mock.patch.object(_OptunaObjectiveCV, "_get_cv_scores", return_value=[1.0]): tuner2.tune_regularization_factors() assert n_trials == len(study.trials)