示例#1
0
    def test_when_a_step_does_not_improve_best_score(self):
        # type: () -> None

        params = {}  # type: Dict
        valid_data = np.zeros((10, 10))
        valid_sets = lgb.Dataset(valid_data)

        with warnings.catch_warnings():
            warnings.simplefilter("ignore", category=DeprecationWarning)
            tuner = LightGBMTuner(params, None, valid_sets=valid_sets)
        assert not tuner.higher_is_better()

        with mock.patch("lightgbm.train"), mock.patch.object(
            BaseTuner, "_get_booster_best_score", return_value=0.9
        ):
            tuner.tune_feature_fraction()

        assert "feature_fraction" in tuner.best_params
        assert tuner.best_score == 0.9

        # Assume that tuning `num_leaves` doesn't improve the `best_score`.
        with mock.patch("lightgbm.train"), mock.patch.object(
            BaseTuner, "_get_booster_best_score", return_value=1.1
        ):
            tuner.tune_num_leaves()

        # `num_leaves` should be same as default.
        assert tuner.best_params["num_leaves"] == 31
        assert tuner.best_score == 0.9
示例#2
0
    def test_when_a_step_does_not_improve_best_score(self):
        # type: () -> None

        params = {}  # type: Dict
        valid_data = np.zeros((10, 10))
        valid_sets = lgb.Dataset(valid_data)

        tuner = LightGBMTuner(params, None, valid_sets=valid_sets)
        assert not tuner.higher_is_better()

        with mock.patch("lightgbm.train"), mock.patch.object(
            BaseTuner, "_get_booster_best_score", return_value=0.9
        ):
            tuner.tune_feature_fraction()

        assert "feature_fraction" in tuner.best_params
        assert tuner.best_score == 0.9

        # Assume that tuning `num_leaves` doesn't improve the `best_score`.
        with mock.patch("lightgbm.train"), mock.patch.object(
            BaseTuner, "_get_booster_best_score", return_value=1.1
        ):
            tuner.tune_num_leaves()
示例#3
0
    def test_when_a_step_does_not_improve_best_score(self):
        # type: () -> None

        params = {}  # type: Dict
        valid_data = np.zeros((10, 10))
        valid_sets = lgb.Dataset(valid_data)
        tuner = LightGBMTuner(params, None, valid_sets=valid_sets)
        assert not tuner.higher_is_better()

        objective_class_name = 'optuna.integration.lightgbm_tuner.optimize.OptunaObjective'

        with mock.patch(objective_class_name) as objective_mock,\
                mock.patch('optuna.study.Study') as study_mock,\
                mock.patch('optuna.trial.Trial') as trial_mock:

            fake_objective = mock.MagicMock(spec=OptunaObjective)
            fake_objective.report = []
            fake_objective.best_booster = None
            objective_mock.return_value = fake_objective

            fake_study = mock.MagicMock(spec=Study)
            fake_study._storage = mock.MagicMock()
            fake_study.best_value = 0.9
            study_mock.return_value = fake_study

            fake_trial = mock.MagicMock(spec=Trial)
            fake_trial.best_params = {
                'feature_fraction': 0.2,
            }
            trial_mock.return_value = fake_trial

            tuner.tune_feature_fraction()

            fake_study.optimize.assert_called()

        assert 'feature_fraction' in tuner.best_params
        assert tuner.best_score == 0.9

        with mock.patch(objective_class_name) as objective_mock,\
                mock.patch('optuna.study.Study') as study_mock,\
                mock.patch('optuna.trial.Trial') as trial_mock:

            fake_objective = mock.MagicMock(spec=OptunaObjective)
            fake_objective.report = []
            fake_objective.best_booster = None
            objective_mock.return_value = fake_objective

            # Assume that tuning `num_leaves` doesn't improve the `best_score`.
            fake_study = mock.MagicMock(spec=Study)
            fake_study._storage = mock.MagicMock()
            fake_study.best_value = 1.1
            study_mock.return_value = fake_study

            fake_trial = mock.MagicMock(spec=Trial)
            fake_trial.best_params = {
                'num_leaves': 128,
            }
            trial_mock.return_value = fake_trial

            tuner.tune_num_leaves()

            fake_study.optimize.assert_called()

        # `num_leaves` should not be same as default.
        assert tuner.best_params['num_leaves'] == 31
        assert tuner.best_score == 0.9