예제 #1
0
    def test_get_best_booster(self) -> None:
        unexpected_value = 20  # out of scope.

        params = {"verbose": -1, "lambda_l1": unexpected_value}  # type: Dict
        dataset = lgb.Dataset(np.zeros((10, 10)))

        study = optuna.create_study()
        tuner = LightGBMTuner(params, dataset, valid_sets=dataset, study=study)

        with pytest.raises(ValueError):
            tuner.get_best_booster()

        with mock.patch.object(BaseTuner, "_get_booster_best_score", return_value=0.0):
            tuner.tune_regularization_factors()

        best_booster = tuner.get_best_booster()
        assert best_booster.params["lambda_l1"] != unexpected_value

        # TODO(toshihikoyanase): Remove this check when LightGBMTuner.best_booster is removed.
        with pytest.warns(DeprecationWarning):
            tuner.best_booster

        tuner2 = LightGBMTuner(params, dataset, valid_sets=dataset, study=study)

        # Resumed study does not have the best booster.
        with pytest.raises(ValueError):
            tuner2.get_best_booster()
예제 #2
0
    def test_best_booster_with_model_dir(self) -> None:
        params = {"verbose": -1}  # type: Dict
        dataset = lgb.Dataset(np.zeros((10, 10)))

        study = optuna.create_study()
        with TemporaryDirectory() as tmpdir:
            tuner = LightGBMTuner(params,
                                  dataset,
                                  valid_sets=dataset,
                                  study=study,
                                  model_dir=tmpdir)

            with mock.patch.object(BaseTuner,
                                   "_get_booster_best_score",
                                   return_value=0.0):
                tuner.tune_regularization_factors()

            best_booster = tuner.get_best_booster()

            tuner2 = LightGBMTuner(params,
                                   dataset,
                                   valid_sets=dataset,
                                   study=study,
                                   model_dir=tmpdir)
            best_booster2 = tuner2.get_best_booster()

            assert best_booster.params == best_booster2.params
예제 #3
0
def train(*args: Any, **kwargs: Any) -> Any:
    """Wrapper of LightGBM Training API to tune hyperparameters.

    It tunes important hyperparameters (e.g., `min_child_samples` and `feature_fraction`) in a
    stepwise manner. Arguments and keyword arguments for `lightgbm.train()
    <https://lightgbm.readthedocs.io/en/latest/pythonapi/lightgbm.train.html>`_ can be passed.
    """
    _check_lightgbm_availability()

    auto_booster = LightGBMTuner(*args, **kwargs)
    auto_booster.run()
    return auto_booster.get_best_booster()
예제 #4
0
def train(*args: Any, **kwargs: Any) -> Any:
    """Wrapper of LightGBM Training API to tune hyperparameters.

    It tunes important hyperparameters (e.g., ``min_child_samples`` and ``feature_fraction``) in a
    stepwise manner. It is a drop-in replacement for `lightgbm.train()`_. See
    `a simple example of LightGBM Tuner <https://github.com/optuna/optuna/blob/master/examples/lig
    htgbm_tuner_simple.py>`_ which optimizes the validation log loss of cancer detection.

    :func:`~optuna.integration.lightgbm.train` is a wrapper function of
    :class:`~optuna.integration.lightgbm_tuner.LightGBMTuner`. To use feature in Optuna such as
    suspended/resumed optimization and/or parallelization, refer to
    :class:`~optuna.integration.lightgbm_tuner.LightGBMTuner` instead of this function.

    Arguments and keyword arguments for `lightgbm.train()`_ can be passed.

    .. _lightgbm.train(): https://lightgbm.readthedocs.io/en/latest/pythonapi/lightgbm.train.html
    """
    _imports.check()

    auto_booster = LightGBMTuner(*args, **kwargs)
    auto_booster.run()
    return auto_booster.get_best_booster()