Example #1
0
    def test_run_verbosity(self, verbosity: int, level: int) -> None:
        # We need to reconstruct our default handler to properly capture stderr.
        optuna.logging._reset_library_root_logger()
        optuna.logging.set_verbosity(optuna.logging.INFO)

        params = {"verbose": -1}  # type: Dict
        dataset = lgb.Dataset(np.zeros((10, 10)))

        study = optuna.create_study()
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", category=FutureWarning)
            tuner = LightGBMTuner(
                params,
                dataset,
                valid_sets=dataset,
                study=study,
                verbosity=verbosity,
                time_budget=1,
            )

        with mock.patch.object(_BaseTuner,
                               "_get_booster_best_score",
                               return_value=1.0):
            tuner.run()

        assert optuna.logging.get_verbosity() == level
        assert tuner.lgbm_params["verbose"] == -1
Example #2
0
    def test_run_show_progress_bar(self, show_progress_bar: bool, expected: int) -> None:
        params: Dict = {"verbose": -1}
        dataset = lgb.Dataset(np.zeros((10, 10)))

        study = optuna.create_study()
        tuner = LightGBMTuner(
            params,
            dataset,
            valid_sets=dataset,
            study=study,
            time_budget=1,
            show_progress_bar=show_progress_bar,
        )

        with mock.patch.object(
            _BaseTuner, "_get_booster_best_score", return_value=1.0
        ), mock.patch("tqdm.tqdm") as mock_tqdm:
            tuner.run()

        assert mock_tqdm.call_count == expected
Example #3
0
def train(*args: Any, **kwargs: Any) -> Any:
    """Wrapper of LightGBM Training API to tune hyperparameters.

    It tunes important hyperparameters (e.g., ``min_child_samples`` and ``feature_fraction``) in a
    stepwise manner. It is a drop-in replacement for `lightgbm.train()`_. See
    `a simple example of LightGBM Tuner <https://github.com/optuna/optuna/blob/master/examples/lig
    htgbm_tuner_simple.py>`_ which optimizes the validation log loss of cancer detection.

    :func:`~optuna.integration.lightgbm.train` is a wrapper function of
    :class:`~optuna.integration.lightgbm.LightGBMTuner`. To use feature in Optuna such as
    suspended/resumed optimization and/or parallelization, refer to
    :class:`~optuna.integration.lightgbm.LightGBMTuner` instead of this function.

    Arguments and keyword arguments for `lightgbm.train()`_ can be passed.

    .. _lightgbm.train(): https://lightgbm.readthedocs.io/en/latest/pythonapi/lightgbm.train.html
    """
    _imports.check()

    auto_booster = LightGBMTuner(*args, **kwargs)
    auto_booster.run()
    return auto_booster.get_best_booster()