def train(*args: Any, **kwargs: Any) -> Any: """Wrapper of LightGBM Training API to tune hyperparameters. It tunes important hyperparameters (e.g., `min_child_samples` and `feature_fraction`) in a stepwise manner. Arguments and keyword arguments for `lightgbm.train() <https://lightgbm.readthedocs.io/en/latest/pythonapi/lightgbm.train.html>`_ can be passed. """ _check_lightgbm_availability() auto_booster = LightGBMTuner(*args, **kwargs) auto_booster.run() return auto_booster.get_best_booster()
def train(*args, **kwargs): # type: (List[Any], Optional[Dict[Any, Any]]) -> Any """Wrapper function of LightGBM API: train() Arguments and keyword arguments for `lightgbm.train()` can be passed. """ auto_booster = LightGBMTuner(*args, **kwargs) booster = auto_booster.run() return booster
def train(*args: Any, **kwargs: Any) -> Any: """Wrapper of LightGBM Training API to tune hyperparameters. It tunes important hyperparameters (e.g., ``min_child_samples`` and ``feature_fraction``) in a stepwise manner. It is a drop-in replacement for `lightgbm.train()`_. See `a simple example of LightGBM Tuner <https://github.com/optuna/optuna/blob/master/examples/lig htgbm_tuner_simple.py>`_ which optimizes the validation log loss of cancer detection. :func:`~optuna.integration.lightgbm.train` is a wrapper function of :class:`~optuna.integration.lightgbm_tuner.LightGBMTuner`. To use feature in Optuna such as suspended/resumed optimization and/or parallelization, refer to :class:`~optuna.integration.lightgbm_tuner.LightGBMTuner` instead of this function. Arguments and keyword arguments for `lightgbm.train()`_ can be passed. .. _lightgbm.train(): https://lightgbm.readthedocs.io/en/latest/pythonapi/lightgbm.train.html """ _imports.check() auto_booster = LightGBMTuner(*args, **kwargs) auto_booster.run() return auto_booster.get_best_booster()
def train(*args, **kwargs): # type: (Any, Any) -> Any """Wrapper of LightGBM Training API to tune hyperparameters. .. warning:: This feature is experimental. The interface may be changed in the future. It tunes important hyperparameters (e.g., `min_child_samples` and `feature_fraction`) in a stepwise manner. Arguments and keyword arguments for `lightgbm.train() <https://lightgbm.readthedocs.io/en/latest/pythonapi/lightgbm.train.html>`_ can be passed. """ auto_booster = LightGBMTuner(*args, **kwargs) booster = auto_booster.run() return booster