def test__handling_alias_parameters(): # type: () -> None params = {'reg_alpha': 0.1} _handling_alias_parameters(params) assert 'reg_alpha' not in params assert 'lambda_l1' in params
def test__handling_alias_parameters(): # type: () -> None params = {"reg_alpha": 0.1} _handling_alias_parameters(params) assert "reg_alpha" not in params assert "lambda_l1" in params
def run(self): # type: () -> lgb.Booster """Perform the hyperparameter-tuning with given parameters. Returns: booster : Booster The trained Booster model. """ # Surpress log messages. if self.auto_options['verbosity'] == 0: optuna.logging.disable_default_handler() self.lgbm_params['verbose'] = -1 self.lgbm_params['seed'] = 111 self.lgbm_kwargs['verbose_eval'] = False # Handling aliases. _handling_alias_parameters(self.lgbm_params) # Sampling. self.sample_train_set() # Tuning. time_budget = self.auto_options['time_budget'] self.start_time = time.time() with _timer() as t: self.tune_feature_fraction() if time_budget is not None and time_budget > t.elapsed_secs(): self.best_params.update(self._get_params()) return self.best_booster self.tune_num_leaves() if time_budget is not None and time_budget > t.elapsed_secs(): self.best_params.update(self._get_params()) return self.best_booster self.tune_bagging() if time_budget is not None and time_budget > t.elapsed_secs(): self.best_params.update(self._get_params()) return self.best_booster self.tune_feature_fraction_stage2() if time_budget is not None and time_budget > t.elapsed_secs(): self.best_params.update(self._get_params()) return self.best_booster self.tune_regularization_factors() if time_budget is not None and time_budget > t.elapsed_secs(): self.best_params.update(self._get_params()) return self.best_booster self.tune_min_data_in_leaf() if time_budget is not None and time_budget > t.elapsed_secs(): self.best_params.update(self._get_params()) return self.best_booster self.best_params.update(self._get_params()) return self.best_booster
def test_handling_alias_parameter(): # type: () -> None params = { "num_boost_round": 5, "early_stopping_rounds": 2, "min_data": 0.2, } _handling_alias_parameters(params) assert "min_data" not in params assert "min_data_in_leaf" in params assert params["min_data_in_leaf"] == 0.2
def test_handling_alias_parameter(): # type: () -> None params = { 'num_boost_round': 5, 'early_stopping_rounds': 2, 'min_data': 0.2, } _handling_alias_parameters(params) assert 'min_data' not in params assert 'min_data_in_leaf' in params assert params['min_data_in_leaf'] == 0.2
def test_handling_alias_parameter_with_default_value(): # type: () -> None params = { 'num_boost_round': 5, 'early_stopping_rounds': 2, } _handling_alias_parameters(params) assert 'eta' not in params assert 'learning_rate' in params assert params['learning_rate'] == 0.1
def test_handling_alias_parameter_with_user_supplied_param(): # type: () -> None params = { "num_boost_round": 5, "early_stopping_rounds": 2, "eta": 0.5, } _handling_alias_parameters(params) assert "eta" not in params assert "learning_rate" in params assert params["learning_rate"] == 0.5
def test_handling_alias_parameter_with_user_supplied_param(): # type: () -> None params = { 'num_boost_round': 5, 'early_stopping_rounds': 2, 'eta': 0.5, } _handling_alias_parameters(params) assert 'eta' not in params assert 'learning_rate' in params assert params['learning_rate'] == 0.5
def run(self) -> None: """Perform the hyperparameter-tuning with given parameters.""" # Surpress log messages. if self.auto_options["verbosity"] == 0: optuna.logging.disable_default_handler() self.lgbm_params["verbose"] = -1 self.lgbm_params["seed"] = 111 self.lgbm_kwargs["verbose_eval"] = False # Handling aliases. _handling_alias_parameters(self.lgbm_params) # Sampling. self.sample_train_set() # Tuning. time_budget = self.auto_options["time_budget"] self.start_time = time.time() with _timer() as t: self.tune_feature_fraction() if time_budget is not None and time_budget < t.elapsed_secs(): return self.tune_num_leaves() if time_budget is not None and time_budget < t.elapsed_secs(): return self.tune_bagging() if time_budget is not None and time_budget < t.elapsed_secs(): return self.tune_feature_fraction_stage2() if time_budget is not None and time_budget < t.elapsed_secs(): return self.tune_regularization_factors() if time_budget is not None and time_budget < t.elapsed_secs(): return self.tune_min_data_in_leaf() if time_budget is not None and time_budget < t.elapsed_secs(): return
def run(self) -> None: """Perform the hyperparameter-tuning with given parameters.""" # Surpress log messages. if self.auto_options["verbosity"] == 0: optuna.logging.disable_default_handler() self.lgbm_params["verbose"] = -1 self.lgbm_params["seed"] = 111 self.lgbm_kwargs["verbose_eval"] = False # Handling aliases. _handling_alias_parameters(self.lgbm_params) # Sampling. self.sample_train_set() self.tune_feature_fraction() self.tune_num_leaves() self.tune_bagging() self.tune_feature_fraction_stage2() self.tune_regularization_factors() self.tune_min_data_in_leaf()