def param_search(self, search_method='grid'): ''' @description: use param search tech to find best param @param {type} search_method: two options. grid or bayesian optimization @return: None ''' if search_method == 'grid': logger.info("use grid search") self.model = Grid_Train_model(self.model, self.X_train, self.X_test, self.y_train, self.y_test) elif search_method == 'bayesian': logger.info("use bayesian optimization") trn_data = lgb.Dataset(data=self.X_train, label=self.y_train, free_raw_data=False) tst_data = lgb.Dataset(data=self.X_test, label=self.y_test, free_raw_data=False) param = bayes_parameter_opt_lgb(trn_data) logger.info("best param", param) param['objective'] = 'multiclass' param['metric'] = 'auc' self.model = lgb.train(param, trn_data, valid_sets=[trn_data, tst_data], verbose_eval=1000, early_stopping_rounds=100)
def param_search(self, search_method='grid'): if search_method == 'grid': logger.info('use grid search ... ') self.model = grid_train_model(self.model, self.x_train, self.x_test, self.y_train, self.y_test) elif search_method == 'bayesian': logger.info('use bayesian optimization ... ') trn_data = lgb.Dataset(data=self.x_train, label=self.y_train, free_raw_data=False) param = bayes_parameter_opt_lgb(trn_data) logger.info('best param', param) return param
def param_search(self, search_method='grid'): ''' @description: use param search tech to find best param @param {type} search_method: two options. grid or bayesian optimization @return: None ''' if search_method == 'grid': logger.info("use grid search") self.model = Grid_Train_model(self.model, self.X_train, self.X_test, self.y_train, self.y_test) elif search_method == 'bayesian': logger.info("use bayesian optimization") trn_data = lgb.Dataset(data=self.X_train, label=self.y_train, free_raw_data=False) param = bayes_parameter_opt_lgb(trn_data) logger.info("best param", param) return param