def perform(self): # Search for good alphas alphas = [10**-x for x in np.arange(-1, 9.01, 1.0)] hiddens = [(50, ), (50, 100, 50)] learning_rates = sorted([0.001, 0.005]) params = { 'MLP__activation': ['relu'], 'MLP__alpha': alphas, 'MLP__learning_rate_init': learning_rates, 'MLP__hidden_layer_sizes': hiddens } learner = MLPClassifier(max_iter=1000, early_stopping=True, random_state=self._details.seed, shuffle=True, verbose=self._verbose) pipe = Pipeline([('MLP', learner)]) trainer.perform_experiment(ds=self._details.ds, ds_name=self._details.ds_name, clf_name='ANNClassifier', params=params, pipe=pipe, seed=self._details.seed)
def perform(self): params = {'DTClassifier__criterion': ['gini'], 'DTClassifier__max_depth': [20, 40, 60]} # , 'DT__max_leaf_nodes': max_leaf_nodes} learner = DecisionTreeClassifier(random_state=self._details.seed) pipe = Pipeline([ ('DTClassifier', learner)]) trainer.perform_experiment(ds=self._details.ds, ds_name=self._details.ds_name, clf_name='DTClassifier', params=params, pipe=pipe, seed=self._details.seed)
def perform(self): params = {'LRClassifier__penalty': ['l2']} learner = LogisticRegression(random_state=self._details.seed) pipe = Pipeline([('LRClassifier', learner)]) trainer.perform_experiment(ds=self._details.ds, ds_name=self._details.ds_name, clf_name='LRClassifier', params=params, pipe=pipe, seed=self._details.seed)
def perform(self): random_forest = ensemble.RandomForestClassifier( random_state=self._details.seed) params = {"RF__max_depth": [3, 5, 7, 10, None], "RF__n_estimators": [3, 5, 10, 25, 50, 150], "RF__max_features": [4, 7, 15, 20]} pipe = Pipeline([ ('RF', random_forest)]) trainer.perform_experiment(ds=self._details.ds, ds_name=self._details.ds_name, clf_name='RF', params=params, pipe=pipe, seed=self._details.seed)
def perform(self): booster = xgb.XGBClassifier(booster='gbtree') params = { 'Xgb__n_estimators': [10, 20, 50, 150], 'Xgb__learning_rate': [0.02, 0.04], 'Xgb__max_depth': [20, 40] } pipe = Pipeline([('Xgb', booster)]) trainer.perform_experiment(ds=self._details.ds, ds_name=self._details.ds_name, clf_name='Xgb', params=params, pipe=pipe, seed=self._details.seed)
def perform(self): booster = GradientBoostingClassifier(random_state=self._details.seed) params = { 'Boost__n_estimators': [10, 20, 50, 150], 'Boost__learning_rate': [0.02, 0.04], 'Boost__max_depth': [20, 40] } pipe = Pipeline([('Boost', booster)]) trainer.perform_experiment(ds=self._details.ds, ds_name=self._details.ds_name, clf_name='Boost', params=params, pipe=pipe, seed=self._details.seed)
def perform(self): neighbors = [10, 20, 30] learner = KNeighborsClassifier() pca = PCA(random_state=self._details.seed) params = { 'pca__n_components': [15, 20], 'KNN__metric': ['manhattan', 'euclidean', 'chebyshev'], 'KNN__n_neighbors': neighbors, 'KNN__weights': ['uniform'] } pipe = Pipeline([('pca', pca), ('KNN', learner)]) trainer.perform_experiment(ds=self._details.ds, ds_name=self._details.ds_name, clf_name='KNNClassifier', params=params, pipe=pipe, seed=self._details.seed)