def hpskForClf(train_arrays, train_labels): estim = HyperoptEstimator( classifier=sgd('mySGD'), preprocessing=[], algo=tpe.suggest, max_evals=5, ) #cross_val_score(estim, train_arrays, train_labels, cv=5, scoring='recall_macro').mean() estim.fit(train_arrays, train_labels) print(estim.best_model())
def bench_classifiers(name): classifiers = [ ada_boost(name + '.ada_boost'), # boo gaussian_nb(name + '.gaussian_nb'), # eey knn(name + '.knn', sparse_data=True), # eey linear_discriminant_analysis(name + '.linear_discriminant_analysis', n_components=1), # eey random_forest(name + '.random_forest'), # boo sgd(name + '.sgd') # eey ] if xgboost: classifiers.append(xgboost_classification(name + '.xgboost')) # boo return hp.choice('%s' % name, classifiers)
n_job = 6 select_classes = [0, 1, 2, 3, 4, 5] val_dist = X_val_mini.shape[0] / X_train_mini.shape[0] name = 'my_est_oVa' tic_mod_all = time.time() select_alg = [ ada_boost(name + '.ada_boost'), gaussian_nb(name + '.gaussian_nb'), knn(name + '.knn', sparse_data=True), linear_discriminant_analysis(name + '.linear_discriminant_analysis', n_components=1), random_forest(name + '.random_forest'), sgd(name + '.sgd'), xgboost_classification(name + '.xgboost') ] # fitting models estim_one_vs_rest = dict() # scoring models algo_scoring = dict() save_score_path = r'C:/Users/anden/PycharmProjects/NovelEEG/results' for alg in [select_alg[args.index]]: tic_mod = time.time() print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", "running on %s" % (alg.name + '.one_V_all'), "\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") clf_method = one_vs_rest(str(alg.name + '.one_V_all'), estimator=alg,