Esempio n. 1
0
            'ANN__hidden_layer_sizes': [(d,) for d in [dim, dim//2]],
            'ANN__solver': ['adam'],
            'ANN__alpha': 10.0 ** -np.arange(1, 7),
            'ANN__activation': ['relu', 'tanh', 'logistic'],
        }
        clf = basicResults(pipe, x_train, y_train, x_test,
                           y_test, params, 'ANN', dataset)
    else:
        print('Using presolved hyperparameters for ' + dataset)
        clf = pipe.set_params(**solved_params)
    # plot_learning_curve(clf, dataset + ' neural network',
    #                     x, y, cv=5, n_jobs=4, scoring=scorer)
    # plt.savefig('./graphs/' + dataset + '-ann.png')
    # print('Creating timing curve for ' + dataset)
    # plot_timing_curve(clf, x, y, 'neural network', dataset)
    # plt.savefig('./graphs/' + dataset + '-ANN-timing.png')
    # print('Creating iteration curve for ' + dataset)
    # plot_iteration_curve(clf, x_train, y_train, x_test, y_test, iter_adjust, 'neural network', dataset)
    # plt.savefig('./graphs/' + dataset + '-ANN-iteration.png')
    conf = confusion_matrix(y_test, clf.predict(x_test))
    conf = conf.astype('float') / conf.sum(axis=1)[:, np.newaxis]
    print('Confusion matrix:')
    print(conf)
    np.savetxt('./output/ANN_{}_confusion.csv'.format(dataset), conf, delimiter=',', fmt='%.2f')


if __name__ == '__main__':
    run_ann(read_wine(), 'wine')
    run_ann(read_gestures(), 'gestures')
    # plt.show();
Esempio n. 2
0
    # plt.savefig('./graphs/' + dataset + '-boost-timing.png')
    # plot_iteration_curve(clf, x_train, y_train, x_test, y_test, params, 'boosted', dataset)
    # plt.savefig('./graphs/' + dataset + '-boost-iteration.png')
    conf = confusion_matrix(y_test, clf.predict(x_test))
    conf = conf.astype('float') / conf.sum(axis=1)[:, np.newaxis]
    print('Confusion matrix:')
    print(conf)
    np.savetxt('./output/Boosted_{}_confusion.csv'.format(dataset),
               conf,
               delimiter=',',
               fmt='%.2f')


if __name__ == '__main__':
    run_boost(read_wine(),
              'wine',
              dtparams={
                  'alpha': -100,
                  'class_weight': 'balanced',
                  'criterion': 'entropy',
                  'min_samples_split': 2
              })
    run_boost(read_gestures(),
              'gestures',
              dtparams={
                  'alpha': 0.0001,
                  'class_weight': 'balanced',
                  'criterion': 'gini',
                  'min_samples_split': 4
              })
Esempio n. 3
0
    x_train, x_test, y_train, y_test = train_test_split(x, y, stratify=y)
    if solved_params is None:
        print("Doing a GridSearch for best hyperparameters")
        params = {
            'DT__criterion': ['gini', 'entropy'],
            'DT__alpha': ALPHAS,
            'DT__class_weight': ['balanced'],
            'DT__min_samples_split': [2, 3, 4, 5],
        }
        clf = basicResults(pipe, x_train, y_train, x_test,
                           y_test, params, 'DT', title)
    else:
        print("Using pre-solved hyperparameters")
        clf = pipe.set_params(**solved_params)
    # print ("Plotting learning curve")
    # plot_learning_curve(clf, title + ' decision tree', x,
    #                     y, n_jobs=4, scoring=scorer, ylim=(0, 1))
    # plt.savefig('./graphs/' + title + '-dt.png')
    y_pred = clf.predict(x_test)
    conf = confusion_matrix(y_test, clf.predict(x_test))
    conf = conf.astype('float') / conf.sum(axis=1)[:, np.newaxis]
    print('Confusion matrix:')
    print(conf)
    np.savetxt('./output/DT_{}_confusion.csv'.format(title), conf, delimiter=',', fmt='%.2f')


if __name__ == '__main__':
    run_dt(read_gestures(), 'gestures')
    run_dt(read_wine(), 'wine')
    # plt.show()
Esempio n. 4
0
    x, y, pipeline = data
    pipe = Pipeline([
        *pipeline,
        ('SVM', svm.SVC(class_weight='balanced')),
    ])
    print('Splitting data SVM RBF -- ' + dataset)
    x_train, x_test, y_train, y_test = train_test_split(x, y, stratify=y)
    print('Computing hyperparameters SVM RBF -- ' + dataset)
    clf = basicResults(pipe, x_train, y_train, x_test,
                       y_test, params, 'SVM-RBF', dataset)

    # plot_timing_curve(clf, x, y, 'rbf svm', dataset)
    # plt.savefig('./graphs/' + dataset + '-svm-rbf-timing.png')
    # plot_iteration_curve(clf, x_train, y_train, x_test,
    #                      y_test, iter_adjust, 'rbf svm', dataset)
    # plt.savefig('./graphs/' + dataset + '-svm-rbf-iteration.png')
    conf = confusion_matrix(y_test, clf.predict(x_test))
    conf = conf.astype('float') / conf.sum(axis=1)[:, np.newaxis]
    print('Confusion matrix:')
    print(conf)
    np.savetxt('./output/SVM-RBF_{}_confusion.csv'.format(dataset), conf, delimiter=',', fmt='%.2f')


if __name__ == '__main__':
    wine = read_wine()
    gestures = read_gestures()
    run_svm_linear(wine, 'wine')
    run_svm_linear(gestures, 'gestures')
    run_svm_rbf(wine, 'wine')
    run_svm_rbf(gestures, 'gestures')
Esempio n. 5
0
from multiprocessing import Process
from dt import run_dt
from ann import run_ann
from bdt import run_boost
from knn import run_knn
from svm import run_svm_linear, run_svm_rbf
from parse_data import read_gestures, read_wine


def run(data, dataset):
    # print('running decision tree for ' + dataset)
    # run_dt(data, dataset)
    print('running neural network for ' + dataset)
    run_ann(data, dataset)
    print('running boosted dt for ' + dataset)
    run_boost(data, dataset)
    print('running knn for ' + dataset)
    run_knn(data, dataset)
    print('running svm (linear) for ' + dataset)
    run_svm_linear(data, dataset)
    print('running svm (rbf) for ' + dataset)
    run_svm_rbf(data, dataset)


if __name__ == '__main__':
    run(read_gestures(), 'gestures')
    run(read_wine(), 'wine')