def support_vector():
    svc = svm.SVC(kernel='linear')
    svc.fit(X_train, y_train)
    save_pck(svc, './ml_model_saves/svm')
    # svc = load('./ml_model_saves/svm.pck')
    svc_predict = svc.predict(X_val)
    print(classification_report(y_val, svc_predict))
    cnf_matrix = (confusion_matrix(y_val, svc_predict))
    plt.figure()
    plot_confusion_matrix(cnf_matrix,
                          classes=EMOTIONS,
                          normalize=True,
                          title='Normalized confusion matrix SVM')
    plt.show()
def naive_bayes():
    gnb = GaussianNB()
    gnb.fit(X_train, y_train)
    save_pck(gnb, './ml_model_saves/gnb')
    # gnb = load('./ml_model_saves/gnb.pck')
    gnb_predict = gnb.predict(X_val)
    print(classification_report(y_val, gnb_predict))
    cnf_matrix = (confusion_matrix(y_val, gnb_predict))
    plt.figure()
    plot_confusion_matrix(cnf_matrix,
                          classes=EMOTIONS,
                          normalize=True,
                          title='Normalized confusion matrix NB')
    plt.show()
def linear_disc():
    lda = LinearDiscriminantAnalysis()
    lda.fit(X_train, y_train)
    save_pck(lda, './ml_model_saves/lda')
    # lda = load('./ml_model_saves/lda.pck')
    lda_predict = lda.predict(X_val)
    print(classification_report(y_val, lda_predict))
    cnf_matrix = (confusion_matrix(y_val, lda_predict))
    plt.figure()
    plot_confusion_matrix(cnf_matrix,
                          classes=EMOTIONS,
                          normalize=True,
                          title='Normalized confusion matrix LDA')
    plt.show()
def random_forests():
    rfc = RandomForestClassifier()
    rfc.fit(X_train, y_train)
    save_pck(rfc, './ml_model_saves/rfc')
    # rfc = load('./ml_model_saves/rfc.pck')
    rfc_predict = rfc.predict(X_val)
    print(classification_report(y_val, rfc_predict))
    cnf_matrix = (confusion_matrix(y_val, rfc_predict))
    plt.figure()
    plot_confusion_matrix(cnf_matrix,
                          classes=EMOTIONS,
                          normalize=True,
                          title='Normalized confusion matrix RF')
    plt.show()
def ada_boost():
    adc = AdaBoostClassifier()
    adc.fit(X_train, y_train)
    save_pck(adc, './ml_model_saves/adc')
    # adc = load('./ml_model_saves/adc.pck')
    adc_predict = adc.predict(X_val)
    print(classification_report(y_val, adc_predict))
    cnf_matrix = (confusion_matrix(y_val, adc_predict))
    plt.figure()
    plot_confusion_matrix(cnf_matrix,
                          classes=EMOTIONS,
                          normalize=True,
                          title='Normalized confusion matrix AB')
    plt.show()
Beispiel #6
0
def get_results():

    classifiers = [
        MLPClassifier(solver='sgd', early_stopping=True),
        LinearSVC(),
        LogisticRegression()
    ]
    classifiers_names = ['MLP', 'Linear SVM', 'Logistic Regression']

    print('Beginning Experiments')
    for n, classifier in enumerate(classifiers):
        X, y, O, X_t, y_t, O_t = train_test_split_observations(0.7, classifier)
        print('Classifier Accuracy on training data:')
        print(classifier.score(flatten(X), flatten(y)))
        print('Classifier Accuracy on test data:')
        print(classifier.score(flatten(X_t), flatten(y_t)))
        print('Confusion Matrix of classifier:')
        c = confusion_matrix(flatten(y_t), flatten(O_t))
        print(c)
        plt.figure()

        plot_confusion_matrix(c,
                              class_names,
                              normalize=True,
                              title=classifiers_names[n])

        h = HMM(range(5), range(5))
        h.train(O)
        print('Classifier+HMM Accuracy on training data:')
        print(accuracy_score(flatten(y), flatten(h.viterbiSequenceList(O))))
        print('Classifier+HMM Accuracy on test data:')
        print(accuracy_score(flatten(y_t),
                             flatten(h.viterbiSequenceList(O_t))))
        print('Confusion Matrix of classifier+HMM:')
        c = confusion_matrix(flatten(y_t), flatten(h.viterbiSequenceList(O_t)))

        plt.figure()
        plot_confusion_matrix(c,
                              class_names,
                              normalize=True,
                              title=classifiers_names[n] + ' HMM')

        plt.show()
Beispiel #7
0
def classify_experiment(plot, num, database_list, save_file, expr="IOU", neighbors=11):
    func = {
        "IOU": iou_distance,
        "BOL": label_distance,
        "IDF": label_distance
    }

    result_data = [expr]
    for db in database_list:
        result_data.append(db)
        with open(db, 'rb') as handle:
            data = pkl.load(handle)
        test_data = generate_test_data(data, num)
        result, mt, vt = process_classify_data(func[expr], data, test_data)
        result_data.append(str(mt) + "\t" + str(vt))
        result_data.append(str(result)+"\n")
        if plot:
            df, perc = process_classification_result(result)
            print(perc)
            plot_confusion_matrix(df, expr)
    with open(save_file, 'a+') as f:
        f.write("\n".join(result_data))
Beispiel #8
0
logreg.fit(X_train, Y_train)

# Apply the model on the test set
Y_pred = logreg.predict(X_test)
probs = logreg.predict_proba(X_test)

# Accuracy
print('Misclassified test samples: %d' % (Y_test != Y_pred).sum())
print('Accuracy: %.2f' % metrics.accuracy_score(Y_test, Y_pred))

# Make some plots
class_names = ['COAD', 'READ', 'STAD']

# Plot non-normalized confusion matrix
plot = plot_confusion_matrix(Y_test,
                             Y_pred,
                             classes=class_names,
                             title='Confusion matrix, without normalization')

plot.savefig(os.path.join(dirpath + "/figures/confusion.png"))

# Plot normalized confusion matrix
plot_normalized = plot_confusion_matrix(Y_test,
                                        Y_pred,
                                        classes=class_names,
                                        normalize=True,
                                        title='Normalized confusion matrix')

plot_normalized.savefig(
    os.path.join(dirpath + "/figures/confusion_normalized.png"))

# Make some heatmaps
def ex1and2(distance_metric):
    ##### Exercise 1

    print("Using {} distance".format(distance_metric))

    def dist(a, b):
        adim = a.ndim
        if adim == 1:
            a = np.array([a])

        dist_mat = metrics.pairwise.pairwise_distances(a,
                                                       np.array([b]),
                                                       metric=distance_metric)

        if adim == 2:
            return dist_mat[:, 0]
        else:
            return dist_mat[0, 0]

    centers = np.array([None] * 10)
    radiuses = np.array([None] * 10)

    for d in range(10):
        train_d = train_in[train_out == d, ]
        c_d = np.mean(train_d, 0)
        r_c = np.amax(dist(train_d, c_d), axis=0)
        centers[d] = c_d
        radiuses[d] = r_c

    dists = np.array([[dist(ci, cj) for ci in centers] for cj in centers])

    fig = plt.figure()
    ax = fig.add_subplot(111)
    dist_mat = ax.matshow(dists)
    fig.colorbar(dist_mat)
    ax.xaxis.set_ticks(range(10))
    ax.yaxis.set_ticks(range(10))
    ax.set_xticklabels(range(10))
    ax.set_yticklabels(range(10))
    plt.savefig("out/1_{}_digit-dists.png".format(distance_metric))
    plt.close()

    ##### Exercise 2
    def classify(point):
        return np.argmin([np.mean(dist(point, c)) for c in centers], 0)

    for set_name, set_in, set_out in [("training", train_in, train_out),
                                      ("test", test_in, test_out)]:
        set_pred = [classify(point) for point in set_in]
        correct = set_pred == set_out
        print("Correctly classified in {} set: {}".format(
            set_name,
            np.sum(correct) / len(correct)))

        cnf_matrix = metrics.confusion_matrix(set_out,
                                              set_pred,
                                              labels=range(10))

        fig = plt.figure()
        plot_confusion_matrix(
            cnf_matrix,
            classes=range(10),
            title='Confusion matrix of {} set'.format(set_name),
            normalize=True)

        plt.savefig("out/2_{}_{}_confusion_matrix.png".format(
            distance_metric, set_name))
        plt.close()
Beispiel #10
0
# Compute ICA
ica = FastICA(n_components=SOURCES_COUNT)
S_ = ica.fit_transform(X)  # Reconstruct signals
A_ = ica.mixing_  # Get estimated mixing matrix

# Saving mixed and recovered sources to disk
for i in range(X.shape[1]):
    save_audio(X[:, i], sr, 'output/mixed_{}.wav'.format(i))

for i in range(S_.shape[1]):
    save_audio(S_[:, i], sr, 'output/{}.wav'.format(i))

# MI matrixs
mi_S = mutual_info_pairwise(S, False)
print("Sources Mutual Info:")
print(mi_S)

mi_X = mutual_info_pairwise(X, False)
print("Microphones Mutual Info:")
print(mi_X)

mi_S_ = mutual_info_pairwise(S_, False)
print("Reconstructed Sources Mutual Info:")
print(mi_S_)

plot_confusion_matrix(mi_S, 'mi_S.png', title='Informacao Mutua das Fontes')
plot_confusion_matrix(mi_X, 'mi_X.png', title='Informacao Mutua das Misturas')
plot_confusion_matrix(mi_S_,
                      'mi_S_.png',
                      title='Informacao Mutua das Reconstituicoes')