Ejemplo n.º 1
0
def main():
    data = load_data()
    target1, input1, target2, input2 = \
        data['target1'], normalize(data['input1']), data['target2'], normalize(data['input2'])

    ## Plot some random images
    plot_random_images(input2)
    ## End plot some random images

    ## 2.1
    ex_2_1(input2, target2)
Ejemplo n.º 2
0
def main():

    X_train, y_train = load_mnist('data', kind='train')
    X_test, y_test = load_mnist('data', kind='t10k')

    ## Plot some random images
    plot_random_images(X_train)
    ## End plot some random images

    ## 2.1
    ex_2_1(X_train, y_train, X_test, y_test)
Ejemplo n.º 3
0
def main():
    data = load_data()
    target1, input1, target2, input2 = \
        data['target1'], normalize(data['input1']), data['target2'], normalize(data['input2'])

    ## Plot some random images
    plot_random_images(input2)
    ## End plot some random images

    ## 2.1
    ## ex_2_1(input2, target2)
    ## End 2.1

    ## 2.2
    train_acc, test_acc, y_pred, C = ex_2_2(input1, target1, input2, target2)
    plot_histogram_of_acc(train_acc, test_acc) 
    print(C)
def ex_2_2(input1, target1, input2, target2):
    """
    Solution for exercise 2.2
    :param input1: The input from dataset1
    :param target1: The target from dataset1
    :param input2: The input from dataset2
    :param target2: The target from dataset2
    :return:
    """
    n = 10

    train_acc = np.zeros(n)
    test_acc = np.zeros(n)
    pred_test = np.zeros((n, 564))
    coefs = np.zeros((n, 960, 20))

    #print(min(target1[:,0]), max(target1[:,0]))
    # we have 20 person

    for i in range(n):
        classifier = MLPClassifier(hidden_layer_sizes=(20, ),
                                   activation='tanh',
                                   solver='adam',
                                   max_iter=5000,
                                   random_state=i)
        classifier.fit(input1, target1[:, 0])
        pred_test[i] = classifier.predict(input2)
        coefs[i] = classifier.coefs_[0]
        train_acc[i] = classifier.score(input1, target1[:, 0])
        test_acc[i] = classifier.score(input2, target2[:, 0])

    error = pred_test[1] - target2[:, 0]
    for j in range(len(error)):
        if (error[j] != 0):
            print(j)
    plot_random_images(np.row_stack((input2[175, :], input2[184, :])))
    plot_random_images(np.row_stack((input2[210, :], input2[134, :])))
    plot_random_images(np.row_stack((input2[223, :], input2[177, :])))
    plot_random_images(np.row_stack((input2[179, :], input2[186, :])))

    plot_histogram_of_acc(train_acc, test_acc)

    # best network with seed i=1
    confmat = confusion_matrix(target2[:, 0], pred_test[1])
    print(confmat)

    pass
# Compute NN weights with the best seed
    best_seed = np.argmax(accu_list_train)
    best_nn = nn = MLPClassifier(activation='tanh', solver='adam', max_iter=1000, hidden_layer_sizes=(n_hidden_neurons,),random_state=best_seed)
    best_nn.fit(train, target_train)

# Evaluate the confusion matrix with best NN
    predictions = nn.predict(test)
    C = confusion_matrix(target_test, predictions)
    print(C)

# Plot results
    plot_histogram_of_acc(accu_list_train, accu_list_test)
    print(accu_list_test)
# Find misclassified images
    comp_array = target_test - predictions
    comp_vector2 = np.nonzero(comp_array)
<<<<<<< HEAD
    print(comp_vector2)

    plot_image(test(comp_vector2(1)))
    plot_image(test(comp_vector2[5]))
    plot_image(test(comp_vector2[8]))
    print(test(comp_vector2[1]))
=======
>>>>>>> 590cfb03b659199cfec13c3c6794c009a26452a7

# Plot misclassified image
    plot_random_images(test[comp_vector2], n_images=3)

    pass