color="white" if cm[i, j] > thresh else "black")
    fig.tight_layout()
    return ax


if __name__ == '__main__':

    # Load dataset 
    x_train, y_train, x_test, y_test = load_dataset()
    num_class = len(np.unique(y_train))
    feature_dim = len(x_train[0]) 
    num_value = 256
    class_names = np.array(["T-shirt/top","Trouser","Pullover","Dress",
        "Coat","Sandal","Shirt","Sneaker","Bag","Ankle boot"])
    # Initialize perceptron model
    perceptron = MultiClassPerceptron(num_class,feature_dim)
    # Train model
    perceptron.train(x_train,y_train)
    # Visualize the learned perceptron weights
    # plot_visualization(perceptron.w[:-1,:], class_names, None)
    # Classify the test sets
    accuracy, y_pred = perceptron.test(x_test,y_test)

    """ 
    Uses image indices collected from running the test set on the model. These indicees are used here to get
    the image pixel data of those images and stores them in mixed_imgs[] that will be used to print out images later.
    """
    width = 28
    mixed_imgs = np.zeros((num_class*2, width, width))
    for i in range(num_class):
        for y_feature_idx in range(width):
Beispiel #2
0
    # # # # Visualize the feature likelihoods for high intensity pixels.
    class_names = np.array([
        "T-shirt/top", "Trouser", "Pullover", "Dress", "Coat", "Sandal",
        "Shirt", "Sneaker", "Bag", "Ankle boot"
    ])
    # # plot_visualization(feature_likelihoods, class_names, "Greys")
    # # Classify the test sets.
    # accuracy, y_pred = NB.test(x_test,y_test)
    # print(accuracy)
    # # Plot confusion matrix.
    # plot_confusion_matrix(y_test, y_pred, classes=class_names, normalize=True,
    #                   title='Confusion matrix, with normalization')
    # plt.show()

    # Initialize perceptron model.
    perceptron = MultiClassPerceptron(num_class, feature_dim)
    # Train model.
    perceptron.train(x_train, y_train)
    # Visualize the learned perceptron weights.
    plot_visualization(perceptron.w[:-1, :], class_names, None)
    # Classify the test sets.
    accuracy, y_pred = perceptron.test(x_test, y_test)
    print("Accuracy: ", accuracy)
    # Plot confusion matrix.
    plot_confusion_matrix(y_test,
                          y_pred,
                          classes=class_names,
                          normalize=True,
                          title='Confusion matrix, with normalization')
    plt.show()
    perceptron.high_class_posterior_probs(x_test, y_test)