Exemple #1
0
def main():
    '''

    Main function
    :return:
        NAN
    '''

    # Load Data
    x_train, y_train, x_test, y_test, label_dict = load_mnist(
        which_type='fashion', threshold=0.5)

    # Get the Model
    nbc = NaiveBayesClassifier()

    # Train
    nbc.fit(x_train, y_train)

    # Test
    predictions = nbc.predict(x_test)

    # Evaluate accuracy
    accuracy = np.sum(np.uint8(predictions == y_test)) / len(y_test)
    print("Accuracy: ", accuracy)

    # Show Confusion Matrix
    plot_confusion_matrix(targets=y_test,
                          predictions=predictions,
                          classes=[label_dict[l] for l in label_dict])

    # Plot predictions
    plt.figure()
    while True:
        idx = np.random.randint(0, x_test.shape[0])
        x = x_test[idx]
        p = predictions[idx]
        y = y_test[idx]

        plt.imshow(x, cmap='gray')
        plt.title('Target: {}, Prediction: {}'.format(label_dict[int(y)],
                                                      label_dict[int(p)]))
        plt.waitforbuttonpress()
Exemple #2
0
def main():
    """ Main function """

    # load data
    x_train, y_train, x_test, y_test, label_dict = load_mnist(
        which_type='digits', threshold=0.5)

    # get the model
    nbc = NaiveBayesClassifier()

    # train
    nbc.fit(x_train, y_train)

    # test
    predictions = nbc.predict(x_test)

    # evaluate performances
    accuracy = np.sum(np.uint8(predictions == y_test)) / len(y_test)
    print('Accuracy: {}'.format(accuracy))

    # show confusion matrix
    plot_confusion_matrix(targets=y_test,
                          predictions=predictions,
                          classes=[label_dict[l] for l in label_dict])

    # plot predictions
    plt.figure()
    while True:
        idx = np.random.randint(0, x_test.shape[0])

        x = x_test[idx]
        p = predictions[idx]
        y = y_test[idx]

        plt.imshow(x, cmap='gray')
        plt.title('Target: {}, Prediction: {}'.format(label_dict[int(y)],
                                                      label_dict[int(p)]))
        plt.waitforbuttonpress()
Exemple #3
0
print()
print(classification_report(y_test, predictions))
'''

### K-NEAREST NEIGHBORS ###
'''
from sklearn.datasets import load_iris
from knn import KNN

X, y = load_iris(return_X_y=True)

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

model = KNN()

model.fit(X_train, y_train)
predictions = model.predict(X_test)
print(classification_report(y_test, predictions))
'''

### NAIVE BAYES CLASSIFIER ###
from sklearn.datasets import load_wine
from naive_bayes import NaiveBayesClassifier

X, y = load_wine(return_X_y=True)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

model = NaiveBayesClassifier()
model.fit(X_train, y_train)
predictions = model.predict(X_test)
print(classification_report(y_test, predictions))
Exemple #4
0
import numpy as np
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split

from naive_bayes import NaiveBayesClassifier


def compute_accuracy(y_true, y_pred):
    return np.sum(y_true == y_pred) / len(y_true)


if __name__ == '__main__':
    X, y = make_classification(n_samples=1000, n_features=10, n_classes=2)
    X_train, X_test, Y_train, Y_test = train_test_split(X, y, test_size=0.2, shuffle=True)

    clf = NaiveBayesClassifier()
    clf.fit(X_train, Y_train)

    predictions = clf.predict(X_test)

    accuracy = compute_accuracy(Y_test, predictions)
    print("The accuracy of the model is: {}".format(accuracy))