import numpy as np X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) Y = np.array([1, 1, 1, 2, 2, 2]) from scikits.learn.naive_bayes import GNB clf = GNB() clf.fit(X, Y) GNB() print clf.predict([[-0.8, -1]])
'eps': 1e-3, } libsvm_res = benchmark(LinearSVC(**libsvm_parameters)) libsvm_err, libsvm_train_time, libsvm_test_time = libsvm_res ###################################################################### ## Train SGD model sgd_parameters = { 'alpha': 0.001, 'n_iter': 2, } sgd_err, sgd_train_time, sgd_test_time = benchmark(SGD(**sgd_parameters)) ###################################################################### ## Train GNB model gnb_err, gnb_train_time, gnb_test_time = benchmark(GNB()) ###################################################################### ## Print classification performance print("") print("Classification performance:") print("===========================") print("") def print_row(clf_type, train_time, test_time, err): print("%s %s %s %s" % (clf_type.ljust(12), ("%.4fs" % train_time).center(10), ("%.4fs" % test_time).center(10), ("%.4f" % err).center(10))) print("%s %s %s %s" % ("Classifier ", "train-time", "test-time", "error-rate"))
""" ============================ Gaussian Naive Bayes ============================ A classification example using Gaussian Naive Bayes (GNB). """ ################################################################################ # import some data to play with # The IRIS dataset from scikits.learn import datasets iris = datasets.load_iris() X = iris.data y = iris.target ################################################################################ # GNB from scikits.learn.naive_bayes import GNB gnb = GNB() y_pred = gnb.fit(X, y).predict(X) print "Number of mislabeled points : %d" % (y != y_pred).sum()