Пример #1
0
def test_naive_bayes(scr):
    mnb = mnbb.MultinomialNaiveBayes()

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        # In this exercise, python should yield the following warning:
        # RuntimeWarning: divide by zero encountered in log
        params_nb_sc = mnb.train(scr.train_X,scr.train_y)
        # TODO: make a test to check if the warning was issued
    
    y_pred_train = mnb.test(scr.train_X,params_nb_sc)
    acc_train = mnb.evaluate(scr.train_y, y_pred_train)
    assert allclose(acc_train, 0.987500, tolerance)

    y_pred_test = mnb.test(scr.test_X,params_nb_sc)
    acc_test = mnb.evaluate(scr.test_y, y_pred_test)
    assert allclose(acc_test, 0.635000, tolerance)
Пример #2
0
y_pred_test = svm.test(sd.test_X, params_svm_sd)
acc_test = svm.evaluate(sd.test_y, y_pred_test)
fig, axis = sd.add_line(fig, axis, params_svm_sd, "SVM", "yellow")
print "SVM Online Simple Dataset Accuracy train: %f test: %f" % (acc_train,
                                                                 acc_test)
print

# End of exercise 3.1 #########

# Exercise 3.2: implement Naive Bayes for multinomial data ########

# Read the book review data
scr = srs.SentimentCorpus("books")

# Initialize the Naive Bayes classifier for multinomial data
mnb = mnb.MultinomialNaiveBayes()

# Learn the NB parameters from the train data
params_nb_sc = mnb.train(scr.train_X, scr.train_y)

# Use the learned parameters to predict labels for the training data
y_pred_train = mnb.test(scr.train_X, params_nb_sc)

# Compute accuracy on training data from predicted labels and true labels
acc_train = mnb.evaluate(scr.train_y, y_pred_train)

# Use the learned parameters to predict labels for the test data
y_pred_test = mnb.test(scr.test_X, params_nb_sc)

# Compute accuracy on test data from predicted labels and true labels
acc_test = mnb.evaluate(scr.test_y, y_pred_test)