def test1(self):
     classifier = nbc.Bayes_Classifier()
     classifier.train(data[:12478])
     predictions = classifier.classify(data[12478:])
     fp, fn = f_score(data[12478:], predictions)
     print(fp, fn)
     self.assertGreater(fp, 0.90)
     self.assertGreater(fn, 0.60)
Example #2
0
def main():

    source_name = sys.argv[0]
    check_imports(source_name)

    bayes = nbc.Bayes_Classifier()
    bayes.train('train.txt')
    predict = bayes.classify('classifyA.txt')
    fA = f_score('answersA.txt', predict)

    print('Classifier F-Scores:')
    print('  Reviews A: %.2f' % fA)
    exit(0)
Example #3
0
File: main.py Project: EuniceWan/AI
def main():

    source_name = sys.argv[0]
    check_imports(source_name)

    bayes = nbc.Bayes_Classifier()
    bayes.train('/home/xwl7110/EECS348_Labs/Labs/Lab5/src/train.txt')
    predict = bayes.classify(
        '/home/xwl7110/EECS348_Labs/Labs/Lab5/src/classifyA.txt')
    fA = f_score('/home/xwl7110/EECS348_Labs/Labs/Lab5/src/answersA.txt',
                 predict)

    print('Classifier F-Scores:')
    print(predict)
    print('  Reviews A: %.2f' % fA)
Example #4
0
    def test(self):
        sum_fp = 0
        sum_fn = 0
        for i in range(1, 11):
            k = i * 1400

            classifier = nbc.Bayes_Classifier()
            classifier.train(data[0:k - 1400] + data[k:])
            predictions = classifier.classify(data[k - 1400:k])
            fp, fn = f_score(data[k - 1400:k], predictions)
            print(fp, fn)
            self.assertGreater(fp, 0.90)
            self.assertGreater(fn, 0.60)
            sum_fp += fp
            sum_fn += fn
            print(" the value of K is {}".format(k))

        print("the average FP is {} and the average FN is {}".format(
            sum_fp / 10, sum_fn / 10))
Example #5
0
            fp = fp + 1
            # print 'predicted 5 but actually 1', ids[i]
        if predict[i] == '1' and actual[i] == '1':
            tn = tn + 1
        if predict[i] == '1' and actual[i] == '5':
            fn = fn + 1
            # print 'predicted 1 but actually 5', ids[i]

    precision = float(tp) / float(tp + fp)
    recall = float(tp) / float(tp + fn)
    f_score = float(2.0) * precision * recall / (precision + recall)

    return (f_score)


bc1 = nbc.Bayes_Classifier()
bc1.train('train_short.txt')
predict = bc1.classify('halfA.txt')
fA = f_score('answershalfA.txt', predict)
print 'less skew data f score is', fA

# bc2 = nbc.Bayes_Classifier()
# bc2.train_improve('train_short.txt')
# predict = bc2.classify_improve('halfA.txt')
# fA = f_score('answershalfA.txt',predict)
# print 'less skew improved?  f is', fA

bc3 = nbc.Bayes_Classifier()
bc3.train('train.txt')
predict = bc3.classify('classifyA.txt')
fA = f_score('answersA.txt', predict)