def run_Multinomial_Naive_Bayes(clf, alpha, count, fold, use_balanced_set,
                                use_feature_selection):
    training_samples, training_labels, validation_samples, validation_labels = samples_and_labels(
        count, fold, use_balanced_set, use_feature_selection, True)
    fit_and_predict(clf, alpha, count, fold, training_samples, training_labels,
                    validation_samples, validation_labels, use_balanced_set,
                    use_feature_selection)
def run_SVM_Linear(clf, C, count, fold, use_balanced_set,
                   use_feature_selection):
    training_samples, training_labels, validation_samples, validation_labels = samples_and_labels(
        count, fold, use_balanced_set, use_feature_selection)
    fit_and_predict(clf, C, count, fold, training_samples, training_labels,
                    validation_samples, validation_labels, use_balanced_set,
                    use_feature_selection)
Exemple #3
0
def run_Random_Forest(clf, num_tree, max_depth, count, fold, use_balanced_set,
                      use_feature_selection):
    training_samples, training_labels, validation_samples, validation_labels = samples_and_labels(
        count, fold, use_balanced_set, use_feature_selection)
    fit_and_predict(clf, num_tree, max_depth, count, fold, training_samples,
                    training_labels, validation_samples, validation_labels,
                    use_balanced_set, use_feature_selection)
def test_SVM_Linear(tempC, count, use_balanced_set, use_feature_selection):
    clf = svm.LinearSVC(C=tempC)
    training_samples, training_labels, test_samples, test_labels = samples_and_labels(
        count, 0, use_balanced_set, use_feature_selection)
    clf.fit(training_samples, training_labels)
    result = clf.predict(test_samples)
    accuracy, precision, recall, specificity, f_score = calculate_precision_recall(
        test_labels, result)
    create_result_txt_for_roc_and_pr_plots('svm_linear', clf, test_samples,
                                           test_labels, use_feature_selection)
    print 'Accuracy = ' + str(accuracy) + '\nPrecision = ' + str(
        precision) + '\nRecall = ' + str(recall) + '\nSpecificity = ' + str(
            specificity) + '\nF1 Score = ' + str(f_score) + '\n'
Exemple #5
0
def test_Random_Forest(num_tree, max_depth, count, use_balanced_set,
                       use_feature_selection):
    clf = RandomForestClassifier(n_estimators=num_tree, max_depth=max_depth)
    training_samples, training_labels, test_samples, test_labels = samples_and_labels(
        count, 0, use_balanced_set, use_feature_selection)
    clf.fit(training_samples, training_labels)
    result = clf.predict(test_samples)
    accuracy, precision, recall, specificity, f_score = calculate_precision_recall(
        test_labels, result)
    create_result_txt_for_roc_and_pr_plots('random_forest', clf, test_samples,
                                           test_labels, use_feature_selection)
    print 'Accuracy = ' + str(accuracy) + '\nPrecision = ' + str(
        precision) + '\nRecall = ' + str(recall) + '\nSpecificity = ' + str(
            specificity) + '\nF1 Score = ' + str(f_score) + '\n'
def test_Multinomial_Naive_bayes(alpha, count, use_balanced_set,
                                 use_feature_selection):
    clf = MultinomialNB(alpha=alpha)
    training_samples, training_labels, test_samples, test_labels = samples_and_labels(
        count, 0, use_balanced_set, use_feature_selection, True)
    clf.fit(training_samples, training_labels)
    result = clf.predict(test_samples)
    accuracy, precision, recall, specificity, f_score = calculate_precision_recall(
        test_labels, result)
    if not use_feature_selection:
        create_result_txt_for_roc_and_pr_plots('multinomial_naive_bayes', clf,
                                               test_samples, test_labels,
                                               use_feature_selection)
    print 'Accuracy = ' + str(accuracy) + '\nPrecision = ' + str(
        precision) + '\nRecall = ' + str(recall) + '\nSpecificity = ' + str(
            specificity) + '\nF1 Score = ' + str(f_score) + '\n'