コード例 #1
0
        mean_tpr += interp(mean_fpr, fpr, tpr)
        mean_tpr[0] = 0.0
        roc_auc = compute_auc(homesite.train_y[test], probas_[:, 1])
        fold_cm = confusion_matrix(homesite.train_y[test], np.round(probas_)[:, 1])
        confusion_matrix_history = np.dstack((confusion_matrix_history, fold_cm))

        accuracy, precision, recall = compute_performance_metrics(fold_cm)
        mean_acc += accuracy
        mean_recall += recall
        mean_precision += precision

        accuracy_history.append(accuracy)
        precision_history.append(precision)
        recall_history.append(recall)
        auc_history.append(roc_auc)

        save_np_array("../../results/random_forests/rf_accuracy_" + str(c) + ".bin", np.array(accuracy_history))
        save_np_array("../../results/random_forests/rf_precision_" + str(c) + ".bin", np.array(precision_history))
        save_np_array("../../results/random_forests/rf_recall_" + str(c) + ".bin", np.array(recall_history))
        save_np_array("../../results/random_forests/rf_auc_" + str(c) + ".bin", np.array(auc_history))
        save_np_array("../../results/random_forests/rf_confusion_matrix_" + str(c) + ".bin", np.array(confusion_matrix_history))
        plt.plot(fpr, tpr, lw = 1, label = 'ROC fold %d (area = %0.2f)' % (i, roc_auc))

    mean_acc /= len(cvs)
    mean_recall /= len(cvs)
    mean_precision /= len(cvs)
    mean_tpr /= len(cvs)
    mean_tpr[-1] = 1.0
    mean_auc = auc(mean_fpr, mean_tpr)
    plot_roc(mean_fpr, mean_tpr, mean_auc)
コード例 #2
0
                                       np.round(probas_)[:, 1])

            confusion_matrix_history = np.dstack(
                (confusion_matrix_history, fold_cm))

            accuracy, precision, recall = compute_performance_metrics(fold_cm)
            mean_acc += accuracy
            mean_recall += recall
            mean_precision += precision

            accuracy_history.append(accuracy)
            precision_history.append(precision)
            recall_history.append(recall)
            auc_history.append(roc_auc)

            save_np_array("results/ada_accuracy_" + str(c) + ".bin",
                          np.array(accuracy_history))
            save_np_array("results/ada_precision_" + str(c) + ".bin",
                          np.array(precision_history))
            save_np_array("results/ada_recall_" + str(c) + ".bin",
                          np.array(recall_history))
            save_np_array("results/ada_auc_" + str(c) + ".bin",
                          np.array(auc_history))

            plt.plot(fpr,
                     tpr,
                     lw=1,
                     label='ROC fold %d (area = %0.2f)' % (i, roc_auc))

        mean_acc /= len(cvs)
        mean_recall /= len(cvs)
        mean_precision /= len(cvs)
コード例 #3
0
    #    homesite.train_y = homesite.train_y[reduced_range]

    C = [0.2, 0.4, 0.6, 0.8, 1]
    for c in C:
        # Creating classifier.
        clf = svm.SVC(kernel='linear', class_weight='balanced', C=c)

        # Train classifier.
        print "Training classifier."
        clf.fit(homesite.train_x, homesite.train_y)

        # Test classifier.
        print 'Testing classifier.'
        predicted_labels = clf.predict(homesite.validation_x)

        # Show final results.
        results = confusion_matrix(homesite.validation_y,
                                   np.round(predicted_labels))
        accuracy, precision, recall = compute_performance_metrics(results)
        auc = compute_auc(homesite.validation_y, predicted_labels)

        result = [c, precision, recall, accuracy, auc]
        wr.writerow(result)

        save_np_array("results/svm_accuracy.bin", np.array(accuracy_history))
        save_np_array("results/svm_precision.bin", np.array(precision_history))
        save_np_array("results/svm_recall.bin", np.array(recall_history))
        save_np_array("results/svm_auc.bin", np.array(auc_history))

        del clf
コード例 #4
0
        # Test classifier.
        print 'Testing classifier.'
        predicted_labels = clf.predict_proba(homesite.validation_x)[:, 1]

        # Show final results.
        results = confusion_matrix(homesite.validation_y,
                                   np.round(predicted_labels))
        accuracy, precision, recall = compute_performance_metrics(results)
        auc = compute_auc(homesite.validation_y, predicted_labels)

        accuracy_history.append(accuracy)
        precision_history.append(precision)
        recall_history.append(recall)
        auc_history.append(auc)

        print 'Saving result.', i * 10
        save_np_array(
            "../homesite_data/results/random_forest_grid_search_accuracy.bin",
            np.array(accuracy_history))
        save_np_array(
            "../homesite_data/results/random_forest_grid_search_precision.bin",
            np.array(precision_history))
        save_np_array(
            "../homesite_data/results/random_forest_grid_search_recall.bin",
            np.array(recall_history))
        save_np_array(
            "../homesite_data/results/random_forest_grid_search_auc.bin",
            np.array(auc_history))

        del clf
コード例 #5
0
        roc_auc = compute_auc(homesite.train_y[test], probas_[:, 1])
        fold_cm = confusion_matrix(homesite.train_y[test],
                                   np.round(probas_)[:, 1])
        confusion_matrix_history = np.dstack(
            (confusion_matrix_history, fold_cm))

        accuracy, precision, recall = compute_performance_metrics(fold_cm)
        mean_acc += accuracy
        mean_recall += recall
        mean_precision += precision
        accuracy_history.append(accuracy)
        precision_history.append(precision)
        recall_history.append(recall)
        auc_history.append(roc_auc)

        save_np_array("../../results/ANN/ann_accuracy_" + str(c) + ".bin",
                      np.array(accuracy_history))
        save_np_array("../../results/ANN/ann_precision_" + str(c) + ".bin",
                      np.array(precision_history))
        save_np_array("../../results/ANN/ann_recall_" + str(c) + ".bin",
                      np.array(recall_history))
        save_np_array("../../results/ANN/ann_auc_" + str(c) + ".bin",
                      np.array(auc_history))
        save_np_array(
            "../../results/ANN/ann_confusion_matrix_" + str(c) + ".bin",
            np.array(confusion_matrix_history))
        plt.plot(fpr,
                 tpr,
                 lw=1,
                 label='ROC fold %d (area = %0.2f)' % (i, roc_auc))

    mean_acc /= len(cvs)