def test_pandas_confusion_binary_cm_inverse(self):
        y_true = [True, True, False, False, False, True, False, True, True,
                  False, True, False, False, False, False, False, True, False,
                  True, True, True, True, False, False, False, True, False,
                  True, False, False, False, False, True, True, False, False,
                  False, True, True, True, True, False, False, False, False,
                  True, False, False, False, False, False, False, False, False,
                  False, True, True, False, True, False, True, True, True,
                  False, False, True, False, True, False, False, True, False,
                  False, False, False, False, False, False, False, True, False,
                  True, True, True, True, False, False, True, False, True,
                  True, False, True, False, True, False, False, True, True,
                  False, False, True, True, False, False, False, False, False,
                  False, True, True, False]

        y_pred = [False, False, False, False, False, True, False, False, True,
                  False, True, False, False, False, False, False, False, False,
                  True, True, True, True, False, False, False, False, False,
                  False, False, False, False, False, True, False, False, False,
                  False, True, False, False, False, False, False, False, False,
                  True, False, False, False, False, False, False, False, False,
                  False, True, False, False, False, False, False, False, False,
                  False, False, True, False, False, False, False, True, False,
                  False, False, False, False, False, False, False, True, False,
                  False, True, False, False, False, False, True, False, True,
                  True, False, False, False, True, False, False, True, True,
                  False, False, True, True, False, False, False, False, False,
                  False, True, False, False]

        binary_cm = ConfusionMatrix(y_true, y_pred)
        assert isinstance(binary_cm, pdml.confusion_matrix.BinaryConfusionMatrix)
        bcm_sum = binary_cm.sum()

        binary_cm_r = binary_cm.inverse()  # reverse not in place
        assert bcm_sum == binary_cm_r.sum()
    def test_pandas_confusion_binary_cm_inverse(self):
        y_true = [True, True, False, False, False, True, False, True, True,
                  False, True, False, False, False, False, False, True, False,
                  True, True, True, True, False, False, False, True, False,
                  True, False, False, False, False, True, True, False, False,
                  False, True, True, True, True, False, False, False, False,
                  True, False, False, False, False, False, False, False, False,
                  False, True, True, False, True, False, True, True, True,
                  False, False, True, False, True, False, False, True, False,
                  False, False, False, False, False, False, False, True, False,
                  True, True, True, True, False, False, True, False, True,
                  True, False, True, False, True, False, False, True, True,
                  False, False, True, True, False, False, False, False, False,
                  False, True, True, False]

        y_pred = [False, False, False, False, False, True, False, False, True,
                  False, True, False, False, False, False, False, False, False,
                  True, True, True, True, False, False, False, False, False,
                  False, False, False, False, False, True, False, False, False,
                  False, True, False, False, False, False, False, False, False,
                  True, False, False, False, False, False, False, False, False,
                  False, True, False, False, False, False, False, False, False,
                  False, False, True, False, False, False, False, True, False,
                  False, False, False, False, False, False, False, True, False,
                  False, True, False, False, False, False, True, False, True,
                  True, False, False, False, True, False, False, True, True,
                  False, False, True, True, False, False, False, False, False,
                  False, True, False, False]

        binary_cm = ConfusionMatrix(y_true, y_pred)
        assert isinstance(binary_cm, pdml.confusion_matrix.BinaryConfusionMatrix)
        bcm_sum = binary_cm.sum()

        binary_cm_r = binary_cm.inverse()  # reverse not in place
        assert bcm_sum == binary_cm_r.sum()
    def test_pandas_confusion_cm_binarize(self):
        y_true = ['rabbit', 'cat', 'rabbit', 'rabbit', 'cat', 'dog', 'dog', 'rabbit', 'rabbit', 'cat', 'dog', 'rabbit']
        y_pred = ['cat', 'cat', 'rabbit', 'dog', 'cat', 'rabbit', 'dog', 'cat', 'rabbit', 'cat', 'rabbit', 'rabbit']

        cm = ConfusionMatrix(y_true, y_pred)
        assert isinstance(cm, pdml.confusion_matrix.LabeledConfusionMatrix)

        print("Confusion matrix:\n%s" % cm)
        select = ['cat', 'dog']

        print("Binarize with %s" % select)
        binary_cm = cm.binarize(select)

        print("Binary confusion matrix:\n%s" % binary_cm)

        assert cm.sum() == binary_cm.sum()
    def test_pandas_confusion_cm_binarize(self):
        y_true = ['rabbit', 'cat', 'rabbit', 'rabbit', 'cat', 'dog', 'dog', 'rabbit', 'rabbit', 'cat', 'dog', 'rabbit']
        y_pred = ['cat', 'cat', 'rabbit', 'dog', 'cat', 'rabbit', 'dog', 'cat', 'rabbit', 'cat', 'rabbit', 'rabbit']

        cm = ConfusionMatrix(y_true, y_pred)
        assert isinstance(cm, pdml.confusion_matrix.LabeledConfusionMatrix)

        print("Confusion matrix:\n%s" % cm)
        select = ['cat', 'dog']

        print("Binarize with %s" % select)
        binary_cm = cm.binarize(select)

        print("Binary confusion matrix:\n%s" % binary_cm)

        assert cm.sum() == binary_cm.sum()
Beispiel #5
0
    test_error = classifier.score(X_test, Y_test)
    #print(train_error, test_error)
    #-------------> EVALUATION

    evaluation_file.write('SVM_EVALUATION:' + title)
    evaluation_file.write('\n\nConfusion_matrix:\n')
    evaluation_file.write(str(confusion_matrix(Y_test, Y_pred)))
    evaluation_file.write('\n\nClassification report:\n')
    evaluation_file.write(str(classification_report(Y_test, Y_pred)))
    #evaluation_file.write("\nAccuracy is "+str(accuracy_score(Y_test,Y_pred)*100) + '\n')
    evaluation_file.write('\n\n')
    cm = ConfusionMatrix(Y_test, Y_pred)
    evaluation_file.write(str(cm.stats()))

    cm = confusion_matrix(Y_test, Y_pred)
    cm = cm.astype('float') * 100 / cm.sum(axis=1)[:, np.newaxis]
    sn.heatmap(cm, annot=True, cmap='YlGnBu')
    plt.xticks(np.arange(0.5, 4.5, 1), ('A', 'B', 'C', 'D'))
    plt.yticks(np.arange(0.5, 4.5, 1), ('A', 'B', 'C', 'D'))
    plt.savefig(path_results + title + '/' + 'Confusion_Matrix.pdf')
    #plt.show()
    plt.close()

    print("\nAccuracy is " + str(accuracy_score(Y_test, Y_pred) * 100) + '\n')

    #X_test_plot = scaling.inverse_transform(X_test)
    value = 1.5
    width = 100

    scatter_kwargs = {'s': 0.001, 'edgecolor': None, 'alpha': 0.7}