Esempio n. 1
0
    def accuracy(global_predictions, local_predictions):
        global_predictions[global_predictions >= 0.5] = 1
        global_predictions[global_predictions < 0.5] = 0

        local_predictions[local_predictions >= 0.5] = 1
        local_predictions[local_predictions < 0.5] = 0

        confusion_matrix = fumt.get_confusion_matrix(global_predictions,
                                                     local_predictions,
                                                     labels=[0, 1])
        accuracy = fummet.accuracy(confusion_matrix)

        return accuracy
Esempio n. 2
0
def test_get_confusion_matrix():
    """
    Tests :func:`fatf.utils.metrics.tools.get_confusion_matrix` function.
    """
    # [[1, 1, 1],
    #  [1, 2, 1],
    #  [1, 1, 1]]
    ground_truth = np.array(['a', 'b', 'b', 'b', 'a', 'a', 'b', 'c', 'c', 'c'])
    predictions = np.array(['b', 'a', 'b', 'c', 'a', 'c', 'b', 'a', 'c', 'b'])
    # [[3, 11],
    #  [7, 5 ]]
    ground_truth_bin = np.array([
        'a', 'a', 'a', 'b', 'b', 'b', 'b', 'b', 'a', 'a', 'a', 'a', 'a', 'a',
        'a', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b'
    ])
    predictions_bin = np.array([
        'a', 'a', 'a', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b', 'b',
        'b', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a', 'a'
    ])

    cmx = np.array([[1, 1, 1], [1, 2, 1], [1, 1, 1]])
    cmx_bin = np.array([[3, 11], [7, 5]])
    cmx_bb = np.array([[1, 1, 0, 1], [1, 2, 0, 1], [0, 0, 0, 0], [1, 1, 0, 1]])

    # Default labeling
    cma = fumt.get_confusion_matrix(ground_truth, predictions)
    assert np.array_equal(cmx, cma)
    cma = fumt.get_confusion_matrix(ground_truth_bin, predictions_bin)
    assert np.array_equal(cmx_bin, cma)

    # Custom non-existing labeling
    with pytest.warns(UserWarning) as w:
        cma = fumt.get_confusion_matrix(ground_truth, predictions,
                                        ['a', 'b', 'bb', 'c'])
    assert len(w) == 1
    assert str(w[0].message) == USER_WARNING.format("{'bb'}")
    assert np.array_equal(cmx_bb, cma)
Esempio n. 3
0
    def accuracy_prob(global_predictions,
                      local_predictions,
                      global_proba=True,
                      local_proba=True):
        if global_proba:
            global_predictions = np.argmax(global_predictions, axis=1)
        if local_proba:
            local_predictions = np.argmax(local_predictions, axis=1)

        confusion_matrix = fumt.get_confusion_matrix(global_predictions,
                                                     local_predictions,
                                                     labels=[0, 1, 2])
        accuracy = fummet.accuracy(confusion_matrix)

        return accuracy
Esempio n. 4
0
def test_get_confusion_matrix_errors():
    """
    Tests :func:`fatf.utils.metrics.tools.get_confusion_matrix` errors.
    """
    incorrect_shape_error_gt = ('The ground truth vector has to be '
                                '1-dimensional numpy array.')
    incorrect_shape_error_pred = ('The predictions vector has to be '
                                  '1-dimensional numpy array.')
    incorrect_shape_error_gtp = ('Both the ground truth and the predictions '
                                 'vectors have to have the same length.')
    value_error_labels_empty = 'The labels list cannot be empty.'
    value_error_labels_duplicates = 'The labels list contains duplicates.'
    value_error_labels_missing = ('The following labels are present in the '
                                  'input arrays but were not given in the '
                                  'labels parameter: {}.')
    type_error_labels = 'The labels parameter has to either a list or None.'

    two_d_array = np.array([[1, 2], [3, 4]])
    one_d_array_4 = np.array([1, 2, 3, 4])
    one_d_array_5 = np.array([1, 2, 3, 4, 5])

    cma_true = np.array([[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 0, 0],
                         [0, 0, 0, 1, 0], [0, 0, 0, 0, 0]])

    with pytest.raises(IncorrectShapeError) as exi:
        fumt.get_confusion_matrix(two_d_array, two_d_array)
    assert str(exi.value) == incorrect_shape_error_gt
    #
    with pytest.raises(IncorrectShapeError) as exi:
        fumt.get_confusion_matrix(one_d_array_4, two_d_array)
    assert str(exi.value) == incorrect_shape_error_pred
    #
    with pytest.raises(IncorrectShapeError) as exi:
        fumt.get_confusion_matrix(one_d_array_4, one_d_array_5)
    assert str(exi.value) == incorrect_shape_error_gtp

    with pytest.raises(TypeError) as exi:
        fumt.get_confusion_matrix(one_d_array_4, one_d_array_4, 'a')
    assert str(exi.value) == type_error_labels
    #
    with pytest.raises(ValueError) as exi:
        fumt.get_confusion_matrix(one_d_array_4, one_d_array_4, [])
    assert str(exi.value) == value_error_labels_empty
    #
    with pytest.raises(ValueError) as exi:
        fumt.get_confusion_matrix(one_d_array_4, one_d_array_4, [2, 3, 2])
    assert str(exi.value) == value_error_labels_duplicates
    #
    with pytest.raises(ValueError) as exi:
        fumt.get_confusion_matrix(one_d_array_4, one_d_array_4, [2, 4, 3])
    assert str(exi.value) == value_error_labels_missing.format('{1}')

    with pytest.warns(UserWarning) as w:
        cma = fumt.get_confusion_matrix(one_d_array_4, one_d_array_4,
                                        [1, 2, 3, 4, 5])
    assert len(w) == 1
    assert str(w[0].message) == USER_WARNING.format('{5}')
    assert np.array_equal(cma, cma_true)