Exemple #1
0
def accuracy(confusion_matrix):
    tn, fp, fn, tp = confusion_matrix.ravel()
    PPV = tp / (tp + fp)
    sensitivity = tp / (tp + fn)
    specificity = tn / (tn + fp)
    NPV = tn / (fn + tn)
    pos_lr = sensitivity / (1 - specificity)
    neg_lr = (1 - sensitivity) / specificity
    recall = tp / (tp + fn)
    precison = tp / (tp + fp)
    f1 = (2 * precison * recall)/(precison + recall)
    print("*F1: %f  *Sensitivity: %f  *Specificity: %f  *PPV: %f  *NPV: %f  *Positive-LR: %f  *Negative-LR: %f" %
          (f1, sensitivity, specificity, PPV, NPV, pos_lr, neg_lr))
    diagonal_sum = confusion_matrix.trace()
    sum_of_all_elements = confusion_matrix.sum()
    return diagonal_sum / sum_of_all_elements
def accuracy(confusion_matrix):
   diagonal_sum = confusion_matrix.trace()
   sum_of_all_elements = confusion_matrix.sum()
   return diagonal_sum / sum_of_all_elements
Exemple #3
0
def accuracy(confusion_matrix):
    print("acc starts")
    diagonal_sum = confusion_matrix.trace()
    sum_of_all_elements = confusion_matrix.sum()
    return diagonal_sum / sum_of_all_elements
Exemple #4
0
def accuracy(confusion_matrix):
    #It contains true positive, true negative, false positive and false negatives. Thus this matrix can be used to find accuracy, recall, etc.
    diagonal_sum = confusion_matrix.trace()
    sum_of_all_elements = confusion_matrix.sum()
    return diagonal_sum / sum_of_all_elements  #formula for accuracy
def accuracy(
    confusion_matrix
):  # accuracy: total number of all correct predictions/ total number of dataset
    diagonal_sum = confusion_matrix.trace()
    sum_of_all_elements = confusion_matrix.sum()
    return diagonal_sum / sum_of_all_elements
def accuracy(confusion_matrix):
    diagonal = confusion_matrix.trace()
    elements = confusion_matrix.sum()
    return diagonal / elements
def get_cm_metrics(confusion_matrix, labels, verbose=False):
    n = len(labels)
    cm_total = confusion_matrix.sum()
    chance_agree = 0
    agree = confusion_matrix.trace() / cm_total

    supports = [0] * n
    vp = [0] * n
    fp = [0] * n
    fn = [0] * n
    precisions = [0] * n
    recalls = [0] * n
    fscores = [0] * n

    label_true_sum = [0] * n
    label_pred_sum = [0] * n

    for row in range(n):
        for column in range(n):
            value = confusion_matrix[row][column]

            supports[row] += value

            label_true_sum[row] += value
            label_pred_sum[column] += value

            if row == column:
                vp[row] += value
            else:
                fp[column] += value
                fn[row] += value

    for x in range(n):
        recalls[x] = vp[x] / (vp[x] + fn[x])
        precisions[x] = vp[x] / (vp[x] + fp[x])

        if precisions[x] == 0 or recalls[x] == 0:
            fscores[x] = 0
        else:
            fscores[x] = 2 * (precisions[x] * recalls[x]) / \
                (precisions[x] + recalls[x])

        prob_label_true = label_true_sum[x] / cm_total
        prob_label_pred = label_pred_sum[x] / cm_total

        chance_agree += prob_label_true * prob_label_pred

    if verbose:
        print()
        print(''.rjust(30), 'precision'.rjust(10), 'recall'.rjust(
            10), 'f1-score'.rjust(10), 'support'.rjust(10))
        print()
        for x in range(n):
            print(labels[x].ljust(30),
                  '{:.2f}'.format(precisions[x]).rjust(10),
                  '{:.2f}'.format(recalls[x]).rjust(10),
                  '{:.2f}'.format(fscores[x]).rjust(10),
                  '{}'.format(supports[x]).rjust(10))
        print()
    accuracy = sum(vp) / sum(supports)
    avg_fscore = sum(fscores) / n
    kappa_score = (agree - chance_agree) / (1 - chance_agree)

    return {
        'F1_scores': fscores,
        'accuracy': accuracy,
        'macro_F1': avg_fscore,
        'supports': supports,
        'precisions': precisions,
        'recalls': recalls,
        'kappa': kappa_score,
    }
Exemple #8
0
 def compute_accuracy(self, confusion_matrix: np.array):
     """
     Total number of TP divided by the total length of X_test.
     """
     return confusion_matrix.trace() / confusion_matrix.sum()
Exemple #9
0
def accuracy(confusion_matrix):
    diagonal_sum = confusion_matrix.trace()
    print('Diagonal Sum is : ', diagonal_sum)
    sum_of_all_elements = confusion_matrix.sum()
    return diagonal_sum / sum_of_all_elements