Exemple #1
0
def test_count():
    bin_class_A = BinaryClassificationMetrics(cls=['a', 'b'], FN=1, TP=2)

    assert bin_class_A.count == 3

    bin_class_B = BinaryClassificationMetrics(cls=['a', 'b'], FN=1, TP=2, FP=2)

    assert bin_class_B.count == 5
def test_precision_recall_curve():
    bin_class = BinaryClassificationMetrics(cls='a', FN=FN, TP=TP, FP=FP)
    metrics = BinaryDetectionMetrics(bin_class, 10, predictions)

    rounded_pr_curve = [(round(rp[0], 4), round(rp[1], 4))
                        for rp in metrics.precision_recall_curve]
    expected_curve = [
        (0.0667, 1.0000),
        (0.0667, 0.5000),
        (0.1333, 0.6667),
        (0.1333, 0.5000),
        (0.1333, 0.4000),
        (0.1333, 0.3333),
        (0.1333, 0.2857),
        (0.1333, 0.2500),
        (0.1333, 0.2222),
        (0.2000, 0.3000),
        (0.2000, 0.2727),
        (0.2667, 0.3333),
        (0.3333, 0.3846),
        (0.4000, 0.4286),
        (0.4000, 0.4000),
        (0.4000, 0.3750),
        (0.4000, 0.3529),
        (0.4000, 0.3333),
        (0.4000, 0.3158),
        (0.4000, 0.3000),
        (0.4000, 0.2857),
        (0.4000, 0.2727),
        (0.4667, 0.3043),
        (0.4667, 0.2917),
    ]

    assert rounded_pr_curve == expected_curve
def test_iou():
    bin_class = BinaryClassificationMetrics(cls='a', FN=FN, TP=TP, FP=FP)
    metrics = BinaryDetectionMetrics(bin_class, 10, predictions)

    assert metrics.iou == 10
def test_pred_count():
    bin_class = BinaryClassificationMetrics(cls='a', FN=FN, TP=TP, FP=FP)
    metrics = BinaryDetectionMetrics(bin_class, 0, predictions)

    assert metrics.predicted_count == 24
def test_average_precision():
    bin_class = BinaryClassificationMetrics(cls='a', FN=FN, TP=TP, FP=FP)
    metrics = BinaryDetectionMetrics(bin_class, 10, predictions)

    assert round(metrics.average_precision(11), 4) == .2684
    assert round(metrics.average_precision(), 4) == .2457
Exemple #6
0
def test_f_score_zero():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], FN=0, TN=0, TP=0, FP=0)

    assert bin_class.f_score == 1
Exemple #7
0
def test_precision_zero():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], TP=0, FP=0)

    assert bin_class.precision == 1
Exemple #8
0
def test_f_score():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], FN=4, TN=2, TP=5, FP=6)

    assert bin_class.f_score == 0.5
Exemple #9
0
def test_specificity_zero():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], FP=0, TN=0)

    assert math.isnan(bin_class.specificity)
Exemple #10
0
def test_precision():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], TP=8, FP=2)

    assert bin_class.precision == 0.8
Exemple #11
0
def test_specificity():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], FP=2, TN=6)

    assert bin_class.specificity == 0.75
Exemple #12
0
def test_fpr():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], FP=2, TN=6)

    assert bin_class.false_positive_rate == 0.25
Exemple #13
0
def test_recall_zero():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], TP=0, FN=0)

    assert math.isnan(bin_class.recall)
Exemple #14
0
def test_recall():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], TP=2, FN=6)

    assert bin_class.recall == 0.25
Exemple #15
0
def test_accuracy():
    bin_class = BinaryClassificationMetrics(cls=['a', 'b'], FN=2, TN=2, TP=2, FP=2)

    assert bin_class.accuracy == 0.5