def test_metrics_two_predictions_one_gt_2(): width, height = 10, 10 bbox = BBox(0, 2, 0, 2).normalize(width, height) annotations = [ Annotation.ground_truth("a", bbox), Annotation.prediction("a", bbox, 0.9), Annotation.prediction("a", bbox.move(0.5, 0.5), 0.5), ] metrics = get_metrics([annotations]) assert metrics.per_class["a"].total_FP == 1 assert metrics.per_class["a"].total_TP == 1 assert metrics.mAP == 1
def test_iou_threshold(): bbox = BBox(0, 5, 0, 5) annotations = [ Annotation.ground_truth("a", bbox), Annotation.prediction("a", bbox.move(2.5, 0), 0.9), ] metrics = get_metrics([annotations], iou_threshold=0.9) assert metrics.per_class["a"].total_FP == 1 assert metrics.per_class["a"].total_TP == 0 assert metrics.mAP == 0 metrics = get_metrics([annotations], iou_threshold=0.2) assert metrics.per_class["a"].total_FP == 0 assert metrics.per_class["a"].total_TP == 1 assert metrics.mAP == 1