def test_zero_detection(self):
        gt = make_representation(["0 0 0 5 5; 1 10 10 20 20"], is_ground_truth=True)
        pred = make_representation(["0 30 30 40 40"], score=1)

        metric = _test_metric_wrapper(DetectionMAP, multi_class_dataset())
        assert metric(gt, pred) == [0.0]
        assert metric.meta.get('names') == ['dog']
    def test_no_warnings_in_map_calculation(self):
        gt = make_representation(["0 0 0 5 5; 1 10 10 20 20", "1 0 0 5 5"], is_ground_truth=True)
        pred = make_representation(["0 0 0 5 5; 1 10 10 20 20", "1 0 0 5 5"], score=1)

        with pytest.warns(None) as warnings:
            _test_metric_wrapper(DetectionMAP, multi_class_dataset())(gt, pred)
        assert len(warnings) == 0
    def test_no_detections_warn_user_warning(self):
        gt = make_representation(["0 0 0 5 5; 1 10 10 20 20"], is_ground_truth=True)
        pred = make_representation("", score=1)
        with pytest.warns(UserWarning) as warnings:
            map_ = _test_metric_wrapper(DetectionMAP, multi_class_dataset())(gt, pred)[0]
            assert len(warnings) == 1

            assert map_ == 0
 def test_one_false_alarm(self):
     gt = make_representation(["0 0 0 5 5", "1 0 0 5 5"], is_ground_truth=True)
     pred = make_representation(["1 10 10 20 20; 0 0 0 5 5", "1 0 0 5 5"], score=1)
     metric = _test_metric_wrapper(DetectionMAP, multi_class_dataset())
     values = metric(gt, pred)
     assert values == [1.0, 0.5]
     map_ = np.mean(values)
     assert 0.75 == map_
     assert metric.meta.get('names') == ['dog', 'cat']