Exemplo n.º 1
0
 def match_targets_to_proposals(self, proposal, target):
     match_quality_matrix = boxlist_iou(target, proposal)
     matched_idxs = self.proposal_matcher(match_quality_matrix)
     target = target.copy_quad_with_fields(["labels"])
     matched_targets = target[matched_idxs.clamp(min=0)]
     matched_targets.add_field("matched_idxs", matched_idxs)
     return matched_targets
Exemplo n.º 2
0
 def match_targets_to_proposals(self, proposal, target):
     match_quality_matrix = boxlist_iou(target, proposal)
     matched_idxs = self.proposal_matcher(match_quality_matrix)
     # Hier RCNN needs "labels" and "hier "fields for creating the targets
     target = target.copy_with_fields(["labels", "hier"])
     # get the targets corresponding GT for each proposal
     # NB: need to clamp the indices because we can have a single
     # GT in the image, and matched_idxs can be -2, which goes
     # out of bounds
     matched_targets = target[matched_idxs.clamp(min=0)]
     matched_targets.add_field("matched_idxs", matched_idxs)
     return matched_targets
Exemplo n.º 3
0
 def match_targets_to_anchors(self, anchor, target, copied_fields=[]):
     match_quality_matrix = boxlist_iou(target, anchor)
     matched_idxs = self.proposal_matcher(match_quality_matrix)
     # RPN doesn't need any fields from target
     # for creating the labels, so clear them all
     target = target.copy_with_fields(copied_fields)
     # get the targets corresponding GT for each anchor
     # NB: need to clamp the indices because we can have a single
     # GT in the image, and matched_idxs can be -2, which goes
     # out of bounds
     matched_targets = target[matched_idxs.clamp(min=0)]
     matched_targets.add_field("matched_idxs", matched_idxs)
     return matched_targets
Exemplo n.º 4
0
    def compute(self, limit, area_range):
        gt_overlaps = []
        num_pos = 0
        for image_id, prediction in enumerate(self.cocoDt):
            original_id = self.cocoGt.id_to_img_map[image_id]

            img_info = self.cocoGt.get_img_info(image_id)
            image_width = img_info["width"]
            image_height = img_info["height"]
            prediction = prediction.resize((image_width, image_height))

            # sort predictions in descending order
            inds = prediction.get_field("objectness").sort(descending=True)[1]
            prediction = prediction[inds]

            ann_ids = self.cocoGt.coco.getAnnIds(imgIds=original_id)
            anno = self.cocoGt.coco.loadAnns(ann_ids)
            gt_boxes = [obj["bbox"] for obj in anno if obj["iscrowd"] == 0]
            gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4)  # guard against no boxes
            gt_boxes = BoxList(gt_boxes, (image_width, image_height), mode="xywh").convert("xyxy")
            gt_areas = torch.as_tensor([obj["area"] for obj in anno if obj["iscrowd"] == 0])

            if len(gt_boxes) == 0:
                continue

            valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])
            gt_boxes = gt_boxes[valid_gt_inds]

            num_pos += len(gt_boxes)

            if len(gt_boxes) == 0:
                continue

            if len(prediction) == 0:
                continue

            if limit is not None and len(prediction) > limit:
                prediction = prediction[:limit]

            overlaps = boxlist_iou(prediction, gt_boxes)
            _gt_overlaps = torch.zeros(len(gt_boxes))
            for j in range(min(len(prediction), len(gt_boxes))):
                # find which proposal box maximally covers each gt box
                # and get the iou amount of coverage for each gt box
                max_overlaps, argmax_overlaps = overlaps.max(dim=0)

                # find which gt box is 'best' covered (i.e. 'best' = most iou)
                gt_ovr, gt_ind = max_overlaps.max(dim=0)
                assert gt_ovr >= 0
                # find the proposal box that covers the best covered gt box
                box_ind = argmax_overlaps[gt_ind]
                # record the iou coverage of this gt box
                _gt_overlaps[j] = overlaps[box_ind, gt_ind]
                assert _gt_overlaps[j] == gt_ovr
                # mark the proposal box and the gt box as used
                overlaps[box_ind, :] = -1
                overlaps[:, gt_ind] = -1

            # append recorded iou coverage level
            gt_overlaps.append(_gt_overlaps)
        gt_overlaps = torch.cat(gt_overlaps, dim=0)
        gt_overlaps, _ = torch.sort(gt_overlaps)

        if self.thresholds is None:
            step = 0.05
            self.thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)
        recalls = torch.zeros_like(self.thresholds)
        # compute recall for each iou threshold
        for i, t in enumerate(self.thresholds):
            recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)
        # ar = 2 * np.trapz(recalls, thresholds)
        ar = recalls.mean()
        return {"ar": ar, "recalls": recalls, "thresholds": self.thresholds,
                "gt_overlaps": gt_overlaps, "num_pos": num_pos}