Ejemplo n.º 1
0
    def decode(roi_locs, roi_scores, indices_and_rois, test_rois_num, configer, metas):
        indices_and_rois = indices_and_rois
        num_classes = configer.get('data', 'num_classes')
        mean = torch.Tensor(configer.get('roi', 'loc_normalize_mean')).repeat(num_classes)[None]
        std = torch.Tensor(configer.get('roi', 'loc_normalize_std')).repeat(num_classes)[None]
        mean = mean.to(roi_locs.device)
        std = std.to(roi_locs.device)

        roi_locs = (roi_locs * std + mean)
        roi_locs = roi_locs.contiguous().view(-1, num_classes, 4)

        rois = indices_and_rois[:, 1:]
        rois = rois.contiguous().view(-1, 1, 4).expand_as(roi_locs)
        wh = torch.exp(roi_locs[:, :, 2:]) * (rois[:, :, 2:] - rois[:, :, :2])
        cxcy = roi_locs[:, :, :2] * (rois[:, :, 2:] - rois[:, :, :2]) + (rois[:, :, :2] + rois[:, :, 2:]) / 2
        dst_bbox = torch.cat([cxcy - wh / 2, cxcy + wh / 2], 2)  # [b, 8732,4]

        if configer.get('phase') != 'debug':
            cls_prob = F.softmax(roi_scores, dim=1)
        else:
            cls_prob = roi_scores

        cls_label = torch.LongTensor([i for i in range(num_classes)])\
            .contiguous().view(1, num_classes).repeat(indices_and_rois.size(0), 1).to(roi_locs.device)

        output = [None for _ in range(test_rois_num.size(0))]
        start_index = 0
        for i in range(test_rois_num.size(0)):
            tmp_dst_bbox = dst_bbox[start_index:start_index+test_rois_num[i]]
            tmp_dst_bbox[:, :, 0::2] = tmp_dst_bbox[:, :, 0::2].clamp(min=0, max=metas[i]['border_size'][0] - 1)
            tmp_dst_bbox[:, :, 1::2] = tmp_dst_bbox[:, :, 1::2].clamp(min=0, max=metas[i]['border_size'][1] - 1)
            tmp_dst_bbox *= (metas[i]['ori_img_size'][0] / metas[i]['border_size'][0])

            tmp_cls_prob = cls_prob[start_index:start_index+test_rois_num[i]]
            tmp_cls_label = cls_label[start_index:start_index+test_rois_num[i]]
            start_index += test_rois_num[i]

            mask = (tmp_cls_prob > configer.get('res', 'val_conf_thre')) & (tmp_cls_label > 0)

            tmp_dst_bbox = tmp_dst_bbox[mask].contiguous().view(-1, 4)
            if tmp_dst_bbox.numel() == 0:
                continue

            tmp_cls_prob = tmp_cls_prob[mask].contiguous().view(-1,).unsqueeze(1)
            tmp_cls_label = tmp_cls_label[mask].contiguous().view(-1,).unsqueeze(1)

            valid_preds = torch.cat((tmp_dst_bbox, tmp_cls_prob.float(), tmp_cls_label.float()), 1)

            valid_ind = DetHelper.cls_nms(valid_preds[:, :5],
                                          labels=valid_preds[:, 5],
                                          max_threshold=configer.get('res', 'nms')['max_threshold'],
                                          return_ind=True)

            valid_preds = valid_preds[valid_ind]
            output[i] = valid_preds

        return output
Ejemplo n.º 2
0
    def decode(bbox, conf, default_boxes, configer, input_size):
        loc = bbox
        if configer.get('phase') != 'debug':
            conf = F.softmax(conf, dim=-1)

        default_boxes = default_boxes.unsqueeze(0).repeat(loc.size(0), 1, 1).to(bbox.device)

        variances = [0.1, 0.2]
        wh = torch.exp(loc[:, :, 2:] * variances[1]) * default_boxes[:, :, 2:]
        cxcy = loc[:, :, :2] * variances[0] * default_boxes[:, :, 2:] + default_boxes[:, :, :2]
        boxes = torch.cat([cxcy - wh / 2, cxcy + wh / 2], 2)  # [b, 8732,4]

        batch_size, num_priors, _ = boxes.size()
        boxes = boxes.unsqueeze(2).repeat(1, 1, configer.get('data', 'num_classes'), 1)
        boxes = boxes.contiguous().view(boxes.size(0), -1, 4)

        # clip bounding box
        boxes[:, :, 0::2] = boxes[:, :, 0::2].clamp(min=0, max=input_size[0] - 1)
        boxes[:, :, 1::2] = boxes[:, :, 1::2].clamp(min=0, max=input_size[1] - 1)

        labels = torch.Tensor([i for i in range(configer.get('data', 'num_classes'))]).to(boxes.device)
        labels = labels.view(1, 1, -1, 1).repeat(batch_size, num_priors, 1, 1).contiguous().view(batch_size, -1, 1)
        max_conf = conf.contiguous().view(batch_size, -1, 1)

        # max_conf, labels = conf.max(2, keepdim=True)  # [b, 8732,1]
        predictions = torch.cat((boxes, max_conf.float(), labels.float()), 2)
        output = [None for _ in range(len(predictions))]
        for image_i, image_pred in enumerate(predictions):
            ids = labels[image_i].squeeze(1).nonzero().contiguous().view(-1,)
            if ids.numel() == 0:
                continue

            valid_preds = image_pred[ids]
            _, order = valid_preds[:, 4].sort(0, descending=True)
            order = order[:configer.get('res', 'nms')['pre_nms']]
            valid_preds = valid_preds[order]
            valid_preds = valid_preds[valid_preds[:, 4] > configer.get('res', 'val_conf_thre')]
            if valid_preds.numel() == 0:
                continue

            valid_preds = DetHelper.cls_nms(valid_preds[:, :6],
                                            labels=valid_preds[:, 5],
                                            max_threshold=configer.get('res', 'nms')['max_threshold'],
                                            cls_keep_num=configer.get('res', 'cls_keep_num'))

            _, order = valid_preds[:, 4].sort(0, descending=True)
            order = order[:configer.get('res', 'max_per_image')]
            output[image_i] = valid_preds[order]

        return output
Ejemplo n.º 3
0
    def decode(batch_pred_bboxes, configer):
        box_corner = batch_pred_bboxes.new(batch_pred_bboxes.shape)
        box_corner[:, :,
                   0] = batch_pred_bboxes[:, :,
                                          0] - batch_pred_bboxes[:, :, 2] / 2
        box_corner[:, :,
                   1] = batch_pred_bboxes[:, :,
                                          1] - batch_pred_bboxes[:, :, 3] / 2
        box_corner[:, :,
                   2] = batch_pred_bboxes[:, :,
                                          0] + batch_pred_bboxes[:, :, 2] / 2
        box_corner[:, :,
                   3] = batch_pred_bboxes[:, :,
                                          1] + batch_pred_bboxes[:, :, 3] / 2

        # clip bounding box
        box_corner[:, :, 0::2] = box_corner[:, :, 0::2].clamp(min=0, max=1.0)
        box_corner[:, :, 1::2] = box_corner[:, :, 1::2].clamp(min=0, max=1.0)

        batch_pred_bboxes[:, :, :4] = box_corner[:, :, :4]

        output = [None for _ in range(len(batch_pred_bboxes))]
        for image_i, image_pred in enumerate(batch_pred_bboxes):
            # Filter out confidence scores below threshold
            conf_mask = (image_pred[:, 4] > configer.get(
                'vis', 'obj_threshold')).squeeze()
            image_pred = image_pred[conf_mask]
            # If none are remaining => process next image
            if image_pred.numel() == 0:
                continue

            # Get score and class with highest confidence
            class_conf, class_pred = torch.max(
                image_pred[:, 5:5 + configer.get('data', 'num_classes')],
                1,
                keepdim=True)
            # Detections ordered as (x1, y1, x2, y2, obj_conf, class_conf, class_pred)
            detections = torch.cat(
                (image_pred[:, :5], class_conf.float(), class_pred.float()), 1)
            keep_index = DetHelper.cls_nms(
                image_pred[:, :4],
                scores=image_pred[:, 4],
                labels=class_pred.squeeze(1),
                nms_threshold=configer.get('nms', 'max_threshold'),
                iou_mode=configer.get('nms', 'mode'),
                nms_mode='cython_nms')

            output[image_i] = detections[keep_index]

        return output
Ejemplo n.º 4
0
    def decode(loc, conf, configer, meta):
        batch_size, num_priors, _ = loc.size()
        loc = loc.unsqueeze(2).repeat(1, 1,
                                      configer.get('data', 'num_classes'), 1)
        loc = loc.contiguous().view(loc.size(0), -1, 4)

        labels = torch.Tensor([
            i for i in range(configer.get('data', 'num_classes'))
        ]).to(loc.device)
        labels = labels.view(1, 1, -1,
                             1).repeat(batch_size, num_priors, 1,
                                       1).contiguous().view(batch_size, -1, 1)
        conf = conf.contiguous().view(batch_size, -1, 1)

        # max_conf, labels = conf.max(2, keepdim=True)  # [b, 8732,1]
        predictions = torch.cat((loc.float(), conf.float(), labels.float()), 2)
        output = [None for _ in range(len(predictions))]
        for i, image_pred in enumerate(predictions):
            image_pred[:, 0] *= meta[i]['ori_img_size'][0]
            image_pred[:, 1] *= meta[i]['ori_img_size'][1]
            image_pred[:, 2] *= meta[i]['ori_img_size'][0]
            image_pred[:, 3] *= meta[i]['ori_img_size'][1]
            ids = labels[i].squeeze(1).nonzero().contiguous().view(-1, )
            if ids.numel() == 0:
                continue

            valid_preds = image_pred[ids]
            _, order = valid_preds[:, 4].sort(0, descending=True)
            order = order[:configer.get('res', 'nms')['pre_nms']]
            valid_preds = valid_preds[order]
            valid_preds = valid_preds[
                valid_preds[:, 4] > configer.get('res', 'val_conf_thre')]
            if valid_preds.numel() == 0:
                continue

            valid_ind = DetHelper.cls_nms(
                valid_preds[:, :5],
                labels=valid_preds[:, 5],
                max_threshold=configer.get('res', 'nms')['max_threshold'],
                cls_keep_num=configer.get('res', 'cls_keep_num'),
                return_ind=True)

            valid_preds = valid_preds[valid_ind]
            _, order = valid_preds[:, 4].sort(0, descending=True)
            order = order[:configer.get('res', 'max_per_image')]
            output[i] = valid_preds[order]

        return output
Ejemplo n.º 5
0
    def decode(batch_detections, configer, meta):
        output = [None for _ in range(len(meta))]
        for i in range(len(meta)):
            image_pred = batch_detections[i]
            image_pred[:, 0] *= meta[i]['ori_img_size'][0]
            image_pred[:, 1] *= meta[i]['ori_img_size'][1]
            image_pred[:, 2] *= meta[i]['ori_img_size'][0]
            image_pred[:, 3] *= meta[i]['ori_img_size'][1]
            # Filter out confidence scores below threshold
            image_pred = image_pred[image_pred[:, 4] > configer.get('res', 'val_conf_thre')]
            # If none are remaining => process next image
            if image_pred.numel() == 0:
                continue

            # Get score and class with highest confidence
            class_conf, class_pred = torch.max(image_pred[:, 5:5 + configer.get('data', 'num_classes')], 1, keepdim=True)
            # Detections ordered as (x1, y1, x2, y2, obj_conf, class_conf, class_pred)
            detections = torch.cat((image_pred[:, :5], class_conf.float(), class_pred.float()), 1)
            valid_ind = DetHelper.cls_nms(detections[:, :5], labels=class_pred.squeeze(1),
                                          max_threshold=configer.get('res', 'nms')['max_threshold'], return_ind=True)
            output[i] = detections[valid_ind]

        return output
Ejemplo n.º 6
0
    def decode(roi_locs, roi_scores, indices_and_rois, test_rois_num, configer,
               input_size):
        roi_locs = roi_locs.cpu()
        roi_scores = roi_scores.cpu()
        indices_and_rois = indices_and_rois.cpu()
        num_classes = configer.get('data', 'num_classes')
        mean = torch.Tensor(configer.get(
            'roi', 'loc_normalize_mean')).repeat(num_classes)[None]
        std = torch.Tensor(configer.get(
            'roi', 'loc_normalize_std')).repeat(num_classes)[None]
        mean = mean.to(roi_locs.device)
        std = std.to(roi_locs.device)

        roi_locs = (roi_locs * std + mean)
        roi_locs = roi_locs.contiguous().view(-1, num_classes, 4)
        # roi_locs = roi_locs[:,:, [1, 0, 3, 2]]

        rois = indices_and_rois[:, 1:]
        rois = rois.contiguous().view(-1, 1, 4).expand_as(roi_locs)
        wh = torch.exp(roi_locs[:, :, 2:]) * (rois[:, :, 2:] - rois[:, :, :2])
        cxcy = roi_locs[:, :, :2] * (rois[:, :, 2:] - rois[:, :, :2]) + (
            rois[:, :, :2] + rois[:, :, 2:]) / 2
        dst_bbox = torch.cat([cxcy - wh / 2, cxcy + wh / 2], 2)  # [b, 8732,4]

        # clip bounding box
        dst_bbox[:, :, 0::2] = (dst_bbox[:, :,
                                         0::2]).clamp(min=0,
                                                      max=input_size[0] - 1)
        dst_bbox[:, :, 1::2] = (dst_bbox[:, :,
                                         1::2]).clamp(min=0,
                                                      max=input_size[1] - 1)

        if configer.get('phase') != 'debug':
            cls_prob = F.softmax(roi_scores, dim=1)
        else:
            cls_prob = roi_scores

        cls_label = torch.LongTensor([i for i in range(num_classes)])\
            .contiguous().view(1, num_classes).repeat(indices_and_rois.size(0), 1)

        output = [None for _ in range(test_rois_num.size(0))]
        start_index = 0
        for i in range(test_rois_num.size(0)):
            # batch_index = (indices_and_rois[:, 0] == i).nonzero().contiguous().view(-1,)
            # tmp_dst_bbox = dst_bbox[batch_index]
            # tmp_cls_prob = cls_prob[batch_index]
            # tmp_cls_label = cls_label[batch_index]
            tmp_dst_bbox = dst_bbox[start_index:start_index + test_rois_num[i]]
            tmp_cls_prob = cls_prob[start_index:start_index + test_rois_num[i]]
            tmp_cls_label = cls_label[start_index:start_index +
                                      test_rois_num[i]]
            start_index += test_rois_num[i]

            mask = (tmp_cls_prob > configer.get(
                'vis', 'conf_threshold')) & (tmp_cls_label > 0)

            tmp_dst_bbox = tmp_dst_bbox[mask].contiguous().view(-1, 4)
            if tmp_dst_bbox.numel() == 0:
                continue

            tmp_cls_prob = tmp_cls_prob[mask].contiguous().view(
                -1, ).unsqueeze(1)
            tmp_cls_label = tmp_cls_label[mask].contiguous().view(
                -1, ).unsqueeze(1)

            valid_preds = torch.cat(
                (tmp_dst_bbox, tmp_cls_prob.float(), tmp_cls_label.float()), 1)

            keep = DetHelper.cls_nms(valid_preds[:, :4],
                                     scores=valid_preds[:, 4],
                                     labels=valid_preds[:, 5],
                                     nms_threshold=configer.get(
                                         'nms', 'overlap_threshold'),
                                     iou_mode=configer.get('nms', 'mode'))

            output[i] = valid_preds[keep]

        return output