Esempio n. 1
0
    def add_gt_proposals(self, proposals, targets):
        """
        Arguments:
            proposals: BoxList
            targets: BoxList
        """
        # Get the device we're operating on
        device = proposals.bbox3d.device

        gt_boxes = [target.copy_with_fields([]) for target in targets]
        # later cat of bbox requires all fields to be present for all bbox
        # so we need to add a dummy for objectness that's missing
        for gt_box in gt_boxes:
            gt_box.add_field("objectness",
                             torch.ones(len(gt_box), device=device))
            gt_box.add_field("is_gt", torch.ones(len(gt_box), device=device))
            gt_box.constants = proposals.constants

        proposals.add_field("is_gt", torch.zeros(len(proposals),
                                                 device=device))

        batch_size = proposals.batch_size()
        proposals = proposals.seperate_examples()
        proposals = [
            cat_boxlist_3d((proposal, gt_box), per_example=False)
            for proposal, gt_box in zip(proposals, gt_boxes)
        ]
        proposals = cat_boxlist_3d(proposals, per_example=True)

        return proposals
Esempio n. 2
0
        def show_one_type(eval_type):
            indices = metric_inds[eval_type]
            if eval_type == 'TP' and self.add_gt_proposals:
                indices = indices[0:-gt_num]
            n0 = indices.shape[0]
            pro_ = proposals[indices]
            objectness_ = pro_.get_field('objectness')
            logits_ = pred_logits[indices]
            labels_ = labels[indices]

            err_ = torch.abs(logits_ - labels_)
            err_num = err_.sum()
            print(f"\n * * * * * * * * \n{eval_type} :{n0} err num: {err_num}")
            print(f"objectness_:{objectness_}\n")
            if n0 > 0:
                roi_class_pred_ = roi_class_pred[indices[:, None],
                                                 labels_[:, None]]

                #if eval_type != 'TP':
                print(f"roi_class_pred_:\n{roi_class_pred_}")

                if eval_type == 'FP':
                    pro_.show__together(targets)
                    pass

                if eval_type == 'FN' or eval_type == 'TP':
                    map_inds_ = 7 * labels_[:, None] + torch.tensor(
                        [0, 1, 2, 3, 4, 5, 6], device=device)
                    roi_box_regression_ = box_regression[indices[:, None],
                                                         map_inds_]
                    roi_box = self.box_coder.decode(roi_box_regression_,
                                                    pro_.bbox3d)
                    tar_reg = regression_targets[indices]
                    #roi_box = self.box_coder.decode(tar_reg, pro_.bbox3d)
                    print(f"target reg: \n{tar_reg[0:3]}")
                    print(f"roi_reg: \n{roi_box_regression_[0:3]}")

                    roi_box[:, 0] += 15
                    roi_boxlist_ = pro_.copy()
                    roi_boxlist_.bbox3d = roi_box

                    targets_ = targets.copy()
                    targets_.bbox3d[:, 0] += 15

                    bs_ = cat_boxlist_3d([pro_, roi_boxlist_],
                                         per_example=False)
                    tg_ = cat_boxlist_3d([targets, targets_], False)
                    bs_.show__together(tg_, twolabels=True)

                    pass
            pass
Esempio n. 3
0
    def __call__(self,
                 class_logits,
                 box_regression,
                 corners_semantic,
                 targets=None):
        """
        Computes the loss for Faster R-CNN.
        This requires that the subsample method has been called beforehand.

        Arguments:
            class_logits: [n,class_num]
            box_regression: class agnostic [n,7]; class specific: [n,7*num_classes]
            corners_semantic: [n,8*2]
            targets for debuging only: [BoxList3D] len = batch_size

        Returns:
            classification_loss (Tensor)
            box_loss (Tensor)
        """

        #class_logits = cat(class_logits, dim=0)
        n = class_logits.shape[0]
        assert n == box_regression.shape[0]
        assert corners_semantic is None or n == corners_semantic.shape[0]
        class_num = class_logits.shape[1]

        if not hasattr(self, "_proposals"):
            raise RuntimeError("subsample needs to be called before")

        proposals = self._proposals

        proposals = cat_boxlist_3d(proposals, per_example=True)
        labels = proposals.get_field("labels")
        regression_targets = proposals.get_field("regression_targets")
        pro_bbox3ds = proposals.bbox3d

        if not self.need_seperate:
            classification_loss = F.cross_entropy(class_logits, labels)
            box_loss, corner_loss = self.box_loss(labels, box_regression,
                                                  regression_targets,
                                                  pro_bbox3ds,
                                                  corners_semantic)
        else:
            classification_loss = self.seperate_classifier.roi_cross_entropy_seperated(
                class_logits, labels, proposals)
            box_loss, corner_loss = self.seperate_classifier.roi_box_loss_seperated(
                self.box_loss,
                labels,
                box_regression,
                regression_targets,
                pro_bbox3ds=pro_bbox3ds,
                corners_semantic=corners_semantic)
            pass

        if SHOW_ROI_CLASSFICATION:
            self.show_roi_cls_regs(proposals, classification_loss, box_loss,
                                   class_logits, targets, box_regression,
                                   regression_targets)

        return classification_loss, box_loss, corner_loss
Esempio n. 4
0
    def post_processor(self, class_logits, box_regression, proposals,
                       post_processor_fn):
        proposals_g, sep_ids_g = self.seperate_proposals(proposals)
        #for gi in range(self.group_num):
        class_logits_g = self.seperate_pred_logits(class_logits, sep_ids_g)
        box_regression_g = self.seperate_pred_box(box_regression, sep_ids_g)

        results_g = []
        for gi in range(self.group_num):
            result_gi = post_processor_fn(
                (class_logits_g[gi], box_regression_g[gi]), proposals_g[gi])
            results_g.append(result_gi)

        batch_size = len(proposals)
        result = []
        for b in range(batch_size):
            for gi in range(self.group_num):
                sep_l = results_g[gi][b].extra_fields['labels']
                results_g[gi][b].extra_fields[
                    'labels'] = self.sep_labels_to_org_labels[gi][sep_l]
            result_b = [results_g[gi][b] for gi in range(self.group_num)]
            result_b = cat_boxlist_3d(result_b, per_example=False)
            result.append(result_b)

        #print(result[0].fields())
        return result
Esempio n. 5
0
    def seperate_subsample(self, proposals, targets, subsample_fn):
        proposals_g, _ = self.seperate_proposals(proposals)
        self.targets_g = self.seperate_targets_and_update_labels(targets)

        for gi in range(self.group_num):
            proposals_g[gi] = subsample_fn(proposals_g[gi], self.targets_g[gi])
            assert self.targets_g[gi][0].get_field(
                'labels').max() < self.class_nums[gi]

        bs = len(proposals)
        proposals_out = []
        for i in range(bs):
            psi = [proposals_g[j][i] for j in range(self.group_num)]
            proposals_out.append(cat_boxlist_3d(psi, per_example=False))

        #assert self.targets_1[0].get_field('labels').max() <= self.num_classes1 - 1

        if DEBUG and False:
            show_box_fields(proposals, 'In')
            show_box_fields(proposals_0, 'Sep0')
            show_box_fields(proposals_1, 'Sep1')
            show_box_fields(proposals_0_, 'subs0')
            show_box_fields(proposals_1_, 'subs1')
            show_box_fields(proposals_out, 'Out')

            show_box_fields(self.targets_0, 'T0')
            show_box_fields(self.targets_1, 'T1')
        return proposals_out
Esempio n. 6
0
    def cat_boxlist_3d_seperated(self, bboxes_ls):
        batch_size = bboxes_ls[0].batch_size()
        m = len(bboxes_ls)
        assert m == self.group_num

        bboxes_ = [None] * m
        for gi in range(m):
            bboxes_ls[gi].add_field(
                'sep_id',
                torch.ones([len(bboxes_ls[gi])], dtype=torch.int32) * gi)
            bboxes_[gi] = bboxes_ls[gi].seperate_examples()

        bboxes_ls_new = []
        for j in range(batch_size):
            bboxes_ls_new.append(
                cat_boxlist_3d([bboxes_[i][j] for i in range(m)],
                               per_example=False))
        bboxes_ls_new_all = cat_boxlist_3d(bboxes_ls_new, per_example=True)
        return bboxes_ls_new_all
Esempio n. 7
0
    def __call__(self, class_logits, box_regression, targets=None):
        """
        Computes the loss for Faster R-CNN.
        This requires that the subsample method has been called beforehand.

        Arguments:
            class_logits (list[Tensor])
            box_regression (list[Tensor])
            targets for debuging only

        Returns:
            classification_loss (Tensor)
            box_loss (Tensor)
        """

        class_logits = cat(class_logits, dim=0)
        box_regression = cat(box_regression, dim=0)

        if not hasattr(self, "_proposals"):
            raise RuntimeError("subsample needs to be called before")

        proposals = self._proposals

        proposals = cat_boxlist_3d(proposals, per_example=True)
        labels = proposals.get_field("labels")
        regression_targets = proposals.get_field("regression_targets")
        pro_bbox3ds = proposals.bbox3d

        if not self.need_seperate:
            classification_loss = F.cross_entropy(class_logits, labels)
            box_loss = self.box_loss(labels, box_regression,
                                     regression_targets, pro_bbox3ds)
        else:
            classification_loss = self.seperate_classifier.roi_cross_entropy_seperated(
                class_logits, labels, proposals)
            box_loss = self.seperate_classifier.roi_box_loss_seperated(
                self.box_loss, labels, box_regression, regression_targets,
                pro_bbox3ds)

        if SHOW_ROI_CLASSFICATION:
            self.show_roi_cls_regs(proposals, classification_loss, box_loss,
                                   class_logits, targets, box_regression,
                                   regression_targets)

        return classification_loss, box_loss
Esempio n. 8
0
    def forward_for_single_feature_map(self,
                                       anchors,
                                       objectness,
                                       box_regression,
                                       targets=None):
        """
        Arguments:
            anchors: BoxList -> all examples within same batch are concated together
            objectness: tensor of size N
            box_regression: tensor of size N, 7
        """
        device = objectness.device
        assert objectness.shape[0] == box_regression.shape[0] == len(anchors)

        examples_idxscope = anchors.examples_idxscope
        batch_size = anchors.batch_size()
        result = []
        for bi in range(batch_size):
            # split examples in the batch
            s, e = examples_idxscope[bi]
            if SHOW_PRO_NUMS:
                print(f'\n\nRPN input anchor num: {e-s}')
            objectness_i0 = objectness[s:e]
            box_regression_i = box_regression[s:e]

            # put in the same format as anchors
            objectness_i1 = objectness_i0.sigmoid()

            # only choose top 2000 proposals for nms
            num_anchors = e - s
            fpn_pre_nms_top_n = min(self.fpn_pre_nms_top_n, num_anchors)
            objectness_i, topk_idx = objectness_i1.topk(fpn_pre_nms_top_n,
                                                        dim=0,
                                                        sorted=True)

            #batch_idx = torch.arange(N, device=device)[:, None]
            box_regression_i = box_regression_i[topk_idx]

            if anchors.size3d is None:
                pcl_size3d = None
            else:
                pcl_size3d = anchors.size3d[bi:bi + 1]
            concat_anchors_i = anchors.bbox3d[s:e, :]
            concat_anchors_i = concat_anchors_i[topk_idx]

            # decode box_regression to get proposals
            proposals_i = self.box_coder.decode(box_regression_i,
                                                concat_anchors_i)

            #*********************************************************************
            # apply nms
            examples_idxscope_new = torch.tensor([[0, proposals_i.shape[0]]])
            boxlist = BoxList3D(proposals_i,
                                pcl_size3d,
                                mode="yx_zb",
                                examples_idxscope=examples_idxscope_new,
                                constants={'prediction': True})
            boxlist.add_field("objectness", objectness_i)
            boxlist.set_as_prediction()
            if SHOW_RPN_OUT_BEFORE_NMS:
                print(
                    f'\n\n------------------------------------\n RPN out before NMS '
                )
                boxlist.show_together(targets[bi])
                boxlist.show_by_objectness(0.8, targets[bi])

            #boxlist = boxlist.clip_to_pcl(remove_empty=False)
            #boxlist = remove_small_boxes3d(boxlist, self.min_size)
            if SHOW_PRO_NUMS:
                print(f'before nms box num: {len(boxlist)}')
            boxlist_new = boxlist_nms_3d(
                boxlist,
                self.nms_thresh,
                nms_aug_thickness=self.nms_aug_thickness,
                max_proposals=self.fpn_post_nms_top_n,
                score_field="objectness",
                flag='rpn_post',
            )
            result.append(boxlist_new)
            if SHOW_PRO_NUMS:
                print(f'RPN out, after nms box num: {len(boxlist_new)}\n\n')

            if SHOW_NMS_OUT:
                print(
                    f'\n\n------------------------------------\n RPN out after NMS '
                )
                print('inference_3d.py SHOW_NMS_OUT')
                objectness_i_new = boxlist_new.get_field('objectness')
                print(f"objectness: {objectness_i_new[0:10]}")
                boxlist_new.show_by_objectness(0.8, targets[bi])
                import pdb
                pdb.set_trace()  # XXX BREAKPOINT
                pass
        result = cat_boxlist_3d(result, per_example=True)
        return result
Esempio n. 9
0
    def show_roi_cls_regs(self, proposals, classification_loss, box_loss,
                          class_logits, targets, box_regression,
                          regression_targets):
        '''
          From rpn nms: FP, FN, TP
          ROI: (1)remove all FP (2) add all FN, (3) keep all TP
          '''
        assert proposals.batch_size() == 1
        targets = cat_boxlist_3d(targets, per_example=True)
        roi_class_pred = F.softmax(class_logits)
        pred_logits = torch.argmax(class_logits, 1)
        labels = proposals.get_field("labels")
        metric_inds, metric_evals = proposals.metric_4areas(
            self.low_threshold, self.high_threshold)
        gt_num = len(targets)
        device = class_logits.device
        num_classes = class_logits.shape[1]

        class_err = (labels != pred_logits).sum()

        print(
            '\n-----------------------------------------\n roi classificatio\n'
        )
        print(f"RPN_NMS: {metric_evals}")
        print(
            f"classification_loss:{classification_loss}, box_loss: {box_loss}")

        def show_one_type(eval_type):
            indices = metric_inds[eval_type]
            if eval_type == 'TP' and self.add_gt_proposals:
                indices = indices[0:-gt_num]
            n0 = indices.shape[0]
            pro_ = proposals[indices]
            objectness_ = pro_.get_field('objectness')
            logits_ = pred_logits[indices]
            labels_ = labels[indices]

            err_ = torch.abs(logits_ - labels_)
            err_num = err_.sum()
            print(f"\n * * * * * * * * \n{eval_type} :{n0} err num: {err_num}")
            print(f"objectness_:{objectness_}\n")
            if n0 > 0:
                roi_class_pred_ = roi_class_pred[indices[:, None],
                                                 labels_[:, None]]

                #if eval_type != 'TP':
                print(f"roi_class_pred_:\n{roi_class_pred_}")

                if eval_type == 'FP':
                    pro_.show__together(targets)
                    pass

                if eval_type == 'FN' or eval_type == 'TP':
                    map_inds_ = 7 * labels_[:, None] + torch.tensor(
                        [0, 1, 2, 3, 4, 5, 6], device=device)
                    roi_box_regression_ = box_regression[indices[:, None],
                                                         map_inds_]
                    roi_box = self.box_coder.decode(roi_box_regression_,
                                                    pro_.bbox3d)
                    tar_reg = regression_targets[indices]
                    #roi_box = self.box_coder.decode(tar_reg, pro_.bbox3d)
                    print(f"target reg: \n{tar_reg[0:3]}")
                    print(f"roi_reg: \n{roi_box_regression_[0:3]}")

                    roi_box[:, 0] += 15
                    roi_boxlist_ = pro_.copy()
                    roi_boxlist_.bbox3d = roi_box

                    targets_ = targets.copy()
                    targets_.bbox3d[:, 0] += 15

                    bs_ = cat_boxlist_3d([pro_, roi_boxlist_],
                                         per_example=False)
                    tg_ = cat_boxlist_3d([targets, targets_], False)
                    bs_.show__together(tg_, twolabels=True)

                    pass
            pass

        show_one_type('FP')
        show_one_type('FN')
        show_one_type('TP')
        return