Exemple #1
0
 def get_proposal(self, lrpn_probs, ldeltas, lrotation, lanchors, num_proposals):
   with torch.no_grad():
     assert len(lrpn_probs) == len(ldeltas) == len(lrotation) == len(lanchors)
     rpn_scores = []
     rpn_boxes = []
     rpn_rotations = []
     rpn_batch_idxs = []
     rpn2anchor_maps = []
     for rpn_probs, deltas, rotation, (anchor_coords, anchor_feats) in zip(
             lrpn_probs, ldeltas, lrotation, lanchors):
       if rpn_probs is None:
         rpn2anchor_maps.append((None, None))
         continue
       assert rpn_probs.coords_key == deltas.coords_key
       assert rpn_probs.F.shape[1] / 2 == deltas.F.shape[1] / 6
       assert deltas.F.shape[-1] == anchor_feats.shape[-1]
       rpn2anchor, anchor2rpn = utils.map_coordinates(deltas, anchor_coords, check_input_map=True)
       rpn2anchor_maps.append((rpn2anchor, anchor2rpn))
       rpn_batch_idxs.append(deltas.coords[rpn2anchor][:, 0])
       rpn_scores.append(rpn_probs.F[rpn2anchor].reshape(-1, 2)[:, 1])
       rpn_bbox_std = torch.from_numpy(np.expand_dims(self.config.rpn_bbox_std, 0)).to(deltas.F)
       deltas = deltas.F[rpn2anchor].reshape(-1, 6) * rpn_bbox_std
       anchors = anchor_feats[anchor2rpn].reshape(-1, 6)
       boxes = utils.apply_box_deltas(anchors.to(deltas), deltas, self.config.normalize_bbox)
       rpn_boxes.append(boxes)
       if rotation is not None:
         num_rot_output = self.rotation_criterion.NUM_OUTPUT
         assert rpn_probs.coords_key == rotation.coords_key
         assert rpn_probs.F.shape[1] / 2 == rotation.F.shape[1] / num_rot_output
         rpn_rotations.append(
             self.rotation_criterion.pred(rotation.F[rpn2anchor].reshape(-1, num_rot_output)))
     if not rpn_scores:
       return None, None, None, rpn2anchor_maps
     all_scores = torch.cat(rpn_scores)
     all_boxes = torch.cat(rpn_boxes)
     all_batch_idxs = torch.cat(rpn_batch_idxs).repeat_interleave(7)
     rotations = None
     if rpn_rotations:
       all_rotations = torch.cat(rpn_rotations)
       rotations = []
     boxes = []
     scores = []
     for i in range(all_batch_idxs.max().item() + 1):
       batch_mask = all_batch_idxs == i
       batch_scores = all_scores[batch_mask]
       confidence_mask = batch_scores > self.config.rpn_pre_nms_min_confidence
       pre_nms_limit = min(self.config.rpn_pre_nms_limit, confidence_mask.sum())
       batch_scores, ix = torch.topk(batch_scores[confidence_mask], pre_nms_limit, sorted=True)
       scores.append(batch_scores)
       boxes.append(all_boxes[batch_mask][confidence_mask][ix])
       if rotations is not None:
         rotations.append(all_rotations[batch_mask][confidence_mask][ix])
     rpn_proposal, rotation, rpn_scores = self.batch_non_maximum_suppression(
         boxes, rotations, scores, num_proposals)
     return rpn_proposal, rotation, rpn_scores, rpn2anchor_maps
Exemple #2
0
 def visualize_groundtruth(self, datum, iteration):
   coords = datum['coords'].numpy()
   batch_size = coords[:, 0].max() + 1
   output_path = pathlib.Path(self.config.visualize_path)
   output_path.mkdir(exist_ok=True)
   for i in range(batch_size):
     # Visualize ground-truth positive anchors.
     anchors_gt = datum['anchors'][torch.where(datum['rpn_match'].cpu() == 1)[1]]
     anchors_gt_ptc = pc_utils.visualize_bboxes(anchors_gt)
     anchors_gt_ply_dest = output_path / ('visualize_%04d_anchors_gt.ply' % iteration)
     pc_utils.save_point_cloud(anchors_gt_ptc, anchors_gt_ply_dest)
     # Visualize center location of all ground-truth anchors.
     anchors_all = np.unique((datum['anchors'][:, 3:] + datum['anchors'][:, :3]) / 2, axis=0)
     anchors_all_ply_dest = output_path / ('visualize_%04d_all_anchors_centers.ply' % iteration)
     pc_utils.save_point_cloud(anchors_all, anchors_all_ply_dest)
     # Visualize ground-truth positive anchors.
     if datum.get('bboxes_rotations') is None:
       bboxes_gt = pc_utils.visualize_bboxes(datum['bboxes_coords'][i], datum['bboxes_cls'][i])
     else:
       bboxes_gt = np.hstack((datum['bboxes_coords'][i], datum['bboxes_rotations'][i][:, None]))
       bboxes_gt = pc_utils.visualize_bboxes(bboxes_gt, datum['bboxes_cls'][i],
                                             bbox_param='xyzxyzr')
     bboxes_gt_ply_dest = output_path / ('visualize_%04d_bboxes_gt.ply' % iteration)
     pc_utils.save_point_cloud(bboxes_gt, bboxes_gt_ply_dest)
     # Visualize reconstructed ground-truth rpn targets.
     rpn_bbox_anchors = datum['anchors'][(datum['rpn_match'].flatten() == 1).cpu().numpy()]
     rpn_bbox_anchors = detection_utils.normalize_boxes(rpn_bbox_anchors, self.config.max_ptc_size)
     rpn_bbox_target = datum['rpn_bbox'].reshape(-1, 6)
     rpn_bbox_mask = ~torch.all(rpn_bbox_target == 0, 1)
     rpn_bbox_target = rpn_bbox_target[rpn_bbox_mask].cpu().numpy()
     rpn_bbox_target *= np.reshape(self.config.rpn_bbox_std, (1, len(self.config.rpn_bbox_std)))
     rpn_bbox_target = detection_utils.apply_box_deltas(torch.from_numpy(rpn_bbox_anchors),
                                                        torch.from_numpy(rpn_bbox_target),
                                                        self.config.normalize_bbox)
     rpn_bbox_target = detection_utils.unnormalize_boxes(rpn_bbox_target.numpy(),
                                                         self.config.max_ptc_size)
     if datum.get('rpn_rotation') is None:
       bboxes_gt_recon = pc_utils.visualize_bboxes(rpn_bbox_target)
     else:
       rpn_rot_target = datum['rpn_rotation'][i][rpn_bbox_mask].cpu().numpy()
       bboxes_gt_recon = np.hstack((rpn_bbox_target, rpn_rot_target[:, None]))
       bboxes_gt_recon = pc_utils.visualize_bboxes(bboxes_gt_recon, bbox_param='xyzxyzr')
     bboxes_gt_recon_ply_dest = output_path / ('visualize_%04d_bboxes_gt_recon.ply' % iteration)
     pc_utils.save_point_cloud(bboxes_gt_recon, bboxes_gt_recon_ply_dest)
Exemple #3
0
 def get_proposal(self, rpn_probs, deltas, rotation, anchors, num_proposals):
   assert deltas.shape[1:] == anchors.shape
   scores = rpn_probs[:, :, 1]
   rpn_bbox_std = np.reshape(self.config.rpn_bbox_std, (1, 1, len(self.config.rpn_bbox_std)))
   deltas *= torch.from_numpy(rpn_bbox_std).to(deltas)
   anchors = torch.from_numpy(np.broadcast_to(anchors, deltas.shape)).to(deltas)
   pre_nms_limit = min(self.config.rpn_pre_nms_limit, anchors.shape[1])
   scores, ix = torch.topk(scores, pre_nms_limit, sorted=True)
   ix = [i[s > self.config.rpn_pre_nms_min_confidence] for s, i in zip(scores, ix)]
   scores = [s[s > self.config.rpn_pre_nms_min_confidence] for s, i in zip(scores, ix)]
   deltas = [torch.index_select(o, 0, i) for o, i in zip(deltas, ix)]
   anchors = [torch.index_select(o, 0, i) for o, i in zip(anchors, ix)]
   boxes = [utils.apply_box_deltas(a, d, self.config.normalize_bbox)
            for a, d in zip(anchors, deltas)]
   if rotation is not None:
     with torch.no_grad():
       rotation = [self.rotation_criterion.pred(torch.index_select(o, 0, i))
                   for o, i in zip(rotation, ix)]
   rpn_proposal, rotation, rpn_scores = self.batch_non_maximum_suppression(
       boxes, rotation, scores, num_proposals)
   return rpn_proposal, rotation, rpn_scores
Exemple #4
0
 def get_proposal(self, lrpn_probs, lrpn_sem, ldeltas, lrotation, num_proposals):
   with torch.no_grad():
     assert len(lrpn_probs) == len(lrpn_sem) == len(ldeltas) == len(lrotation)
     rpn_cls = []
     rpn_scores = []
     rpn_return_scores = []
     rpn_boxes = []
     rpn_rotations = []
     rpn_batch_idxs = []
     for rpn_probs, rpn_semantic, deltas, rotation, anchor_size in zip(
             lrpn_probs, lrpn_sem, ldeltas, lrotation, self.anchor_sizes):
       if rpn_probs is None:
         continue
       num_anchors = rpn_probs.F.shape[1] / 2
       assert rpn_probs.coords_key == deltas.coords_key
       assert num_anchors == deltas.F.shape[1] / 6
       rpn_batch_idxs.append(deltas.coords[:, 0])
       rpn_semantic = rpn_semantic.reshape(-1, self.num_class)
       rpn_semantic_prob, rpn_semantic_cls = rpn_semantic.max(1)
       rpn_cls.append(rpn_semantic_cls)
       rpn_prob = rpn_probs.F.reshape(-1, 2)[:, 1]
       if self.config.detection_nms_score == 'obj':
         rpn_score = rpn_prob
       elif self.config.detection_nms_score == 'sem':
         rpn_score = rpn_semantic_prob
       elif self.config.detection_nms_score == 'objsem':
         rpn_score = rpn_prob * rpn_semantic_prob
       if self.config.detection_ap_score == 'obj':
         ap_score = rpn_prob
       elif self.config.detection_ap_score == 'sem':
         ap_score = rpn_semantic_prob
       elif self.config.detection_ap_score == 'objsem':
         ap_score = rpn_prob * rpn_semantic_prob
       rpn_scores.append(rpn_score)
       rpn_return_scores.append(ap_score)
       rpn_bbox_std = torch.from_numpy(np.expand_dims(self.config.rpn_bbox_std, 0)).to(deltas.F)
       anchor_centers = deltas.coords[:, 1:] + deltas.tensor_stride[0] / 2
       anchor_center = np.tile(anchor_centers, (1, int(num_anchors)))
       anchors = np.hstack(((anchor_center - anchor_size).reshape(-1, 3),
                            (anchor_center + anchor_size).reshape(-1, 3)))
       deltas = deltas.F.reshape(-1, 6) * rpn_bbox_std
       anchors = torch.from_numpy(utils.normalize_boxes(anchors, self.config.max_ptc_size))
       rpn_boxes.append(
           utils.apply_box_deltas(anchors.to(deltas), deltas, self.config.normalize_bbox))
       if rotation is not None:
         num_rot_output = self.rotation_criterion.NUM_OUTPUT
         assert rpn_probs.coords_key == rotation.coords_key
         assert rpn_probs.F.shape[1] / 2 == rotation.F.shape[1] / num_rot_output
         rpn_rotations.append(
             self.rotation_criterion.pred(rotation.F.reshape(-1, num_rot_output)))
     if not rpn_scores:
       return None, None, None
     all_scores = torch.cat(rpn_scores)
     all_return_scores = torch.cat(rpn_return_scores)
     all_cls = torch.cat(rpn_cls)
     all_boxes = torch.cat(rpn_boxes)
     all_batch_idxs = torch.cat(rpn_batch_idxs).repeat_interleave(int(num_anchors))
     rotations = None
     if rpn_rotations:
       all_rotations = torch.cat(rpn_rotations)
       rotations = []
     boxes = []
     scores = []
     return_scores = []
     classes = []
     for i in range(all_batch_idxs.max().item() + 1):
       batch_mask = all_batch_idxs == i
       batch_scores = all_scores[batch_mask]
       confidence_mask = batch_scores > self.config.rpn_pre_nms_min_confidence
       pre_nms_limit = min(self.config.rpn_pre_nms_limit, confidence_mask.sum())
       batch_scores, ix = torch.topk(batch_scores[confidence_mask], pre_nms_limit, sorted=True)
       scores.append(batch_scores)
       return_scores.append(all_return_scores[batch_mask][confidence_mask][ix])
       boxes.append(all_boxes[batch_mask][confidence_mask][ix])
       classes.append(all_cls[batch_mask][confidence_mask][ix])
       if rotations is not None:
         rotations.append(all_rotations[batch_mask][confidence_mask][ix])
     rpn_proposal, rotation, rpn_scores = self.batch_non_maximum_suppression(
         boxes, rotations, classes, scores, return_scores, num_proposals)
     return rpn_proposal, rotation, rpn_scores
Exemple #5
0
 def detection_refinement(self, b_probs, b_rois, b_deltas, b_rots, b_rotdeltas):
   if b_probs is None:
     num_channel = 9 if b_rots is None else 8
     return [np.zeros((0, num_channel))]
   num_batch = [rois.shape[0] for rois in b_rois]
   num_samples = sum(num_batch)
   assert num_samples == b_probs.shape[0] == b_deltas.shape[0]
   if b_rots is not None:
     assert num_samples == sum(rots.shape[0] for rots in b_rots) == b_rotdeltas.shape[0]
   batch_split = [(sum(num_batch[:i]), sum(num_batch[:(i + 1)])) for i in range(len(num_batch))]
   b_probs = [b_probs[i:j] for (i, j) in batch_split]
   b_deltas = [b_deltas[i:j] for (i, j) in batch_split]
   if b_rots is not None:
     b_rotdeltas = [b_rotdeltas[i:j] for (i, j) in batch_split]
   b_nms = []
   b_nms_rot = None if b_rots is None else []
   for i, (probs, rois, deltas) in enumerate(zip(b_probs, b_rois, b_deltas)):
     rois = rois.reshape(-1, rois.shape[-1])
     class_ids = torch.argmax(probs, dim=1)
     batch_slice = range(probs.shape[0])
     class_scores = probs[batch_slice, class_ids]
     class_deltas = deltas[batch_slice, class_ids - 1]
     class_deltas *= torch.tensor(self.config.rpn_bbox_std).to(deltas)
     refined_rois = detection_utils.apply_box_deltas(rois, class_deltas,
                                                     self.config.normalize_bbox)
     if b_rots is not None:
       class_rot_deltas = b_rotdeltas[i][batch_slice, class_ids - 1]
       class_rot_deltas = self.ref_rotation_criterion.pred(class_rot_deltas)
       refined_rots = detection_utils.normalize_rotation(b_rots[i] + class_rot_deltas)
     keep = torch.where(class_ids > 0)[0].cpu().numpy()
     if self.config.detection_min_confidence:
       conf_keep = torch.where(class_scores > self.config.detection_min_confidence)[0]
       keep = np.array(list(set(conf_keep.cpu().numpy()).intersection(keep)))
     if keep.size == 0:
       b_nms.append(np.zeros((0, 8)))
       if b_rots is not None:
         b_nms_rot.append(np.zeros(0))
     else:
       pre_nms_class_ids = class_ids[keep] - 1
       pre_nms_scores = class_scores[keep]
       pre_nms_rois = refined_rois[keep]
       if b_rots is not None:
         pre_nms_rots = refined_rots[keep]
       nms_scores = []
       nms_rois = []
       nms_classes = []
       nms_rots = []
       for class_id in torch.unique(pre_nms_class_ids):
         class_nms_mask = pre_nms_class_ids == class_id
         class_nms_scores = pre_nms_scores[class_nms_mask]
         class_nms_rois = pre_nms_rois[class_nms_mask]
         pre_nms_class_rots = None
         if b_rots is not None:
           pre_nms_class_rots = pre_nms_rots[class_nms_mask]
         nms_roi, nms_rot, nms_score = detection_utils.non_maximum_suppression(
             class_nms_rois, pre_nms_class_rots, class_nms_scores,
             self.config.detection_nms_threshold, self.config.detection_max_instances,
             self.config.detection_rot_nms, self.config.detection_aggregate_overlap)
         nms_rois.append(nms_roi)
         nms_scores.append(nms_score)
         nms_classes.append(torch.ones(len(nms_score)).to(class_nms_rois) * class_id)
         if b_rots is not None:
           if self.config.normalize_rotation2:
             nms_rot = nms_rot / 2 + np.pi / 2
           nms_rots.append(nms_rot)
       nms_scores = torch.cat(nms_scores)
       nms_rois = torch.cat(nms_rois)
       nms_classes = torch.cat(nms_classes)
       detection_max_instances = min(self.config.detection_max_instances, nms_scores.shape[0])
       ix = torch.topk(nms_scores, detection_max_instances)[1]
       nms_rois_unnorm = detection_utils.unnormalize_boxes(
           nms_rois[ix].cpu().numpy(), self.config.max_ptc_size)
       nms_bboxes = np.hstack((nms_rois_unnorm, nms_classes[ix, None].cpu().numpy(),
                               nms_scores[ix, None].cpu().numpy()))
       if b_rots is not None:
         nms_rots = torch.cat(nms_rots)[ix, None].cpu().numpy()
         nms_bboxes = np.hstack((nms_bboxes[:, :6], nms_rots, nms_bboxes[:, 6:]))
       b_nms.append(nms_bboxes)
   return b_nms