def forward_for_single_feature_map(self, anchors, objectness, box_regression): """ Arguments: anchors: list[BoxList] objectness: tensor of size N, A, H, W box_regression: tensor of size N, A * 4, H, W """ device = objectness.device N, A, H, W = objectness.shape # put in the same format as anchors objectness = permute_and_flatten(objectness, N, A, 1, H, W).view(N, -1) objectness = objectness.sigmoid() box_regression = permute_and_flatten(box_regression, N, A, 4, H, W) num_anchors = A * H * W pre_nms_top_n = min(self.pre_nms_top_n, num_anchors) objectness, topk_idx = objectness.topk(pre_nms_top_n, dim=1, sorted=True) batch_idx = torch.arange(N, device=device)[:, None] box_regression = box_regression[batch_idx, topk_idx] image_shapes = [box.size for box in anchors] concat_anchors = torch.cat([a.bbox for a in anchors], dim=0) concat_anchors = concat_anchors.reshape(N, -1, 4)[batch_idx, topk_idx] proposals = self.box_coder.decode(box_regression.view(-1, 4), concat_anchors.view(-1, 4)) proposals = proposals.view(N, -1, 4) result = [] for proposal, score, im_shape in zip(proposals, objectness, image_shapes): boxlist = BoxList(proposal, im_shape, mode="xyxy") boxlist.add_field("objectness", score) boxlist = boxlist.clip_to_image(remove_empty=False) boxlist = remove_small_boxes(boxlist, self.min_size) boxlist = boxlist_nms( boxlist, self.nms_thresh, max_proposals=self.post_nms_top_n, score_field="objectness", ) result.append(boxlist) return result
def filter_results(self, boxlist, num_classes): """Returns bounding-box detection results by thresholding on scores and applying non-maximum suppression (NMS). """ # unwrap the boxlist to avoid additional overhead. # if we had multi-class NMS, we could perform this directly on the boxlist boxes = boxlist.bbox.reshape(-1, num_classes * 4) scores = boxlist.get_field("scores").reshape(-1, num_classes) device = scores.device result = [] # Apply threshold on detection probabilities and apply NMS # Skip j = 0, because it's the background class inds_all = scores > self.score_thresh for j in range(1, num_classes): inds = inds_all[:, j].nonzero().squeeze(1) scores_j = scores[inds, j] boxes_j = boxes[inds, j * 4 : (j + 1) * 4] boxlist_for_class = BoxList(boxes_j, boxlist.size, mode="xyxy") boxlist_for_class.add_field("scores", scores_j) boxlist_for_class = boxlist_nms( boxlist_for_class, self.nms ) num_labels = len(boxlist_for_class) boxlist_for_class.add_field( "labels", torch.full((num_labels,), j, dtype=torch.int64, device=device) ) result.append(boxlist_for_class) result = cat_boxlist(result) number_of_detections = len(result) # Limit to max_per_image detections **over all classes** if number_of_detections > self.detections_per_img > 0: cls_scores = result.get_field("scores") image_thresh, _ = torch.kthvalue( cls_scores.cpu(), number_of_detections - self.detections_per_img + 1 ) keep = cls_scores >= image_thresh.item() keep = torch.nonzero(keep).squeeze(1) result = result[keep] return result
def select_over_all_levels(self, boxlists): num_images = len(boxlists) results = [] for i in range(num_images): scores = boxlists[i].get_field("scores") labels = boxlists[i].get_field("labels") boxes = boxlists[i].bbox boxlist = boxlists[i] result = [] # skip the background for j in range(1, self.num_classes): inds = (labels == j).nonzero().view(-1) scores_j = scores[inds] boxes_j = boxes[inds, :].view(-1, 4) boxlist_for_class = BoxList(boxes_j, boxlist.size, mode="xyxy") boxlist_for_class.add_field("scores", scores_j) boxlist_for_class = boxlist_nms(boxlist_for_class, self.nms_thresh, score_field="scores") num_labels = len(boxlist_for_class) boxlist_for_class.add_field("labels", jt.full((num_labels, ), j).int32()) result.append(boxlist_for_class) result = cat_boxlist(result) number_of_detections = len(result) # Limit to max_per_image detections **over all classes** if number_of_detections > self.fpn_post_nms_top_n > 0: cls_scores = result.get_field("scores") image_thresh, _ = jt.kthvalue( cls_scores, number_of_detections - self.fpn_post_nms_top_n + 1) keep = cls_scores >= image_thresh keep = jt.nonzero(keep).squeeze(1) result = result[keep] results.append(result) return results
def filter_results(self, boxlist, num_classes): """Returns bounding-box detection results by thresholding on scores and applying non-maximum suppression (NMS). """ # unwrap the boxlist to avoid additional overhead. # if we had multi-class NMS, we could perform this directly on the boxlist boxes = boxlist.bbox.reshape(-1, num_classes * 4) scores = boxlist.get_field("scores").reshape(-1, num_classes) result = [] # Apply threshold on detection probabilities and apply NMS # Skip j = 0, because it's the background class # inds_all = (scores > self.score_thresh).int() inds_all = scores > self.score_thresh # print(self.score_thresh,num_classes) # print(inds_all.shape) # inds_all = inds_all.transpose(1,0) inds_nonzeros = [ inds_all[:,j].nonzero() for j in range(1, num_classes) ] jt.sync(inds_nonzeros) for j in range(1, num_classes): # with nvtx_scope("aa"): # inds = inds_all[:,j].nonzero().squeeze(1) # with nvtx_scope("bb"): # scores_j = scores[inds, j] # boxes_j = boxes[inds, j * 4 : (j + 1) * 4] # with nvtx_scope("cc"): # boxlist_for_class = BoxList(boxes_j, boxlist.size, mode="xyxy") # with nvtx_scope("cc2"): # boxlist_for_class.add_field("scores", scores_j) # with nvtx_scope("cc3"): # boxlist_for_class = boxlist_nms( # boxlist_for_class, self.nms # ) # with nvtx_scope("dd"): # num_labels = len(boxlist_for_class) # with nvtx_scope("dd2"): # boxlist_for_class.add_field( # "labels", jt.full((num_labels,), j).int32() # ) # result.append(boxlist_for_class) # inds = inds_all[:,j].nonzero().squeeze(1) inds = inds_nonzeros[j-1] if inds.shape[0] == 0: continue inds = inds.squeeze(1) scores_j = scores[inds, j] boxes_j = boxes[inds, j * 4 : (j + 1) * 4] boxlist_for_class = BoxList(boxes_j, boxlist.size, mode="xyxy") boxlist_for_class.add_field("scores", scores_j) boxlist_for_class = boxlist_nms( boxlist_for_class, self.nms ) num_labels = len(boxlist_for_class) # print(j,num_labels) boxlist_for_class.add_field( "labels", jt.full((num_labels,), j).int32() ) result.append(boxlist_for_class) result = cat_boxlist(result) if not result.has_field('labels'): result.add_field('labels',jt.empty((0,))) if not result.has_field('scores'): result.add_field('scores',jt.empty((0,))) number_of_detections = len(result) #Limit to max_per_image detections **over all classes** if number_of_detections > self.detections_per_img > 0: cls_scores = result.get_field("scores") image_thresh, _ = jt.kthvalue( cls_scores, number_of_detections - self.detections_per_img + 1 ) keep = cls_scores >= image_thresh keep = jt.nonzero(keep).squeeze(1) result = result[keep] # # Absolute limit detection imgs # if number_of_detections > self.detections_per_img > 0: # cls_scores = result.get_field("scores") # scores, indices = jt.topk( # cls_scores, self.detections_per_img # ) # result = result[indices] return result
def forward_for_single_feature_map(self, anchors, objectness, box_regression): """ Arguments: anchors: list[BoxList] objectness: tensor of size N, A, H, W box_regression: tensor of size N, A * 4, H, W """ # global II # import pickle N, A, H, W = objectness.shape # put in the same format as anchors objectness = permute_and_flatten(objectness, N, A, 1, H, W).reshape(N, -1) # print('objectness',objectness.mean()) objectness = objectness.sigmoid() box_regression = permute_and_flatten(box_regression, N, A, 4, H, W) # print('regression',box_regression.mean()) num_anchors = A * H * W pre_nms_top_n = min(self.pre_nms_top_n, num_anchors) # print(pre_nms_top_n) #print('objectness',objectness) # objectness = jt.array(pickle.load(open(f'/home/lxl/objectness_0_{II}.pkl','rb'))) # print(objectness.shape) objectness, topk_idx = objectness.topk(pre_nms_top_n, dim=1, sorted=True) # print(II,'topk',topk_idx.sum(),topk_idx.shape) batch_idx = jt.arange(N).unsqueeze(1) # pickle.dump(topk_idx.numpy(),open(f'/home/lxl/topk_idx_{II}_jt.pkl','wb')) # topk_idx_tmp = topk_idx.numpy() # batch_idx = jt.array(pickle.load(open(f'/home/lxl/batch_idx_{II}.pkl','rb'))) # topk_idx = jt.array(pickle.load(open(f'/home/lxl/topk_idx_{II}.pkl','rb'))) # err = np.abs(topk_idx_tmp-topk_idx.numpy()) # print('Error!!!!!!!!!!!!!!!!',err.sum()) # print(err.nonzero()) #print('box_regression0',box_regression) #batch_idx = jt.index(topk_idx.shape,dim=0) box_regression = box_regression[batch_idx, topk_idx] #print('box_regression1',box_regression) image_shapes = [box.size for box in anchors] concat_anchors = jt.contrib.concat([a.bbox for a in anchors], dim=0) concat_anchors = concat_anchors.reshape(N, -1, 4)[batch_idx, topk_idx] # box_regression = jt.array(pickle.load(open(f'/home/lxl/box_regression_{II}.pkl','rb'))) # concat_anchors = jt.array(pickle.load(open(f'/home/lxl/concat_anchors_{II}.pkl','rb'))) proposals = self.box_coder.decode(box_regression.reshape(-1, 4), concat_anchors.reshape(-1, 4)) proposals = proposals.reshape(N, -1, 4) # proposals = jt.array(pickle.load(open(f'/home/lxl/proposal_{II}.pkl','rb'))) # objectness = jt.array(pickle.load(open(f'/home/lxl/objectness_{II}.pkl','rb'))) # II+=1 result = [] for i in range(len(image_shapes)): proposal, score, im_shape = proposals[i], objectness[ i], image_shapes[i] boxlist = BoxList(proposal, im_shape, mode="xyxy") boxlist.add_field("objectness", score) boxlist = boxlist.clip_to_image(remove_empty=False) boxlist = remove_small_boxes(boxlist, self.min_size) boxlist = boxlist_nms( boxlist, self.nms_thresh, max_proposals=self.post_nms_top_n, score_field="objectness", ) result.append(boxlist) return result