def test_universal(): test_obj = [{'a': 'abc', 'b': 1}, 2, 'c'] # dump as a string for format in ['json', 'yaml', 'pickle']: cvb.dump(test_obj, format=format) with pytest.raises(ValueError): cvb.dump(test_obj) with pytest.raises(TypeError): cvb.dump(test_obj, 'tmp.txt') # test load/dump with filename for format in ['json', 'yaml', 'pkl']: tmp_filename = '.cvbase_test.tmp.' + format cvb.dump(test_obj, tmp_filename) assert path.isfile(tmp_filename) load_obj = cvb.load(tmp_filename) assert load_obj == test_obj remove(tmp_filename) # test json load/dump with file object for format in ['json', 'yaml', 'pkl']: tmp_filename = '.cvbase_test.tmp.' + format mode = 'wb' if format == 'pkl' else 'w' with open(tmp_filename, mode) as f: cvb.dump(test_obj, f, format=format) assert path.isfile(tmp_filename) mode = 'rb' if format == 'pkl' else 'r' with open(tmp_filename, mode) as f: load_obj = cvb.load(f, format=format) assert load_obj == test_obj remove(tmp_filename)
def test_solver(model, data_loader, output_dir): # load checkpoint load_checkpoint(model, output_dir[0]) New2Old = cvb.load('/mnt/lustre/liushu1/mask_rcnn/coco-master/PythonAPI/Newlabel.pkl') result_path = os.path.join(output_dir[1], 'result.json') log_dir = output_dir[1] count = 0 logger = solver_log(os.path.join(log_dir, 'test_'+ time.strftime('%Y%m%d_%H%M%S', time.localtime()) +'.log')) # logger = solver_log(os.path.join(log_dir, 'test1.log')) results = [] for box_feature, rank_score, box_box, box_label, box_score_origin, box_box_origin, image_id, box_keep_np in data_loader: # print(image_id) image_id = int(image_id.numpy()) bboxes = [] start = time.time() box_feature_variable = Variable(box_feature).cuda() box_score_variable = Variable(rank_score).cuda() box_label_variable = Variable(box_label).cuda() box_box_variable = Variable(box_box).cuda() output = test(box_feature_variable, box_score_variable, box_box_variable, model) # keep = list(np.where(output==1)[0]) box_score_origin = box_score_origin.cpu().numpy().astype(np.float) box_keep_np = box_keep_np.cpu().numpy().astype(np.int) # final_score = box_score_origin * output final_score = box_score_origin * output # for index in keep: for index in range(final_score.shape[0]): # cls_index = np.argmax(box_score_origin[index, :]) cls_all_index = np.where(box_keep_np[index, :]==1)[0] for cls_index in cls_all_index: # cls_index = np.argsort(final_score[index, :])[::-1][0] x1, y1, x2, y2 = box_box_origin[index, cls_index*4:cls_index*4+4] score = final_score[index, cls_index] # score = box_score_origin[index, cls_index] category_id = New2Old[str(cls_index+1)][1] bboxes.append({'bbox': [int(x1), int(y1), int(x2)-int(x1)+1, int(y2)-int(y1)+1], 'score': float(score), 'category_id':category_id, 'image_id':int(image_id)}) count += 1 end = time.time() print_time = float(end-start) results.extend(bboxes) logger.info('index:{}, image_id:{}, cost:{}'.format(count, image_id,print_time)) cvb.dump(results, result_path)
end = int(unique_class_len[ii + 1]) for index in range(start, end): if all_class_box_label[index, 0] == 0: continue x1, y1, x2, y2 = all_class_box_origin_box[index, 0:4] score = all_class_box_origin_score[index, 0] category_id = New2Old[str(ii + 1)][1] bboxes.append({ 'bbox': [ int(x1), int(y1), int(x2) - int(x1) + 1, int(y2) - int(y1) + 1 ], 'score': float(score), 'category_id': category_id, 'image_id': int(image_id) }) results.extend(bboxes) print('{}:{}'.format(i, image_id)) cvb.dump(results, '/data/luqi/check_2.json') # count += 1 # end = time.time() # print_time = float(end-start) # print(ii) # print(np.concatenate((all_class_box_origin_score[start:end, 0].reshape(-1, 1), all_class_box_label[start:end, 0].reshape(-1, 1), all_class_box_origin_box[start:end, 0:4].reshape(-1, 4)), axis=1)) # input()
if __name__ == '__main__': with Manager() as manager: args = parse_args() result_dir = os.path.join(args.output_dir, 'result/') if not osp.exists(result_dir): os.makedirs(result_dir) result_path = os.path.join(result_dir, 'result.json') result = manager.list() p_list = [] for i in range(args.thread_all): p = Process(target=run, args=(i, args.thread_all, result, args)) p.start() p_list.append(p) for res in p_list: res.join() # print(result) ori_result = list(result) # print(ori_result) cvb.dump(ori_result, result_path) # do evaluation cocoGt = COCO(args.gt_path) cocoDt = cocoGt.loadRes(result_path) cocoEval = COCOeval(cocoGt, cocoDt, args.ann_type) cocoEval.evaluate() cocoEval.accumulate() cocoEval.summarize()
def transForm(box_feature, box_box, box_score, save_path): cvb.dump([box_feature, box_box, box_score], save_path)
end = int(unique_class_len[ii + 1]) for index in range(start, end): if all_class_box_label[index, 0] == 0: continue x1, y1, x2, y2 = all_class_box_origin_box[index, 0:4] score = all_class_box_origin_score[index, 0] category_id = New2Old[str(ii + 1)][1] bboxes.append({ 'bbox': [ int(x1), int(y1), int(x2) - int(x1) + 1, int(y2) - int(y1) + 1 ], 'score': float(score), 'category_id': category_id, 'image_id': int(image_id) }) results.extend(bboxes) print('{}:{}'.format(i, image_id)) cvb.dump(results, '/mnt/lustre/liushu1/qilu_ex/check_2.json') # count += 1 # end = time.time() # print_time = float(end-start) # print(ii) # print(np.concatenate((all_class_box_origin_score[start:end, 0].reshape(-1, 1), all_class_box_label[start:end, 0].reshape(-1, 1), all_class_box_origin_box[start:end, 0:4].reshape(-1, 4)), axis=1)) # input()
# gt gts_info = pkl.load(open(os.path.join(gts_base_path, img_name + '.pkl'), 'rb'), encoding='iso-8859-1') gts_box = np.zeros((len(gts_info), 5)) for index, gt in enumerate(gts_info): gts_box[index, :] = gt['bbox'] box_box = box_box.astype(np.float) box_score = box_score.astype(np.float) box_feature = box_feature.astype(np.float) proposals_feature_nms, proposals_score_nms, proposals_box_nms, proposals_label = stage2_assign(box_feature, box_box, box_score, gts_box) proposals_score_nms = proposals_score_nms[:, 1:] proposals_box_nms = proposals_box_nms[:, 4:] valid_index = list(np.where(proposals_label==1)[0]) bboxes = [] image_id = int(img_name) for ii in valid_index: cls_index = np.argmax(proposals_score_nms[ii, :]) score = proposals_score_nms[ii, cls_index] x1, y1, x2, y2 = proposals_box_nms[ii, cls_index*4:cls_index*4+4] category_id = New2Old[str(cls_index+1)][1] bboxes.append({'bbox': [int(x1), int(y1), int(x2)-int(x1)+1, int(y2)-int(y1)+1], 'score': float(score), 'category_id':category_id, 'image_id':int(image_id)}) # nms_filter_count, nms_count = Calculate_ratio(box_box, box_score, gts_box, img_name, New2Old, iou_thr=0.3) result.extend(bboxes) print('{}/{}'.format(val_index, val_num)) # input() cvb.dump(result, result_path)
# val = TrainDataset(args.base_path, args.img_list, 'msra', cls_list, phase='test') val = TrainDataset(args.base_path, args.img_list, 'msra', cls_list, phase='test', final_score_thresh=0.03) # val_loader = torch.utils.data.DataLoader(val, batch_size=1, num_workers=1, collate_fn=unique_collate, pin_memory=False) # model model = Encoder_Decoder(args.hidden_size, attn_type=args.attn_type, context_type=args.context_type) if use_cuda: model = model.cuda() model.eval() thread_index = 0 thread_num = 1 thread_result = test_solver(model, val, output_dir, thread_index, thread_num) # result.extend(thread_result) cvb.dump(thread_result, result_path) # do evaluation cocoGt = COCO(args.gt_path) cocoDt = cocoGt.loadRes(result_path) cocoEval = COCOeval(cocoGt, cocoDt, args.ann_type) cocoEval.evaluate() cocoEval.accumulate() cocoEval.summarize()
def test_solver(model, dataset, output_dir, thread_index, thread_num): # load checkpoint load_checkpoint(model, output_dir[0]) New2Old = cvb.load( '/mnt/lustre/liushu1/mask_rcnn/coco-master/PythonAPI/Newlabel.pkl') # result_path = os.path.join(output_dir[1], 'result.json') np.set_printoptions(formatter={'float': '{: 0.4f}'.format}) log_dir = output_dir[1] # count = 0 logger = solver_log( os.path.join( log_dir, 'test_' + time.strftime('%Y%m%d_%H%M%S', time.localtime()) + '.log')) # logger = solver_log(os.path.join(log_dir, 'test1.log')) results = [] data_num = len(dataset) for count in range(data_num): if count % thread_num != thread_index: continue data_np = dataset[count] # input # all_class_box_origin_score, all_class_box_origin_box, unique_class, unique_class_len, img_id # box_feature, rank_score, box_box = torch.FloatTensor(data_np[0]), torch.FloatTensor(data_np[1]), torch.FloatTensor(data_np[2]) all_class_box_feature, all_class_box_box, all_class_box_score = torch.FloatTensor( data_np[0]), torch.FloatTensor(data_np[1]), torch.FloatTensor( data_np[2]) all_class_box_label = data_np[3] if all_class_box_label.shape[0] == 0: continue all_class_box_weight = data_np[4] all_class_box_origin_score, all_class_box_origin_box = torch.FloatTensor( data_np[5]), data_np[6] unique_class, unique_class_len = torch.FloatTensor( data_np[7]), torch.FloatTensor(data_np[8]) unique_class_np, unique_class_len_np = data_np[7], data_np[8] image_id = int(data_np[9]) bboxes = [] start = time.time() # all_class_box_label_variable = Variable(all_class_box_label).cuda() all_class_box_score_variable = Variable(all_class_box_score).cuda() all_class_box_box_variable = Variable(all_class_box_box).cuda() all_class_box_feature_variable = Variable(all_class_box_feature).cuda() all_class_box_origin_score_variable = Variable( all_class_box_origin_score).cuda() unique_class_cuda = unique_class.cuda() unique_class_len_cuda = unique_class_len.cuda() output = test(all_class_box_feature_variable, all_class_box_box_variable, all_class_box_score_variable, all_class_box_origin_score_variable, unique_class_cuda, unique_class_len_cuda, model) box_score_origin = all_class_box_origin_score_variable.data.cpu( ).numpy().astype(np.float)[:, 0:1].reshape(-1, 1) # final_score = box_score_origin save_score = np.concatenate((output, box_score_origin), 1) save_path = '/mnt/lustre/liushu1/qilu_ex/dataset/test_dev/panet/score/' + str( image_id).zfill(12) + '.pkl' cvb.dump(save_score, save_path) # iii = cvb.load(save_path) # output = iii[:,0:1] # box_score_origin = iii[:,1:2] # final_score = box_score_origin * output # for cls_index in range(80): # if unique_class_np[cls_index] == 0: # continue # start_ = int(unique_class_len_np[cls_index]) # end_ = int(unique_class_len_np[cls_index+1]) # # info_info = np.concatenate((box_score_origin[start_:end_, 0:1], output[start_:end_, 0:1], final_score[start_:end_,0:1], all_class_box_origin_box[start_:end_, 0:4].astype(np.int), all_class_box_label[start_:end_, 0:1]), axis=1) # # qwe = DataFrame(info_info, columns=['score_origin', 'network', 'final', 'x1', 'y1', 'x2', 'y2', 'label']) # # print(qwe) # # print(qwe.sort_values(by='score_origin')) # # input() # for index in range(start_, end_): # x1, y1, x2, y2 = all_class_box_origin_box[index, 0:4] # score = final_score[index, 0] # category_id = New2Old[str(cls_index+1)][1] # bboxes.append({'bbox': [int(x1), int(y1), int(x2)-int(x1)+1, int(y2)-int(y1)+1], 'score': float(score), 'category_id':category_id, 'image_id':int(image_id)}) # # count += 1 end = time.time() print_time = float(end - start) # results.extend(bboxes) # # if count==20: # # break logger.info('thread_index:{}, index:{}, image_id:{}, cost:{}'.format( thread_index, count, image_id, print_time)) return results