def func_co(x): x.sort() Eval = NuScenesEval('', '', args.format) fused_data = cu.matching_and_fusion(cam_test_list[x[0]],cam_test_list[x[1]]) fused_gt = cu.filt_gt_labels_tuple(cam_gt_list[x[0]],cam_gt_list[x[1]]) mAP_temp = Eval.my_evaluate(fused_data,fused_gt) return 1- mAP_temp
def fuse_constellation(x): #根据x的维度进行融合 x.sort() size_n = x.shape[0] main_cam = x[0] gt_list = [] gt_list.append(cam_gt_list[main_cam]) for i in x[1:]: fused_data = cu.matching_and_fusion(cam_test_list[main_cam],cam_test_list[i]) #融合 gt_list.append(cam_gt_list[i]) fused_gt = cu.filt_gt_labels_tuple(*gt_list) Eval = NuScenesEval('', '', args.format) mAP_temp = Eval.my_evaluate(fused_data,fused_gt) return 1- mAP_temp
# Eval = NuScenesEval('', '', args.format) # fused_data = cu.matching_and_fusion(fused_data,cam_test_list[z]) #融合 # fused_gt = cu.filt_gt_labels_tuple(cam_gt_list[i],cam_gt_list[j],cam_gt_list[k],cam_gt_list[z]) # #评估 # mAP_temp = Eval.my_evaluate(fused_data,fused_gt) # if mAP_temp > max_map: # max_map = mAP_temp # max_i,max_j = i,j # print('temp max mAP: {}.......... time: ## i: {} j: {} k:{} z:{}'.format(max_map,i,j,k,z)) #print(mAP_temp) max_map = 0 max_i, max_j = 0, 0 fused_gt = cu.filt_gt_labels_tuple(*cam_gt_list) for i in range(34): for j in range(i + 1, 34): fused_data = cu.matching_and_fusion(cam_test_list[i], cam_test_list[j]) #融合 Eval = NuScenesEval('', '', args.format) #fused_gt = cu.filt_gt_labels_tuple(cam_gt_list[i],cam_gt_list[j],cam_gt_list[k]) #评估 mAP_temp = Eval.my_evaluate(fused_data, fused_gt) if mAP_temp > max_map: max_map = mAP_temp max_i, max_j = i, j print( 'temp max mAP: {}.......... time: ## i: {} j: {} '. format(max_map, i, j)) #print(mAP_temp) # max_map = 0 # max_i,max_j = 0,0 # fused_gt = cu.filt_gt_labels_tuple(*cam_gt_list) # for i in range(34):
ry_world2cam1 = (rotation_y - yaw + 90) * np.pi / 180 tv = uti.CamVehicle(p_in_cam1[1][0], -p_in_cam1[2][0], p_in_cam1[0][0], car[5], car[4], car[3], ry_world2cam1) ret.append(tv) return ret # filt_start_time = time.time() # ret = cu.filt_gt_labels(cam_gt_list[0],cam_gt_list[1]) # filt_time = time.time() - filt_start_time # print("filt gt for 1 iter, time: ",filt_time) filt_start_time = time.time() max_map = 0 max_i, max_j = 0, 0 fused_data = cu.matching_and_fusion([cam_test_list[2][44]], [cam_test_list[19][44]]) #融合 fused_data = cu.matching_and_fusion(fused_data, [cam_test_list[33][44]]) #融合 vehicls = vehicle_world2sensor(fused_data[0], cam_transform[2], -20) box3d_list = [car.compute_box_3d() for car in vehicls] #print(box3d_list) bird_view = uti.add_bird_view(box3d_list) cv2.imshow('bird', bird_view) cv2.waitKey() # max_map = 0 # max_i,max_j = 0,0 # for i in range(34): # for j in range(i+1,34): # fused_data = cu.matching_and_fusion(cam_test_list[i],cam_test_list[j]) #融合 # for k in range(j+1,34):