def rel_pose_err(pose_src, pose_tgt): ''' Computes relative pose error (RPE) Args: pose_src : numpy N x 3 x 4 source pose matrix relative to 0-th frame pose_tgt : numpy N x 3 x 4 target (ground truth) pose matrix relative to 0-th frame Returns: float : relative pose error ''' assert pose_src.shape == pose_tgt.shape rpe = [] for t in range(0, pose_src.shape[0] - 1): # Get relative pose from t to t+1 pose01_tgt = data_utils.compose_pose( data_utils.inverse_pose(pose_tgt[t]), pose_tgt[t + 1]) pose01_src = data_utils.compose_pose( data_utils.inverse_pose(pose_src[t]), pose_src[t + 1]) # Compute change in pose delta_pose = data_utils.compose_pose( data_utils.inverse_pose(pose01_tgt), pose01_src) # Compute RPE (Relative Pose Error) rpe.append(np.linalg.norm(delta_pose[:3, 3])**2) return np.sqrt(np.mean(rpe))
def abs_trajectory_err(pose_src, pose_tgt): ''' Computes absolute trajectory error (ATE) Args: pose_src : numpy N x 3 x 4 source pose matrix relative to 0-th frame pose_tgt : numpy N x 3 x 4 target (ground truth) pose matrix relative to 0-th frame Returns: float : absolute trajectory error ''' assert pose_src.shape == pose_tgt.shape ate = [] for t in range(pose_src.shape[0]): # Compute ATE (Absolute Trajectory Error) delta_pose = data_utils.compose_pose( data_utils.inverse_pose(pose_tgt[t]), pose_src[t]) ate.append(np.linalg.norm(delta_pose[:3, 3])**2) return np.sqrt(np.mean(ate))
def rel_rotation_err(pose_src, pose_tgt): ''' Computes relative rotation error (RRE) Args: pose_src : numpy N x 3 x 4 source pose matrix relative to 0-th frame pose_tgt : numpy N x 3 x 4 target (ground truth) pose matrix relative to 0-th frame Returns: float : relative rotation error ''' assert pose_src.shape == pose_tgt.shape rre = [] for t in range(0, pose_src.shape[0] - 1): # Get relative pose from t to t+1 pose01_tgt = data_utils.compose_pose( data_utils.inverse_pose(pose_tgt[t]), pose_tgt[t + 1]) pose01_src = data_utils.compose_pose( data_utils.inverse_pose(pose_src[t]), pose_src[t + 1]) # Compute change in pose delta_pose = data_utils.compose_pose( data_utils.inverse_pose(pose01_tgt), pose01_src) # Compute RRE (Relative Rotation Error) omega, _ = cv2.Rodrigues(delta_pose[:3, :3]) rre.append((180.0 / np.pi * np.linalg.norm(omega))**2) return np.sqrt(np.mean(rre))
if seq_dirpath.split(os.sep)[-1] in test_seq_dirpaths: start_idx = 0 offset_idx = 0 else: start_idx = 30 # Skip the first 30 stationary frames offset_idx = 5 # Temporal window size pool_inputs = [] for idx in range(start_idx, len(image_paths) - offset_idx - start_idx): # Find images with enough parallax, pose are from camera to world pose0tow = np.loadtxt(absolute_pose_paths[idx]) pose1tow = np.loadtxt(absolute_pose_paths[idx - offset_idx]) pose2tow = np.loadtxt(absolute_pose_paths[idx + offset_idx]) posewto0 = data_utils.inverse_pose(pose0tow) pose1to0 = data_utils.compose_pose(posewto0, pose1tow) pose2to0 = data_utils.compose_pose(posewto0, pose1tow) parallax1to0 = np.linalg.norm(pose1to0[:3, 3]) parallax2to0 = np.linalg.norm(pose2to0[:3, 3]) # If translation magnitude less than 1 centimeter, then skip if offset_idx > 0 and (parallax1to0 < 0.01 or parallax2to0 < 0.01): continue pool_inputs.append( (image_paths[idx - offset_idx], image_paths[idx], image_paths[idx + offset_idx], sparse_depth_paths[idx], validity_map_paths[idx], ground_truth_paths[idx])) sys.stdout.write('Processing {} examples for sequence={}\r'.format( len(pool_inputs), seq_dirpath))