def get_same_timestamp_poses(poses_a, poses_b, add_orientations=True, abs_tol=0): trimmed_poses_a = poses.Poses(poses_a.pose_type, poses_a.topic) trimmed_poses_b = poses.Poses(poses_b.pose_type, poses_b.topic) poses_a_size = len(poses_a.times) poses_b_size = len(poses_b.times) a_index = 0 b_index = 0 # Increment a and b index as needed. Add same timestamped poses to trimmed poses containers while (a_index < poses_a_size) and (b_index < poses_b_size): a_time = poses_a.times[a_index] b_time = poses_b.times[b_index] if (np.isclose(a_time, b_time, rtol=0, atol=abs_tol)): trimmed_poses_a.positions.add_vector3d(poses_a.positions.get_vector3d(a_index)) trimmed_poses_a.times.append(poses_a.times[a_index]) trimmed_poses_b.positions.add_vector3d(poses_b.positions.get_vector3d(b_index)) trimmed_poses_b.times.append(poses_b.times[b_index]) if add_orientations: trimmed_poses_a.orientations.add_euler(poses_a.orientations.get_euler(a_index)) trimmed_poses_b.orientations.add_euler(poses_b.orientations.get_euler(b_index)) a_index += 1 b_index += 1 elif (a_time < b_time): a_index += 1 else: b_index += 1 return trimmed_poses_a, trimmed_poses_b
def get_same_timestamp_poses(poses_a, poses_b): trimmed_poses_a = poses.Poses(poses_a.pose_type, poses_a.topic) trimmed_poses_b = poses.Poses(poses_b.pose_type, poses_b.topic) poses_a_size = len(poses_a.times) poses_b_size = len(poses_b.times) a_index = 0 b_index = 0 # Increment a and b index as needed. Add same timestamped poses to trimmed poses containers while (a_index < poses_a_size) and (b_index < poses_b_size): a_time = poses_a.times[a_index] b_time = poses_b.times[b_index] if (a_time == b_time): trimmed_poses_a.positions.add_vector3d( poses_a.positions.get_vector3d(a_index)) # trimmed_poses_a.orientations.add_vector3d(poses_a.orientations.get_vector3d(a_index)) trimmed_poses_a.times.append(poses_a.times[a_index]) trimmed_poses_b.positions.add_vector3d( poses_b.positions.get_vector3d(b_index)) # trimmed_poses_b.orientations.add_vector3d(poses_b.orientations.get_vector3d(b_index)) trimmed_poses_b.times.append(poses_b.times[b_index]) a_index += 1 b_index += 1 elif (a_time < b_time): a_index += 1 else: b_index += 1 return trimmed_poses_a, trimmed_poses_b
def create_plots(bagfile, output_pdf_file, output_csv_file='results.csv'): bag = rosbag.Bag(bagfile) bag_start_time = bag.get_start_time() has_imu_augmented_graph_localization_state = has_topic(bag, '/gnc/ekf') has_imu_bias_tester_poses = has_topic(bag, '/imu_bias_tester/pose') sparse_mapping_poses = poses.Poses('Sparse Mapping', '/sparse_mapping/pose') ar_tag_poses = poses.Poses('AR Tag', '/ar_tag/pose') imu_bias_tester_poses = poses.Poses('Imu Bias Tester', '/imu_bias_tester/pose') vec_of_poses = [sparse_mapping_poses, ar_tag_poses, imu_bias_tester_poses] load_pose_msgs(vec_of_poses, bag, bag_start_time) graph_localization_states = loc_states.LocStates('Graph Localization', '/graph_loc/state') imu_augmented_graph_localization_states = loc_states.LocStates( 'Imu Augmented Graph Localization', '/gnc/ekf') vec_of_loc_states = [ graph_localization_states, imu_augmented_graph_localization_states ] load_loc_state_msgs(vec_of_loc_states, bag, bag_start_time) imu_bias_tester_velocities = velocities.Velocities( 'Imu Bias Tester', '/imu_bias_tester/velocity') load_velocity_msgs(imu_bias_tester_velocities, bag, bag_start_time) bag.close() with PdfPages(output_pdf_file) as pdf: add_graph_plots(pdf, sparse_mapping_poses, ar_tag_poses, graph_localization_states, imu_augmented_graph_localization_states) if has_imu_bias_tester_poses: add_imu_bias_tester_poses(pdf, imu_bias_tester_poses, sparse_mapping_poses) add_imu_bias_tester_velocities(pdf, imu_bias_tester_velocities) if has_imu_augmented_graph_localization_state: add_other_loc_plots(pdf, graph_localization_states, imu_augmented_graph_localization_states, sparse_mapping_poses, ar_tag_poses) else: add_other_loc_plots(pdf, graph_localization_states, graph_localization_states) plot_loc_state_stats(pdf, graph_localization_states, sparse_mapping_poses, output_csv_file) plot_loc_state_stats(pdf, imu_augmented_graph_localization_states, sparse_mapping_poses, output_csv_file, 'imu_augmented_', 0.01) if has_imu_bias_tester_poses: plot_loc_state_stats(pdf, imu_bias_tester_poses, sparse_mapping_poses, output_csv_file, 'imu_bias_tester_', 0.01, False)
def get_same_timestamp_poses( poses_a, poses_b, add_orientations=True, abs_tol=0, rel_start_time=0, rel_end_time=-1, ): trimmed_poses_a = poses.Poses(poses_a.pose_type, poses_a.topic) trimmed_poses_b = poses.Poses(poses_b.pose_type, poses_b.topic) poses_a_size = len(poses_a.times) poses_b_size = len(poses_b.times) a_index = 0 b_index = 0 # Increment a and b index as needed. Add same timestamped poses to trimmed poses containers while (a_index < poses_a_size) and (b_index < poses_b_size): a_time = poses_a.times[a_index] b_time = poses_b.times[b_index] # Check if times are within given start and end time bounds if a_time < rel_start_time: a_index += 1 continue if b_time < rel_start_time: b_index += 1 continue # rel_end_time less than zero indicates no bound on end time if rel_end_time >= 0: if a_time > rel_end_time or b_time > rel_end_time: break if np.isclose(a_time, b_time, rtol=0, atol=abs_tol): trimmed_poses_a.positions.add_vector3d( poses_a.positions.get_vector3d(a_index)) trimmed_poses_a.times.append(poses_a.times[a_index]) trimmed_poses_b.positions.add_vector3d( poses_b.positions.get_vector3d(b_index)) trimmed_poses_b.times.append(poses_b.times[b_index]) if add_orientations: trimmed_poses_a.orientations.add_euler( poses_a.orientations.get_euler(a_index)) trimmed_poses_b.orientations.add_euler( poses_b.orientations.get_euler(b_index)) a_index += 1 b_index += 1 elif a_time < b_time: a_index += 1 else: b_index += 1 return trimmed_poses_a, trimmed_poses_b
def make_poses(times, xs, ys, zs): new_poses = poses.Poses("", "") new_poses.times = times new_poses.positions.xs = xs new_poses.positions.ys = ys new_poses.positions.zs = zs return new_poses
def integrate_velocities(localization_states): delta_times = [j - i for i, j in zip(localization_states.times[:-1], localization_states.times[1:])] # Make sure times are same length as velocities, ignore last velocity delta_times.append(0) integrated_positions = poses.Poses('Integrated Graph Velocities', '') # TODO(rsoussan): Integrate angular velocities? # TODO(rsoussan): central difference instead? x_increments = [velocity * delta_t for velocity, delta_t in zip(localization_states.velocities.xs, delta_times)] cumulative_x_increments = np.cumsum(x_increments) integrated_positions.positions.xs = [ localization_states.positions.xs[0] + cumulative_x_increment for cumulative_x_increment in cumulative_x_increments ] y_increments = [velocity * delta_t for velocity, delta_t in zip(localization_states.velocities.ys, delta_times)] cumulative_y_increments = np.cumsum(y_increments) integrated_positions.positions.ys = [ localization_states.positions.ys[0] + cumulative_y_increment for cumulative_y_increment in cumulative_y_increments ] z_increments = [velocity * delta_t for velocity, delta_t in zip(localization_states.velocities.zs, delta_times)] cumulative_z_increments = np.cumsum(z_increments) integrated_positions.positions.zs = [ localization_states.positions.zs[0] + cumulative_z_increment for cumulative_z_increment in cumulative_z_increments ] # Add start positions integrated_positions.positions.xs.insert(0, localization_states.positions.xs[0]) integrated_positions.positions.ys.insert(0, localization_states.positions.ys[0]) integrated_positions.positions.zs.insert(0, localization_states.positions.zs[0]) # Remove last elements (no timestamp for these) del integrated_positions.positions.xs[-1] del integrated_positions.positions.ys[-1] del integrated_positions.positions.zs[-1] integrated_positions.times = localization_states.times return integrated_positions
def make_absolute_poses_from_relative_poses(absolute_poses, relative_poses, name): starting_relative_time = relative_poses.times[0] np_times = np.array(absolute_poses.times) closest_index = np.argmin(np.abs(np_times - starting_relative_time)) start_pose = absolute_poses.pose(closest_index) new_pose = start_pose new_poses_list = [start_pose] new_poses_times = [absolute_poses.times[closest_index]] for index in range(len(relative_poses.times)): relative_pose = relative_poses.pose(index) new_pose = new_pose * relative_pose new_poses_list.append(new_pose) new_poses_times.append(relative_poses.times[index]) new_poses = poses.Poses(name, "") new_poses.init_from_poses(new_poses_list, new_poses_times) return new_poses
def add_increments_to_absolute_pose( x_increments, y_increments, z_increments, starting_x, starting_y, starting_z, times, poses_name="Increment Poses", ): integrated_positions = poses.Poses(poses_name, "") cumulative_x_increments = np.cumsum(x_increments) integrated_positions.positions.xs = [ starting_x + cumulative_x_increment for cumulative_x_increment in cumulative_x_increments ] cumulative_y_increments = np.cumsum(y_increments) integrated_positions.positions.ys = [ starting_y + cumulative_y_increment for cumulative_y_increment in cumulative_y_increments ] cumulative_z_increments = np.cumsum(z_increments) integrated_positions.positions.zs = [ starting_z + cumulative_z_increment for cumulative_z_increment in cumulative_z_increments ] # Add start positions integrated_positions.positions.xs.insert(0, starting_x) integrated_positions.positions.ys.insert(0, starting_y) integrated_positions.positions.zs.insert(0, starting_z) # Remove last elements (no timestamp for these) del integrated_positions.positions.xs[-1] del integrated_positions.positions.ys[-1] del integrated_positions.positions.zs[-1] integrated_positions.times = times return integrated_positions
def create_plots( bagfile, output_pdf_file, output_csv_file="results.csv", groundtruth_bagfile=None, rmse_rel_start_time=0, rmse_rel_end_time=-1, ): bag = rosbag.Bag(bagfile) groundtruth_bag = rosbag.Bag( groundtruth_bagfile) if groundtruth_bagfile else bag bag_start_time = bag.get_start_time() has_imu_augmented_graph_localization_state = has_topic(bag, "/gnc/ekf") has_imu_bias_tester_poses = has_topic(bag, "/imu_bias_tester/pose") sparse_mapping_poses = poses.Poses("Sparse Mapping", "/sparse_mapping/pose") ar_tag_poses = poses.Poses("AR Tag", "/ar_tag/pose") imu_bias_tester_poses = poses.Poses("Imu Bias Tester", "/imu_bias_tester/pose") vec_of_poses = [ar_tag_poses, imu_bias_tester_poses] load_pose_msgs(vec_of_poses, bag, bag_start_time) has_depth_odom = has_topic(bag, "/loc/depth/odom") depth_odom_relative_poses = poses.Poses("Depth Odom", "/loc/depth/odom") load_odometry_msgs([depth_odom_relative_poses], bag, bag_start_time) groundtruth_vec_of_poses = [sparse_mapping_poses] load_pose_msgs(groundtruth_vec_of_poses, groundtruth_bag, bag_start_time) graph_localization_states = loc_states.LocStates("Graph Localization", "/graph_loc/state") imu_augmented_graph_localization_states = loc_states.LocStates( "Imu Augmented Graph Localization", "/gnc/ekf") vec_of_loc_states = [ graph_localization_states, imu_augmented_graph_localization_states, ] load_loc_state_msgs(vec_of_loc_states, bag, bag_start_time) imu_bias_tester_velocities = velocities.Velocities( "Imu Bias Tester", "/imu_bias_tester/velocity") load_velocity_msgs(imu_bias_tester_velocities, bag, bag_start_time) bag.close() with PdfPages(output_pdf_file) as pdf: add_graph_plots( pdf, sparse_mapping_poses, ar_tag_poses, graph_localization_states, imu_augmented_graph_localization_states, ) if has_imu_bias_tester_poses: add_imu_bias_tester_poses(pdf, imu_bias_tester_poses, sparse_mapping_poses) add_imu_bias_tester_velocities(pdf, imu_bias_tester_velocities) if has_imu_augmented_graph_localization_state: add_other_loc_plots( pdf, graph_localization_states, imu_augmented_graph_localization_states, sparse_mapping_poses, ar_tag_poses, ) else: add_other_loc_plots(pdf, graph_localization_states, graph_localization_states) if has_depth_odom: depth_odom_poses = utilities.make_absolute_poses_from_relative_poses( sparse_mapping_poses, depth_odom_relative_poses, "Depth Odometry") plot_poses(pdf, depth_odom_poses, sparse_mapping_poses, ar_tag_poses) # Note that for absolute time difference tolerance depth images and groudtruth use different sensor data # and therefore have less similar timestamps. This timestamp difference reduces the accuracy of depth odometry # groundtruth comparison. plot_loc_state_stats( pdf, depth_odom_poses, sparse_mapping_poses, output_csv_file, "depth_odometry_", 0.01, False, rmse_rel_start_time=rmse_rel_start_time, rmse_rel_end_time=rmse_rel_end_time, ) plot_covariances( pdf, depth_odom_relative_poses.times, depth_odom_relative_poses.covariances.position, "Depth Odometry Position", ) plot_covariances( pdf, depth_odom_relative_poses.times, depth_odom_relative_poses.covariances.orientation, "Depth Odometry Orientation", ) plot_loc_state_stats( pdf, graph_localization_states, sparse_mapping_poses, output_csv_file, rmse_rel_start_time=rmse_rel_start_time, rmse_rel_end_time=rmse_rel_end_time, ) plot_loc_state_stats( pdf, imu_augmented_graph_localization_states, sparse_mapping_poses, output_csv_file, "imu_augmented_", 0.01, rmse_rel_start_time=rmse_rel_start_time, rmse_rel_end_time=rmse_rel_end_time, ) if has_imu_bias_tester_poses: plot_loc_state_stats( pdf, imu_bias_tester_poses, sparse_mapping_poses, output_csv_file, "imu_bias_tester_", 0.01, False, rmse_rel_start_time=rmse_rel_start_time, rmse_rel_end_time=rmse_rel_end_time, )
def create_plots( bagfile, output_pdf_file, output_csv_file="results.csv", groundtruth_bagfile=None, rmse_rel_start_time=0, rmse_rel_end_time=-1, ): bag = rosbag.Bag(bagfile) groundtruth_bag = rosbag.Bag( groundtruth_bagfile) if groundtruth_bagfile else bag bag_start_time = bag.get_start_time() has_imu_augmented_graph_localization_state = has_topic(bag, "/gnc/ekf") has_imu_bias_tester_poses = has_topic(bag, "/imu_bias_tester/pose") sparse_mapping_poses = poses.Poses("Sparse Mapping", "/sparse_mapping/pose") ar_tag_poses = poses.Poses("AR Tag", "/ar_tag/pose") imu_bias_tester_poses = poses.Poses("Imu Bias Tester", "/imu_bias_tester/pose") vec_of_poses = [ar_tag_poses, imu_bias_tester_poses] load_pose_msgs(vec_of_poses, bag, bag_start_time) groundtruth_vec_of_poses = [sparse_mapping_poses] load_pose_msgs(groundtruth_vec_of_poses, groundtruth_bag, bag_start_time) graph_localization_states = loc_states.LocStates("Graph Localization", "/graph_loc/state") imu_augmented_graph_localization_states = loc_states.LocStates( "Imu Augmented Graph Localization", "/gnc/ekf") vec_of_loc_states = [ graph_localization_states, imu_augmented_graph_localization_states, ] load_loc_state_msgs(vec_of_loc_states, bag, bag_start_time) imu_bias_tester_velocities = velocities.Velocities( "Imu Bias Tester", "/imu_bias_tester/velocity") load_velocity_msgs(imu_bias_tester_velocities, bag, bag_start_time) bag.close() with PdfPages(output_pdf_file) as pdf: add_graph_plots( pdf, sparse_mapping_poses, ar_tag_poses, graph_localization_states, imu_augmented_graph_localization_states, ) if has_imu_bias_tester_poses: add_imu_bias_tester_poses(pdf, imu_bias_tester_poses, sparse_mapping_poses) add_imu_bias_tester_velocities(pdf, imu_bias_tester_velocities) if has_imu_augmented_graph_localization_state: add_other_loc_plots( pdf, graph_localization_states, imu_augmented_graph_localization_states, sparse_mapping_poses, ar_tag_poses, ) else: add_other_loc_plots(pdf, graph_localization_states, graph_localization_states) plot_loc_state_stats( pdf, graph_localization_states, sparse_mapping_poses, output_csv_file, rmse_rel_start_time=rmse_rel_start_time, rmse_rel_end_time=rmse_rel_end_time, ) plot_loc_state_stats( pdf, imu_augmented_graph_localization_states, sparse_mapping_poses, output_csv_file, "imu_augmented_", 0.01, rmse_rel_start_time=rmse_rel_start_time, rmse_rel_end_time=rmse_rel_end_time, ) if has_imu_bias_tester_poses: plot_loc_state_stats( pdf, imu_bias_tester_poses, sparse_mapping_poses, output_csv_file, "imu_bias_tester_", 0.01, False, rmse_rel_start_time=rmse_rel_start_time, rmse_rel_end_time=rmse_rel_end_time, )