def evaluate_calibration(time_stamped_poses_B_H, time_stamped_poses_W_E,
                         dq_H_E, time_offset, config):
    assert len(time_stamped_poses_B_H) > 0
    assert len(time_stamped_poses_W_E) > 0
    assert isinstance(config, HandEyeConfig)

    (aligned_poses_B_H,
     aligned_poses_W_E) = compute_aligned_poses(time_stamped_poses_B_H,
                                                time_stamped_poses_W_E,
                                                time_offset)
    assert len(aligned_poses_B_H) == len(aligned_poses_W_E)

    # If we found no matching poses, the evaluation failed.
    if len(aligned_poses_B_H) == 0:
        return ((float('inf'), float('inf')), 0)

    # Convert poses to dual quaterions.
    dual_quat_B_H_vec = [
        DualQuaternion.from_pose_vector(aligned_pose_B_H)
        for aligned_pose_B_H in aligned_poses_B_H[:, 1:]
    ]
    dual_quat_W_E_vec = [
        DualQuaternion.from_pose_vector(aligned_pose_W_E)
        for aligned_pose_W_E in aligned_poses_W_E[:, 1:]
    ]

    assert len(dual_quat_B_H_vec) == len(
        dual_quat_W_E_vec
    ), "len(dual_quat_B_H_vec): {} vs len(dual_quat_W_E_vec): {}".format(
        len(dual_quat_B_H_vec), len(dual_quat_W_E_vec))

    aligned_dq_B_H = align_paths_at_index(dual_quat_B_H_vec, 0)
    aligned_dq_W_E = align_paths_at_index(dual_quat_W_E_vec, 0)

    (poses_B_H, poses_W_H) = get_aligned_poses(aligned_dq_B_H, aligned_dq_W_E,
                                               dq_H_E)

    (rmse_position, rmse_orientation,
     inlier_flags) = evaluate_alignment(poses_B_H, poses_W_H, config,
                                        config.visualize)

    if config.visualize:
        every_nth_element = config.visualize_plot_every_nth_pose
        plot_poses(
            [poses_B_H[::every_nth_element], poses_W_H[::every_nth_element]],
            True,
            title="3D Poses After Fine Alignment")

    return ((rmse_position, rmse_orientation), sum(inlier_flags))
 def test_conversions(self):
     pose = [1, 2, 3, 1., 0., 0., 0.]
     dq = DualQuaternion.from_pose_vector(pose)
     pose_out = dq.to_pose()
     matrix_out = dq.to_matrix()
     matrix_expected = np.array([[1, 0, 0, 1], [0, -1, 0, 2], [0, 0, -1, 3],
                                 [0, 0, 0, 1]])
     npt.assert_allclose(pose, pose_out)
     npt.assert_allclose(matrix_out, matrix_expected)
Exemple #3
0
  def fromJson(cls, in_file, switchConvention = False):
    with open(in_file, 'r') as f:
      data = json.load(f)
    
    p = [ float(data['translation'][name]) for name in 'xyz' ]
    
    q = np.array([ float(data['rotation'][name]) for name in 'ijkw' ])
    if switchConvention:
      q[:3] *= -1.0

    dq = DualQuaternion.from_pose_vector(np.hstack((p, q)))
    return ExtrinsicCalibration(float(data['delay']), dq)
    def test_dq_to_matrix(self):

        pose = [1, 2, 3, 4., 5., 6., 7.]
        dq = DualQuaternion.from_pose_vector(pose)
        dq.normalize()

        matrix_out = dq.to_matrix()

        dq_from_matrix = DualQuaternion.from_transformation_matrix(matrix_out)
        matrix_out_2 = dq_from_matrix.to_matrix()

        npt.assert_allclose(dq.dq, dq_from_matrix.dq)
        npt.assert_allclose(matrix_out, matrix_out_2)
def apply_hand_eye_calibration(trajectory_G_I, marker_calibration_file):
    """Returns the transformed estimator trajectory.

    Computes the trajectory of the marker frame, given the trajectory if the eye
    and the hand_eye_calibration output.
    """
    assert os.path.isfile(marker_calibration_file)
    # Frames of reference: M = marker (hand), I = IMU (eye).
    dq_I_M = ExtrinsicCalibration.fromJson(
        marker_calibration_file).pose_dq.inverse()
    trajectory_G_M = trajectory_G_I
    for idx in range(trajectory_G_M.shape[0]):
        dq_G_I = DualQuaternion.from_pose_vector(trajectory_G_I[idx, 1:8])
        dq_G_M = dq_G_I * dq_I_M
        trajectory_G_M[idx, 1:8] = dq_G_M.to_pose()
    return trajectory_G_M
def calibrateTwo(group, a_in, b_in, a, b):
    if not os.path.exists(group):
        os.mkdir(group)
    a_aligned = group + '/' + a + "_aligned.csv"
    b_aligned = group + '/' + b + "_aligned.csv"
    time_offset_file = group + '/' + 'time_offset.csv'
    pose_file = group + '/' + 'pose.csv'

    if requiresUpdate([a_in, b_in], [a_aligned, b_aligned, time_offset_file]):
        run(
            "rosrun hand_eye_calibration compute_aligned_poses.py \
      --poses_B_H_csv_file %s  \
      --poses_W_E_csv_file %s \
      --aligned_poses_B_H_csv_file %s \
      --aligned_poses_W_E_csv_file %s \
      --time_offset_output_csv_file %s" %
            (a_in, b_in, a_aligned, b_aligned, time_offset_file), DRY_RUN)

    if requiresUpdate([a_aligned, b_aligned], [pose_file]):
        run(
            "rosrun hand_eye_calibration compute_hand_eye_calibration.py \
      --aligned_poses_B_H_csv_file %s \
      --aligned_poses_W_E_csv_file %s  \
      --extrinsics_output_csv_file %s" % (a_aligned, b_aligned, pose_file),
            DRY_RUN)

    init_guess_file = group + "_init_guess.json"
    if requiresUpdate([time_offset_file, pose_file], [init_guess_file]):
        time_offset = float(readArrayFromCsv(time_offset_file)[0, 0])
        pose = readArrayFromCsv(pose_file).reshape((7, ))
        calib = ExtrinsicCalibration(time_offset,
                                     DualQuaternion.from_pose_vector(pose))
        calib.writeJson(init_guess_file)

    calib_file = group + ".json"
    if requiresUpdate([a_in, b_in, init_guess_file], [calib_file]):
        run(
            "rosrun hand_eye_calibration_batch_estimation batch_estimator -v 1 \
      --pose1_csv=%s --pose2_csv=%s \
      --init_guess_file=%s \
      --output_file=%s" % (a_in, b_in, init_guess_file, calib_file), DRY_RUN)

    cal = ExtrinsicCalibration.fromJson(calib_file)
    cal_init = ExtrinsicCalibration.fromJson(init_guess_file)
    print(cal_init, "->\n", cal)
    return cal, cal_init
 def test_conversions(self):
     poses = [[1, 2, 3, 1.0, 0., 0., 0.],
              [1, -2, -3, -0.5 * math.sqrt(2), 0., 0., -0.5 * math.sqrt(2)]]
     expected_matrices = [
         np.array([[1, 0, 0, 1], [0, -1, 0, 2], [0, 0, -1, 3], [0, 0, 0,
                                                                1]]),
         np.array([[1, 0, 0, 1], [0, 0, -1, -2], [0, 1, 0, -3],
                   [0, 0, 0, 1]])
     ]
     for idx in range(len(poses)):
         pose = poses[idx]
         matrix_expected = expected_matrices[idx]
         dq = DualQuaternion.from_pose_vector(pose)
         pose_out = dq.to_pose()
         matrix_out = dq.to_matrix()
         np.set_printoptions(suppress=True)
         npt.assert_allclose(pose[0:3], pose_out[0:3])
         # Check both quaternions which describe the same rotation.
         if not np.allclose(pose[3:7], pose_out[3:7]):
             npt.assert_allclose(pose[3:7], -pose_out[3:7])
         npt.assert_allclose(matrix_out, matrix_expected, atol=1e-6)
        "Provide either poses_W_E or poses_E_W!"

    if args.calibration_output_json_file is not None:
        assert args.time_offset_input_csv_file is not None, (
            "In order to compose a complete calibration result json file, you "
            +
            "need to provide the time alignment csv file as an input using " +
            "this flag: --time_offset_input_csv_file")

    if use_poses_B_H:
        with open(args.aligned_poses_B_H_csv_file, 'r') as csvfile:
            poses1_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
            poses1 = np.array(list(poses1_reader))
            poses1 = poses1.astype(float)
            dual_quat_B_H_vec = [
                DualQuaternion.from_pose_vector(pose[1:]) for pose in poses1
            ]
    else:
        with open(args.aligned_poses_H_B_csv_file, 'r') as csvfile:
            poses1_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
            poses1 = np.array(list(poses1_reader))
            poses1 = poses1.astype(float)
            dual_quat_B_H_vec = [
                DualQuaternion.from_pose_vector(pose[1:]).inverse()
                for pose in poses1
            ]

    if use_poses_W_E:
        with open(args.aligned_poses_W_E_csv_file, 'r') as csvfile:
            poses2_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
            poses2 = np.array(list(poses2_reader))
    use_poses_H_B = (args.aligned_poses_H_B_csv_file is not None)
    use_poses_W_E = (args.aligned_poses_W_E_csv_file is not None)
    use_poses_E_W = (args.aligned_poses_E_W_csv_file is not None)

    assert use_poses_B_H != use_poses_H_B, \
        "Provide either poses_B_H or poses_H_B!"
    assert use_poses_W_E != use_poses_E_W, \
        "Provide either poses_W_E or poses_E_W!"

    if use_poses_B_H:
        with open(args.aligned_poses_B_H_csv_file, 'r') as csvfile:
            poses1_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
            poses1 = np.array(list(poses1_reader))
            poses1 = poses1.astype(float)
            dual_quat_B_H_vec = [
                DualQuaternion.from_pose_vector(pose[1:]) for pose in poses1
            ]
    else:
        with open(args.aligned_poses_H_B_csv_file, 'r') as csvfile:
            poses1_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
            poses1 = np.array(list(poses1_reader))
            poses1 = poses1.astype(float)
            dual_quat_B_H_vec = [
                DualQuaternion.from_pose_vector(pose[1:]).inverse()
                for pose in poses1
            ]

    if use_poses_W_E:
        with open(args.aligned_poses_W_E_csv_file, 'r') as csvfile:
            poses2_reader = csv.reader(csvfile, delimiter=',', quotechar='|')
            poses2 = np.array(list(poses2_reader))
Exemple #10
0
def compute_initial_guess_for_all_pairs(set_of_pose_pairs, algorithm_name, hand_eye_config,
                                        filtering_config, optimization_config, visualize=False):
  """
  Iterate over all pairs and compute an initial guess for the calibration (both time and
  transformation). Also store the experiment results from this computation for each pair.
  """

  set_of_dq_H_E_initial_guess = []
  set_of_time_offset_initial_guess = []

  # Initialize the result entry.
  result_entry = ResultEntry()
  result_entry.init_from_configs(algorithm_name, 0, filtering_config,
                                 hand_eye_config, optimization_config)

  for (pose_file_B_H, pose_file_W_E) in set_of_pose_pairs:
    print("\n\nCompute initial guess for calibration between \n\t{} \n and \n\t{} \n\n".format(
        pose_file_B_H, pose_file_W_E))

    (time_stamped_poses_B_H,
     times_B_H,
     quaternions_B_H
     ) = read_time_stamped_poses_from_csv_file(pose_file_B_H)
    print("Found ", time_stamped_poses_B_H.shape[0],
          " poses in file: ", pose_file_B_H)

    (time_stamped_poses_W_E,
     times_W_E,
     quaternions_W_E) = read_time_stamped_poses_from_csv_file(pose_file_W_E)
    print("Found ", time_stamped_poses_W_E.shape[0],
          " poses in file: ", pose_file_W_E)

    # No time offset.
    time_offset_initial_guess = 0.
    # Unit DualQuaternion.
    dq_H_E_initial_guess = DualQuaternion.from_vector(
        [0., 0., 0., 1.0, 0., 0., 0., 0.])

    print("Computing time offset...")
    time_offset_initial_guess = calculate_time_offset(times_B_H, quaternions_B_H, times_W_E,
                                                      quaternions_W_E, filtering_config,
                                                      args.visualize)

    print("Time offset: {}s".format(time_offset_initial_guess))

    print("Computing aligned poses...")
    (aligned_poses_B_H, aligned_poses_W_E) = compute_aligned_poses(
        time_stamped_poses_B_H, time_stamped_poses_W_E, time_offset_initial_guess, visualize)

    # Convert poses to dual quaterions.
    dual_quat_B_H_vec = [DualQuaternion.from_pose_vector(
        aligned_pose_B_H) for aligned_pose_B_H in aligned_poses_B_H[:, 1:]]
    dual_quat_W_E_vec = [DualQuaternion.from_pose_vector(
        aligned_pose_W_E) for aligned_pose_W_E in aligned_poses_W_E[:, 1:]]

    assert len(dual_quat_B_H_vec) == len(dual_quat_W_E_vec), ("len(dual_quat_B_H_vec): {} "
                                                              "vs len(dual_quat_W_E_vec): {}"
                                                              ).format(len(dual_quat_B_H_vec),
                                                                       len(dual_quat_W_E_vec))

    dual_quat_B_H_vec = align_paths_at_index(dual_quat_B_H_vec)
    dual_quat_W_E_vec = align_paths_at_index(dual_quat_W_E_vec)

    # Draw both paths in their Global / World frame.
    if visualize:
      poses_B_H = np.array([dual_quat_B_H_vec[0].to_pose().T])
      poses_W_E = np.array([dual_quat_W_E_vec[0].to_pose().T])
      for i in range(1, len(dual_quat_B_H_vec)):
        poses_B_H = np.append(poses_B_H, np.array(
            [dual_quat_B_H_vec[i].to_pose().T]), axis=0)
        poses_W_E = np.append(poses_W_E, np.array(
            [dual_quat_W_E_vec[i].to_pose().T]), axis=0)
      every_nth_element = args.plot_every_nth_pose
      plot_poses([poses_B_H[:: every_nth_element], poses_W_E[:: every_nth_element]],
                 True, title="3D Poses Before Alignment")

    print("Computing hand-eye calibration to obtain an initial guess...")

    if hand_eye_config.use_baseline_approach:
      (success, dq_H_E_initial_guess, rmse,
       num_inliers, num_poses_kept,
       runtime, singular_values, bad_singular_values) = compute_hand_eye_calibration_BASELINE(
          dual_quat_B_H_vec, dual_quat_W_E_vec, hand_eye_config)
    else:
      (success, dq_H_E_initial_guess, rmse,
       num_inliers, num_poses_kept,
       runtime, singular_values, bad_singular_values) = compute_hand_eye_calibration_RANSAC(
          dual_quat_B_H_vec, dual_quat_W_E_vec, hand_eye_config)

    result_entry.success.append(success)
    result_entry.num_initial_poses.append(len(dual_quat_B_H_vec))
    result_entry.num_poses_kept.append(num_poses_kept)
    result_entry.runtimes.append(runtime)
    result_entry.singular_values.append(singular_values)
    result_entry.bad_singular_value.append(bad_singular_values)
    result_entry.dataset_names.append((pose_file_B_H, pose_file_W_E))

    set_of_dq_H_E_initial_guess.append(dq_H_E_initial_guess)
    set_of_time_offset_initial_guess.append(time_offset_initial_guess)

  return (set_of_dq_H_E_initial_guess,
          set_of_time_offset_initial_guess, result_entry)
        # Unit DualQuaternion.
        dq_H_E_initial_guess = DualQuaternion.from_vector(
            [0., 0., 0., 1.0, 0., 0., 0., 0.])

        print("Computing time offset...")
        time_offset_initial_guess = calculate_time_offset(times_B_H, quaternions_B_H, times_W_E,
                                                          quaternions_W_E, filtering_config, args.visualize)

        print("Time offset: {}s".format(time_offset_initial_guess))

        print("Computing aligned poses...")
        (aligned_poses_B_H, aligned_poses_W_E) = compute_aligned_poses(
            time_stamped_poses_B_H, time_stamped_poses_W_E, time_offset_initial_guess, args.visualize)

        # Convert poses to dual quaterions.
        dual_quat_B_H_vec = [DualQuaternion.from_pose_vector(
            aligned_pose_B_H) for aligned_pose_B_H in aligned_poses_B_H[:, 1:]]
        dual_quat_W_E_vec = [DualQuaternion.from_pose_vector(
            aligned_pose_W_E) for aligned_pose_W_E in aligned_poses_W_E[:, 1:]]

        assert len(dual_quat_B_H_vec) == len(dual_quat_W_E_vec), "len(dual_quat_B_H_vec): {} vs len(dual_quat_W_E_vec): {}".format(
            len(dual_quat_B_H_vec), len(dual_quat_W_E_vec))

        dual_quat_B_H_vec = align_paths_at_index(dual_quat_B_H_vec)
        dual_quat_W_E_vec = align_paths_at_index(dual_quat_W_E_vec)

        # Draw both paths in their Global / World frame.
        if args.visualize:
          poses_B_H = np.array([dual_quat_B_H_vec[0].to_pose().T])
          poses_W_E = np.array([dual_quat_W_E_vec[0].to_pose().T])
          for i in range(1, len(dual_quat_B_H_vec)):
            poses_B_H = np.append(poses_B_H, np.array(