Esempio n. 1
0
def get_rot_vector(frame):
    qx, qy, qz, _ = real_quat_from_matrix(frame)

    axis   = vector3(qx, qy, qz)
    sin_a2 = norm(axis)
    a      = asin(sin_a2) * 2
    return axis * (a / sin_a2)
Esempio n. 2
0
def encode_rotation(data):
    quat = real_quat_from_matrix(data)
    out = QuaternionMsg()
    out.w = quat[3]
    out.x = quat[0]
    out.y = quat[1]
    out.z = quat[2]
    return out
Esempio n. 3
0
        for x, (pose_name,
                noise_pose) in enumerate(zip(pose_names, noise_poses)):
            try:
                trans = tf_buffer.lookup_transform(reference_frame, pose_name,
                                                   rospy.Time(0))
                stamp = trans.header.stamp
                translation = trans.transform.translation
                rotation = trans.transform.rotation
                clean_pose = np_frame3_quaternion(translation.x, translation.y,
                                                  translation.z, rotation.x,
                                                  rotation.y, rotation.z,
                                                  rotation.w)
                noisy_obs = clean_pose.dot(noise_pose)
                msg.transforms[x] = trans
                quat = real_quat_from_matrix(noisy_obs)
                trans.header.stamp = rospy.Time.now()
                trans.transform.translation.x = noisy_obs[0, 3]
                trans.transform.translation.y = noisy_obs[1, 3]
                trans.transform.translation.z = noisy_obs[2, 3]
                trans.transform.rotation.x = quat[0]
                trans.transform.rotation.y = quat[1]
                trans.transform.rotation.z = quat[2]
                trans.transform.rotation.w = quat[3]
            except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                    tf2_ros.ExtrapolationException) as e:
                print(
                    f'Exception raised while looking up {pose_name} -> {reference_frame}:\n{e}'
                )
                break
        else:
Esempio n. 4
0
    def _generate_pose_constraints(self, str_path, model):
        with self.lock:
            if str_path in self.tracked_poses:
                align_rotation = '{} align rotation'.format(str_path)
                align_position = '{} align position'.format(str_path)
                if model is not None:
                    m_free_symbols = cm.free_symbols(model.pose)
                    if len(m_free_symbols) > 0:
                        te = self.tracked_poses[str_path]

                        self.joints |= m_free_symbols
                        self.joint_aliases = {s: str(s) for s in self.joints}

                        r_dist = norm(
                            cm.rot_of(model.pose) - cm.rot_of(te.pose))
                        self.soft_constraints[align_rotation] = SC(
                            -r_dist, -r_dist, 1, r_dist)

                        # print(align_position)
                        # print(model.pose)
                        dist = norm(cm.pos_of(model.pose) - cm.pos_of(te.pose))
                        # print('Distance expression:\n{}'.format(dist))
                        # print('Distance expression symbol overlap:\n{}'.format(m_free_symbols.intersection(cm.free_symbols(dist))))
                        # for s in m_free_symbols:
                        #     print('Diff w.r.t {}:\n{}'.format(s, cm.diff(dist, s)))
                        self.soft_constraints[align_position] = SC(
                            -dist, -dist, 1, dist)
                        self.generate_opt_problem()

                        # Avoid crashes due to insufficient perception data. This is not fully correct.
                        if str_path in self._unintialized_poses:
                            state = self.integrator.state.copy(
                            ) if self.integrator is not None else {}
                            state.update({
                                s: 0.0
                                for s in m_free_symbols if s not in state
                            })
                            null_pose = cm.subs(model.pose, state)
                            pos = cm.pos_of(null_pose)
                            quat = real_quat_from_matrix(cm.rot_of(null_pose))
                            msg = PoseMsg()
                            msg.position.x = pos[0]
                            msg.position.y = pos[1]
                            msg.position.z = pos[2]
                            msg.orientation.x = quat[0]
                            msg.orientation.y = quat[1]
                            msg.orientation.z = quat[2]
                            msg.orientation.w = quat[3]
                            te.update_state(msg, self.integrator.state)
                else:
                    regenerate_problem = False
                    if align_position in self.soft_constraints:
                        del self.soft_constraints[align_position]
                        regenerate_problem = True

                    if align_rotation in self.soft_constraints:
                        del self.soft_constraints[align_rotation]
                        regenerate_problem = True

                    if regenerate_problem:
                        self.generate_opt_problem()
                        random_rot_normal(len(frames), 0, angular_std))
                ]

                # Calculate forward kinematics of frames
                obs_frames = {
                    k: subs(f, joint_state).dot(n)
                    for (k, f), n in zip(frames.items(), noise)
                }

                for x, (k, f) in enumerate(obs_frames.items()):
                    update_msg.poses[x].header.frame_id = k
                    update_msg.poses[x].pose.position.x = float(f[0, 3])
                    update_msg.poses[x].pose.position.y = float(f[1, 3])
                    update_msg.poses[x].pose.position.z = float(f[2, 3])

                    qx, qy, qz, qw = real_quat_from_matrix(f)
                    update_msg.poses[x].pose.orientation.x = qx
                    update_msg.poses[x].pose.orientation.y = qy
                    update_msg.poses[x].pose.orientation.z = qz
                    update_msg.poses[x].pose.orientation.w = qw

                try:
                    tracker.cb_process_obs(update_msg)

                    time_start = Time.now()
                    tracker.cb_tick(None)

                    iter_times.append(
                        (Time.now() - time_start).to_sec() /
                        (tracker.integrator.current_iteration + 1))
                    n_iter.append(tracker.integrator.current_iteration + 1)
Esempio n. 6
0
def matrix_to_transform(matrix):
    quat = real_quat_from_matrix(matrix)
    pos = matrix[:3, 3]
    return pb.Transform(pb.Quaternion(*quat), pb.Vector3(*pos))