def quat_slerp(q1, q2):
    if isinstance(q1, PyQuaternion) and isinstance(q2, PyQuaternion):
        return PyQuaternion.slerp(q1, q2)
    elif isinstance(q1, Quaternion) and isinstance(q2, Quaternion):
        q3 = PyQuaternion.slerp(quat_geom_to_py(q1), quat_geom_to_py(q2))
        return quat_py_to_geom(q3)
    else:
        return None
Ejemplo n.º 2
0
 def test_slerp_extensive(self):
     for axis in [[1, 0, 0], [0, 1, 0], [0, 0, 1]]:
         q1 = Quaternion(axis=axis, angle=0.0)
         q2 = Quaternion(axis=axis, angle=pi/2.0)
         q3 = Quaternion(axis=axis, angle=pi*3.0/2.0)
         for t in np.arange(0.1, 1, 0.1):
             q4 = Quaternion.slerp(q1, q2, t)
             q5 = Quaternion.slerp(q1, q3, t)
             q6 = Quaternion(axis=axis, angle=t*pi/2)
             q7 = Quaternion(axis=axis, angle=-t*pi/2)
             assert q4 == q6 or q4 == -q6
             assert q5 == q7 or q5 == -q7
    def interpolate(self, b):
        current_model_state = self.get_model_state(model_name="piano2")
        quat = Q(current_model_state.pose.orientation.x, current_model_state.pose.orientation.y,\
         current_model_state.pose.orientation.z, current_model_state.pose.orientation.w)
        a = SE3(current_model_state.pose.position.x, current_model_state.pose.position.y,\
         current_model_state.pose.position.z, quat)

        x_inc = b.X - a.X
        y_inc = b.Y - a.Y
        z_inc = b.Z - a.Z
        mag = sqrt(x_inc * x_inc + y_inc * y_inc + z_inc * z_inc)
        x_inc = x_inc / mag * inc
        y_inc = y_inc / mag * inc
        z_inc = z_inc / mag * inc

        x = a.X
        y = a.Y
        z = a.Z
        q = a.q
        cur = a
        while SE3.euclid_dist(cur, b) > inc\
        or fabs(Q.sym_distance(q, b.q)) > steering_inc:
            self.set_position(cur)
            self.set_steering_angle(cur.q)
            if SE3.euclid_dist(cur, b) > inc:
                x += x_inc
                y += y_inc
                z += z_inc
            if Q.sym_distance(q, b.q) > steering_inc:
                q = Q.slerp(q, b.q, amount=steering_inc)
            cur = SE3(x, y, z, q)
            sleep(0.05)
        self.set_state(b)
Ejemplo n.º 4
0
def interpolate_pose_using_slerp(command_matrix):
    pos_mat = command_matrix[:, 0:3].copy()
    quat_mat = command_matrix[:, 3:7].copy()

    distance_mat = numpy.linalg.norm(pos_mat[1:] - pos_mat[:-1], axis=1)
    steps = numpy.cumsum(distance_mat, axis=0)
    steps /= steps[-1]

    qxyzw0 = numpy.asarray(quat_mat[0]).reshape(-1)
    qxyzw1 = numpy.asarray(quat_mat[-1]).reshape(-1)

    if numpy.dot(qxyzw0, qxyzw1) < 0:
        qxyzw0 = -qxyzw0

    q0 = Quaternion(
        qxyzw0[3],
        qxyzw0[0],
        qxyzw0[1],
        qxyzw0[2],
    )
    q1 = Quaternion(
        qxyzw1[3],
        qxyzw1[0],
        qxyzw1[1],
        qxyzw1[2],
    )
    interpolated_q = [q0]
    for i in steps:
        interpolated_q.append(Quaternion.slerp(q0, q1, i))
    interpolated_mat = [[
        i.elements[1], i.elements[2], i.elements[3], i.elements[0]
    ] for i in interpolated_q]

    command_matrix[:, 3:7] = interpolated_mat
    return command_matrix
Ejemplo n.º 5
0
def precalculate_interpolation(p1, q1, p2, q2, duration, last_pos, start_cmd, joint_names):
    q1, q2 = [Quaternion(x) for x in [q1, q2]]
    spline = QuinticSpline(p1, p2, duration)
    num_queries = int(CONTROL_RATE * duration / INTERP_SKIP) + 1
    jas = []
    last_cmd = start_cmd
    for t in np.linspace(0., duration, num_queries):
        cart_pos = spline.get(t)[0][0]
        interp_quat = Quaternion.slerp(q1, q2, t / duration)
        interp_pose = state_to_pose(cart_pos[:3], interp_quat.elements)
        z_angle = interp_quat.yaw_pitch_roll[0] + np.pi
        try:

            interp_ja = pose_to_ja(interp_pose, last_cmd,
                                   debug_z=z_angle * 180 / np.pi, retry_on_fail=True)
            last_cmd = interp_ja
            interp_ja = np.array([interp_ja[j] for j in joint_names])
            jas.append(interp_ja)
            last_pos = interp_ja
        except EnvironmentError:
            jas.append(last_pos)
            logging.getLogger('robot_logger').error('ignoring IK failure')

    interp_ja = []
    for i in range(len(jas) - 1):
        interp_ja.append(jas[i].tolist())
        for j in range(1, INTERP_SKIP):
            t = float(j) / INTERP_SKIP
            interp_point = (1 - t) * jas[i] + t * jas[i + 1]
            interp_ja.append(interp_point.tolist())
    interp_ja.append(jas[-1].tolist())

    return interp_ja
Ejemplo n.º 6
0
    def render(self, pv_matrix: np.ndarray,
               tracked_cam_track_pos_float: float):
        camera_pos_floor = int(np.floor(tracked_cam_track_pos_float))
        assert camera_pos_floor >= 0
        camera_pos_ceil = int(np.ceil(tracked_cam_track_pos_float))
        assert camera_pos_ceil <= len(self._cam_poses) - 1
        camera_pos_fraction = tracked_cam_track_pos_float - camera_pos_floor
        assert 0 <= camera_pos_fraction <= 1

        floor_quaternion = Quaternion(
            matrix=self._cam_poses[camera_pos_floor].r_mat)
        ceil_quaternion = Quaternion(
            matrix=self._cam_poses[camera_pos_ceil].r_mat)
        rotation_matrix = Quaternion.slerp(floor_quaternion, ceil_quaternion,
                                           camera_pos_fraction).rotation_matrix

        floor_trans_vector = self._cam_poses[camera_pos_floor].t_vec
        ceil_trans_vector = self._cam_poses[camera_pos_ceil].t_vec
        translation_vector = ceil_trans_vector * camera_pos_fraction + floor_trans_vector * (
            1 - camera_pos_fraction)

        # cam_model_pose_matrix = _get_pose_matrix(self._cam_poses[camera_pos_floor])
        cam_model_pose_matrix = _get_pose_matrix(
            data3d.Pose(r_mat=rotation_matrix, t_vec=translation_vector))
        cam_mvp = pv_matrix.dot(cam_model_pose_matrix)

        self._cam_model_renderer.render(cam_mvp)
        self._cam_frustrum_renderer.render(cam_mvp)
Ejemplo n.º 7
0
    def __getitem__(self, time):
        times = self.t
        if self.circuit:
            start = min(times)
            time = time - start % (len(self) + start)
        x = self.__filter__(self.xfn(time))
        y = self.__filter__(self.yfn(time))
        z = self.__filter__(self.zfn(time))
        q = self.q
        pos = Position(x, y, z)

        start = min(times)
        stop = max(times)
        if time < start:
            quat = self.wps[0]['pose'].quaternion
        elif time > stop:
            quat = self.wps[-1]['pose'].quaternion
        else:
            for start_idx, pair in enumerate(pairwise(times)):
                if pair[0] <= time <= pair[1]:
                    quat1 = q[start_idx]
                    quat2 = q[start_idx + 1]
                    quat_times = (pair[0], pair[1])
            percent = (time - quat_times[0]) / (quat_times[1] - quat_times[0])
            quat = Quaternion.from_py_quaternion(
                pyQuaternion.slerp(quat1, quat2, percent))
        return Pose(pos, quat)
Ejemplo n.º 8
0
    def get_boxes(self, sample_data_token: str) -> List[Box]:
        """
        Instantiates Boxes for all annotation for a particular sample_data record. If the sample_data is a
        keyframe, this returns the annotations for that sample. But if the sample_data is an intermediate
        sample_data, a linear interpolation is applied to estimate the location of the boxes at the time the
        sample_data was captured.
        :param sample_data_token: Unique sample_data identifier.
        """

        # Retrieve sensor & pose records
        sd_record = self.get('sample_data', sample_data_token)
        curr_sample_record = self.get('sample', sd_record['sample_token'])

        if curr_sample_record['prev'] == "" or sd_record['is_key_frame']:
            # If no previous annotations available, or if sample_data is keyframe just return the current ones.
            boxes = list(map(self.get_box, curr_sample_record['anns']))

        else:
            prev_sample_record = self.get('sample', curr_sample_record['prev'])

            curr_ann_recs = [self.get('sample_annotation', token) for token in curr_sample_record['anns']]
            prev_ann_recs = [self.get('sample_annotation', token) for token in prev_sample_record['anns']]

            # Maps instance tokens to prev_ann records
            prev_inst_map = {entry['instance_token']: entry for entry in prev_ann_recs}

            t0 = prev_sample_record['timestamp']
            t1 = curr_sample_record['timestamp']
            t = sd_record['timestamp']

            # There are rare situations where the timestamps in the DB are off so ensure that t0 < t < t1.
            t = max(t0, min(t1, t))

            boxes = []
            for curr_ann_rec in curr_ann_recs:

                if curr_ann_rec['instance_token'] in prev_inst_map:
                    # If the annotated instance existed in the previous frame, interpolate center & orientation.
                    prev_ann_rec = prev_inst_map[curr_ann_rec['instance_token']]

                    # Interpolate center.
                    center = [np.interp(t, [t0, t1], [c0, c1]) for c0, c1 in zip(prev_ann_rec['translation'],
                                                                                 curr_ann_rec['translation'])]

                    # Interpolate orientation.
                    rotation = Quaternion.slerp(q0=Quaternion(prev_ann_rec['rotation']),
                                                q1=Quaternion(curr_ann_rec['rotation']),
                                                amount=(t - t0) / (t1 - t0))

                    box = Box(center, curr_ann_rec['size'], rotation, name=curr_ann_rec['category_name'],
                              token=curr_ann_rec['token'])
                else:
                    # If not, simply grab the current annotation.
                    box = self.get_box(curr_ann_rec['token'])

                boxes.append(box)
        return boxes
def get_interpolated_quat(interp_time, scan_time, q_array):
	q_interp_array = []
	for i in range(0, len(scan_time)-1):
		# count the number of quaternion
		n = np.sum(np.logical_and(interp_time >= scan_time[i], interp_time < scan_time[i+1]))
		for j in range(1, int(n)+1):
			q_interp_array.append(Quaternion.slerp(q_array[i], q_array[i+1], float(j)/(n+1)))
	q_interp_array.append(q_array[-1])
	return q_interp_array
Ejemplo n.º 10
0
def quat_average(input):
    """averaging quaternions
       input : nx4 array

       """
    if len(input) == 0:
        return Quaternion(0, 0, 0, 0)
    else:
        avg = Quaternion(input[0])
        for l1 in range(1, input.shape[0]):
            avg = Quaternion.slerp(avg, Quaternion(input[l1]), 1 / (l1 + 1))
        return avg
Ejemplo n.º 11
0
def _interpolate_cam_pose(t: float, p0: data3d.Pose, p1: data3d.Pose):
    def to_quat(m):
        qr = np.sqrt(1 + m[0][0] + m[1][1] + m[2][2]) / 2
        return Quaternion(
            np.array([
                qr, (m[2][1] - m[1][2]) / 4 / qr, (m[0][2] - m[2][0]) / 4 / qr,
                (m[1][0] - m[0][1]) / 4 / qr
            ]))

    return data3d.Pose(
        Quaternion.slerp(to_quat(p0.r_mat), to_quat(p1.r_mat),
                         t).rotation_matrix.astype(np.float32),
        p0.t_vec * (1 - t) + p1.t_vec * t)
Ejemplo n.º 12
0
    def act(self, obs):
        if self._t == 0:
            self._target = np.random.uniform(self._range[0], self._range[1])
            y = -(self._target[1] - obs[self._obs_name][1])
            x = self._target[0] - obs[self._obs_name][0]
            self._target_quat = self._calculate_quat(np.arctan2(y, x))
        
        if np.random.uniform() < self._p:
            self._gripper *= -1

        quat_t = Quaternion.slerp(self._base_quat, self._target_quat, min(1, float(self._t) / 50))
        velocities = self._get_velocities(self._target - obs[self._obs_name], quat_t)
        action = np.concatenate((velocities, [self._gripper]))
        self._t += 1
        return action
Ejemplo n.º 13
0
def interpolate_pose(pose1, pose2, alpha):
    assert alpha >=0 and alpha <= 1

    # interpolating rotation component
    q1 = Quaternion(matrix = pose1)
    q2 = Quaternion(matrix = pose2)
    q_interp = Quaternion.slerp(q1, q2, alpha)
    rot_interp = q_interp.transformation_matrix

    # interpolating translation component
    t1 = pose1[0:3, 3]
    t2 = pose2[0:3, 3]
    trans_interp = t1 * (1. - alpha) + t2 * alpha
    rot_interp[0:3, 3] = rot_interp[0:3, 3] + trans_interp
    return rot_interp
Ejemplo n.º 14
0
def handQuatUpd():
    #assign an array of a quaternion to the message output
    global q_out, q_prev, count
    pose = Pose()

    Q1 = Quaternion(q_out)
    Q0 = Quaternion(q_prev)

    Q05 = Quaternion.slerp(Q0, Q1, 1 / (2**(1 + count)))
    print(Q05)

    pose.orientation.w = Q05[0]
    pose.orientation.x = Q05[1]
    pose.orientation.y = Q05[2]
    pose.orientation.z = Q05[3]

    return pose.orientation
    def check_collide(a, b):
        x_inc = b.X - a.X
        y_inc = b.Y - a.Y
        z_inc = b.Z - a.Z
        mag = sqrt(x_inc * x_inc + y_inc * y_inc + z_inc * z_inc)
        x_inc = x_inc / mag * inc
        y_inc = y_inc / mag * inc
        z_inc = z_inc / mag * inc

        x = a.X
        y = a.Y
        z = a.Z
        q = a.q

        cur = SE3(x, y, z, q)
        T, R = b.get_transition_rotation()

        currlerp = 0
        if pqp_client(T, R).result:
            return False
        looping = True
        while looping:
            looping = False
            T, R = cur.get_transition_rotation()

            if pqp_client(T, R).result:
                # Collision
                return False

            if SE3.euclid_dist(cur, b) > inc:
                looping = True
                cur.X += x_inc
                cur.Y += y_inc
                cur.Z += z_inc
            if fabs(Q.sym_distance(cur.q, b.q)) > .1:
                looping = True
                cur.q = Q.slerp(a.q, b.q, amount=currlerp)
                currlerp += steering_inc

        T, R = cur.get_transition_rotation()
        if pqp_client(T, R).result:
            return False
        return True
Ejemplo n.º 16
0
    def evaluate(self, t):
        t0, data0 = self.data_time.get(block=True)
        t1, data1 = self.data_time.get(block=True)
        q = []
        for i in range(0, len(t)):
            while t[i] > t1:
                t0 = t1
                data0 = data1
                t1, data1 = self.data_time.get(block=True)

            #Here I assumed roll pitch yaw, but data may come in a different order
            q0 = transformations.quaternion_from_euler(data0[0], data0[1],
                                                       data0[2])
            q1 = transformations.quaternion_from_euler(data1[0], data1[1],
                                                       data1[2])
            q0 = Quaternion(q0[3], q0[0], q0[1], q0[2])
            q1 = Quaternion(q1[3], q1[0], q1[1], q1[2])

            w = (t[i] - t0) / (t1 - t0)
            yield Quaternion.slerp(q0, q1, w)
Ejemplo n.º 17
0
def interpolate_tracking_boxes(left_box: TrackingBox, right_box: TrackingBox,
                               right_ratio: float) -> TrackingBox:
    """
    Linearly interpolate box parameters between two boxes.
    :param left_box: A Trackingbox.
    :param right_box: Another TrackingBox
    :param right_ratio: Weight given to the right box.
    :return: The interpolated TrackingBox.
    """
    def interp_list(left, right, rratio):
        return tuple((1.0 - rratio) * np.array(left, dtype=float) +
                     rratio * np.array(right, dtype=float))

    def interp_float(left, right, rratio):
        return (1.0 - rratio) * float(left) + rratio * float(right)

    # Interpolate quaternion.
    rotation = Quaternion.slerp(q0=Quaternion(left_box.rotation),
                                q1=Quaternion(right_box.rotation),
                                amount=right_ratio).elements

    # Score will remain -1 for GT.
    tracking_score = interp_float(left_box.tracking_score,
                                  right_box.tracking_score, right_ratio)

    return TrackingBox(
        sample_token=right_box.sample_token,
        translation=interp_list(left_box.translation, right_box.translation,
                                right_ratio),
        size=interp_list(left_box.size, right_box.size, right_ratio),
        rotation=rotation,
        velocity=interp_list(left_box.velocity, right_box.velocity,
                             right_ratio),
        ego_translation=interp_list(left_box.ego_translation,
                                    right_box.ego_translation,
                                    right_ratio),  # May be inaccurate.
        tracking_id=right_box.tracking_id,
        tracking_name=right_box.tracking_name,
        tracking_score=tracking_score)
    def act(self, obs):
        if self._t == 0:
            self._start = -1
            try:
                y = -(obs['{}_pos'.format(self._object_name)][1] - obs[self._obs_name][1])
                x = obs['{}_pos'.format(self._object_name)][0] - obs[self._obs_name][0]
            except:
                import pdb; pdb.set_trace()
            angle = np.arctan2(y, x) - np.pi/3 if 'Cereal' in self._object_name else np.arctan2(y, x)
            self._target_quat = self._calculate_quat(angle)

        if self._start < 0 and self._t < 15:
            if np.linalg.norm(obs['{}_pos'.format(self._object_name)] - obs[self._obs_name] + [0, 0, self._hover_delta]) < self._g_tol or self._t == 14:
                self._start = self._t
                self._rise_t = self._start + 8

            quat_t = Quaternion.slerp(self._base_quat, self._target_quat, min(1, float(self._t) / 5))
            eef_pose = self._get_target_pose(obs['{}_pos'.format(self._object_name)] - obs[self._obs_name] + [0, 0, self._hover_delta], obs['eef_pos'], quat_t)
            action = np.concatenate((eef_pose, [-1]))
        elif self._t < self._start + 10: 
            if self._t  < self._rise_t:
                eef_pose = self._get_target_pose(obs['{}_pos'.format(self._object_name)] - obs[self._obs_name] - [0, 0, self._clearance], obs['eef_pos'], self._target_quat)  
                action = np.concatenate((eef_pose, [-1]))
            else:
                eef_pose = self._get_target_pose(obs['{}_pos'.format(self._object_name)] - obs[self._obs_name] + [0, 0, self._hover_delta], obs['eef_pos'], self._target_quat)
                action = np.concatenate((eef_pose, [1]))

        elif np.linalg.norm(self._target_loc - obs[self._obs_name]) > self._final_thresh: 
            target = self._target_loc        
            eef_pose = self._get_target_pose(target - obs[self._obs_name], obs['eef_pos'], self._target_quat)
            action = np.concatenate((eef_pose, [1]))
        else:
            eef_pose = self._get_target_pose(np.zeros(3), obs['eef_pos'], self._target_quat)
            action = np.concatenate((eef_pose, [-1]))
        
        self._t += 1
        return action
Ejemplo n.º 19
0
def move_straight(broker, start_pos, target_pos, **kwargs):
    '''
    move_straight moves the robot in a straight line
    interpolating the positions in between

    Kwargs:
        num_steps:      the number of steps to interpolate between
                        (default 10)
        max_stepwidth:  the maximum euclidean distance between two steps
                        (default 0.05)
        by default, the one requiring more steps will be used
        time_to_go:     the available time to go to target
                        (default 0.5)
    '''
    # check the dimensionality of the positions and verify it's either 3 or 7
    if (np.asarray(start_pos).size == 7 or np.asarray(start_pos).size == 3) \
            and \
            (np.asarray(target_pos).size == 7 or np.asarray(target_pos).size == 3) \
            and \
            (np.asarray(start_pos).size == np.asarray(target_pos).size):
        # all parameters seem valid
        pass
    else:
        raise ValueError("The input positions need to be the same size and have"
                         "either 3 or 7 components")
    # check how many steps we need to take
    if 'num_steps' in kwargs:
        num_steps = kwargs['num_steps']
    else:
        num_steps = 10
    if 'max_stepwidth' in kwargs:
        max_stepwidth = kwargs['max_stepwidth']
    else:
        max_stepwidth = 0.05
    if 'time_to_go' in kwargs:
        time_to_go = kwargs['time_to_go']
    else:
        time_to_go = 0.5

    # calculate the offset/displacement between start and target
    displacement = target_pos - start_pos
    distance = np.linalg.norm(displacement[0:3])  # consider only position
    # calculate the number of steps by choosing whichever value needs more
    num_steps = np.amax(np.asarray([
        np.ceil(distance / max_stepwidth),
        num_steps]))
    num_steps = np.int64(num_steps)

    for step in range(num_steps):
        fraction = step / num_steps
        new_xyz = start_pos[0:3] + fraction * displacement[0:3]
        if start_pos.size == 7 and target_pos.size == 7:
            new_quat = Quaternion.slerp(
                Quaternion(start_pos[3:]),
                Quaternion(target_pos[3:]),
                fraction)
            new_pos = np.concatenate([new_xyz, np.asarray(new_quat.q)])
        else:
            new_pos = new_xyz
        set_new_pos(broker, new_pos, ctrl_mode=0, time_to_go=time_to_go)
        wait_til_ready(broker)
Ejemplo n.º 20
0
def catmull_rom_spline(camera_poses: List[np.ndarray],
                       num_points: int) -> List[np.ndarray]:
    """
    Interpolate points on a Catmull-Rom spline
    Reading: Edwin Catmull, Raphael Rom, A CLASS OF LOCAL INTERPOLATING SPLINES,
                Computer Aided Geometric Design, Academic Press, 1974, Pages 317-326
    :param camera_poses: List of 4 camera poses. Points are interpolated on the curve segment between pose 1 and 2
    :param num_points: Number of points to interpolate between pose 1 and 2
    :return: A list of num_points point on the curve segment between pose 1 and 2
    """
    # Formulas from https://en.wikipedia.org/wiki/Centripetal_Catmull%E2%80%93Rom_spline
    assert len(
        camera_poses) == 4, "Need exactly 4 camera poses for interpolation"
    interpolated_cameras = []

    # rotations
    q1 = Quaternion(
        matrix=find_closest_orthogonal_matrix(camera_poses[1][:3, :3]))
    q2 = Quaternion(
        matrix=find_closest_orthogonal_matrix(camera_poses[2][:3, :3]))

    # translations
    translations = [pose[:3, 3] for pose in camera_poses]

    if np.linalg.norm(translations[1] - translations[2]) < 0.001:
        for i in range(num_points):
            ext = np.eye(4)
            ext[:3, :3] = Quaternion.slerp(q1, q2,
                                           float(i) /
                                           num_points).rotation_matrix
            ext[:3, 3] = translations[1]
            interpolated_cameras.append(ext)
    else:
        eps = 0.001
        alpha = 0.5  # Default for centripetal catmull rom splines

        # Calculate knot parameters
        t0 = 0.0
        t1 = np.linalg.norm(translations[1] - translations[0])**alpha + t0
        t2 = np.linalg.norm(translations[2] - translations[1])**alpha + t1
        t3 = np.linalg.norm(translations[3] - translations[2])**alpha + t2

        # Calculate points on curve segment C
        new_points = []
        for t in np.arange(t1, t2, (t2 - t1) / float(num_points)):
            A1 = (t1 - t) / max(eps, t1 - t0) * translations[0] + (
                t - t0) / max(eps, t1 - t0) * translations[1]
            A2 = (t2 - t) / max(eps, t2 - t1) * translations[1] + (
                t - t1) / max(eps, t2 - t1) * translations[2]
            A3 = (t3 - t) / max(eps, t3 - t2) * translations[2] + (
                t - t2) / max(eps, t3 - t2) * translations[3]

            B1 = (t2 - t) / max(eps, t2 - t0) * A1 + (t - t0) / max(
                eps, t2 - t0) * A2
            B2 = (t3 - t) / max(eps, t3 - t1) * A2 + (t - t1) / max(
                eps, t3 - t1) * A3

            C = (t2 - t) / max(eps, t2 - t1) * B1 + (t - t1) / max(
                eps, t2 - t1) * B2
            new_points.append(C)

        # For each point, also slerp the rotation matrix
        for idx, point in enumerate(new_points):
            ext = np.eye(4)
            ext[:3, :3] = Quaternion.slerp(q1, q2,
                                           float(idx) /
                                           len(new_points)).rotation_matrix
            ext[:3, 3] = point
            interpolated_cameras.append(ext)

    return interpolated_cameras
Ejemplo n.º 21
0
def imu_callback(data):

    # sample time
    global dt
    dt = 1 / 200.0

    # get gyro data
    w = np.array([
        data.angular_velocity.x, data.angular_velocity.y,
        data.angular_velocity.z
    ])

    # get accelerometer data: specific force in body frame
    f_b = np.array([
        data.linear_acceleration.x, data.linear_acceleration.y,
        data.linear_acceleration.z
    ])

    ### Update state ###

    # get current state
    global state

    # current position
    p = state[0]

    # current velocity
    v = state[1]

    # current orientation
    b = state[2]

    # current accel bias
    x_a = state[3]

    # current gyro bias
    x_g = state[4]

    # subtract out gyroscope bias. w_bn = (-w_nb)
    w = -1 * (w - x_g)  #
    w_norm = np.linalg.norm(w)

    # subtract out accelerometer bias
    f_b = f_b - x_a

    # differential rotation: [w, x, y, z]
    db = np.concatenate(
        ([math.cos(w_norm * dt / 2)], math.sin(w_norm * dt / 2) * w / w_norm))

    # convert to pyquaternion format
    b_prev = Quaternion(array=b)
    db = Quaternion(array=db)

    # update orientation
    b_next = db * b_prev

    # get average quaternion by interpolation
    b_avg = Quaternion.slerp(b_prev, b_next, amount=0.5)

    # b is the nav to body transformation. we need body to nav transformation -> invert
    b_body_to_nav_avg = b_avg.inverse  # for specific force (5.9 Principles of GNSS book)

    # rotate specific force into inertial frame
    f_i = b_body_to_nav_avg.rotate(f_b)

    # gravity vector
    global g
    g_vec = np.array([0, 0, g])

    # get acceleration in inertial frame. (acceleration of body wrt inertial frame in inertial frame)
    a_i = f_i + g_vec

    # update position (5.16 Principles of GNSS book)
    p = p + v * dt + 0.5 * a_i * dt**2

    # DEBUG:
    if np.linalg.norm(v * dt + 0.5 * a_i * dt**2) > 0.1:
        print("IMU update crazy")
        print(state)
    # 	print(np.linalg.norm(v*dt + 0.5*a_i*dt**2) )
    # 	print(v)
    # 	print(a_i)
    # 	print(x_a)
    # 	print(x_g)
    # 	print(w)
    # 	print()
    # 	rospy.sleep(100)

    # update velocity
    v = v + a_i * dt

    # store in state -> this is time propagation step.
    state[0] = p
    state[1] = v
    state[2] = b_next.elements

    ### Update covariance ###

    # get rotation matrix corresponding to b_next
    b_body_to_nav = b_next.inverse
    R_body_to_nav = b_body_to_nav.rotation_matrix

    # compute F matrix: F is 15 x 15
    F = np.zeros((15, 15))

    # store relevant values in F (see writeup)
    F[:3, 3:6] = np.identity(3)
    f_i_skew = to_skew(f_i)
    F[3:6, 6:9] = -1 * f_i_skew
    F[3:6, 9:12] = -1 * R_body_to_nav
    F[6:9, 12:15] = R_body_to_nav
    F_gg = -1.0 / 1000 * np.identity(3)
    F_aa = -1.0 / 1000 * np.identity(3)
    F[9:12, 9:12] = F_aa
    F[12:15, 12:15] = F_gg

    # compute system transition matrix Phi
    Phi = expm(F * dt)

    # compute G. G is 15 x 12.
    G = np.zeros((15, 12))
    G[3:6, :3] = -R_body_to_nav
    G[6:9, 3:6] = R_body_to_nav
    G[9:12, 6:9] = np.identity(3)
    G[12:15, 9:12] = np.identity(3)

    # get noise matrix
    global Q

    # compute Qdk (discrete noise). Qdk is 15 x 15
    Qdk = G.dot(Q).dot(G.T) * dt

    # get previous covariance
    global cov

    # update covariance (15x15)
    cov = Phi.dot(cov).dot(Phi.T) + Qdk

    ### Measurement update stuff for acceleration. Helps correct roll and pitch ###

    # if 50 accelerometer readings were close enough to the gravity vector, robot is stationary
    global accel_counter, rover_stationary, g_pred_sum

    # predict gravity in navigation frame and store prediction in global variable. used in filter.
    num_g_pred = 50
    global g_pred_sum

    # check if current measurement is stationary
    if np.abs(
            np.linalg.norm(f_b) - g
    ) < 0.03:  # tuned. test: you should never be able to hold the imu in your hand and have an update.
        accel_counter += 1
        g_pred_sum += R_body_to_nav.dot(-1 * f_b)
    else:
        accel_counter = 0
        g_pred_sum = 0
        rover_stationary = False

    # if 50 consecutive stationary, use accel_data
    if accel_counter == num_g_pred:
        global g_pred
        # averaging
        g_pred = g_pred_sum / num_g_pred
        rover_stationary = True
        accel_counter = 0
        g_pred_sum = 0

    ### Visualization ###

    # for visualization: publish tf
    if True:
        b_vis = b_body_to_nav

        # useful for visualization
        br = tf.TransformBroadcaster()

        # send world to IMU transformation
        br.sendTransform((p[0], p[1], p[2]),
                         (b_vis[1], b_vis[2], b_vis[3], b_vis[0]),
                         rospy.Time.now(), "IMU", "ENU")  # ENU to quat
Ejemplo n.º 22
0
    def process(self, df, participant=None, video=None):
        print("Process LowPassFilter", participant, video)
        df = df.sort_values(by="time")

        ## get all sensors
        sensors = list(
            set(
                df.columns.map(lambda row: row
                               if row == "time" else (row.split("_")[0]))))
        sensors.remove("time")

        low = max(min(self.hbias - (self.hrange / 2), 1), 0)
        high = max(min(self.hbias + (self.hrange / 2), 1), 0)
        hrangeLimited = high - low
        y = {}
        result = {}
        for sensor in sensors:
            yrow = df.iloc[[0]][[
                "%s_orientation_q_w" % sensor,
                "%s_orientation_q_x" % sensor,
                "%s_orientation_q_y" % sensor,
                "%s_orientation_q_z" % sensor
            ]]
            y[sensor] = Quaternion(yrow["%s_orientation_q_w" % sensor],
                                   yrow["%s_orientation_q_x" % sensor],
                                   yrow["%s_orientation_q_y" % sensor],
                                   yrow["%s_orientation_q_z" % sensor])
            if y[sensor].norm == 0.0:
                y[sensor] = Quaternion(1, 0, 0, 0)
            if sensor not in result:
                result[sensor] = []
            result[sensor].append(list(y[sensor]))

        for i in range(1, len(df)):
            rowresult = []
            for sensor in sensors:
                xrow = df.iloc[[i]][[
                    "%s_orientation_q_w" % sensor,
                    "%s_orientation_q_x" % sensor,
                    "%s_orientation_q_y" % sensor,
                    "%s_orientation_q_z" % sensor
                ]]
                x = Quaternion(xrow["%s_orientation_q_w" % sensor],
                               xrow["%s_orientation_q_x" % sensor],
                               xrow["%s_orientation_q_y" % sensor],
                               xrow["%s_orientation_q_z" % sensor])
                if x.norm == 0.0:
                    x = Quaternion(1, 0, 0, 0)
                d = Quaternion.distance(y[sensor], x)
                hlpf = d / math.pi * hrangeLimited + low
                y[sensor] = Quaternion.slerp(y[sensor], x, amount=hlpf)
                result[sensor].append(list(y[sensor]))

        dfs = []
        dfs.append(pd.DataFrame({"time": df.time}))
        for sensor in sensors:
            newdf = pd.DataFrame(result[sensor],
                                 columns=[
                                     "%s_orientation_q_%s" % (sensor, s)
                                     for s in list("wxyz")
                                 ])
            dfs.append(newdf)
        newdf = pd.concat(dfs, axis=1, join='inner')
        return newdf
Ejemplo n.º 23
0
 def interpolate(source, target, alpha):
     iquat = Quaternion.slerp(source.q, target.q, alpha)
     it = source.t * (1 - alpha) + target.t * alpha
     return Isometry(q=iquat, t=it)
def merge_pose_to_gps_interpolate(pose_df, gps_imu_df, time_key='Timestamp'):
    gps_imu_df = gps_imu_df.sort_values(time_key, ignore_index=True)
    with tqdm(total=pose_df.shape[0]) as pbar:
        x_list = []
        y_list = []
        z_list = []
        qw_list = []
        qx_list = []
        qy_list = []
        qz_list = []
        roll_list = []
        pitch_list = []
        yaw_list = []
        time_list = []
        for index, row in pose_df.iterrows():
            index_p = index + 1
            if index_p >= pose_df.shape[0]:
                pbar.update(1)
                continue
            ref_time = pose_df[time_key][index]
            time_diff = pose_df[time_key][index_p] - pose_df[time_key][index]
            x_diff = pose_df['x'][index_p] - pose_df['x'][index]
            y_diff = pose_df['y'][index_p] - pose_df['y'][index]
            z_diff = pose_df['z'][index_p] - pose_df['z'][index]

            q_imu_0 = Quaternion(pose_df[['qw', 'qx', 'qy', 'qz']].loc[index])
            q_imu_1 = Quaternion(pose_df[['qw', 'qx', 'qy', 'qz']].loc[index_p])

            check_df = gps_imu_df[(gps_imu_df[time_key] > pose_df[time_key][index]) &
                                  (gps_imu_df[time_key] <= pose_df[time_key][index_p])]
            for idx, crow in check_df.iterrows():
                inter_time = crow[time_key] - ref_time
                ratio = inter_time / time_diff
                x = pose_df['x'][index] + x_diff * ratio
                y = pose_df['y'][index] + y_diff * ratio
                z = pose_df['z'][index] + z_diff * ratio

                q_inter = Quaternion.slerp(q_imu_0, q_imu_1, ratio)

                r_obj = sc_Rotation.from_quat([q_inter.x, q_inter.y, q_inter.z, q_inter.w])
                np_euler = r_obj.as_euler('xyz', degrees=False)

                roll = np_euler[0]
                pitch = np_euler[1]
                yaw = np_euler[2]

                qw_list.append(q_inter.w)
                qx_list.append(q_inter.x)
                qy_list.append(q_inter.y)
                qz_list.append(q_inter.z)

                x_list.append(x)
                y_list.append(y)
                z_list.append(z)
                roll_list.append(roll)
                pitch_list.append(pitch)
                yaw_list.append(yaw)
            pbar.update(1)

        if gps_imu_df.shape[0] != len(x_list):
            raise Exception('invalid data frame size', 'please check first time and last time')

        gps_imu_df['pose_x'] = x_list
        gps_imu_df['pose_y'] = y_list
        gps_imu_df['pose_z'] = z_list
        gps_imu_df['pose_roll'] = roll_list
        gps_imu_df['pose_pitch'] = pitch_list
        gps_imu_df['pose_yaw'] = yaw_list

        gps_imu_df['pose_qx'] = np.array(qx_list)
        gps_imu_df['pose_qy'] = np.array(qy_list)
        gps_imu_df['pose_qz'] = np.array(qz_list)
        gps_imu_df['pose_qw'] = np.array(qw_list)

    return gps_imu_df
Ejemplo n.º 25
0
	def update_ori(self, roll, pitch, yaw, slerp_amount=1.0):
		new_ori = self.__convert_from_euler_angles_to_quat(roll, pitch, yaw)
		interp_ori = Quaternion.slerp(self.ori, new_ori, slerp_amount)
		print(interp_ori)
		self.ori = interp_ori
Ejemplo n.º 26
0
 def test_slerp(self):
     q1 = Quaternion(axis=[1, 0, 0], angle=0.0)
     q2 = Quaternion(axis=[1, 0, 0], angle=pi/2)
     q3 = Quaternion.slerp(q1, q2, 0.5)
     self.assertEqual(q3, Quaternion(axis=[1,0,0], angle=pi/4))
Ejemplo n.º 27
0
    def calculate(self, body):
        bone_state = {}
        for bone in body:

            if bone in MMD_TO_VAM_BONE_MAPPINGS.keys():
                bone_name = MMD_TO_VAM_BONE_MAPPINGS[bone]
                frames = self.md.boneAnimation[bone]
                bone_state[bone_name] = {}
                last_frame = -1

                bone_dep = None
                if bone_name in Body.DEPS.keys():
                    bone_dep = Body.DEPS[bone_name]
                    # Sometimes a dep wont have any info, so take the dep of the dep.
                    # E.g. Foot depends on knee, but knee has no motion info so use thigh as the dep.
                    while bone_dep not in bone_state.keys() or len(
                            bone_state[bone_dep].keys()) <= 1:
                        if bone_dep == 'hip':
                            break
                        bone_dep = Body.DEPS[bone_dep]
                frames.sort(key=lambda g: g.frame_number)

                print('Calculating motion for: ' + bone_name)
                for boneFrameKey in frames:
                    if last_frame == -1:
                        # This is the first frame
                        bone_state[bone_name][0] = {}
                        bone_state[bone_name][0]['pos'] = {
                            'x': boneFrameKey.location[0],
                            'y': boneFrameKey.location[1],
                            'z': boneFrameKey.location[2]
                        }
                        # If all the rotation data is 0 (null rotation) then set the rotation to off for this bone
                        # at frame 0.
                        if boneFrameKey.rotation[0] == 0 and boneFrameKey.rotation[1] == 0\
                                and boneFrameKey.rotation[2] == 0:
                            bone_state[bone_name][0]['rot_on'] = False
                        # Get the initial rotation
                        q = Quaternion(boneFrameKey.rotation[3],
                                       boneFrameKey.rotation[0],
                                       boneFrameKey.rotation[1],
                                       boneFrameKey.rotation[2])

                        # If bone has a parent then add the first frame rotation with the parent.
                        if bone_dep:
                            q = bone_state[bone_dep][0]['rot'] * q
                        bone_state[bone_name][0]['rot'] = q

                    else:
                        # Try to calculate the positions and rotations between frames via interpolation.
                        for current_frame in range(
                                last_frame + 1, boneFrameKey.frame_number + 1):
                            bone_state[bone_name][current_frame] = {}
                            diff = boneFrameKey.frame_number - last_frame

                            # Use simple math to calculate where the intermediate points would be.
                            factor = float(1 / diff)
                            bone_state[bone_name][current_frame]['pos'] = {
                                'x':
                                bone_state[bone_name][last_frame]['pos']['x'] *
                                (diff - (current_frame - last_frame)) / diff +
                                (boneFrameKey.location[0] *
                                 (current_frame - last_frame) / diff),
                                'y':
                                bone_state[bone_name][last_frame]['pos']['y'] *
                                (diff - (current_frame - last_frame)) / diff +
                                (boneFrameKey.location[1] *
                                 (current_frame - last_frame) / diff),
                                'z':
                                bone_state[bone_name][last_frame]['pos']['z'] *
                                (diff - (current_frame - last_frame)) / diff +
                                (boneFrameKey.location[2] *
                                 (current_frame - last_frame) / diff),
                            }

                            # This is the tricky part. The calculation for a rotation goes as follows:
                            # 1. Find the rotation of the parent bone at this frame. Should be a quaternion.
                            # 2. If not found, go to the previous frame until one is found.
                            # 3. If the parent is not found at all just start with a null rotation for the dep.
                            # 4. If it's not null then turn on rotation, put the rotation it in a new quaternion.
                            # 5. Calculate the absolute rotation of the next frame by adding the parent and the
                            #    child's rot.
                            # 6. Get the absolute rotation that was already stored previously.
                            # 7. Use the slerp function to interpolate the current rotation based on both previous and
                            #    next rotations.
                            # 8. Save the calculated rotation, and proceed to the next frame.
                            rot_next_frame_parent = None

                            if bone_name in Body.DEPS.keys():
                                fr = boneFrameKey.frame_number
                                while not rot_next_frame_parent:
                                    try:
                                        # 1
                                        rot_next_frame_parent = bone_state[
                                            bone_dep][fr]['rot']
                                    except KeyError:  # 2
                                        fr = fr - 1
                            if not rot_next_frame_parent:
                                rot_next_frame_parent = Quaternion(1, 0, 0,
                                                                   0)  # 3

                            bone_state[bone_name][current_frame][
                                'rot_on'] = True
                            # 4
                            rot_next_frame_child_relative = Quaternion(
                                boneFrameKey.rotation[3],
                                boneFrameKey.rotation[0],
                                boneFrameKey.rotation[1],
                                boneFrameKey.rotation[2])
                            # 5
                            rot_next_frame = rot_next_frame_parent * rot_next_frame_child_relative
                            rot_prev_frame = bone_state[bone_name][last_frame][
                                'rot']  # 6
                            # 7
                            rot_current_frame = \
                                Quaternion.slerp(rot_prev_frame, rot_next_frame, (current_frame - last_frame) * factor)
                            bone_state[bone_name][current_frame][
                                'rot'] = rot_current_frame  # 8
                    last_frame = boneFrameKey.frame_number
            else:
                print('Unknown body part: ' + bone)
        return bone_state