Ejemplo n.º 1
0
def get_all_property(dataset, name):
    all_data_point = []
    # rot_mean = torch.Tensor(dataset.data_statistics['rotation_mean'])
    pos_mean = torch.Tensor(dataset.data_statistics['position_mean'])
    pos_std = torch.Tensor(dataset.data_statistics['position_std'])
    # rot_std = torch.Tensor(dataset.data_statistics['rotation_std'])
    for idx in range(len(dataset)):
        input_dict, labels = dataset[idx]
        if name == 'before_angle':
            before_tensor = input_dict['norm_state_tensor'][3:7]
            before_angle1 = euler_from_quaternion(before_tensor,
                                                  axes='rxyz')[0] / 3.14 * 180
            before_angle2 = euler_from_quaternion(before_tensor,
                                                  axes='rxyz')[1] / 3.14 * 180
            before_angle3 = euler_from_quaternion(before_tensor,
                                                  axes='rxyz')[2] / 3.14 * 180
            all_data_point += [before_angle1, before_angle2, before_angle3]
        elif name == 'position':
            before_pos = norm_tensor(
                norm_or_denorm=False,
                tensor=input_dict['norm_state_tensor'][:3],
                mean_tensor=pos_mean,
                std_tensor=pos_std)
            all_data_point += [before_pos[0], before_pos[1], before_pos[2]]
        elif name == 'rot_diff':
            before_rot = input_dict['norm_state_tensor'][3:7]
            before_angle = euler_from_quaternion(before_rot,
                                                 axes='rxyz')[0] / 3.14 * 180
            after_rot = labels['norm_state_tensor'][3:7]
            after_angle = euler_from_quaternion(after_rot,
                                                axes='rxyz')[0] / 3.14 * 180
            diff_rot = after_angle - before_angle
            if abs(diff_rot) > 40:
                embed()
            all_data_point += [diff_rot]
        elif name == 'pos_diff':
            before_pos = norm_tensor(
                norm_or_denorm=False,
                tensor=input_dict['norm_state_tensor'][:3],
                mean_tensor=pos_mean,
                std_tensor=pos_std)
            after_pos = norm_tensor(norm_or_denorm=False,
                                    tensor=labels['norm_state_tensor'][:3],
                                    mean_tensor=pos_mean,
                                    std_tensor=pos_std)
            diff_pos = after_pos - before_pos
            all_data_point += [diff_pos[0], diff_pos[1], diff_pos[2]]
    return all_data_point
Ejemplo n.º 2
0
def quaternion_to_euler_angle(quaternion, degree=True):
    '''
    Transform quaternion to euler angles.
    :param quaternion: in [w, x, y, z] format.
    :param degree: whether the result should be represented in degree.
    :return: euler angles, in xyz format.
    '''
    if type(quaternion) == torch.Tensor:
        quaternion = quaternion.cpu()
    euler_angle = euler_from_quaternion(quaternion, axes='rxyz')
    euler_angle = np.array(euler_angle)
    if degree:
        euler_angle = euler_angle / math.pi * 180
    return euler_angle
Ejemplo n.º 3
0
from utils.conversions import trans_rot_to_hmat,hmat_to_trans_rot
from utils.transformations import euler_from_quaternion
import rospy
import roslib
roslib.load_manifest("tf")
import tf
import numpy as np

class Globals:
    listener = None
    
if rospy.get_name() == "/unnamed":
    rospy.init_node("transform_kinect_frames")
    Globals.listener = tf.TransformListener()
    rospy.sleep(1)

ONIfromCL = trans_rot_to_hmat(*Globals.listener.lookupTransform("/camera_rgb_optical_frame","camera_link", rospy.Time(0)))
GAZfromONI = trans_rot_to_hmat([0, -.15, -.24], [0, 0, 0, 1])
HEADfromGAZ = trans_rot_to_hmat(*Globals.listener.lookupTransform("/head_plate_frame", "/wide_stereo_gazebo_l_stereo_camera_optical_frame",rospy.Time(0)))

HEADfromCL = np.dot(np.dot(HEADfromGAZ, GAZfromONI), ONIfromCL)

trans_final, quat_final = hmat_to_trans_rot(HEADfromCL)
eul_final = euler_from_quaternion(quat_final)

print "translation", trans_final
print "rotation", eul_final


print "%.4f %.4f %.4f %.4f %.4f %.4f %.4f"%(tuple(trans_final) + tuple(quat_final))
Ejemplo n.º 4
0
def quat_to_yaw(q):
    e = transformations.euler_from_quaternion(q)
    return e[2]
Ejemplo n.º 5
0
    def _hdf5_add_commands(self):
        ### all
        hdf5_times = list(self._hdf5_dict.time)
        hdf5_times.append(hdf5_times[-1] + (hdf5_times[-1] - hdf5_times[-2]))

        ### steps
        csv = pandas.read_csv(self._csv_acc_fname)
        times = 1e-3 * np.array(csv['cts'])
        accs = np.stack([
            csv['Accelerometer (x) [m/s2]'], csv['Accelerometer (y) [m/s2]'],
            csv['Accelerometer (z) [m/s2]']
        ]).T.astype(np.float32)

        pedometer = Pedometer(accs, times)
        step_rates = pedometer.get_step_rates()

        step_rates_chunked = []
        for i in range(len(hdf5_times) - 1):
            indices = np.logical_and(times >= hdf5_times[i],
                                     times < hdf5_times[i + 1])
            step_rate_i = step_rates[indices].mean()
            step_rates_chunked.append(step_rate_i)

        ### camera orientation
        csv = pandas.read_csv(self._csv_ori_fname)
        times = np.array(1e-3 * csv['cts'])
        quats = np.array(
            [csv['1'], csv['2'], csv['3'], csv['CameraOrientation']]).T

        def smooth_quaternions(quats, frac_range=0.4, frac_bias=0.04):
            # https://www.mathworks.com/help/fusion/examples/lowpass-filter-orientation-using-quaternion-slerp.html
            smoothed_quats = [quats[0]]
            for quat in quats[1:]:
                smoothed_quat = smoothed_quats[-1]
                delta_quat = transformations.quaternion_multiply(
                    transformations.quaternion_inverse(smoothed_quat), quat)
                delta_quat /= np.linalg.norm(delta_quat)
                angle = transformations.quaternion_to_axisangle(
                    delta_quat)[0][0]
                alpha = (angle / np.pi) * frac_range + frac_bias
                smoothed_quat = transformations.quaternion_slerp(
                    smoothed_quats[-1], quat, alpha)
                smoothed_quats.append(smoothed_quat)
            smoothed_quats = np.array(smoothed_quats)
            assert np.all(np.isfinite(smoothed_quats))
            return smoothed_quats

        quats = smooth_quaternions(quats)

        quats_chunked = []
        for i in range(len(hdf5_times) - 1):
            indices = np.logical_and(times >= hdf5_times[i],
                                     times < hdf5_times[i + 1])
            quats_chunked.append(quats[indices])

        ### integrate
        dt = times[1] - times[0]
        fps = 1. / dt
        turns = []
        steps = []
        for i in range(len(hdf5_times) - 1):
            indices = np.where(
                np.logical_and(times >= hdf5_times[i],
                               times < hdf5_times[i + 1]))[0]
            assert len(indices) == (indices[-1] - indices[0] + 1)
            start_idx, stop_idx = indices[0], indices[-1]

            # step
            step_rate = step_rates_chunked[i]

            # angles
            prepend = int(fps * 1.5)
            prestart_idx = np.clip(start_idx - prepend, 0, len(times) - 1)
            poststop_idx = np.clip(stop_idx + prepend, 0, len(times) - 1)

            origin = quats[start_idx]
            origin_inv = transformations.quaternion_inverse(origin)
            quats_list_origin = [
                transformations.quaternion_multiply(origin_inv, q)
                for q in quats[prestart_idx:poststop_idx]
            ]
            angles = -np.array([
                transformations.euler_from_quaternion(q)[1]
                for q in quats_list_origin
            ])
            angles = np_utils.butter_lowpass_filter(angles,
                                                    cutoff=0.8,
                                                    fs=30.,
                                                    order=5)

            # account for delay of
            delay_shift = int(0.8 * fps)
            angles = np.roll(angles, -delay_shift)

            angles = angles[start_idx -
                            prestart_idx:-(poststop_idx - stop_idx)]

            pos = np.zeros(2)
            if len(angles) > 0:
                dt = (1. / GoproToHdf5.FPS) / len(angles)
                for angle in angles:
                    pos += dt * step_rate * np.array(
                        [np.cos(angle), np.sin(angle)])

            turns.append(np.arctan2(pos[1], pos[0]))
            steps.append(np.linalg.norm(pos))

        self._hdf5_dict.commands.step = np.array(steps)
        self._hdf5_dict.commands.turn = np.array(turns)
Ejemplo n.º 6
0
def euler_from_quaternion(q):
    """Convert quaternion from (x,y,z,w) returned from pybullet to
    euler angles = (roll, pitch, yaw) convention used by transformations.py"""
    trans_quat = to_transquat(q)
    eul = trans.euler_from_quaternion(trans_quat)
    return eul