def test_calc_quat(self): v0 = [0., 0., 100.] vel = tile(v0, (1000, 1)) rate = 100 out = quat.calc_quat(np.deg2rad(vel), [0., 0., 0.], rate, 'sf') result = out[-1:] correct = [[-0.76040597, 0., 0., 0.64944805]] error = norm(result - correct) self.assertTrue(error < self.delta)
def test_calc_quat(self): v0 = [0., 0., 100.] vel = tile(v0, (1000,1)) rate = 100 out = quat.calc_quat(np.deg2rad(vel), [0., 0., 0.], rate, 'sf') result = out[-1:] correct = [[-0.76040597, 0., 0., 0.64944805]] error = norm(result - correct) self.assertTrue(error < self.delta)
def test_calc_angvel(self): rate = 1000 t = np.arange(0,10,1./rate) x = 0.1 * np.sin(t) y = 0.2 * np.sin(t) z = np.zeros_like(t) q = np.column_stack( (x,y,z) ) vel = quat.calc_angvel(q, rate, 5, 2) qReturn = quat.calc_quat(vel, q[0], rate, 'sf' ) error = np.max(np.abs( q-qReturn[:,1:] )) self.assertTrue(error < 1e3 )
def test_calc_angvel(self): rate = 1000 t = np.arange(0, 10, 1. / rate) x = 0.1 * np.sin(t) y = 0.2 * np.sin(t) z = np.zeros_like(t) q = np.column_stack((x, y, z)) vel = quat.calc_angvel(q, rate, 5, 2) qReturn = quat.calc_quat(vel, q[0], rate, 'sf') error = np.max(np.abs(q - qReturn[:, 1:])) self.assertTrue(error < 1e3)
def calculate_head_orientation(angular_velocity_data, frequency): """ calculates the orientation of the head according to given angular velocities it undergoes as measured by a sensor, with the assumption that the head is perfectly aligned with the global coordinate system in the beginning :param angular_velocity_data: angular velocities tracked by a sensor :param frequency: sample rate of the sensor :return: the orientations of the head at each point in time """ # calculate the quaternions describing the position of the head according to the angular velocities it underwent. head_orientation = quat.calc_quat(angular_velocity_data, [0, 0, 0], rate=frequency, CStype='bf') return head_orientation
def test_view_orientation(self): omega = np.r_[0, 10, 10] # [deg/s] duration = 2 rate = 100 q0 = [1, 0, 0, 0] out_file = 'demo_patch.mp4' title_text = 'Rotation Demo' ## Calculate the orientation dt = 1./rate num_rep = duration*rate omegas = np.tile(omega, [num_rep, 1]) quaternion = quat.calc_quat(omegas, q0, rate, 'sf') view.orientation(quaternion, out_file, 'Well done!')
def test_view_orientation(self): omega = np.r_[0, 10, 10] # [deg/s] duration = 2 rate = 100 q0 = [1, 0, 0, 0] out_file = 'demo_patch.mp4' title_text = 'Rotation Demo' ## Calculate the orientation dt = 1. / rate num_rep = duration * rate omegas = np.tile(omega, [num_rep, 1]) quaternion = quat.calc_quat(omegas, q0, rate, 'sf') view.orientation(quaternion, out_file, 'Well done!')
def analytical( R_initialOrientation=np.eye(3), omega=np.zeros((5, 3)), initialPosition=np.zeros(3), initialVelocity=np.zeros(3), accMeasured=np.column_stack((np.zeros((5, 2)), 9.81 * np.ones(5))), rate=100): ''' Reconstruct position and orientation with an analytical solution, from angular velocity and linear acceleration. Assumes a start in a stationary position. No compensation for drift. Parameters ---------- R_initialOrientation: ndarray(3,3) Rotation matrix describing the initial orientation of the sensor, except a mis-orienation with respect to gravity omega : ndarray(N,3) Angular velocity, in [rad/s] initialPosition : ndarray(3,) initial Position, in [m] accMeasured : ndarray(N,3) Linear acceleration, in [m/s^2] rate : float sampling rate, in [Hz] Returns ------- q : ndarray(N,3) Orientation, expressed as a quaternion vector pos : ndarray(N,3) Position in space [m] vel : ndarray(N,3) Velocity in space [m/s] Example ------- >>> q1, pos1 = analytical(R_initialOrientation, omega, initialPosition, acc, rate) ''' if omega.ndim == 1: raise ValueError('The input to "analytical" requires matrix inputs.') # Transform recordings to angVel/acceleration in space -------------- # Orientation of \vec{g} with the sensor in the "R_initialOrientation" g = constants.g g0 = np.linalg.inv(R_initialOrientation).dot(np.r_[0, 0, g]) # for the remaining deviation, assume the shortest rotation to there q0 = vector.q_shortest_rotation(accMeasured[0], g0) q_initial = rotmat.convert(R_initialOrientation, to='quat') # combine the two, to form a reference orientation. Note that the sequence # is very important! q_ref = quat.q_mult(q_initial, q0) # Calculate orientation q by "integrating" omega ----------------- q = quat.calc_quat(omega, q_ref, rate, 'bf') # Acceleration, velocity, and position ---------------------------- # From q and the measured acceleration, get the \frac{d^2x}{dt^2} g_v = np.r_[0, 0, g] accReSensor = accMeasured - vector.rotate_vector(g_v, quat.q_inv(q)) accReSpace = vector.rotate_vector(accReSensor, q) # Make the first position the reference position q = quat.q_mult(q, quat.q_inv(q[0])) # compensate for drift #drift = np.mean(accReSpace, 0) #accReSpace -= drift*0.7 # Position and Velocity through integration, assuming 0-velocity at t=0 vel = np.nan * np.ones_like(accReSpace) pos = np.nan * np.ones_like(accReSpace) for ii in range(accReSpace.shape[1]): vel[:, ii] = cumtrapz(accReSpace[:, ii], dx=1. / rate, initial=initialVelocity[ii]) pos[:, ii] = cumtrapz(vel[:, ii], dx=1. / rate, initial=initialPosition[ii]) return (q, pos, vel)
from skinematics import view, quat # 2D Viewer ----------------- data = np.random.randn(100, 3) t = np.arange(0, 2 * np.pi, 0.1) x = np.sin(t) # Show the data view.ts(data) # Let the user select data from the local workspace view.ts(locals()) # 3-D Viewer ---------------- # Set the parameters omega = np.r_[0, 10, 10] # [deg/s] duration = 2 rate = 100 q0 = [1, 0, 0, 0] out_file = 'demo_patch.mp4' title_text = 'Rotation Demo' ## Calculate the orientation dt = 1. / rate num_rep = duration * rate omegas = np.tile(omega, [num_rep, 1]) q = quat.calc_quat(omegas, q0, rate, 'sf') #orientation(q) view.orientation(q, out_file, 'Well done!')