def test_analyze3dmarkers(self):
     # Test-data Generation:
     
     # 1) Three marker locations
     M = np.empty((3,3))
     M[0] = np.r_[0,0,0]
     M[1]= np.r_[1,0,0]
     M[2] = np.r_[1,1,0]
     M -= np.mean(M, axis=0) 
 
     # 2) a gradual translation from 0 to [1,1,0], over 10 sec
     t = np.arange(0,10,0.1)
     translation = (np.c_[[1,1,0]]*t).T
 
     # 3) A rotation with 100 deg/s about the z-axis
     q = np.vstack((np.zeros_like(t), np.zeros_like(t),quat.deg2quat(100*t))).T
 
     # Calculate the location of the three test-markers
     M0 = vector.rotate_vector(M[0], q) + translation
     M1 = vector.rotate_vector(M[1], q) + translation
     M2 = vector.rotate_vector(M[2], q) + translation
 
     data = np.hstack((M0,M1,M2))
 
     (pos, ori) = markers.analyze_3Dmarkers(data, data[0])
     
     self.assertAlmostEqual(np.max(np.abs(pos-translation)), 0)
     self.assertAlmostEqual(np.max(np.abs(ori-q)), 0)
    def test_analyze3dmarkers(self):
        # Test-data Generation:

        # 1) Three marker locations
        M = np.empty((3, 3))
        M[0] = np.r_[0, 0, 0]
        M[1] = np.r_[1, 0, 0]
        M[2] = np.r_[1, 1, 0]
        M -= np.mean(M, axis=0)

        # 2) a gradual translation from 0 to [1,1,0], over 10 sec
        t = np.arange(0, 10, 0.1)
        translation = (np.c_[[1, 1, 0]] * t).T

        # 3) A rotation with 100 deg/s about the z-axis
        q = np.vstack(
            (np.zeros_like(t), np.zeros_like(t), quat.deg2quat(100 * t))).T

        # Calculate the location of the three test-markers
        M0 = vector.rotate_vector(M[0], q) + translation
        M1 = vector.rotate_vector(M[1], q) + translation
        M2 = vector.rotate_vector(M[2], q) + translation

        data = np.hstack((M0, M1, M2))

        (pos, ori) = markers.analyze_3Dmarkers(data, data[0])

        self.assertAlmostEqual(np.max(np.abs(pos - translation)), 0)
        self.assertAlmostEqual(np.max(np.abs(ori - q)), 0)
예제 #3
0
파일: imu_calc.py 프로젝트: Kojon74/PosFix
def calc_position(q, accel, initialVelocity, initialPosition, timeVector):
    g_v = np.r_[0, 0, constants.g]
    accReSensor = accel - vector.rotate_vector(g_v, quat.q_inv(q))
    accReSpace = vector.rotate_vector(accReSensor, q)

    # Position and Velocity through integration, assuming 0-velocity at t=0
    vel = np.nan * np.ones_like(accReSpace)
    pos = np.nan * np.ones_like(accReSpace)

    for ii in range(accReSpace.shape[1]):
        vel[:, ii] = cumtrapz(accReSpace[:, ii],
                              x=timeVector,
                              initial=initialVelocity[ii])
        pos[:, ii] = cumtrapz(vel[:, ii],
                              x=timeVector,
                              initial=initialPosition[ii])

    avevel = np.mean(vel, axis=0)
    aveaccel = np.mean(accel, axis=0)

    print("Average Accel: {}, Average Velocity: {}, Time taken: {}".format(
        np.sqrt(aveaccel[0]**2 + aveaccel[1]**2 + aveaccel[2]**2),
        np.sqrt(avevel[0]**2 + avevel[1]**2 + avevel[2]**2),
        timeVector[-1] * 1000))

    return pos, vel
예제 #4
0
    def calc_position(self):
        '''Calculate the position, assuming that the orientation is already known.'''

        initialPosition = self.pos_init
        # Acceleration, velocity, and position ----------------------------
        # From q and the measured acceleration, get the \frac{d^2x}{dt^2}
        g = constants.g
        g_v = np.r_[0, 0, g]
        accReSensor = self.acc - vector.rotate_vector(g_v, quat.q_inv(
            self.quat))
        accReSpace = vector.rotate_vector(accReSensor, self.quat)

        # Position and Velocity through integration, assuming 0-velocity at t=0
        vel = np.nan * np.ones_like(accReSpace)
        pos = np.nan * np.ones_like(accReSpace)

        for ii in range(accReSpace.shape[1]):
            vel[:, ii] = cumtrapz(accReSpace[:, ii],
                                  dx=1. / np.float(self.rate),
                                  initial=0)
            pos[:, ii] = cumtrapz(vel[:, ii],
                                  dx=1. / np.float(self.rate),
                                  initial=initialPosition[ii])

        self.pos = pos
예제 #5
0
    def Update(self, Gyroscope, Accelerometer, Magnetometer):
        '''Calculate the best quaternion to the given measurement values.'''
        
        q = self.Quaternion; # short name local variable for readability

        # Reference direction of Earth's magnetic field
        h = vector.rotate_vector(Magnetometer, q)
        b = np.hstack((0, np.sqrt(h[0]**2+h[1]**2), 0, h[2]))

        # Gradient decent algorithm corrective step
        F = [2*(q[1]*q[3] - q[0]*q[2])   - Accelerometer[0],
             2*(q[0]*q[1] + q[2]*q[3])   - Accelerometer[1],
             2*(0.5 - q[1]**2 - q[2]**2) - Accelerometer[2],
             2*b[1]*(0.5 - q[2]**2 - q[3]**2) + 2*b[3]*(q[1]*q[3] - q[0]*q[2])   - Magnetometer[0],
             2*b[1]*(q[1]*q[2] - q[0]*q[3])   + 2*b[3]*(q[0]*q[1] + q[2]*q[3])   - Magnetometer[1],
             2*b[1]*(q[0]*q[2] + q[1]*q[3])   + 2*b[3]*(0.5 - q[1]**2 - q[2]**2) - Magnetometer[2]]
        
        J = np.array([
            [-2*q[2],                 	2*q[3],                    -2*q[0],                         2*q[1]],
            [ 2*q[1],                 	2*q[0],                	    2*q[3],                         2*q[2]],
            [0,                        -4*q[1],                    -4*q[2],                         0],
            [-2*b[3]*q[2],              2*b[3]*q[3],               -4*b[1]*q[2]-2*b[3]*q[0],       -4*b[1]*q[3]+2*b[3]*q[1]],
            [-2*b[1]*q[3]+2*b[3]*q[1],	2*b[1]*q[2]+2*b[3]*q[0],    2*b[1]*q[1]+2*b[3]*q[3],       -2*b[1]*q[0]+2*b[3]*q[2]],
            [ 2*b[1]*q[2],              2*b[1]*q[3]-4*b[3]*q[1],    2*b[1]*q[0]-4*b[3]*q[2],        2*b[1]*q[1]]])
        
        step = J.T.dot(F)
        step = vector.normalize(step)	# normalise step magnitude

        # Compute rate of change of quaternion
        qDot = 0.5 * quat.quatmult(q, np.hstack([0, Gyroscope])) - self.Beta * step

        # Integrate to yield quaternion
        q = q + qDot * self.SamplePeriod
        self.Quaternion = vector.normalize(q).flatten()
예제 #6
0
 def test_rotate_vector(self):
     x = [[1, 0, 0], [0, 1, 0], [0, 0, 1]]
     result = vector.rotate_vector(x, [0, 0, sin(0.1)])
     correct = array([[0.98006658, 0.19866933, 0.],
                      [-0.19866933, 0.98006658, 0.], [0., 0., 1.]])
     error = norm(result - correct)
     self.assertTrue(error < self.delta)
예제 #7
0
    def calc_position(self, initialPosition):
        '''Calculate the position, assuming that the orientation is already known.'''

        # Acceleration, velocity, and position ----------------------------
        # From q and the measured acceleration, get the \frac{d^2x}{dt^2}
        g = sp.constants.g
        g_v = np.r_[0, 0, g] 
        accReSensor = self.acc - vector.rotate_vector(g_v, quat.quatinv(self.quat))
        accReSpace = vector.rotate_vector(accReSensor, self.quat)

        # Position and Velocity through integration, assuming 0-velocity at t=0
        vel = np.nan*np.ones_like(accReSpace)
        pos = np.nan*np.ones_like(accReSpace)

        for ii in range(accReSpace.shape[1]):
            vel[:,ii] = cumtrapz(accReSpace[:,ii], dx=1./self.rate, initial=0)
            pos[:,ii] = cumtrapz(vel[:,ii],        dx=1./self.rate, initial=initialPosition[ii])

        self.pos = pos
예제 #8
0
def align_sensor_data_globally(sensor_data: XSens, global_orientation):
    """
    align sensor data with a global coordinate system
    :param sensor_data: the data to be adjusted
    :param global_orientation: the orientation of the sensor in the global coordinate system
    :return: the adjusted sensor data
    """
    # rotate the data the same way the sensor is rotated with respect to the global reference coordinate system
    adjusted_data = vector.rotate_vector(sensor_data, global_orientation)

    return adjusted_data
예제 #9
0
def get_nose_from_head_orientation(head_orientation):
    """
    Get a vector describing the orientation of the noses end according to the position of the head
    :param head_orientation: the orientation of the head in the global coordinate system
    :return: the vector describing the nose orientation
    """
    # define the nose as pointing in x-direction initially
    nose_vector = np.array([1, 0, 0])
    # move the nose according to how the head is positioned
    nose_orientation = vector.rotate_vector(nose_vector, head_orientation)

    return nose_orientation
예제 #10
0
 def test_analyze3dmarkers(self):
     t = np.arange(0,10,0.1)
     translation = (np.c_[[1,1,0]]*t).T
 
     M = np.empty((3,3))
     M[0] = np.r_[0,0,0]
     M[1]= np.r_[1,0,0]
     M[2] = np.r_[1,1,0]
     M -= np.mean(M, axis=0) 
 
     q = np.vstack((np.zeros_like(t), np.zeros_like(t),quat.deg2quat(100*t))).T
 
     M0 = vector.rotate_vector(M[0], q) + translation
     M1 = vector.rotate_vector(M[1], q) + translation
     M2 = vector.rotate_vector(M[2], q) + translation
 
     data = np.hstack((M0,M1,M2))
 
     (pos, ori) = markers.analyze_3Dmarkers(data, data[0])
     
     self.assertAlmostEqual(np.max(np.abs(pos-translation)), 0)
     self.assertAlmostEqual(np.max(np.abs(ori-q)), 0)
예제 #11
0
def findTrajectory(r0, Position, Orientation):
    '''
    Movement trajetory of a point on an object, from the position and
    orientation of a sensor, and the relative position of the point at t=0.

    Parameters
    ----------
    r0 : ndarray (3,)
        Position of point relative to center of markers, when the object is
        in the reference position.
    Position : ndarray, shape (N,3)
        x/y/z coordinates of COM, relative to the reference position
    Orientation : ndarray, shape (N,3)
        Orientation relative to reference orientation, expressed as quaternion

    Returns
    -------
    mov : ndarray, shape (N,3)
        x/y/z coordinates of the position on the object, relative to the
        reference position of the markers

    Notes
    ----- 

      .. math::

          \\vec C(t) = \\vec M(t) + \\vec r(t) = \\vec M(t) +
          {{\\bf{R}}_{mov}}(t) \\cdot \\vec r({t_0})

    Examples
    --------
    >>> t = np.arange(0,10,0.1)
    >>> translation = (np.c_[[1,1,0]]*t).T
    >>> M = np.empty((3,3))
    >>> M[0] = np.r_[0,0,0]
    >>> M[1]= np.r_[1,0,0]
    >>> M[2] = np.r_[1,1,0]
    >>> M -= np.mean(M, axis=0) 
    >>> q = np.vstack((np.zeros_like(t), np.zeros_like(t),quat.deg2quat(100*t))).T
    >>> M0 = vector.rotate_vector(M[0], q) + translation
    >>> M1 = vector.rotate_vector(M[1], q) + translation
    >>> M2 = vector.rotate_vector(M[2], q) + translation
    >>> data = np.hstack((M0,M1,M2))
    >>> (pos, ori) = signals.analyze3Dmarkers(data, data[0])
    >>> r0 = np.r_[1,2,3]
    >>> movement = movement_from_markers(r0, pos, ori)

    '''

    return Position + vector.rotate_vector(r0, Orientation)
예제 #12
0
def findTrajectory(r0, Position, Orientation):
    '''
    Movement trajetory of a point on an object, from the position and
    orientation of a sensor, and the relative position of the point at t=0.

    Parameters
    ----------
    r0 : ndarray (3,)
        Position of point relative to center of markers, when the object is
        in the reference position.
    Position : ndarray, shape (N,3)
        x/y/z coordinates of COM, relative to the reference position
    Orientation : ndarray, shape (N,3)
        Orientation relative to reference orientation, expressed as quaternion

    Returns
    -------
    mov : ndarray, shape (N,3)
        x/y/z coordinates of the position on the object, relative to the
        reference position of the markers

    Notes
    ----- 

      .. math::

          \\vec C(t) = \\vec M(t) + \\vec r(t) = \\vec M(t) +
          {{\\bf{R}}_{mov}}(t) \\cdot \\vec r({t_0})

    Examples
    --------
    >>> t = np.arange(0,10,0.1)
    >>> translation = (np.c_[[1,1,0]]*t).T
    >>> M = np.empty((3,3))
    >>> M[0] = np.r_[0,0,0]
    >>> M[1]= np.r_[1,0,0]
    >>> M[2] = np.r_[1,1,0]
    >>> M -= np.mean(M, axis=0) 
    >>> q = np.vstack((np.zeros_like(t), np.zeros_like(t),quat.deg2quat(100*t))).T
    >>> M0 = vector.rotate_vector(M[0], q) + translation
    >>> M1 = vector.rotate_vector(M[1], q) + translation
    >>> M2 = vector.rotate_vector(M[2], q) + translation
    >>> data = np.hstack((M0,M1,M2))
    >>> (pos, ori) = signals.analyze3Dmarkers(data, data[0])
    >>> r0 = np.r_[1,2,3]
    >>> movement = movement_from_markers(r0, pos, ori)

    '''

    return Position + vector.rotate_vector(r0, Orientation)
예제 #13
0
    def Update(self, Gyroscope, Accelerometer, Magnetometer):
        '''Calculate the best quaternion to the given measurement values.'''

        q = self.Quaternion
        # short name local variable for readability

        # Reference direction of Earth's magnetic field
        h = vector.rotate_vector(Magnetometer, q)
        b = np.hstack((0, np.sqrt(h[0]**2 + h[1]**2), 0, h[2]))

        # Estimated direction of gravity and magnetic field
        v = np.array([
            2 * (q[1] * q[3] - q[0] * q[2]), 2 * (q[0] * q[1] + q[2] * q[3]),
            q[0]**2 - q[1]**2 - q[2]**2 + q[3]**2
        ])

        w = np.array([
            2 * b[1] * (0.5 - q[2]**2 - q[3]**2) + 2 * b[3] *
            (q[1] * q[3] - q[0] * q[2]),
            2 * b[1] * (q[1] * q[2] - q[0] * q[3]) + 2 * b[3] *
            (q[0] * q[1] + q[2] * q[3]), 2 * b[1] *
            (q[0] * q[2] + q[1] * q[3]) + 2 * b[3] * (0.5 - q[1]**2 - q[2]**2)
        ])

        # Error is sum of cross product between estimated direction and measured direction of fields
        e = np.cross(Accelerometer, v) + np.cross(Magnetometer, w)

        if self.Ki > 0:
            self._eInt += e * self.SamplePeriod
        else:
            self._eInt = np.array([0, 0, 0], dtype=np.float)

        # Apply feedback terms
        Gyroscope += self.Kp * e + self.Ki * self._eInt

        # Compute rate of change of quaternion
        qDot = 0.5 * quat.quatmult(q, np.hstack([0, Gyroscope])).flatten()

        # Integrate to yield quaternion
        q += qDot * self.SamplePeriod

        self.Quaternion = vector.normalize(q)
예제 #14
0
def get_init_on_dir_rh_scc(reid_initial_pitch_angle_deg):
    """
    get the initial orientation of the right horizontal semi-circular canal's on direction vector
    :param reid_initial_pitch_angle_deg: the angle about which Reid's line is pitched upwards in degrees, with respect
    to fixed space horizontal plane
    :return:
    """
    # set the vector about which rotations stimulate the right horizontal semi-circular canal respective to the
    # orientation of Reid's line given by the exercise and normalize it
    on_dir = vector.normalize(np.array([0.32269, -0.03837, -0.94573]))
    # convert the initial pitch angle to radian
    reid_initial_pitch_angle_rad = reid_initial_pitch_angle_deg / 180 * np.pi
    # set the rotation vector for the pitch of Reid's line with the defined angle
    reid_initial_pitch = np.array(
        [0, 0, np.sin(reid_initial_pitch_angle_rad / 2)])
    # adjust the on direction vector of the semi-circular canal according to the pitch of Reid's line so that it now
    # represents the orientation in the global coordinate system
    on_dir_adjusted = vector.rotate_vector(on_dir, reid_initial_pitch)

    return on_dir_adjusted
예제 #15
0
    def Update(self, Gyroscope, Accelerometer, Magnetometer):
        '''Calculate the best quaternion to the given measurement values.'''
        
        q = self.Quaternion; # short name local variable for readability

        # Reference direction of Earth's magnetic field
        h = vector.rotate_vector(Magnetometer, q)
        b = np.hstack((0, np.sqrt(h[0]**2+h[1]**2), 0, h[2]))

        # Estimated direction of gravity and magnetic field
        v = np.array([
             2*(q[1]*q[3] - q[0]*q[2]),
             2*(q[0]*q[1] + q[2]*q[3]),
             q[0]**2 - q[1]**2 - q[2]**2 + q[3]**2])

        w = np.array([
             2*b[1]*(0.5 - q[2]**2 - q[3]**2) + 2*b[3]*(q[1]*q[3] - q[0]*q[2]),
             2*b[1]*(q[1]*q[2] - q[0]*q[3]) + 2*b[3]*(q[0]*q[1] + q[2]*q[3]),
             2*b[1]*(q[0]*q[2] + q[1]*q[3]) + 2*b[3]*(0.5 - q[1]**2 - q[2]**2)]) 

        # Error is sum of cross product between estimated direction and measured direction of fields
        e = np.cross(Accelerometer, v) + np.cross(Magnetometer, w) 
        
        if self.Ki > 0:
            self._eInt += e * self.SamplePeriod  
        else:
            self._eInt = np.array([0, 0, 0], dtype=np.float)
        
        # Apply feedback terms
        Gyroscope += self.Kp * e + self.Ki * self._eInt;            
        
        # Compute rate of change of quaternion
        qDot = 0.5 * quat.quatmult(q, np.hstack([0, Gyroscope])).flatten()

        # Integrate to yield quaternion
        q += qDot * self.SamplePeriod

        self.Quaternion = vector.normalize(q)
예제 #16
0
def calc_QPos(R_initialOrientation, omega, initialPosition, accMeasured, rate):
    ''' Reconstruct position and orientation, from angular velocity and linear acceleration.
    Assumes a start in a stationary position. No compensation for drift.

    Parameters
    ----------
    omega : ndarray(N,3)
        Angular velocity, in [rad/s]
    accMeasured : ndarray(N,3)
        Linear acceleration, in [m/s^2]
    initialPosition : ndarray(3,)
        initial Position, in [m]
    R_initialOrientation: ndarray(3,3)
        Rotation matrix describing the initial orientation of the sensor,
        except a mis-orienation with respect to gravity
    rate : float
        sampling rate, in [Hz]

    Returns
    -------
    q : ndarray(N,3)
        Orientation, expressed as a quaternion vector
    pos : ndarray(N,3)
        Position in space [m]

    Example
    -------
    >>> q1, pos1 = calc_QPos(R_initialOrientation, omega, initialPosition, acc, rate)

    '''

    # Transform recordings to angVel/acceleration in space --------------

    # Orientation of \vec{g} with the sensor in the "R_initialOrientation"
    g = 9.81
    g0 = np.linalg.inv(R_initialOrientation).dot(r_[0, 0, g])

    # for the remaining deviation, assume the shortest rotation to there
    q0 = vector.qrotate(accMeasured[0], g0)
    R0 = quat.quat2rotmat(q0)

    # combine the two, to form a reference orientation. Note that the sequence
    # is very important!
    R_ref = R_initialOrientation.dot(R0)
    q_ref = quat.rotmat2quat(R_ref)

    # Calculate orientation q by "integrating" omega -----------------
    q = quat.vel2quat(omega, q_ref, rate, 'bf')

    # Acceleration, velocity, and position ----------------------------
    # From q and the measured acceleration, get the \frac{d^2x}{dt^2}
    g_v = r_[0, 0, g]
    accReSensor = accMeasured - vector.rotate_vector(g_v, quat.quatinv(q))
    accReSpace = vector.rotate_vector(accReSensor, q)

    # Make the first position the reference position
    q = quat.quatmult(q, quat.quatinv(q[0]))

    # compensate for drift
    #drift = np.mean(accReSpace, 0)
    #accReSpace -= drift*0.7

    # Position and Velocity through integration, assuming 0-velocity at t=0
    vel = np.nan * np.ones_like(accReSpace)
    pos = np.nan * np.ones_like(accReSpace)

    for ii in range(accReSpace.shape[1]):
        vel[:, ii] = cumtrapz(accReSpace[:, ii], dx=1. / rate, initial=0)
        pos[:, ii] = cumtrapz(vel[:, ii],
                              dx=1. / rate,
                              initial=initialPosition[ii])

    return (q, pos)
예제 #17
0
# Plane orientation
n = vector.plane_orientation(v0, v1, v2)
print('The plane spanned by v0, v1, and v2 is orthogonal to {0}\n'.format(n))

# Gram-Schmidt orthogonalization
gs = vector.GramSchmidt(v0, v1, v2)
print('The Gram-Schmidt orthogonalization of the points v0, v1, and v2 is {0}\n'.format(np.reshape(gs, (3,3))))

# Shortest rotation
q_shortest = vector.q_shortest_rotation(v1, v2)
print('The shortest rotation that brings v1 in alignment with v2 is described by the quaternion {0}\n'.format(q_shortest))

# Rotation of a  vector by a quaternion
q = [0, 0.1, 0]
rotated = vector.rotate_vector(v1, q)
print('v1 rotated by {0} is: {1}'.format(q, rotated))

print('Done')

'''
Output
------
The length of v2 is 1.414

The angle between v1 and v2 is 45.0 degree

v2 normalized is [ 0.70710678  0.70710678  0.        ]

The projection of v1 onto v2 is [ 0.5  0.5  0. ]
예제 #18
0
def calc_QPos(R_initialOrientation, omega, initialPosition, accMeasured, rate):
    ''' Reconstruct position and orientation, from angular velocity and linear acceleration.
    Assumes a start in a stationary position. No compensation for drift.

    Parameters
    ----------
    omega : ndarray(N,3)
        Angular velocity, in [rad/s]
    accMeasured : ndarray(N,3)
        Linear acceleration, in [m/s^2]
    initialPosition : ndarray(3,)
        initial Position, in [m]
    R_initialOrientation: ndarray(3,3)
        Rotation matrix describing the initial orientation of the sensor,
        except a mis-orienation with respect to gravity
    rate : float
        sampling rate, in [Hz]

    Returns
    -------
    q : ndarray(N,3)
        Orientation, expressed as a quaternion vector
    pos : ndarray(N,3)
        Position in space [m]

    Example
    -------
    >>> q1, pos1 = calc_QPos(R_initialOrientation, omega, initialPosition, acc, rate)

    '''

    # Transform recordings to angVel/acceleration in space --------------

    # Orientation of \vec{g} with the sensor in the "R_initialOrientation"
    g = 9.81
    g0 = np.linalg.inv(R_initialOrientation).dot(r_[0,0,g])

    # for the remaining deviation, assume the shortest rotation to there
    q0 = vector.qrotate(accMeasured[0], g0)    
    R0 = quat.quat2rotmat(q0)

    # combine the two, to form a reference orientation. Note that the sequence
    # is very important!
    R_ref = R_initialOrientation.dot(R0)
    q_ref = quat.rotmat2quat(R_ref)

    # Calculate orientation q by "integrating" omega -----------------
    q = quat.vel2quat(omega, q_ref, rate, 'bf')

    # Acceleration, velocity, and position ----------------------------
    # From q and the measured acceleration, get the \frac{d^2x}{dt^2}
    g_v = r_[0, 0, g] 
    accReSensor = accMeasured - vector.rotate_vector(g_v, quat.quatinv(q))
    accReSpace = vector.rotate_vector(accReSensor, q)

    # Make the first position the reference position
    q = quat.quatmult(q, quat.quatinv(q[0]))

    # compensate for drift
    #drift = np.mean(accReSpace, 0)
    #accReSpace -= drift*0.7

    # Position and Velocity through integration, assuming 0-velocity at t=0
    vel = np.nan*np.ones_like(accReSpace)
    pos = np.nan*np.ones_like(accReSpace)

    for ii in range(accReSpace.shape[1]):
        vel[:,ii] = cumtrapz(accReSpace[:,ii], dx=1./rate, initial=0)
        pos[:,ii] = cumtrapz(vel[:,ii],        dx=1./rate, initial=initialPosition[ii])

    return (q, pos)
예제 #19
0
파일: imu_calc.py 프로젝트: Kojon74/PosFix
def analytical(
        R_initialOrientation=np.eye(3),
        omega=np.zeros((5, 3)),
        initialPosition=np.zeros(3),
        initialVelocity=np.zeros(3),
        accMeasured=np.column_stack((np.zeros((5, 2)), 9.81 * np.ones(5))),
        rate=100):
    ''' Reconstruct position and orientation with an analytical solution,
    from angular velocity and linear acceleration.
    Assumes a start in a stationary position. No compensation for drift.

    Parameters
    ----------
    R_initialOrientation: ndarray(3,3)
        Rotation matrix describing the initial orientation of the sensor,
        except a mis-orienation with respect to gravity
    omega : ndarray(N,3)
        Angular velocity, in [rad/s]
    initialPosition : ndarray(3,)
        initial Position, in [m]
    accMeasured : ndarray(N,3)
        Linear acceleration, in [m/s^2]
    rate : float
        sampling rate, in [Hz]

    Returns
    -------
    q : ndarray(N,3)
        Orientation, expressed as a quaternion vector
    pos : ndarray(N,3)
        Position in space [m]
    vel : ndarray(N,3)
        Velocity in space [m/s]

    Example
    -------
     
    >>> q1, pos1 = analytical(R_initialOrientation, omega, initialPosition, acc, rate)

    '''

    if omega.ndim == 1:
        raise ValueError('The input to "analytical" requires matrix inputs.')

    # Transform recordings to angVel/acceleration in space --------------

    # Orientation of \vec{g} with the sensor in the "R_initialOrientation"
    g = constants.g
    g0 = np.linalg.inv(R_initialOrientation).dot(np.r_[0, 0, g])

    # for the remaining deviation, assume the shortest rotation to there
    q0 = vector.q_shortest_rotation(accMeasured[0], g0)

    q_initial = rotmat.convert(R_initialOrientation, to='quat')

    # combine the two, to form a reference orientation. Note that the sequence
    # is very important!
    q_ref = quat.q_mult(q_initial, q0)

    # Calculate orientation q by "integrating" omega -----------------
    q = quat.calc_quat(omega, q_ref, rate, 'bf')

    # Acceleration, velocity, and position ----------------------------
    # From q and the measured acceleration, get the \frac{d^2x}{dt^2}
    g_v = np.r_[0, 0, g]
    accReSensor = accMeasured - vector.rotate_vector(g_v, quat.q_inv(q))
    accReSpace = vector.rotate_vector(accReSensor, q)

    # Make the first position the reference position
    q = quat.q_mult(q, quat.q_inv(q[0]))

    # compensate for drift
    #drift = np.mean(accReSpace, 0)
    #accReSpace -= drift*0.7

    # Position and Velocity through integration, assuming 0-velocity at t=0
    vel = np.nan * np.ones_like(accReSpace)
    pos = np.nan * np.ones_like(accReSpace)

    for ii in range(accReSpace.shape[1]):
        vel[:, ii] = cumtrapz(accReSpace[:, ii],
                              dx=1. / rate,
                              initial=initialVelocity[ii])
        pos[:, ii] = cumtrapz(vel[:, ii],
                              dx=1. / rate,
                              initial=initialPosition[ii])

    return (q, pos, vel)
예제 #20
0
def orientation(quats, out_file=None, title_text=None, deltaT=100):
    '''Calculates the orienation of an arrow-patch used to visualize a quaternion.
    Uses "_update_func" for the display.
    
    Parameters
    ----------
    quats : array [(N,3) or (N,4)]
            Quaterions describing the orientation.
    out_file : string
            Path- and file-name of the animated out-file (".mp4"). [Default=None]
    title_text : string
            Name of title of animation [Default=None]
    deltaT : int
            interval between frames [msec]. Smaller numbers make faster
            animations.
    
    Example
    -------
        To visualize a rotation about the (vertical) z-axis:
        
    >>> # Set the parameters
    >>> omega = np.r_[0, 10, 10]     # [deg/s]
    >>> duration = 2
    >>> rate = 100
    >>> q0 = [1, 0, 0, 0]
    >>> out_file = 'demo_patch.mp4'
    >>> title_text = 'Rotation Demo'
    >>> 
    >>> # Calculate the orientation
    >>> dt = 1./rate
    >>> num_rep = duration*rate
    >>> omegas = np.tile(omega, [num_rep, 1])
    >>> q = skin.quat.vel2quat(omegas, q0, rate, 'sf')
    >>>     
    >>> orientation(q, out_file, 'Well done!')
            
    '''

    # Initialize the 3D-figure
    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')

    # Define the arrow-shape and the top/bottom colors
    delta = 0.01  # "Thickness" of arrow
    corners = [[0, 0, 0.6], [0.2, -0.2, 0], [0, 0, 0]]
    colors = ['r', 'b']

    # Calculate the arrow corners
    corner_array = np.column_stack(corners)

    corner_arrays = []
    corner_arrays.append(corner_array + np.r_[0., 0., delta])
    corner_arrays.append(corner_array - np.r_[0., 0., delta])

    # Calculate the new orientations, given the quaternion orientation
    all_corners = []
    for quat in quats:
        all_corners.append([
            vector.rotate_vector(corner_arrays[0], quat),
            vector.rotate_vector(corner_arrays[1], quat)
        ])

    # Animate the whole thing, using 'update_func'
    num_frames = len(quats)
    ani = animation.FuncAnimation(fig,
                                  _update_func,
                                  num_frames,
                                  fargs=[all_corners, colors, ax, title_text],
                                  interval=deltaT)

    # If requested, save the animation to a file
    if out_file is not None:
        try:
            ani.save(out_file)
            print('Animation saved to {0}'.format(out_file))
        except ValueError:
            print('Sorry, no animation saved!')
            print(
                'You probably have to install "ffmpeg", and add it to your PATH.'
            )

    plt.show()

    return
예제 #21
0
    print(quatmult(c, c))
    print(quatmult(c, a))
    print(quatmult(d, d))

    print('The inverse of {0} is {1}'.format(a, quatinv(a)))
    print('The inverse of {0} is {1}'.format(d, quatinv(d)))
    print('The inverse of {0} is {1}'.format(e, quatinv(e)))
    print(quatmult(e, quatinv(e)))

    print(quat2vect(a))
    print('{0} is {1} degree'.format(a, quat2deg(a)))
    print('{0} is {1} degree'.format(c, quat2deg(c)))
    print(quat2deg(0.2))
    x = np.r_[1, 0, 0]
    vNull = np.r_[0, 0, 0]
    print(rotate_vector(x, a))

    v0 = [0., 0., 100.]
    vel = np.tile(v0, (1000, 1))
    rate = 100

    out = vel2quat(vel, [0., 0., 0.], rate, 'sf')
    print(out[-1:])
    plt.plot(out[:, 1:4])
    plt.show()

    print(deg2quat(15))
    print(deg2quat(quat2deg(a)))

    q = np.array([[0, 0, np.sin(0.1)], [0, np.sin(0.01), 0]])
    rMat = quat2rotmat(q)
예제 #22
0
def analyze3Dmarkers(MarkerPos, ReferencePos):
    '''
    Take recorded positions from 3 markers, and calculate center-of-mass (COM) and orientation
    Can be used e.g. for the analysis of Optotrac data.

    Parameters
    ----------
    MarkerPos : ndarray, shape (N,9)
        x/y/z coordinates of 3 markers

    ReferencePos : ndarray, shape (1,9)
        x/y/z coordinates of markers in the reference position

    Returns
    -------
    Position : ndarray, shape (N,3)
        x/y/z coordinates of COM, relative to the reference position
    Orientation : ndarray, shape (N,3)
        Orientation relative to reference orientation, expressed as quaternion

    Example
    -------
    >>> (PosOut, OrientOut) = analyze3Dmarkers(MarkerPos, ReferencePos)


    '''

    # Specify where the x-, y-, and z-position are located in the data
    cols = {'x': r_[(0, 3, 6)]}
    cols['y'] = cols['x'] + 1
    cols['z'] = cols['x'] + 2

    # Calculate the position
    cog = np.vstack((sum(MarkerPos[:, cols['x']],
                         axis=1), sum(MarkerPos[:, cols['y']], axis=1),
                     sum(MarkerPos[:, cols['z']], axis=1))).T / 3.

    cog_ref = np.vstack(
        (sum(ReferencePos[cols['x']]), sum(
            ReferencePos[cols['y']]), sum(ReferencePos[cols['z']]))).T / 3.

    position = cog - cog_ref

    # Calculate the orientation
    numPoints = len(MarkerPos)
    orientation = np.ones((numPoints, 3))

    refOrientation = vector.plane_orientation(ReferencePos[:3],
                                              ReferencePos[3:6],
                                              ReferencePos[6:])

    for ii in range(numPoints):
        '''The three points define a triangle. The first rotation is such
        that the orientation of the reference-triangle, defined as the
        direction perpendicular to the triangle, is rotated along the shortest
        path to the current orientation.
        In other words, this is a rotation outside the plane spanned by the three
        marker points.'''

        curOrientation = vector.plane_orientation(MarkerPos[ii, :3],
                                                  MarkerPos[ii, 3:6],
                                                  MarkerPos[ii, 6:])
        alpha = vector.angle(refOrientation, curOrientation)

        if alpha > 0:
            n1 = vector.normalize(np.cross(refOrientation, curOrientation))
            q1 = n1 * np.sin(alpha / 2)
        else:
            q1 = r_[0, 0, 0]

        # Now rotate the triangle into this orientation ...
        refPos_after_q1 = vector.rotate_vector(
            np.reshape(ReferencePos, (3, 3)), q1)
        '''Find which further rotation in the plane spanned by the three marker points
	is necessary to bring the data into the measured orientation.'''

        Marker_0 = MarkerPos[ii, :3]
        Marker_1 = MarkerPos[ii, 3:6]
        Vector10 = Marker_0 - Marker_1
        vector10_ref = refPos_after_q1[0] - refPos_after_q1[1]
        beta = vector.angle(Vector10, vector10_ref)

        q2 = curOrientation * np.sin(beta / 2)

        if np.cross(vector10_ref, Vector10).dot(curOrientation) <= 0:
            q2 = -q2
        orientation[ii, :] = quat.quatmult(q2, q1)

    return (position, orientation)
예제 #23
0
def analyze3Dmarkers(MarkerPos, ReferencePos):
    '''
    Take recorded positions from 3 markers, and calculate center-of-mass (COM) and orientation
    Can be used e.g. for the analysis of Optotrac data.

    Parameters
    ----------
    MarkerPos : ndarray, shape (N,9)
        x/y/z coordinates of 3 markers

    ReferencePos : ndarray, shape (1,9)
        x/y/z coordinates of markers in the reference position

    Returns
    -------
    Position : ndarray, shape (N,3)
        x/y/z coordinates of COM, relative to the reference position
    Orientation : ndarray, shape (N,3)
        Orientation relative to reference orientation, expressed as quaternion

    Example
    -------
    >>> (PosOut, OrientOut) = analyze3Dmarkers(MarkerPos, ReferencePos)


    '''

    # Specify where the x-, y-, and z-position are located in the data
    cols = {'x' : r_[(0,3,6)]} 
    cols['y'] = cols['x'] + 1
    cols['z'] = cols['x'] + 2    

    # Calculate the position
    cog = np.vstack(( sum(MarkerPos[:,cols['x']], axis=1),
                      sum(MarkerPos[:,cols['y']], axis=1),
                      sum(MarkerPos[:,cols['z']], axis=1) )).T/3.

    cog_ref = np.vstack(( sum(ReferencePos[cols['x']]),
                          sum(ReferencePos[cols['y']]),
                          sum(ReferencePos[cols['z']]) )).T/3.                      

    position = cog - cog_ref    

    # Calculate the orientation    
    numPoints = len(MarkerPos)
    orientation = np.ones((numPoints,3))

    refOrientation = vector.plane_orientation(ReferencePos[:3], ReferencePos[3:6], ReferencePos[6:])

    for ii in range(numPoints):
        '''The three points define a triangle. The first rotation is such
        that the orientation of the reference-triangle, defined as the
        direction perpendicular to the triangle, is rotated along the shortest
        path to the current orientation.
        In other words, this is a rotation outside the plane spanned by the three
        marker points.'''

        curOrientation = vector.plane_orientation( MarkerPos[ii,:3], MarkerPos[ii,3:6], MarkerPos[ii,6:])
        alpha = vector.angle(refOrientation, curOrientation)        

        if alpha > 0:
            n1 = vector.normalize( np.cross(refOrientation, curOrientation) )
            q1 = n1 * np.sin(alpha/2)
        else:
            q1 = r_[0,0,0]

        # Now rotate the triangle into this orientation ...
        refPos_after_q1 = vector.rotate_vector(np.reshape(ReferencePos,(3,3)), q1)

        '''Find which further rotation in the plane spanned by the three marker points
	is necessary to bring the data into the measured orientation.'''

        Marker_0 = MarkerPos[ii,:3]
        Marker_1 = MarkerPos[ii,3:6]
        Vector10 = Marker_0 - Marker_1
        vector10_ref = refPos_after_q1[0]-refPos_after_q1[1]
        beta = vector.angle(Vector10, vector10_ref)

        q2 = curOrientation * np.sin(beta/2)

        if np.cross(vector10_ref,Vector10).dot(curOrientation)<=0:
            q2 = -q2
        orientation[ii,:] = quat.quatmult(q2, q1)

    return (position, orientation)