Example #1
0
class BachmannCF(OrientationFilter):
    """
    Implementation of the complementary filter algorithm proposed by Bachmann.

    See E. R. Bachmann. "Inertial and Magnetic Tracking of Limb Segment
    Orientation for Inserting Humans into Synthetic Environments". PhD thesis,
    Naval Postgraduate School, California, 2000.

    @ivar vectorObservation: L{TimeSeries} of vector observation results.
    """
    @prepend_method_doc(OrientationFilter)
    def __init__(self, initialTime, initialRotation, k, **kwargs):
        """
        @param k: Filter blending co-efficient (float). At each timestep, a
            correction will be applied of M{1/k} times the difference between
            the gyro integration and vector observation results.
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation)
        self._k = float(k)
        self.qHat = initialRotation.copy()
        self._vectorObservation = vector_observation.FQA()
        self.vectorObservation = TimeSeries()

    def _update(self, accel, mag, gyro, dt, t):
        dotq = 0.5 * self.qHat * Quaternion(0, *gyro)
        self.qHat += dotq * dt
        qMeas = self._vectorObservation(-accel, mag)
        if self.qHat.dot(qMeas) < 0:
            qMeas.negate()
        qError = qMeas - self.qHat
        self.qHat += (1 / self._k) * dt * qError
        self.qHat.normalise()
        self.vectorObservation.add(t, qMeas)
        self.rotation.add(t, self.qHat)
Example #2
0
    def __init__(self, initialTime, initialRotation, k, joint, offset,
            **kwargs):
        """
        @param joint: The L{Joint} the IMU is attached to.
        @param offset: The offset of the IMU in the joint's co-ordinate frame
            (3x1 L{np.ndarray})
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation)
        self.qHat = initialRotation.copy()
        self.k = k
        self.joint = joint
        self.offset = offset

        self._vectorObservation = vector_observation.GramSchmidt()
        self.qMeas = Quaternion.nan()
        self.jointAccel = vectors.nan()

        self.gyroFIFO = collections.deque([], maxlen=3)
        self.accelFIFO = collections.deque([], maxlen=2)
        self.magFIFO = collections.deque([], maxlen=2)

        self.isRoot = not joint.hasParent
        self.children = joint.childJoints

        self.vectorObservation = TimeSeries()
        self.jointAcceleration = TimeSeries()
Example #3
0
    def __init__(self, initialTime, initialRotation, initialRotationalVelocity,
            initialCovariance, measurementCovariance, D, tau, **kwargs):
        """
        @param initialRotationalVelocity: Initial angular rate estimate
            (3x1 L{np.ndarray}).
        @param initialCovariance: Initial state covariance matrix (7x7).
        @param measurementCovariance: Measurement noise covariance (7x7).
        @param D: Variance of process noise model in rad^2/s^2 (float).
        @param tau: Time constant of process noise model in seconds (float).
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation, initialCovariance[3:,3:])

        self.tau = float(tau)
        self.D = float(D)

        self._vectorObservation = vector_observation.FQA()

        # Process noise covariance matrix, updated on each call.
        self.Q = np.asmatrix(np.zeros((7,7)))

        # Measurement noise covariance matrix
        self.R = measurementCovariance

        # Prior covariance estimate and state vector initialisation.
        self.P_minus = initialCovariance
        self.xHat_minus = np.vstack((initialRotationalVelocity,
            np.vstack(initialRotation.components)))

        self.vectorObservation = TimeSeries()
        self.rotationalVelocity = TimeSeries()
Example #4
0
    def __init__(self, initialTime, initialRotation, initialRotationalVelocity,
                 initialCovariance, measurementCovariance, D, tau, **kwargs):
        """
        @param initialRotationalVelocity: Initial angular rate estimate
            (3x1 L{np.ndarray}).
        @param initialCovariance: Initial state covariance matrix (7x7).
        @param measurementCovariance: Measurement noise covariance (7x7).
        @param D: Variance of process noise model in rad^2/s^2 (float).
        @param tau: Time constant of process noise model in seconds (float).
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation, initialCovariance[3:,
                                                                          3:])

        self.tau = float(tau)
        self.D = float(D)

        self._vectorObservation = vector_observation.FQA()

        # Process noise covariance matrix, updated on each call.
        self.Q = np.asmatrix(np.zeros((7, 7)))

        # Measurement noise covariance matrix
        self.R = measurementCovariance

        # Prior covariance estimate and state vector initialisation.
        self.P_minus = initialCovariance
        self.xHat_minus = np.vstack(
            (initialRotationalVelocity, np.vstack(initialRotation.components)))

        self.vectorObservation = TimeSeries()
        self.rotationalVelocity = TimeSeries()
Example #5
0
    def __init__(self, initialTime, initialRotation, k, joint, offset,
                 **kwargs):
        """
        @param joint: The L{Joint} the IMU is attached to.
        @param offset: The offset of the IMU in the joint's co-ordinate frame
            (3x1 L{np.ndarray})
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation)
        self.qHat = initialRotation.copy()
        self.k = k
        self.joint = joint
        self.offset = offset

        self._vectorObservation = vector_observation.GramSchmidt()
        self.qMeas = Quaternion.nan()
        self.jointAccel = vectors.nan()

        self.gyroFIFO = collections.deque([], maxlen=3)
        self.accelFIFO = collections.deque([], maxlen=2)
        self.magFIFO = collections.deque([], maxlen=2)

        self.isRoot = not joint.hasParent
        self.children = joint.childJoints

        self.vectorObservation = TimeSeries()
        self.jointAcceleration = TimeSeries()
Example #6
0
def transform_trajectory(trajectory, R, p):
    """Create new trajectory relative the given transformation

    If R1(t) and p1(t) is the rotation and translation given by inout trajectory, then
    this function returns a new trajectory that fulfills the following.

    Let X1 = R1(t).T[X - p1(t)] be the coordinate of point X in input trajectory coordinates.
    Then X2 = R X1 + p is the same point in the coordinate frame of the new trajectory
    Since X2 = R2(t).T [X - p2(t)] then we have
    R2(t) = R1(t)R and p2(t) = p1(t) + R1(t)p
    """
    ts_q1 = trajectory.sampled.rotationKeyFrames
    q1 = ts_q1.values
    ts_p1 = trajectory.sampled.positionKeyFrames
    p1 = ts_p1.values

    # Rotation
    Q = Quaternion.fromMatrix(R)
    q2 = q1 * Q
    ts_q2 = TimeSeries(ts_q1.timestamps, q2)

    # Translation
    p2 = p1 + q1.rotateVector(p)
    ts_p2 = TimeSeries(ts_p1.timestamps, p2)

    sampled = SampledTrajectory(positionKeyFrames=ts_p2,
                                rotationKeyFrames=ts_q2)
    smoothen = False  # MUST be the same as used in Dataset class
    splined = SplinedTrajectory(sampled, smoothRotations=smoothen)
    return splined
def testSplineTrajectories():
    for i in range(100):
        t = randomTimeSequence()
        p = TimeSeries(t, randomPositionSequence(t))
        r = TimeSeries(t, randomRotationSequence(t))
        T = SplinedTrajectory(SampledTrajectory(p, r), smoothRotations=False)
        yield checkTrajectory, T, p, r
        T = SplinedTrajectory(SampledTrajectory(p, r), smoothRotations=True)
        yield checkTrajectory, T, p, TimeSeries(t, r.values.smoothed())
Example #8
0
    def fromArrays(time, position, rotation):
        """
        Construct a L{SampledTrajectory} from time, position and rotation arrays.

        @param time: sequence of sample times.
        @param position: sequence of position samples.
        @param rotation: sequence of rotation samples.
        """
        return SampledTrajectory(TimeSeries(time, position),
                                 TimeSeries(time, rotation))
Example #9
0
 def __init__(self, initialTime, initialRotation, k, **kwargs):
     """
     @param k: Filter blending co-efficient (float). At each timestep, a
         correction will be applied of M{1/k} times the difference between
         the gyro integration and vector observation results.
     """
     OrientationFilter.__init__(self, initialTime, initialRotation)
     self.rotation.add(initialTime, initialRotation)
     self._k = float(k)
     self.qHat = initialRotation.copy()
     self._vectorObservation = vector_observation.FQA()
     self.vectorObservation = TimeSeries()
Example #10
0
 def __init__(self, initialTime, initialRotation, k, aT, **kwargs):
     """
     @param aT: acceleration threshold (float). The correction step will
         only be performed if the condition M{abs(norm(accel) - 1) <= aT}
         is met.
     """
     OrientationFilter.__init__(self, initialTime, initialRotation)
     self.rotation.add(initialTime, initialRotation)
     self._k = float(k)
     self._aT = float(aT)
     self.qHat = initialRotation.copy()
     self._vectorObservation = vector_observation.GramSchmidt()
     self.vectorObservation = TimeSeries()
Example #11
0
 def load_observations(h5_file):
     framegroup = h5_file['camera']
     frames = sorted(framegroup.keys())
     ts = TimeSeries()
     for fkey in frames:
         group = framegroup[fkey]
         obs_dict = {
             lm_id: ip.reshape(2, 1)
             for lm_id, ip in zip(group['landmarks'].value,
                                  group['measurements'].value.T)
         }
         t = group['time'].value
         ts.add(t, obs_dict)
     return ts
def testOffsetTrajectories():
    for i in range(10):
        T = RandomTrajectory()
        t = T.positionKeyFrames.timestamps
        dt = t[1] - t[0]
        p = T.position(t)
        r = T.rotation(t)
        offset = randomPosition()
        rotationOffset = randomRotation()
        oT = OffsetTrajectory(T, offset, rotationOffset)
        offsetPositions = p + r.rotateVector(offset)
        offsetRotations = r * rotationOffset
        yield checkTrajectory, oT, TimeSeries(t, offsetPositions), TimeSeries(
            t, offsetRotations)
Example #13
0
    def __init__(self, model, initialTime=0,
            initialPosition=np.zeros((3,1)),
            initialVelocity=np.zeros((3,1)),
            accelerationVar=0.2, velocityVar=0.5,
            rejectionProbabilityThreshold=0.4,
            **kwargs):
        """
        @param initialVelocity: Initial velocity estimate (3x1 L{np.ndarray})
        @param accelerationVar: Acceleration variance (float).
        @param velocityVar: Velocity variance (float).
        @param rejectionProbabilityThreshold: Probability below which a
            velocity update will be rejected (float, 0 <= p <= 1)
        """
        PositionEstimator.__init__(self, model, initialTime, initialPosition)

        self._accelerationSigma = math.sqrt(accelerationVar)
        self._measurementVariance = np.diag([velocityVar]*3)
        self._rejectionThreshold = rejectionProbabilityThreshold

        self._points = list(model.points)
        for point in self._points:
            point.contactProbabilities = TimeSeries()

        self.kalman = KalmanFilter(
                stateTransitionMatrix = self.transitionMatrix(1),
                controlMatrix = self.controlMatrix(1),
                measurementMatrix = np.hstack((np.zeros((3,3)),np.eye(3))),
                state = np.vstack((initialPosition,initialVelocity)),
                stateCovariance = self.processCovariance(1),
                processCovariance = self.processCovariance(1),
                measurementCovariance = self._measurementVariance)
Example #14
0
    def __init__(self, keyFrames=None):
        """
        Initialise trajectory.

        @param keyFrames: A L{TimeSeries} of rotation key frame samples.
        """
        self.rotationKeyFrames = TimeSeries() if keyFrames is None else keyFrames
Example #15
0
 def save_trajectory(trajectory, h5f):
     traj_group = h5f.create_group('trajectory')
     save_timeseries(trajectory.sampled.positionKeyFrames,
                     traj_group.create_group('position'))
     rot_ts = TimeSeries(
         trajectory.sampled.rotationKeyFrames.timestamps,
         trajectory.sampled.rotationKeyFrames.values.array.T)
     save_timeseries(rot_ts, traj_group.create_group('rotation'))
Example #16
0
 def load_trajectory(group):
     pos_ts = load_timeseries(group['position'])
     rot_array_ts = load_timeseries(group['rotation'])
     rot_ts = TimeSeries(rot_array_ts.timestamps,
                         QuaternionArray(rot_array_ts.values.T))
     sampled = SampledTrajectory(positionKeyFrames=pos_ts,
                                 rotationKeyFrames=rot_ts)
     splined = SplinedTrajectory(sampled, smoothRotations=False)
     return splined
Example #17
0
    def __init__(self, initialTime, initialRotation):
        """
        Initialise orientation filter.

        @param initialTime: Initial time.
        @param initialRotation: Initial rotation (L{Quaternion}).
        """
        assert isinstance(initialRotation, Quaternion)
        self.rotation = TimeSeries()
Example #18
0
 def __init__(self, camera_platform, end_time):
     self.camera_platform = camera_platform
     self.end_time = end_time
     camera = self.camera_platform.camera
     camera.measurements = TimeSeries()
     # Start the sampling process
     timer = self.camera_platform.timer
     timer.callback = self._timer_callback
     period = 1. / camera.frame_rate
     timer.start(period, repeat=True)
Example #19
0
class OrientCF(OrientationFilter):
    """
    Implementation of the complementary filter used on the Orient IMU.

    See A. Young, M. Ling, and D. K. Arvind. "Orient-2: A Realtime Wireless
    Posture Tracking System using Local Orientation Estimation". in Proc.
    4th Workshop on Embedded Network Sensors, pp 53-57. ACM, 2007.

    @ivar vectorObservation: L{TimeSeries} of vector observation results.
    """

    @prepend_method_doc(BachmannCF)
    def __init__(self, initialTime, initialRotation, k, aT, **kwargs):
        """
        @param aT: acceleration threshold (float). The correction step will
            only be performed if the condition M{abs(norm(accel) - 1) <= aT}
            is met.
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation)
        self._k = float(k)
        self._aT = float(aT)
        self.qHat = initialRotation.copy()
        self._vectorObservation = vector_observation.GramSchmidt()
        self.vectorObservation = TimeSeries()

    def _update(self, accel, mag, gyro, dt, t):
        dotq = 0.5 * self.qHat * Quaternion(0,*gyro)
        self.qHat += dotq * dt

        if abs(vectors.norm(accel) - 1) < self._aT:
            qMeas = self._vectorObservation(-accel, mag)
            if self.qHat.dot(qMeas) < 0:
                qMeas.negate()
            qError = qMeas - self.qHat
            self.qHat += (1/self._k) * dt * qError
        else:
            qMeas = Quaternion.nan()
        self.vectorObservation.add(t, qMeas)

        self.qHat.normalise()
        self.rotation.add(t, self.qHat)
Example #20
0
 def __init__(self, initialTime, initialRotation, k, **kwargs):
     """
     @param k: Filter blending co-efficient (float). At each timestep, a
         correction will be applied of M{1/k} times the difference between
         the gyro integration and vector observation results.
     """
     OrientationFilter.__init__(self, initialTime, initialRotation)
     self.rotation.add(initialTime, initialRotation)
     self._k = float(k)
     self.qHat = initialRotation.copy()
     self._vectorObservation = vector_observation.FQA()
     self.vectorObservation = TimeSeries()
Example #21
0
 def __init__(self, initialTime, initialRotation, k, aT, **kwargs):
     """
     @param aT: acceleration threshold (float). The correction step will
         only be performed if the condition M{abs(norm(accel) - 1) <= aT}
         is met.
     """
     OrientationFilter.__init__(self, initialTime, initialRotation)
     self.rotation.add(initialTime, initialRotation)
     self._k = float(k)
     self._aT = float(aT)
     self.qHat = initialRotation.copy()
     self._vectorObservation = vector_observation.GramSchmidt()
     self.vectorObservation = TimeSeries()
Example #22
0
    def __init__(self,
                 imu,
                 samplingPeriod,
                 calibration=None,
                 filter=None,
                 sampleCallback=None,
                 initialTime=0):
        """
        Initialise IMU behaviour.

        @param imu: L{IMU} on which the behaviour executes.
        @param samplingPeriod: Interval at which to sample and process (float).
        @param calibration: Mapping from IMU L{Sensor}s to
            L{SensorCalibration}s (dict).
        @param filter: L{OrientationFilter} to update with each set of samples.
        @param sampleCallback: Function to call after each sample. The
            behaviour object will be passed as a single argument.
        @param initialTime: Initial time for local timekeeping.
        """

        self.imu = imu
        self.samplingPeriod = samplingPeriod
        self.calibration = calibration
        self.filter = filter

        for sensor in imu.sensors:
            sensor.rawMeasurements = TimeSeries()
            if self.calibration is not None:
                sensor.calibratedMeasurements = TimeSeries()

        self.sampleCallback = sampleCallback

        self.timerMux = TimerMultiplexer(imu.timer)

        PeriodicSampler(imu.adc, imu.sensors, VirtualTimer(self.timerMux),
                        samplingPeriod, self._handleSample)

        self._time = initialTime
Example #23
0
class BachmannCF(OrientationFilter):
    """
    Implementation of the complementary filter algorithm proposed by Bachmann.

    See E. R. Bachmann. "Inertial and Magnetic Tracking of Limb Segment
    Orientation for Inserting Humans into Synthetic Environments". PhD thesis,
    Naval Postgraduate School, California, 2000.

    @ivar vectorObservation: L{TimeSeries} of vector observation results.
    """

    @prepend_method_doc(OrientationFilter)
    def __init__(self, initialTime, initialRotation, k, **kwargs):
        """
        @param k: Filter blending co-efficient (float). At each timestep, a
            correction will be applied of M{1/k} times the difference between
            the gyro integration and vector observation results.
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation)
        self._k = float(k)
        self.qHat = initialRotation.copy()
        self._vectorObservation = vector_observation.FQA()
        self.vectorObservation = TimeSeries()

    def _update(self, accel, mag, gyro, dt, t):
        dotq = 0.5 * self.qHat * Quaternion(0, *gyro)
        self.qHat += dotq * dt
        qMeas = self._vectorObservation(-accel, mag)
        if self.qHat.dot(qMeas) < 0:
            qMeas.negate()
        qError = qMeas - self.qHat
        self.qHat += (1/self._k) * dt * qError
        self.qHat.normalise()
        self.vectorObservation.add(t, qMeas)
        self.rotation.add(t, self.qHat)
    def __init__(self, posMarker, refMarkers, refVectors=None, refTime=0):
        self.posMarker = posMarker
        self.refMarkers = refMarkers
        if refVectors is None:
            self.refVectors = [
                refMarker.position(refTime) - posMarker.position(refTime)
                for refMarker in self.refMarkers
            ]
        else:
            self.refVectors = refVectors
        assert (~np.any(np.isnan(self.refVectors)))

        self.positionKeyFrames = self.posMarker.positionKeyFrames
        t = self.positionKeyFrames.timestamps
        vectorObservation = DavenportQ(self.refVectors)
        measVectors = np.array([
            ref.position(t) - self.posMarker.position(t)
            for ref in self.refMarkers
        ])
        rotations = QuaternionArray([
            vectorObservation(*vectors).conjugate
            for vectors in np.split(measVectors, len(t), axis=2)
        ])
        self.rotationKeyFrames = TimeSeries(t, rotations)
 def rotationKeyFrames(self):
     t = self.sampled.rotationKeyFrames.timestamps
     t = t[(t >= self.startTime) & (t <= self.endTime)]
     r = self.rotation(t)
     return TimeSeries(t, r)
Example #26
0
 def load_timeseries(group):
     times = group['timestamps']
     values = group['data']
     return TimeSeries(timestamps=times, values=values)
Example #27
0
 def load_timeseries(group):
     timestamps = group['timestamps'].value
     data = group['data'].value
     if data.shape[1] == 4:
         data = QuaternionArray(data)
     return TimeSeries(timestamps, data)
 def positionKeyFrames(self):
     t = self.sampled.positionKeyFrames.timestamps
     t = t[(t >= self.startTime) & (t <= self.endTime)]
     p = self.position(t)
     return TimeSeries(t, p)
Example #29
0
class YunEKF(OrientationFilter):
    """
    Implementation of the extented Kalman filter proposed by Yun et al.

    See X. Yun and E. R. Bachmann. "Design, Implementation, and Experiemental
    Results of a Quaternion-Based Kalman Filter for Human Body Motion
    Tracking". IEEE Transactions on Robotics, vol 22, no 6, pp 1216-1227, 2006.

    @ivar vectorObservation: L{TimeSeries} of vector observation results.
    @ivar rotationalVelocity: L{TimeSeries} of angular rate estimates.
    """

    @prepend_method_doc(OrientationFilter)
    def __init__(self, initialTime, initialRotation, initialRotationalVelocity,
            initialCovariance, measurementCovariance, D, tau, **kwargs):
        """
        @param initialRotationalVelocity: Initial angular rate estimate
            (3x1 L{np.ndarray}).
        @param initialCovariance: Initial state covariance matrix (7x7).
        @param measurementCovariance: Measurement noise covariance (7x7).
        @param D: Variance of process noise model in rad^2/s^2 (float).
        @param tau: Time constant of process noise model in seconds (float).
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation, initialCovariance[3:,3:])

        self.tau = float(tau)
        self.D = float(D)

        self._vectorObservation = vector_observation.FQA()

        # Process noise covariance matrix, updated on each call.
        self.Q = np.asmatrix(np.zeros((7,7)))

        # Measurement noise covariance matrix
        self.R = measurementCovariance

        # Prior covariance estimate and state vector initialisation.
        self.P_minus = initialCovariance
        self.xHat_minus = np.vstack((initialRotationalVelocity,
            np.vstack(initialRotation.components)))

        self.vectorObservation = TimeSeries()
        self.rotationalVelocity = TimeSeries()

    def _update(self, accel, mag, gyro, dt, t):
        # Note: Measurement matrix H is equivalent to the identity matrix
        # so is left out to simplify calculations

        # Calculate Kalman gain
        K = self.P_minus * np.linalg.inv(self.P_minus + self.R)

        # Construct measurment vector z
        self.vo = self._vectorObservation(-accel, mag)
        if self.vo.dot(Quaternion(*self.xHat_minus[3:])) < 0:
            self.vo.negate()
        z = np.vstack((gyro, np.vstack(self.vo.components)))

        # Calculate state update
        xHat = self.xHat_minus + K * (z - self.xHat_minus)
        P = (np.eye(7) - K) * self.P_minus

        # Normalise quaternion component of state vector
        qHat = Quaternion(*xHat[3:]).normalise()
        xHat[3:] = np.vstack(qHat.components)

        # Calculate prediction
        self.xHat_minus = xHat + self.dotx(xHat) * dt
        Phi = self.Phi(xHat, dt)
        self.Q[[0,1,2],[0,1,2]] = \
            (self.D / (2 * self.tau)) * (1 - math.e**((-2 * dt) / self.tau))
        self.P_minus = Phi * P * Phi.T + self.Q

        self.rotation.add(t, qHat, P[3:,3:])
        self.vectorObservation.add(t, self.vo)
        self.rotationalVelocity.add(t, xHat[:3], P[:3,:3])

    def dotx(self, xHat):
        """
        Calculate state derivative vector.
        """
        dotx = np.empty((7, 1))
        dotx[:3] = -xHat[:3] / self.tau
        dotx[3] = -0.5 * (xHat[0]*xHat[4] + xHat[1]*xHat[5] + xHat[2]*xHat[6])
        dotx[4] = 0.5 * (xHat[0]*xHat[3] - xHat[1]*xHat[6] +xHat[2]*xHat[5])
        dotx[5] = 0.5 * (xHat[0]*xHat[6] + xHat[1]*xHat[3] - xHat[2]*xHat[4])
        dotx[6] = 0.5 * (-xHat[0]*xHat[5] +xHat[1]*xHat[4] + xHat[2]*xHat[3])
        return dotx

    def Phi(self, xHat, dt):
        """
        Calculate discrete state transition matrix Phi.
        """
        # Note: Yun paper matrix element indices are one-based so subtract 1
        # from all for zero-based indices.
        Phi = np.eye(7)
        Phi[0,0] = math.e**(-dt / self.tau)
        Phi[1,1] = math.e**(-dt / self.tau)
        Phi[2,2] = math.e**(-dt / self.tau)

        Phi[3,0] = -xHat[4] * dt / 2
        Phi[3,1] = -xHat[5] * dt / 2
        Phi[3,2] = -xHat[6] * dt / 2
        Phi[3,4] = -xHat[0] * dt / 2
        Phi[3,5] = -xHat[1] * dt / 2
        Phi[3,6] = -xHat[2] * dt / 2

        Phi[4,0] = xHat[3] * dt / 2
        Phi[4,1] = -xHat[6] * dt / 2
        Phi[4,2] = xHat[5] * dt / 2
        Phi[4,2] = xHat[0] * dt / 2
        Phi[4,5] = xHat[2] * dt / 2
        Phi[4,6] = -xHat[1] * dt / 2

        Phi[5,0] = xHat[6] * dt / 2
        Phi[5,1] = xHat[3] * dt / 2
        Phi[5,2] = -xHat[4] * dt / 2
        Phi[5,3] = xHat[1] * dt / 2
        Phi[5,4] = -xHat[2] * dt / 2
        Phi[5,6] = xHat[0] * dt / 2

        Phi[6,0] = -xHat[5] * dt / 2
        Phi[6,1] = xHat[4] * dt / 2
        Phi[6,2] = xHat[3] * dt / 2
        Phi[6,3] = xHat[2] * dt / 2
        Phi[6,4] = xHat[1] * dt / 2
        Phi[6,5] = -xHat[0] * dt / 2

        return Phi
Example #30
0
def testDistLinAccSimulation():
    sim = Simulation()
    samplingPeriod = 1.0/100
    calibrator = ScaleAndOffsetCalibrator(sim.environment, 1000,
            samplingPeriod, 20)
    bvhFile = path.join(path.dirname(__file__), 'walk.bvh')
    sampledModel = loadBVHFile(bvhFile, conversionFactor=0.01)
    posTimes = sampledModel.positionKeyFrames.timestamps
    sampledModel.positionKeyFrames = TimeSeries(
            posTimes, np.zeros((3,len(posTimes))))
    simModel = SplinedBodyModel(sampledModel)
    joints = list(simModel.joints)
    sim.time = simModel.startTime
    k = 128
    slotTime = 0.001
    txSlots = list(range(2, len(joints))) + [0,1]
    auxRxSlots = range(1, len(joints))
    auxTxSlots = [joints.index(j.parent) for j in joints[1:]]
    schedule = InterSlaveSchedule(slotTime, slotTime, txSlots,
            auxTxSlots, auxRxSlots)

    def setupIMU(id, joint):
        offset = randomPosition((-0.1, 0.1))
        platform = IdealIMU()
        calibration = calibrator.calibrate(platform)
        platform.simulation = sim
        platform.trajectory = OffsetTrajectory(joint, offset)
        filter = DistLinAccelCF(samplingPeriod,
                platform.trajectory.rotation(simModel.startTime),
                k, joint, offset)

        def updateChildren():
            for child in filter.children:
                childID = joints.index(child)
                childAccel = filter.childAcceleration(child.positionOffset, samplingPeriod)
                if childAccel is not None:
                    auxPacket = AuxPacket(id, childID,
                            [('linearAcceleration', childAccel)])
                    mac.queueAuxPacket(auxPacket)

        def handlePacket(packet):
            filter.handleLinearAcceleration(packet['linearAcceleration'], samplingPeriod)
            updateChildren()

        def handleSample(behaviour):
            rotation = filter.rotation.latestValue
            packet = DataPacket(id, [('rotation', rotation)])
            mac.queuePacket(packet)
            if not filter.joint.hasParent:
                updateChildren()

        behaviour = BasicIMUBehaviour(platform, samplingPeriod, calibration,
                filter, handleSample, initialTime=sim.time)
        mac = InterSlaveMAC(platform.radio, behaviour.timerMux, schedule,
                id, handlePacket)

    imus = [setupIMU(i, joint) for i, joint in enumerate(joints)]

    basestation = IdealBasestation(sim, StaticTrajectory())
    recModel = SampledBodyModel.structureCopy(simModel)
    reconstructor = BodyModelReconstructor(recModel, initialTime=sim.time)

    def handleFrame(packets):
        for packet in packets:
            packet['jointName'] = recModel.jointNames[packet.source]
        reconstructor.handleData(packets, schedule.framePeriod)

    basestation.radio.channel = schedule.dataChannel
    MasterMAC(basestation.radio, TimerMultiplexer(basestation.timer),
            schedule, handleFrame)

    sim.run(simModel.endTime)

    for simJoint, recJoint in zip(joints, recModel.joints):
        times = simJoint.rotationKeyFrames.timestamps
        assert_quaternions_correlated(simJoint.rotation(times),
            recJoint.rotation(times), 0.85)
Example #31
0
class DistLinAccelCF(OrientationFilter):
    """
    Implementation of the multi-IMU orientation algorithm by Young & Ling.

    See A. D. Young, M. J. Ling and D. K. Arvind. "Distributed Estimation of
    Linear Acceleration for Improved Accuracy in Wireless Inertial Motion
    Capture", in Proc. 9th ACM/IEEE International Conference on Information
    Processing in Sensor Networks, ACM, 2010, pp. 256-267.

    @ivar qMeas: Result of latest vector observation (L{Quaternion}).
    @ivar vectorObservation: L{TimeSeries} of vector observation results.
    @ivar jointAccel: Latest estimate of joint acceleration
        (3x1 L{np.ndarray}).
    @ivar jointAcceleration: L{TimeSeries} of acceleration estimates.
    """
    GRAVITY_VECTOR = vectors.vector(0, 0, STANDARD_GRAVITY)

    @prepend_method_doc(BachmannCF)
    def __init__(self, initialTime, initialRotation, k, joint, offset,
            **kwargs):
        """
        @param joint: The L{Joint} the IMU is attached to.
        @param offset: The offset of the IMU in the joint's co-ordinate frame
            (3x1 L{np.ndarray})
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation)
        self.qHat = initialRotation.copy()
        self.k = k
        self.joint = joint
        self.offset = offset

        self._vectorObservation = vector_observation.GramSchmidt()
        self.qMeas = Quaternion.nan()
        self.jointAccel = vectors.nan()

        self.gyroFIFO = collections.deque([], maxlen=3)
        self.accelFIFO = collections.deque([], maxlen=2)
        self.magFIFO = collections.deque([], maxlen=2)

        self.isRoot = not joint.hasParent
        self.children = joint.childJoints

        self.vectorObservation = TimeSeries()
        self.jointAcceleration = TimeSeries()

    def handleLinearAcceleration(self, jointAcceleration, dt):
        """
        Perform drift correction based on incoming joint acceleration estimate.

        @param jointAcceleration: Acceleration estimate (3x1 L{np.ndarray}).
        """
        self.jointAccel = jointAcceleration

        if len(self.gyroFIFO) < 3:
            return

        # Estimate linear acceleration
        o = self.offset
        w = self.gyroFIFO
        q = self.qHat_minus_1
        a = (w[0] - w[2]) / (2 * dt)
        lt = vectors.cross(a, o)
        lr = vectors.dot(w[1], o) * w[1] - o * vectors.norm(w[1])**2
        l = (q.rotateFrame(self.jointAccel) + lt + lr)

        # Apply drift correction
        self.qMeas = self._vectorObservation(-(self.accelFIFO[1]- l),
                self.magFIFO[1])
        if q.dot(self.qMeas) < 0:
            self.qMeas.negate()
        q = q + 1.0/self.k * dt * (self.qMeas - q)
        dotq = 0.5 * self.qHat * Quaternion(0,*w[0])
        self.qHat = q + dotq * dt
        self.qHat.normalise()

    def childAcceleration(self, o, dt):
        """
        Compute acceleration for child joint.

        @param o: Offset of child joint (3x1 L{np.ndarray}).
        """
        w = self.gyroFIFO
        if len(w) < 3:
            return None
        q = self.qHat_minus_1
        a = (w[0] - w[2]) / (2 * dt)
        lt = vectors.cross(a, o)
        lr = vectors.dot(w[1], o) * w[1] - o * vectors.norm(w[1])**2
        l = self.jointAccel + q.rotateVector(lt + lr)
        return l

    def _update(self, accel, mag, gyro, dt, t):
        # Store measurements in queues for processing
        self.gyroFIFO.appendleft(gyro)
        self.accelFIFO.appendleft(accel)
        self.magFIFO.appendleft(mag)

        # save current qHat for drift correction
        self.qHat_minus_1 = copy.copy(self.qHat)

        # Inertial update
        dotq = 0.5 * self.qHat * Quaternion(0,*gyro)
        self.qHat += dotq * dt
        self.qHat.normalise()

        if self.isRoot and len(self.accelFIFO) >= 2:
            # Root can't receive acceleration data so must estimate it.
            measuredAccel = self.qHat_minus_1.rotateVector(self.accelFIFO[1])
            # Assume root joint acceleration was zero.
            self.jointAccel = np.zeros((3,1))
            # Calculate tangential acceleration for root joint IMU offset.
            linAccel = self.childAcceleration(self.offset, dt)
            if linAccel is not None:
                # Calculate expected measurement.
                expectedAccel = linAccel - self.GRAVITY_VECTOR
                # Set root joint acceleration to the residual.
                self.jointAccel = measuredAccel - expectedAccel
                self.handleLinearAcceleration(self.jointAccel, dt)

        self.rotation.add(t, self.qHat)
        self.vectorObservation.add(t, self.qMeas)
        self.jointAcceleration.add(t, self.jointAccel)
Example #32
0
 def __init__(self):
     t = randomTimeSequence()
     sampled = SampledTrajectory(
         TimeSeries(t, randomPositionSequence(t)),
         TimeSeries(t, randomRotationSequence(t)))
     SplinedTrajectory.__init__(self, sampled)
Example #33
0
 def __init__(self, t):
     sampled = SampledTrajectory(None, TimeSeries(t, randomRotationSequence(t)))
     SplinedRotationTrajectory.__init__(self, sampled)
Example #34
0
class YunEKF(OrientationFilter):
    """
    Implementation of the extented Kalman filter proposed by Yun et al.

    See X. Yun and E. R. Bachmann. "Design, Implementation, and Experiemental
    Results of a Quaternion-Based Kalman Filter for Human Body Motion
    Tracking". IEEE Transactions on Robotics, vol 22, no 6, pp 1216-1227, 2006.

    @ivar vectorObservation: L{TimeSeries} of vector observation results.
    @ivar rotationalVelocity: L{TimeSeries} of angular rate estimates.
    """
    @prepend_method_doc(OrientationFilter)
    def __init__(self, initialTime, initialRotation, initialRotationalVelocity,
                 initialCovariance, measurementCovariance, D, tau, **kwargs):
        """
        @param initialRotationalVelocity: Initial angular rate estimate
            (3x1 L{np.ndarray}).
        @param initialCovariance: Initial state covariance matrix (7x7).
        @param measurementCovariance: Measurement noise covariance (7x7).
        @param D: Variance of process noise model in rad^2/s^2 (float).
        @param tau: Time constant of process noise model in seconds (float).
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation, initialCovariance[3:,
                                                                          3:])

        self.tau = float(tau)
        self.D = float(D)

        self._vectorObservation = vector_observation.FQA()

        # Process noise covariance matrix, updated on each call.
        self.Q = np.asmatrix(np.zeros((7, 7)))

        # Measurement noise covariance matrix
        self.R = measurementCovariance

        # Prior covariance estimate and state vector initialisation.
        self.P_minus = initialCovariance
        self.xHat_minus = np.vstack(
            (initialRotationalVelocity, np.vstack(initialRotation.components)))

        self.vectorObservation = TimeSeries()
        self.rotationalVelocity = TimeSeries()

    def _update(self, accel, mag, gyro, dt, t):
        # Note: Measurement matrix H is equivalent to the identity matrix
        # so is left out to simplify calculations

        # Calculate Kalman gain
        K = self.P_minus * np.linalg.inv(self.P_minus + self.R)

        # Construct measurment vector z
        self.vo = self._vectorObservation(-accel, mag)
        if self.vo.dot(Quaternion(*self.xHat_minus[3:])) < 0:
            self.vo.negate()
        z = np.vstack((gyro, np.vstack(self.vo.components)))

        # Calculate state update
        xHat = self.xHat_minus + K * (z - self.xHat_minus)
        P = (np.eye(7) - K) * self.P_minus

        # Normalise quaternion component of state vector
        qHat = Quaternion(*xHat[3:]).normalise()
        xHat[3:] = np.vstack(qHat.components)

        # Calculate prediction
        self.xHat_minus = xHat + self.dotx(xHat) * dt
        Phi = self.Phi(xHat, dt)
        self.Q[[0,1,2],[0,1,2]] = \
            (self.D / (2 * self.tau)) * (1 - math.e**((-2 * dt) / self.tau))
        self.P_minus = Phi * P * Phi.T + self.Q

        self.rotation.add(t, qHat, P[3:, 3:])
        self.vectorObservation.add(t, self.vo)
        self.rotationalVelocity.add(t, xHat[:3], P[:3, :3])

    def dotx(self, xHat):
        """
        Calculate state derivative vector.
        """
        dotx = np.empty((7, 1))
        dotx[:3] = -xHat[:3] / self.tau
        dotx[3] = -0.5 * (xHat[0] * xHat[4] + xHat[1] * xHat[5] +
                          xHat[2] * xHat[6])
        dotx[4] = 0.5 * (xHat[0] * xHat[3] - xHat[1] * xHat[6] +
                         xHat[2] * xHat[5])
        dotx[5] = 0.5 * (xHat[0] * xHat[6] + xHat[1] * xHat[3] -
                         xHat[2] * xHat[4])
        dotx[6] = 0.5 * (-xHat[0] * xHat[5] + xHat[1] * xHat[4] +
                         xHat[2] * xHat[3])
        return dotx

    def Phi(self, xHat, dt):
        """
        Calculate discrete state transition matrix Phi.
        """
        # Note: Yun paper matrix element indices are one-based so subtract 1
        # from all for zero-based indices.
        Phi = np.eye(7)
        Phi[0, 0] = math.e**(-dt / self.tau)
        Phi[1, 1] = math.e**(-dt / self.tau)
        Phi[2, 2] = math.e**(-dt / self.tau)

        Phi[3, 0] = -xHat[4] * dt / 2
        Phi[3, 1] = -xHat[5] * dt / 2
        Phi[3, 2] = -xHat[6] * dt / 2
        Phi[3, 4] = -xHat[0] * dt / 2
        Phi[3, 5] = -xHat[1] * dt / 2
        Phi[3, 6] = -xHat[2] * dt / 2

        Phi[4, 0] = xHat[3] * dt / 2
        Phi[4, 1] = -xHat[6] * dt / 2
        Phi[4, 2] = xHat[5] * dt / 2
        Phi[4, 2] = xHat[0] * dt / 2
        Phi[4, 5] = xHat[2] * dt / 2
        Phi[4, 6] = -xHat[1] * dt / 2

        Phi[5, 0] = xHat[6] * dt / 2
        Phi[5, 1] = xHat[3] * dt / 2
        Phi[5, 2] = -xHat[4] * dt / 2
        Phi[5, 3] = xHat[1] * dt / 2
        Phi[5, 4] = -xHat[2] * dt / 2
        Phi[5, 6] = xHat[0] * dt / 2

        Phi[6, 0] = -xHat[5] * dt / 2
        Phi[6, 1] = xHat[4] * dt / 2
        Phi[6, 2] = xHat[3] * dt / 2
        Phi[6, 3] = xHat[2] * dt / 2
        Phi[6, 4] = xHat[1] * dt / 2
        Phi[6, 5] = -xHat[0] * dt / 2

        return Phi
Example #35
0
class DistLinAccelCF(OrientationFilter):
    """
    Implementation of the multi-IMU orientation algorithm by Young & Ling.

    See A. D. Young, M. J. Ling and D. K. Arvind. "Distributed Estimation of
    Linear Acceleration for Improved Accuracy in Wireless Inertial Motion
    Capture", in Proc. 9th ACM/IEEE International Conference on Information
    Processing in Sensor Networks, ACM, 2010, pp. 256-267.

    @ivar qMeas: Result of latest vector observation (L{Quaternion}).
    @ivar vectorObservation: L{TimeSeries} of vector observation results.
    @ivar jointAccel: Latest estimate of joint acceleration
        (3x1 L{np.ndarray}).
    @ivar jointAcceleration: L{TimeSeries} of acceleration estimates.
    """
    GRAVITY_VECTOR = vectors.vector(0, 0, STANDARD_GRAVITY)

    @prepend_method_doc(BachmannCF)
    def __init__(self, initialTime, initialRotation, k, joint, offset,
                 **kwargs):
        """
        @param joint: The L{Joint} the IMU is attached to.
        @param offset: The offset of the IMU in the joint's co-ordinate frame
            (3x1 L{np.ndarray})
        """
        OrientationFilter.__init__(self, initialTime, initialRotation)
        self.rotation.add(initialTime, initialRotation)
        self.qHat = initialRotation.copy()
        self.k = k
        self.joint = joint
        self.offset = offset

        self._vectorObservation = vector_observation.GramSchmidt()
        self.qMeas = Quaternion.nan()
        self.jointAccel = vectors.nan()

        self.gyroFIFO = collections.deque([], maxlen=3)
        self.accelFIFO = collections.deque([], maxlen=2)
        self.magFIFO = collections.deque([], maxlen=2)

        self.isRoot = not joint.hasParent
        self.children = joint.childJoints

        self.vectorObservation = TimeSeries()
        self.jointAcceleration = TimeSeries()

    def handleLinearAcceleration(self, jointAcceleration, dt):
        """
        Perform drift correction based on incoming joint acceleration estimate.

        @param jointAcceleration: Acceleration estimate (3x1 L{np.ndarray}).
        """
        self.jointAccel = jointAcceleration

        if len(self.gyroFIFO) < 3:
            return

        # Estimate linear acceleration
        o = self.offset
        w = self.gyroFIFO
        q = self.qHat_minus_1
        a = (w[0] - w[2]) / (2 * dt)
        lt = vectors.cross(a, o)
        lr = vectors.dot(w[1], o) * w[1] - o * vectors.norm(w[1])**2
        l = (q.rotateFrame(self.jointAccel) + lt + lr)

        # Apply drift correction
        self.qMeas = self._vectorObservation(-(self.accelFIFO[1] - l),
                                             self.magFIFO[1])
        if q.dot(self.qMeas) < 0:
            self.qMeas.negate()
        q = q + 1.0 / self.k * dt * (self.qMeas - q)
        dotq = 0.5 * self.qHat * Quaternion(0, *w[0])
        self.qHat = q + dotq * dt
        self.qHat.normalise()

    def childAcceleration(self, o, dt):
        """
        Compute acceleration for child joint.

        @param o: Offset of child joint (3x1 L{np.ndarray}).
        """
        w = self.gyroFIFO
        if len(w) < 3:
            return None
        q = self.qHat_minus_1
        a = (w[0] - w[2]) / (2 * dt)
        lt = vectors.cross(a, o)
        lr = vectors.dot(w[1], o) * w[1] - o * vectors.norm(w[1])**2
        l = self.jointAccel + q.rotateVector(lt + lr)
        return l

    def _update(self, accel, mag, gyro, dt, t):
        # Store measurements in queues for processing
        self.gyroFIFO.appendleft(gyro)
        self.accelFIFO.appendleft(accel)
        self.magFIFO.appendleft(mag)

        # save current qHat for drift correction
        self.qHat_minus_1 = copy.copy(self.qHat)

        # Inertial update
        dotq = 0.5 * self.qHat * Quaternion(0, *gyro)
        self.qHat += dotq * dt
        self.qHat.normalise()

        if self.isRoot and len(self.accelFIFO) >= 2:
            # Root can't receive acceleration data so must estimate it.
            measuredAccel = self.qHat_minus_1.rotateVector(self.accelFIFO[1])
            # Assume root joint acceleration was zero.
            self.jointAccel = np.zeros((3, 1))
            # Calculate tangential acceleration for root joint IMU offset.
            linAccel = self.childAcceleration(self.offset, dt)
            if linAccel is not None:
                # Calculate expected measurement.
                expectedAccel = linAccel - self.GRAVITY_VECTOR
                # Set root joint acceleration to the residual.
                self.jointAccel = measuredAccel - expectedAccel
                self.handleLinearAcceleration(self.jointAccel, dt)

        self.rotation.add(t, self.qHat)
        self.vectorObservation.add(t, self.qMeas)
        self.jointAcceleration.add(t, self.jointAccel)