Beispiel #1
0
    def test_dcm2angle(self):
        """
        Test deriving Euler angles from body -> nav transformation matrix
        (transpose of DCM).
        
        This test simply creates a DCM, transposes it (to get body -> nav) and
        attempts to recover the original angles.
        """
        # Define (expected) Euler angles and associated DCM (Rnav2body)
        yaw, pitch, roll = [-83, 2.3, 13]  # degrees

        Rnav2body = np.array([[0.121771, -0.991747, -0.040132],
                              [0.968207, 0.109785, 0.224770],
                              [-0.218509, -0.066226, 0.973585]])

        # Make sure matrix type is handled correctly
        for nptype in np.array, np.matrix:
            yaw_C, pitch_C, roll_C = navpy.dcm2angle(nptype(Rnav2body),
                                                     output_unit='deg')

            # Match won't be perfect because Rnav2body is rounded.
            np.testing.assert_almost_equal([yaw_C, pitch_C, roll_C],
                                           [yaw, pitch, roll],
                                           decimal=4)

        # Check output with multiple inputs
        out = navpy.dcm2angle(np.rollaxis(
            np.repeat(Rnav2body, 5).reshape(3, 3, -1), -1),
                              output_unit='deg')
        np.testing.assert_almost_equal(out,
                                       np.repeat([yaw, pitch, roll],
                                                 out[0].size).reshape(3, -1),
                                       decimal=4)
Beispiel #2
0
    def test_dcm2angle(self):
        """
        Test deriving Euler angles from body -> nav transformation matrix
        (transpose of DCM).
        
        This test simply creates a DCM, transposes it (to get body -> nav) and
        attempts to recover the original angles.
        """
        # Define (expected) Euler angles and associated DCM (Rnav2body)
        yaw, pitch, roll = [-83, 2.3, 13] # degrees
        
        Rnav2body = np.array([[ 0.121771, -0.991747, -0.040132],
                              [ 0.968207,  0.109785,  0.224770],
                              [-0.218509, -0.066226,  0.973585]])

        # Make sure matrix type is handled correctly
        for nptype in np.array, np.matrix:
            yaw_C, pitch_C, roll_C = navpy.dcm2angle(nptype(Rnav2body), output_unit='deg')
                               
            # Match won't be perfect because Rnav2body is rounded.
            np.testing.assert_almost_equal([yaw_C, pitch_C, roll_C], [yaw, pitch, roll], decimal=4)

        # Check output with multiple inputs
        out = navpy.dcm2angle(np.rollaxis(
            np.repeat(Rnav2body, 5).reshape(3, 3, -1), -1), output_unit='deg')
        np.testing.assert_almost_equal(
            out, np.repeat([yaw, pitch, roll], out[0].size).reshape(3, -1), decimal=4)
Beispiel #3
0
 def test_dcm2angle(self):
     """
     Test deriving Euler angles from body -> nav transformation matrix
     (transpose of DCM).
     
     This test simply creates a DCM, transposes it (to get body -> nav) and
     attempts to recover the original angles.
     """
     # Define (expected) Euler angles and associated DCM (Rnav2body)
     yaw, pitch, roll = [-83, 2.3, 13] # degrees
     
     Rnav2body = np.array([[ 0.121771, -0.991747, -0.040132],
                            [ 0.968207,  0.109785,  0.224770],
                            [-0.218509, -0.066226,  0.973585]])
 
     yaw_C, pitch_C, roll_C = navpy.dcm2angle(Rnav2body, output_unit='deg')
                            
     # Match won't be perfect because Rnav2body is rounded.
     np.testing.assert_almost_equal([yaw_C, pitch_C, roll_C], [yaw, pitch, roll], decimal=4)
Beispiel #4
0
    def test_dcm2angle(self):
        """
        Test deriving Euler angles from body -> nav transformation matrix
        (transpose of DCM).
        
        This test simply creates a DCM, transposes it (to get body -> nav) and
        attempts to recover the original angles.
        """
        # Define (expected) Euler angles and associated DCM (Rnav2body)
        yaw, pitch, roll = [-83, 2.3, 13]  # degrees

        Rnav2body = np.array([[0.121771, -0.991747, -0.040132],
                              [0.968207, 0.109785, 0.224770],
                              [-0.218509, -0.066226, 0.973585]])

        yaw_C, pitch_C, roll_C = navpy.dcm2angle(Rnav2body, output_unit='deg')

        # Match won't be perfect because Rnav2body is rounded.
        np.testing.assert_almost_equal([yaw_C, pitch_C, roll_C],
                                       [yaw, pitch, roll],
                                       decimal=4)
Beispiel #5
0
 def get_angular(self, dt):
     return (np.array(self.get_orientation()) -
             np.array(navpy.dcm2angle(self.properties.prev_est_attitude)[::-1])) / dt
Beispiel #6
0
 def get_orientation(self):
     return navpy.dcm2angle(  # CTM_to_Euler
         self.properties.estimated_attitude)[::-1]  # roll, pitch, yaw
Beispiel #7
0
    def test_dcm2angle(self):
        """
        Test deriving Euler angles from body -> nav transformation matrix
        (transpose of DCM).
        
        This test simply creates a DCM, transposes it (to get body -> nav) and
        attempts to recover the original angles.
        """
        # Define (expected) Euler angles and associated DCM (Rnav2body)
        yaw, pitch, roll = [-83, 2.3, 13] # degrees
        
        Rnav2body = np.array([[ 0.121771, -0.991747, -0.040132],
                              [ 0.968207,  0.109785,  0.224770],
                              [-0.218509, -0.066226,  0.973585]])

        checks_sequences = (
            (np.deg2rad([45, -5, 70]), 14, 'XYZ',
             np.matrix([[ 0.340718653421610,   0.643384864470459,   0.685541184306890],
                        [-0.936116806662859,   0.299756531066926,   0.183932994229025],
                        [-0.087155742747658,  -0.704416026402759,   0.704416026402759]])),
            (np.deg2rad([45, -5, 70]), 14, 'ZYX',
             np.matrix([[0.704416026402759,   0.704416026402759,   0.087155742747658 ],
                        [-0.299756531066926,   0.183932994229025,   0.936116806662859],
                        [ 0.643384864470459,  -0.685541184306890,   0.340718653421610]])),
            (np.deg2rad([-135, 5, -110]), 14, 'ZYZ',
             np.matrix([[-0.423538554077505,   0.905387494699844,   0.029809019626209],
                        [-0.903779304621979,  -0.420089779326028,  -0.081899608319089],
                        [-0.061628416716219,  -0.061628416716219,   0.996194698091746]])),
            (np.deg2rad([45, -5, 70]), 14, 'ZXY',
             np.matrix([[0.299756531066926,   0.183932994229025,  -0.936116806662859],
                        [-0.704416026402759,   0.704416026402759,  -0.087155742747658],
                        [0.643384864470459,   0.685541184306890,   0.340718653421610]])),
            (np.deg2rad([-135, 5, -110]), 14, 'ZXZ',
             np.matrix([[-0.420089779326028,   0.903779304621979,  -0.081899608319089],
                        [-0.905387494699844,  -0.423538554077505,  -0.029809019626209],
                        [-0.061628416716219,   0.061628416716219,   0.996194698091746]])),
            (np.deg2rad([45, -5, 70]), 14, 'YXZ',
             np.matrix([[0.183932994229025,   0.936116806662859,  -0.299756531066926],
                        [-0.685541184306890,   0.340718653421610,   0.643384864470459],
                        [0.704416026402759,   0.087155742747658,   0.704416026402759]])),
            (np.deg2rad([-135, 5, -110]), 14, 'YXY',
             np.matrix([[-0.420089779326028,  -0.081899608319089,  -0.903779304621979],
                        [-0.061628416716219,   0.996194698091746,  -0.061628416716219],
                        [0.905387494699844,   0.029809019626209,  -0.423538554077505]])),
            (np.deg2rad([45, -5, 70]), 14, 'YZX',
             np.matrix([[0.704416026402759,  -0.087155742747658,  -0.704416026402759],
                        [0.685541184306890,   0.340718653421610,   0.643384864470459],
                        [0.183932994229025,  -0.936116806662859,   0.299756531066926]])),
            (np.deg2rad([-135, 5, -110]), 14, 'YZY',
             np.matrix([[-0.423538554077505,  -0.029809019626209,  -0.905387494699844],
                        [0.061628416716219,   0.996194698091746,  -0.061628416716219],
                        [0.903779304621979,  -0.081899608319089,  -0.420089779326028]])),
            (np.deg2rad([-135, 5, -110]), 14, 'XYX',
             np.matrix([[0.996194698091746,  -0.061628416716219,   0.061628416716219],
                        [-0.081899608319089,  -0.420089779326028,   0.903779304621979],
                        [-0.029809019626209,  -0.905387494699844,  -0.423538554077505]])),
            (np.deg2rad([45, -5, 70]), 14, 'XZY',
             np.matrix([[0.340718653421610,   0.643384864470459,  -0.685541184306890],
                        [0.087155742747658,   0.704416026402759,   0.704416026402759],
                        [0.936116806662859,  -0.299756531066926,   0.183932994229025]])),
            (np.deg2rad([-135, 5, -110]), 14, 'XZX',
             np.matrix([[0.996194698091746,  -0.061628416716219,  -0.061628416716219],
                        [0.029809019626209,  -0.423538554077505,   0.905387494699844],
                        [-0.081899608319089,  -0.903779304621979,  -0.420089779326028]])),
        )
        
        # Make sure matrix type is handled correctly
        for nptype in np.array, np.matrix:
            yaw_C, pitch_C, roll_C = navpy.dcm2angle(nptype(Rnav2body), output_unit='deg')
                               
            # Match won't be perfect because Rnav2body is rounded.
            np.testing.assert_almost_equal([yaw_C, pitch_C, roll_C], [yaw, pitch, roll], decimal=4)

        for angles_expected, decimal, sequence, R in checks_sequences:
            angles_computed = navpy.dcm2angle(R, rotation_sequence=sequence)
            np.testing.assert_almost_equal(angles_expected, angles_computed, decimal=decimal, err_msg="Sequence " + sequence + " not equal")

        # Check output with multiple inputs
        out = navpy.dcm2angle(np.rollaxis(
            np.repeat(Rnav2body, 5).reshape(3, 3, -1), -1), output_unit='deg')
        np.testing.assert_almost_equal(
            out, np.repeat([yaw, pitch, roll], out[0].size).reshape(3, -1), decimal=4)
Beispiel #8
0
    def test_dcm2angle(self):
        """
        Test deriving Euler angles from body -> nav transformation matrix
        (transpose of DCM).
        
        This test simply creates a DCM, transposes it (to get body -> nav) and
        attempts to recover the original angles.
        """
        # Define (expected) Euler angles and associated DCM (Rnav2body)
        yaw, pitch, roll = [-83, 2.3, 13]  # degrees

        Rnav2body = np.array([[0.121771, -0.991747, -0.040132],
                              [0.968207, 0.109785, 0.224770],
                              [-0.218509, -0.066226, 0.973585]])

        checks_sequences = (
            (np.deg2rad([45, -5, 70]), 14, 'XYZ',
             np.matrix(
                 [[0.340718653421610, 0.643384864470459, 0.685541184306890],
                  [-0.936116806662859, 0.299756531066926, 0.183932994229025],
                  [-0.087155742747658, -0.704416026402759,
                   0.704416026402759]])),
            (np.deg2rad([45, -5, 70]), 14, 'ZYX',
             np.matrix(
                 [[0.704416026402759, 0.704416026402759, 0.087155742747658],
                  [-0.299756531066926, 0.183932994229025, 0.936116806662859],
                  [0.643384864470459, -0.685541184306890,
                   0.340718653421610]])),
            (np.deg2rad([-135, 5, -110]), 14, 'ZYZ',
             np.matrix(
                 [[-0.423538554077505, 0.905387494699844, 0.029809019626209],
                  [-0.903779304621979, -0.420089779326028, -0.081899608319089],
                  [-0.061628416716219, -0.061628416716219,
                   0.996194698091746]])),
            (np.deg2rad([45, -5, 70]), 14, 'ZXY',
             np.matrix(
                 [[0.299756531066926, 0.183932994229025, -0.936116806662859],
                  [-0.704416026402759, 0.704416026402759, -0.087155742747658],
                  [0.643384864470459, 0.685541184306890, 0.340718653421610]])),
            (np.deg2rad([-135, 5, -110]), 14, 'ZXZ',
             np.matrix(
                 [[-0.420089779326028, 0.903779304621979, -0.081899608319089],
                  [-0.905387494699844, -0.423538554077505, -0.029809019626209],
                  [-0.061628416716219, 0.061628416716219,
                   0.996194698091746]])),
            (np.deg2rad([45, -5, 70]), 14, 'YXZ',
             np.matrix(
                 [[0.183932994229025, 0.936116806662859, -0.299756531066926],
                  [-0.685541184306890, 0.340718653421610, 0.643384864470459],
                  [0.704416026402759, 0.087155742747658, 0.704416026402759]])),
            (np.deg2rad([-135, 5, -110]), 14, 'YXY',
             np.matrix(
                 [[-0.420089779326028, -0.081899608319089, -0.903779304621979],
                  [-0.061628416716219, 0.996194698091746, -0.061628416716219],
                  [0.905387494699844, 0.029809019626209,
                   -0.423538554077505]])),
            (np.deg2rad([45, -5, 70]), 14, 'YZX',
             np.matrix(
                 [[0.704416026402759, -0.087155742747658, -0.704416026402759],
                  [0.685541184306890, 0.340718653421610, 0.643384864470459],
                  [0.183932994229025, -0.936116806662859,
                   0.299756531066926]])),
            (np.deg2rad([-135, 5, -110]), 14, 'YZY',
             np.matrix(
                 [[-0.423538554077505, -0.029809019626209, -0.905387494699844],
                  [0.061628416716219, 0.996194698091746, -0.061628416716219],
                  [0.903779304621979, -0.081899608319089,
                   -0.420089779326028]])),
            (np.deg2rad([-135, 5, -110]), 14, 'XYX',
             np.matrix(
                 [[0.996194698091746, -0.061628416716219, 0.061628416716219],
                  [-0.081899608319089, -0.420089779326028, 0.903779304621979],
                  [-0.029809019626209, -0.905387494699844,
                   -0.423538554077505]])),
            (np.deg2rad([45, -5, 70]), 14, 'XZY',
             np.matrix(
                 [[0.340718653421610, 0.643384864470459, -0.685541184306890],
                  [0.087155742747658, 0.704416026402759, 0.704416026402759],
                  [0.936116806662859, -0.299756531066926,
                   0.183932994229025]])),
            (np.deg2rad([-135, 5, -110]), 14, 'XZX',
             np.matrix(
                 [[0.996194698091746, -0.061628416716219, -0.061628416716219],
                  [0.029809019626209, -0.423538554077505, 0.905387494699844],
                  [-0.081899608319089, -0.903779304621979,
                   -0.420089779326028]])),
        )

        # Make sure matrix type is handled correctly
        for nptype in np.array, np.matrix:
            yaw_C, pitch_C, roll_C = navpy.dcm2angle(nptype(Rnav2body),
                                                     output_unit='deg')

            # Match won't be perfect because Rnav2body is rounded.
            np.testing.assert_almost_equal([yaw_C, pitch_C, roll_C],
                                           [yaw, pitch, roll],
                                           decimal=4)

        for angles_expected, decimal, sequence, R in checks_sequences:
            angles_computed = navpy.dcm2angle(R, rotation_sequence=sequence)
            np.testing.assert_almost_equal(angles_expected,
                                           angles_computed,
                                           decimal=decimal,
                                           err_msg="Sequence " + sequence +
                                           " not equal")

        # Check output with multiple inputs
        out = navpy.dcm2angle(np.rollaxis(
            np.repeat(Rnav2body, 5).reshape(3, 3, -1), -1),
                              output_unit='deg')
        np.testing.assert_almost_equal(out,
                                       np.repeat([yaw, pitch, roll],
                                                 out[0].size).reshape(3, -1),
                                       decimal=4)
Beispiel #9
0
 def get_orientation(self):
     return navpy.dcm2angle(self.frame_properties.estimated_attitude)  # yaw, pitch, roll
Beispiel #10
0
    def fitting(self):
        """ 
		Pose, Shape and Expression estimation module
		"""

        if (self.curr_frame_num == self.startingFrameNum):
            # calculate  R, t, s
            # execute this only on first frame
            # t, s are saved here purely for visualisation purposes
            self.R, self.t, self.s = self.POS(self.x_landmarks,
                                              self.meanShapeLandmarkPoints)
            tFitting = self.t
            sFitting = self.s
        else:
            # keep reestimating pose, but only keep R for visualisation purposes
            # tFitting and sFitting are only used to estimate shape and expression
            self.R, tFitting, sFitting = self.POS(self.x_landmarks,
                                                  self.meanShapeLandmarkPoints)

        R = self.R
        t = self.t
        s = self.s
        numsd = 3

        if (self.curr_frame_num <= self.midFrameNum):
            # only estimate shape for the first 25 frames
            # estimate shape coefficients
            b = self.estimateShape(self.orthogonalIDBasisForEstimation,
                                   self.meanShapeForEstimation,
                                   self.normalisedIDBasisForEstimation, R,
                                   tFitting, sFitting, numsd, self.ndims,
                                   self.x_landmarks)
            # for carricature puppet: modify pne shape coefficient directly as that affects the principal components of shape vector as well
            if (self.shapeChoice == "2"):
                b[11] = (b[11] + 2) * 10  #11*10 6*10 1*20
            #identity_basis*coefficients
            identity = np.dot(self.shapePC, b)
            #add to the mean shape
            self.identity_shape = np.add(identity, self.shapeMU)
            # subselect vertices of the current identity shape for expression estimation
            self.shapeForExpressionEstimation = []
            for i in self.sortedfps:
                self.shapeForExpressionEstimation.append(
                    self.identity_shape[i - 1])
            self.shapeForExpressionEstimation = np.asarray(
                self.shapeForExpressionEstimation)

        #estimate shape coefficients
        e = self.estimateExpression(self.orthogonalExpBasisForEstimation,
                                    self.shapeForExpressionEstimation,
                                    self.normalisedExpBasisForEstimation, R,
                                    tFitting, sFitting, numsd, self.exp_ndims,
                                    self.x_landmarks)
        if (self.shapeChoice == "2"):
            #expression_basis*coefficients
            expression_mean = np.dot(
                self.expressionPC,
                1.5 * e)  # more pronounced expressions if caricature is chosen
        else:
            #expression_basis*coefficients
            expression_mean = np.dot(self.expressionPC, e)

        if (self.curr_frame_num > self.midFrameNum
                and self.shapeChoice == "2"):
            #subseleact expression to combine with subselected shape
            expression_mean = np.reshape(expression_mean,
                                         (len(expression_mean) / 3, 3)).T
            expression_mean1 = expression_mean[0][::3]
            expression_mean2 = expression_mean[1][::3]
            expression_mean3 = expression_mean[2][::3]
            expression_mean = np.asarray(
                [expression_mean1, expression_mean2,
                 expression_mean3]).T.flatten()
            expression_mean = np.reshape(expression_mean,
                                         (len(expression_mean), 1))

        vertices = []
        vertices1, vertices2, vertices3 = [], [], []
        #construct the puppet
        vertices = np.add(self.identity_shape, expression_mean)
        #reshape the vertices matrix to add pose
        vertices = np.reshape(vertices, (len(vertices) / 3, 3))
        vertices = vertices.T
        vertices = vertices.tolist()
        vertices.append([1 for x in range(len(vertices[0]))])
        vertices = np.asarray(vertices)
        vertices = vertices.T

        #calculate current rotation by averaging it with last 3 known rotations - Temporal smoothing
        if (self.R_Av.size) > 0:
            currRinEuler = nv.dcm2angle(R)
            RinEuler = (currRinEuler + 3 * (self.R_Av)) / 4
            R = nv.angle2dcm(RinEuler[0], RinEuler[1], RinEuler[2])

        #Catching and amending inconsistencies because rotational matrix is distorted by the averaging algorithm.
        #the problem seems patched up for now
        #Note: Phase Unwrapping does not fix this!
        if ((R[1][1] < 0.85 and R[1][1] > -0.85)):
            Rr = np.negative(self.R)
            Rr[0] = np.negative(Rr[0])

        elif (R[1][1] < 0):
            Rr = np.negative(R)
            Rr[0] = np.negative(Rr[0])
            #print("Two!")

        else:
            Rr = np.negative(self.R)
            Rr[0] = np.negative(Rr[0])

        # reshape R, t and s in order to combine with vertices matrix
        Rr = Rr.tolist()
        Rr.append([0, 0, 0, 1])
        Rr[0].append(0)
        Rr[1].append(0)
        Rr[2].append(0)
        Sr = [[s, 0, 0, 0], [0, s, 0, 0], [0, 0, s, 0], [0, 0, 0, s]]
        Tr = [[1, 0, 0, t[0]], [0, 1, 0, t[1]], [0, 0, 1, 0], [0, 0, 0, 1]]
        T = np.dot(Sr, Tr)
        T = np.dot(T, Rr)
        M = T[0:3]
        M = np.transpose(M)

        #add pose to the current shape
        self.vertices = np.transpose(np.dot(vertices, M))

        vertices1 = self.vertices[0]  #x
        vertices2 = self.vertices[1]  #y
        vertices3 = self.vertices[2]  #z

        return vertices1, vertices2, vertices3
Beispiel #11
0
    def main(self, landmarks_first=False):
        #initialize fl and fitting modules
        processPhotos = PP.main(True)
        fitting = fit.MMFitting(self.mat, self.shapeChoice)

        #init variables
        shape = []
        curr_landmarks = []
        lastThreeRotationMatrices = []

        #for shape averaging purposes
        startingFrameNum = 1
        midFrameNum = startingFrameNum + 25

        #for frame tracking purposes
        i = 0

        #main loop
        while (self.cap.isOpened()):
            # capture frame-by-frame
            ret, frame = self.cap.read()

            #track frame num
            i += 1
            #check if camera captures
            if (ret == True):
                self.frame = frame
                cv2.imshow('Frame', frame)
                # Press Q on keyboard to  exit
                if cv2.waitKey(25) & 0xFF == ord('q'):
                    break
            # Break the loop
            else:
                continue

            #make sure to maintain the size of the rotational matrices
            if (len(lastThreeRotationMatrices) == 3):
                lastThreeRotationMatrices.pop(0)
            # extract face landmarks
            temp_landmarks = processPhotos.processPhotos(self.frame, "Fast")
            #check if face has been found
            if (temp_landmarks != None):
                curr_landmarks = temp_landmarks
            else:
                # invoke the Bulat et al's face-alignment algorithm to try and find a face
                # this works better under heavy occlusion, although it slows the system significantly
                print("Could not find your face. Executing back-up algorithm!")
                temp_landmarks = processPhotos.processPhotos(
                    self.frame, "Slow")
                if (temp_landmarks != []):
                    curr_landmarks = temp_landmarks
            #shape training phase
            if (i < midFrameNum):
                #perform fitting
                fitting.secInit(curr_landmarks, i)
                fitting.main()
                currentShape = fitting.identity_shape
                #record shape for averaging purposes
                shape.append(currentShape)

                #temporal smoothing
                #extract current rotational matrix and average it using Euler's triangles
                currentR = fitting.R
                rotAngles = nv.dcm2angle(currentR)
                lastThreeRotationMatrices.append(rotAngles)
                currentR = np.average(np.asarray(lastThreeRotationMatrices).T,
                                      axis=1)
                #output the averaged result back in the fitting module
                fitting.R_Av = currentR
            #average the shapes gathered so far
            elif (i == midFrameNum):
                #perform fitting
                fitting.secInit(curr_landmarks, i)
                fitting.main()
                #record shape for averaging purposes
                currentShape = fitting.identity_shape

                shape.append(currentShape)

                #start averaging process
                shape = np.asarray(shape).T
                shape = np.average(shape, axis=2).flatten()
                shape = np.reshape(shape, (len(shape), 1))
                #if user has chosen a puppet:
                if (shapeChoice == "2"):
                    # subsample vertices for puppet and construct new triangulation list with Delaunay
                    shape1 = np.reshape(shape, (len(shape) / 3, 3))
                    shape1 = shape1.T
                    puppet_vertices1 = shape1[0][::3]
                    puppet_vertices2 = shape1[1][::3]
                    puppet_vertices3 = shape1[2][::3]
                    puppet_vertices = np.asarray(
                        [puppet_vertices1, puppet_vertices2,
                         puppet_vertices3]).T.flatten()

                    puppet_vertices_forDelaunay = np.asarray(
                        [puppet_vertices1, puppet_vertices2])
                    puppet_vertices_forDelaunay = puppet_vertices_forDelaunay.T
                    from scipy.spatial import Delaunay
                    puppet_tri = Delaunay(puppet_vertices_forDelaunay)
                    puppet_vertices = np.reshape(puppet_vertices,
                                                 (len(puppet_vertices), 1))

                    #output the new shape and triangles list to the fitting and visualisation modules
                    fitting.identity_shape = puppet_vertices
                    fitting.sampled_triangles = puppet_tri.simplices

                else:
                    fitting.identity_shape = shape
                    fitting.sampled_triangles = fitting.faces[0:105600]

                #extract current rotational matrix and average it using Euler's triangles
                currentR = fitting.R
                rotAngles = nv.dcm2angle(currentR)
                lastThreeRotationMatrices.append(rotAngles)
                #output the averaged result back in the fitting module
                currentR = np.average(np.asarray(lastThreeRotationMatrices).T,
                                      axis=1)
                fitting.R_Av = currentR
            #continue outputing the averaged shape
            elif (i > midFrameNum):
                fitting.secInit(curr_landmarks, i)
                fitting.main()

                #extract current rotational matrix and average it using Euler's triangles
                currentR = fitting.R
                rotAngles = nv.dcm2angle(currentR)
                lastThreeRotationMatrices.append(rotAngles)
                #output the averaged result back in the fitting module
                currentR = np.average(np.asarray(lastThreeRotationMatrices).T,
                                      axis=1)
                fitting.R_Av = currentR