コード例 #1
0
def callback(msg, ar_tags):
    for i in range(0, len(msg.transforms)):

        # YOUR CODE HERE
        # The code should look at the transforms for each AR tag
        # Then compute the rigid body transform between AR0 and AR1, 
        # AR0 and ARZ, AR1 and ARZ
        #  hint: use the functions you wrote in exp_quat_func
        #  note: you can change anything in this function to get it working
        #  note: you may want to save the last known position of the AR tag

        #lkp[msg.transforms[i].child_frame_id] = 0 # position / orientation
        id = msg.transforms[i].child_frame_id
        if id not in ar_tags.values(): continue
        seen.add(id)
        quat = msg.transforms[i].transform.rotation
        trans = msg.transforms[i].transform.translation
        x = quat.x
        y = quat.y
        z = quat.z
        w = quat.w
        omega, theta = eqf.quaternion_to_exp(np.array([x,y,z,w]))
        g=eqf.create_rbt(omega, theta, np.array([trans.x, trans.y, trans.z]))

        lkp[msg.transforms[i].child_frame_id] = g # position / orientation
        
    print seen
    if len(lkp) == 3:
    	for j1 in lkp:
    		for j2 in lkp:
    			if j1 != j2:
                            print((j1,j2))
                            print(eqf.compute_gab(lkp[j1], lkp[j2]))
コード例 #2
0
    def Grab_Cube(self,
                  Handness,
                  AR_marker,
                  Accuracy=0.001,
                  Marker_z=0,
                  Base_Offsets=[0, 0, 0]):
        #There are two direction that can grasp the Arm, one is vertical and another is horizontal.

        if Handness == 'left':
            move_arm = self.MoveIt_left_arm
            arm = self.Left_Arm
            gripper = self.Left_Gripper
            self.LeftMarker = cube_table[AR_marker]
        elif Handness == 'right':
            move_arm = self.MoveIt_right_arm
            arm = self.Right_Arm
            gripper = self.Right_Gripper
            self.RightMarker = cube_table[AR_marker]
        else:
            print('Invalid input parameter for Grab_Cube function.')

        self.Gripper_Control(gripper, 'open')
        _, orientation = self.TFlistener.lookupTransform(
            '/base', Handness + '_gripper', rospy.Time(0))
        marker_position, rot = self.TFlistener.lookupTransform(
            '/base', AR_marker, rospy.Time(0))
        start_position = self.Get_End_Point_Positon(Handness)
        omega, theta = eqf.quaternion_to_exp(rot)
        R1 = eqf.create_rbt(omega, theta, marker_position)

        while True:
            Marker_z = raw_input('Please input marker offset z: \n')
            Marker_z = float(Marker_z)
            Base_Offsets = raw_input('Please input Base offsets x, y, z: \n')
            Base_Offsets = map(float, Base_Offsets.split())
            if Marker_z == 999:
                break
            R2 = np.matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, Marker_z],
                            [0, 0, 0, 1]])
            R = np.matrix(R1) * R2
            end_position = [
                R[0, 3] + Base_Offsets[0], R[1, 3] + Base_Offsets[1],
                R[2, 3] + Base_Offsets[2]
            ]
            ## MoveIt
            print('start position:', start_position)
            print('end position: ', end_position)
            # raw_input('Start rotational!!!')
            # self.IK_MoveIt(move_arm,rot=orientation, StartPosition=start_position,EndPosition=start_position, Accuracy=Accuracy)
            raw_input('Start IK_MoveIt!!!')
            self.IK_MoveIt(move_arm,
                           rot=orientation,
                           StartPosition=start_position,
                           EndPosition=end_position,
                           Accuracy=Accuracy)  #MiddlePosition=middle_position,

        raw_input('End Start IK_MoveIt!!! Closing ....')
        rospy.sleep(0.5)
        self.Gripper_Control(gripper, 'close')
コード例 #3
0
def callback(msg, ar_tags):
    for i in range(0, len(msg.transforms)):

        # YOUR CODE HERE
        # The code should look at the transforms for each AR tag
        # Then compute the rigid body transform between AR0 and AR1, 
        # AR0 and ARZ, AR1 and ARZ
        #  hint: use the functions you wrote in exp_quat_func
        #  note: you can change anything in this function to get it working
        #  note: you may want to save the last known position of the AR tag

        lkp[msg.transforms[i].child_frame_id] = msg.transforms[i].transform # position / orientation
       
        # print lkp
    
    tag_name = lkp.keys()
    marker1_pose = lkp['ar_marker_0']
    marker1_rot = np.array([marker1_pose.rotation.x, marker1_pose.rotation.y,marker1_pose.rotation.z, marker1_pose.rotation.w])
    marker1_trans = np.array([marker1_pose.translation.x, marker1_pose.translation.y,marker1_pose.translation.z])
    marker1_exp = eqf.quaternion_to_exp(marker1_rot)
    marker1_rbt = eqf.create_rbt(marker1_exp[0], marker1_exp[1], marker1_trans)

    marker2_pose = lkp['ar_marker_1']
    marker2_rot = np.array([marker2_pose.rotation.x, marker2_pose.rotation.y,marker2_pose.rotation.z, marker2_pose.rotation.w])
    marker2_trans = np.array([marker2_pose.translation.x, marker2_pose.translation.y,marker2_pose.translation.z])
    marker2_exp = eqf.quaternion_to_exp(marker2_rot)
    marker2_rbt = eqf.create_rbt(marker2_exp[0], marker2_exp[1], marker2_trans)

    marker3_pose = lkp['ar_marker_17']
    marker3_rot = np.array([marker3_pose.rotation.x, marker3_pose.rotation.y,marker3_pose.rotation.z, marker3_pose.rotation.w])
    marker3_trans = np.array([marker3_pose.translation.x, marker3_pose.translation.y,marker3_pose.translation.z])
    marker3_exp = eqf.quaternion_to_exp(marker3_rot)
    marker3_rbt = eqf.create_rbt(marker3_exp[0], marker3_exp[1], marker3_trans)



    g01 = eqf.compute_gab(marker1_rbt,marker2_rbt)
    g02 = eqf.compute_gab(marker1_rbt,marker3_rbt)
    g12 = eqf.compute_gab(marker2_rbt,marker3_rbt)
    print ('rot between tag 0 and 1')
    print g01
    print ('rot between tag 0 and 2')
    print g02
    print ('rot between tag 1 and 2')
    print g12
コード例 #4
0
def trans_zumy(point):
    trans, rot = getrawcoord(zumy_artag)
    (omega,theta) = eqf.quaternion_to_exp(rot)
    g0a = eqf.create_rbt(omega, theta, trans)
    g0a_inv = inv(g0a)
    (x,y) = point
    p0b = np.array([[x],[y],[0],[1]])
    pab = np.dot(g0a_inv,p0b)
    trans_z = (pab[0,0],pab[1,0])
    return trans_z
コード例 #5
0
def return_rbt(trans, rot):
    """
    Prints out the 4x4 rigid body transformation matrix from quaternions

    Input:
        (3,) array - translation ector
        (4,) array - rotation vector in quaternions
    """

    #YOUR CODE HERE
    omega, theta = eqf.quaternion_to_exp(rot)
    rbt = eqf.create_rbt(omega, theta, trans)
    return rbt
コード例 #6
0
def return_rbt(trans, rot):
    """
    Prints out the 4x4 rigid body transformation matrix from quaternions

    Input:
        (3,) array - translation ector
        (4,) array - rotation vector in quaternions
    """

    [omega, theta] = eqf.quaternion_to_exp(rot)

    g = eqf.create_rbt(omega, theta, trans)

    return g
コード例 #7
0
def callback(msg, ar_tags):
    for i in range(0, len(msg.transforms)):
        # YOUR CODE HERE
        # The code should look at the transforms for each AR tag
        # Then compute the rigid body transform between AR0 and AR1,
        # AR0 and ARZ, AR1 and ARZ
        #  hint: use the functions you wrote in exp_quat_func

        #note:callback gets called for every message, not every packet of 3.

        #extract tag ID
        tag = msg.transforms[i].child_frame_id
        #extract rotation quaternion as an nd array
        rotation_msg = msg.transforms[i].transform.rotation
        rotation_quaternion = np.array(
            [rotation_msg.x, rotation_msg.y, rotation_msg.z, rotation_msg.w])
        #compute omega, theta, of the quaternion.
        omega, theta = eqf.quaternion_to_exp(rotation_quaternion)
        #extract translation as nd array
        translation_msg = msg.transforms[i].transform.translation
        translation = np.array(
            [translation_msg.x, translation_msg.y, translation_msg.z])
        #compute transform from camera to AR tag.
        tag_transform = eqf.create_rbt(omega, theta, translation)
        ar_tag_name = ar_tags[tag]
        most_recent_transforms[ar_tag_name] = tag_transform
        #insert tag_transform into the dictionary tags
        print("Read tag " + str(tag))
        print(tag)

    if len(most_recent_transforms) is not 3:
        print("ERROR: Tags is not the right length (3)")
    else:
        g0 = most_recent_transforms['ar0']
        g1 = most_recent_transforms['ar1']
        g2 = most_recent_transforms['ar2']

        g01 = eqf.compute_gab(g0, g1)
        g02 = eqf.compute_gab(g0, g2)
        g12 = eqf.compute_gab(g1, g2)

        print("g01 is ")
        print(str(g01))
        print("g02 is ")
        print(str(g02))
        print("g12 is ")
        print(str(g12))
        print("  ")
コード例 #8
0
def return_rbt(trans, rot):
    """
    Prints out the 4x4 rigid body transformation matrix from quaternions

    Input:
        (3,) array - translation ector
        (4,) array - rotation vector in quaternions
    """

    #YOUR CODE HERE
    rot = np.array(list(rot))
    trans = np.array(list(trans))
    omega, theta = eqf.quaternion_to_exp(rot)
    ans = eqf.create_rbt(omega, theta, trans)
    # print ans
    return ans
コード例 #9
0
def callback(msg, ar_tags):
    for i in range(0, len(msg.transforms)):
        # YOUR CODE HERE
        # The code should look at the transforms for each AR tag
        # Then compute the rigid body transform between AR0 and AR1, 
        # AR0 and ARZ, AR1 and ARZ
        #  hint: use the functions you wrote in exp_quat_func
        
        #note:callback gets called for every message, not every packet of 3.

        #extract tag ID
        tag = msg.transforms[i].child_frame_id
        #extract rotation quaternion as an nd array
        rotation_msg = msg.transforms[i].transform.rotation        
        rotation_quaternion = np.array([rotation_msg.x, rotation_msg.y, rotation_msg.z, rotation_msg.w])
        #compute omega, theta, of the quaternion.
        omega, theta = eqf.quaternion_to_exp(rotation_quaternion)
        #extract translation as nd array
        translation_msg = msg.transforms[i].transform.translation
        translation = np.array([translation_msg.x, translation_msg.y, translation_msg.z])
        #compute transform from camera to AR tag.
        tag_transform = eqf.create_rbt(omega,theta,translation)
        ar_tag_name = ar_tags[tag]
        most_recent_transforms[ar_tag_name] = tag_transform
        #insert tag_transform into the dictionary tags
        print("Read tag " + str(tag))
        print(tag)
    
    if len(most_recent_transforms) is not 3:
        print("ERROR: Tags is not the right length (3)")
    else:
        g0 = most_recent_transforms['ar0']
        g1 = most_recent_transforms['ar1']
        g2 = most_recent_transforms['ar2']

        g01 = eqf.compute_gab(g0,g1)
        g02 = eqf.compute_gab(g0,g2)
        g12 = eqf.compute_gab(g1,g2)


        print("g01 is ")
        print(str(g01))
        print("g02 is ")
        print(str(g02))
        print("g12 is ")
        print(str(g12))
        print("  ")
コード例 #10
0
def return_rbt(trans, rot):
    """
    Prints out the 4x4 rigid body transformation matrix from quaternions

    Input:
        (3,) array - translation ector
        (4,) array - rotation vector in quaternions
    """

    #YOUR CODE HERE
    trans = np.array(trans)
    rot = np.array(rot)
    exp_form = eqf.quaternion_to_exp(rot)
    # print exp_form
    omega = exp_form[0]
    theta = exp_form[1]
    g = eqf.create_rbt(omega, theta, trans)
    return g
コード例 #11
0
def make_g_d(pos, omega, theta):
    """
    Constructs the g_d configuration, with the rotation the same as it already was

    Args:
    pos - (3,) ndarray: the desired end effector UR5
    omega - (3,) ndarray: the rotation axis
    theta: angle

    Returns:
    g_0 - (4, 4) ndarray: the UR5 starting condition
    """

    # g_d = np.array([[1, 0, 0, pos[0]], [0, 0, -1, pos[1]], [0, 1, 0, pos[2]], [0, 0, 0, 1]])

    g_d = eqf.create_rbt(omega, theta, pos)

    return g_d
コード例 #12
0
def transform_z(p_gc, p_zc, r_zc):
    """
    Converts a point in the camera frame to a point in the zumy frame
        
    Args:
    p_cg - (3,) ndarray: a point in the camera frame
    p_cz - (3,) ndarray: the position of the zumy in the camera frame
    r_cz - (4,) ndarray: the rotation (quaternion) from the camera frame to the zumy frame

    Returns:
    position - (3,) ndarray: the position of the point in the zumy frame
    """

    [omega_zc, theta_zc] = eqf.quaternion_to_exp(r_zc)
    g_zc = np.round(eqf.create_rbt(omega_zc, theta_zc, p_zc), 5)
    g_zc = np.linalg.inv(g_zc)
    # print theta_zc
    position = np.dot(g_zc, np.append(p_gc, 1))[:3]
    # position = np.dot(g_zc, np.append(p_gc, 1))

    return position
コード例 #13
0
ファイル: calibrate.py プロジェクト: Wes-Wang/Baxter-Speaks
def do_stuff():
    #jump is used for indexing our transformation matrix
    jump = [0, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]

    # Below we initialize the tf listener, wait for the user to press enter to begin, and initialize an all zeros dictionary for our stored transformation matrices or "alphabet" which will later be filled with transformations that are seen by the camera
    listener = tf.TransformListener()
    raw_input("Press enter to begin: ")
    bindings = sums = []
    for i in np.linspace(1, 106, 106):
        bindings.append([np.zeros((3, 3)), np.zeros((3, 1))])
    sums = bindings
    print("CALIBRATING --> Please follow the directions :) ")

    alphabet = {
        'A': bindings,
        'B': bindings,
        'C': bindings,
        'D': bindings,
        'E': bindings,
        'F': bindings,
        'G': bindings,
        'H': bindings,
        'I': bindings,
        'J': bindings,
        'K': bindings,
        'L': bindings,
        'M': bindings,
        'N': bindings,
        'O': bindings,
        'P': bindings,
        'Q': bindings,
        'R': bindings,
        'S': bindings,
        'T': bindings,
        'U': bindings,
        'V': bindings,
        'W': bindings,
        'X': bindings,
        'Y': bindings,
        'Z': bindings,
        '_': bindings
    }
    letters = [
        'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
        'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '_'
    ]

    # Here we make our trans_final matrix which gives the alphabet matrix indices for the transformations that should be seen if each sign is done properly (all AR tags recognized) during the calibration stage... we'll use this later as quality control during the calibration loop
    tag_trans_eyeball = {
        'A': [0, 6, 7, 8, 9, 12],
        'B': [0, 1, 2, 3, 4, 12],
        'C': [5, 11],
        'D': [0, 4, 6, 7, 8, 12],
        'E': [0, 6, 7, 8, 9, 12],
        'F': [0, 1, 2, 3, 9, 12],
        'G': [9, 10],
        'H': [8, 9, 10],
        'I': [0, 1, 7, 8, 9, 12],
        'J': [0, 1, 7, 8, 9, 12],
        'K': [0, 3, 4, 6, 7, 12],
        'L': [0, 4, 5, 6, 7, 8],
        'M': [0, 6, 7, 8, 9],
        'N': [0, 6, 7, 8, 9],
        'O': [5, 11],
        'P': [13, 14],
        'Q': [13, 14],
        'R': [0, 3, 4, 12],
        'S': [0, 6, 7, 12],
        'T': [0, 6, 7, 8, 9],
        'U': [0, 3, 4, 12],
        'V': [0, 3, 4, 12],
        'W': [0, 2, 3, 4, 12],
        'X': [0, 6, 7, 8, 12],
        'Y': [0, 1, 5, 7, 8, 9],
        'Z': [0, 4, 6, 7, 8, 12],
        '_': [0, 1, 2, 3, 4, 5]
    }

    trans_final = {
        'A': [],
        'B': [],
        'C': [],
        'D': [],
        'E': [],
        'F': [],
        'G': [],
        'H': [],
        'I': [],
        'J': [],
        'K': [],
        'L': [],
        'M': [],
        'N': [],
        'O': [],
        'P': [],
        'Q': [],
        'R': [],
        'S': [],
        'T': [],
        'U': [],
        'V': [],
        'W': [],
        'X': [],
        'Y': [],
        'Z': [],
        '_': []
    }

    #make trans_final
    for i in letters:
        if len(tag_trans_eyeball[i]) == 1:
            trans_final[i] = [0]
        else:
            for n in tag_trans_eyeball[i]:
                for m in tag_trans_eyeball[i]:
                    if m > n:
                        trans_final[i].append(
                            str(n + sum(jump[0:(n + 1)]) + (m - n)))

    #The below while loop iterates through each letter of the alphabet and space and 'listens' for 3 seconds to record the average transformations between any pair of AR tags that are seen by the camera
    loop = 1
    while loop <= 27:
        #initialize useful loop variables
        n = np.zeros(106)
        scale = s = 0
        bindings = sums = []
        for i in np.linspace(1, 106, 106):
            bindings.append([np.zeros((3, 3)), np.zeros((3, 1))])
        sums = bindings
        listener.clear()
        t_end = time.time() + 3
        print("Calibrating " + letters[loop - 1])
        cal_check = []
        #this loop listens for each letter
        while time.time() < t_end:
            for i in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]:
                for j in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]:
                    if (j > i):

                        #only store transformations that exist
                        if (listener.frameExists('ar_marker_' + str(i))) and (
                                listener.frameExists('ar_marker_' + str(j))):
                            #Get translation and rotation from tf topic
                            try:
                                (trans, rot) = listener.lookupTransform(
                                    'ar_marker_' + str(i),
                                    'ar_marker_' + str(j), rospy.Time(0))
                                #Convert quaternion to an omega and theta.
                                (omega, theta) = eqf.quaternion_to_exp(rot)
                                #Create relative configuration of each AR tag to AR tag 0
                                g = eqf.create_rbt(omega, theta, trans)
                                #Configuration matrix is initially zero; skip index 0 to match index with AR tag number
                                if (np.sum(bindings[i + sum(jump[0:(i + 1)]) +
                                                    (j - i)][0]) == 0):
                                    print(str(i) + "," + str(j))
                                    cal_check.append(str(i) + str(j))
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][0] = bindings[
                                             i + sum(jump[0:(i + 1)]) +
                                             (j - i)][0] = g[0:3, 0:3]
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][1] = bindings[
                                             i + sum(jump[0:(i + 1)]) +
                                             (j - i)][1] = g[0:3, 3]
                                #Sum the values in the configuration matrix to later be averaged
                                else:
                                    n[i + sum(jump[0:(i + 1)]) +
                                      (j - i)] = n[i + sum(jump[0:(i + 1)]) +
                                                   (j - i)] + 1
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][0] = (
                                             sums[i + sum(jump[0:(i + 1)]) +
                                                  (j - i)][0] + g[0:3, 0:3])
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][1] = (
                                             sums[i + sum(jump[0:(i + 1)]) +
                                                  (j - i)][1] + g[0:3, 3])
                            except:
                                continue

            #SCALE FACTOR loop below: needed because depending on distance from camera different translation values will be recored; we have three different scale transformations 0 - 17, 11 - 15, and 13 - 16 depending on which configuration the hand is in because there isnt one pair of tags that can be seen for all signs
            if (listener.canTransform('ar_marker_0', 'ar_marker_17',
                                      rospy.Time(0))):
                try:
                    (trans,
                     rot) = listener.lookupTransform('ar_marker_0',
                                                     'ar_marker_17',
                                                     rospy.Time(0))
                except:
                    continue
                    #Convert quaternion to an omega and theta.
                (omega, theta) = eqf.quaternion_to_exp(rot)
                g_scale = eqf.create_rbt(omega, theta, trans)
                case = 0

                if scale == 0:
                    scale = np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1
                else:
                    scale = scale + np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1

            if (listener.canTransform('ar_marker_11', 'ar_marker_15',
                                      rospy.Time(0))):
                try:
                    (trans,
                     rot) = listener.lookupTransform('ar_marker_11',
                                                     'ar_marker_15',
                                                     rospy.Time(0))
                except:
                    continue
                    #Convert quaternion to an omega and theta.
                (omega, theta) = eqf.quaternion_to_exp(rot)
                g_scale = eqf.create_rbt(omega, theta, trans)
                case = 1
                if scale == 0:
                    scale = np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1
                else:
                    scale = scale + np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1

            if (listener.canTransform('ar_marker_13', 'ar_marker_16',
                                      rospy.Time(0))):
                try:
                    (trans,
                     rot) = listener.lookupTransform('ar_marker_13',
                                                     'ar_marker_16',
                                                     rospy.Time(0))
                except:
                    continue
                    #Convert quaternion to an omega and theta.
                (omega, theta) = eqf.quaternion_to_exp(rot)
                g_scale = eqf.create_rbt(omega, theta, trans)
                case = 2

                if scale == 0:
                    scale = np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1
                else:
                    scale = scale + np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1

        #letting the user know if the scale tags were seen or not
        if s != 0:
            scale = scale / s
            print("SCALE HAPPY :)")
            #print(case)
        else:
            scale = 1
            print("WARNING: S = 0; if G or H was signed all is OK")
            #print(case)

        #averaging out all respective transformations seen during the 3 seconds
        for i in xrange(1, 106):
            #Perform calculation if tag was seen
            if (np.sum(bindings[i][0]) != 0):
                bindings[i][0] = sums[i][0] / n[i]
                bindings[i][1] = sums[i][1] / n[i]
        alphabet[letters[loop - 1]] = bindings
        print(str(loop) + " :loop number")
        print(letters[loop - 1])

        #Quality assurance, if proper tags weren't seen user is prompted to recalibrate that letter
        if len(trans_final[letters[loop - 1]]) == len(cal_check):
            raw_input(
                "Press enter when you're ready to move to the next letter.")
            for z in xrange(0, 106):
                alphabet[letters[loop - 1]][z].append(scale)
            loop = loop + 1  #successful calibration
        else:
            raw_input("Try calibration again for same letter")

    # below we write and save a timestamped file to be later read by the final translation code
    timeout = str(time.time())
    with open('alphabet.' + timeout + 'txt', 'wb') as f:
        pickle.dump(alphabet, f)
    with open('alphabet.' + timeout + 'txt', 'rb') as out_file:
        my_list = pickle.load(out_file)

    return my_list
コード例 #14
0
def quat_to_rbt(pos, rot):
    omega, theta = eqf.quaternion_to_exp(np.array([rot[0], rot[1], rot[2], rot[3]]))
    rbt = eqf.create_rbt(omega, theta, np.array([pos[0], pos[1], pos[2]]))
    return rbt
コード例 #15
0
ファイル: follow_ar_tag.py プロジェクト: dHutchings/ee106a
def follow_ar_tag(zumy, ar_tags):
    """
    This function should obtain the rigid body transform between the Zumy and the AR ar_tag
    Then compute and send the twist required to drive the Zumy to the AR ar_tag
    """
    print(zumy)
    print(ar_tags)
    # YOUR CODE HERE

    #copy paste from lab2, change the proper nouns.  Pumbing to make a publisher

    # Create a timer object that will sleep long enough to result in
    # a 10Hz publishing rate
    r = rospy.Rate(10) # 10hz


    #plumbing to get transforms:
    listener = tf.TransformListener()

    # Loop until the node is killed with Ctrl-C
    while not rospy.is_shutdown():
        print("loop " + str(random.random()))
        #ok, need to compute the transform between tag[0] (stationary) and tag[1] (zumy)
        try_success = False
        try:
            now = rospy.Time.now()
            
            listener.waitForTransform(ar_tags['ar1'],ar_tags['arZ'],now,rospy.Duration(1))
            (trans, rot) = listener.lookupTransform(ar_tags['ar1'], ar_tags['arZ'],now)# rospy.Time(0))
            
            rbt = ats.return_rbt(np.array(trans),np.array(rot))


            #print('gab between ' + ar_tags['ar1'] + ' and ' + ar_tags['arZ'])
            #print rbt
            
            twist = ats.compute_twist(rbt=rbt)
            #print('twist between ' + ar_tags['ar1'] + ' and ' + ar_tags['arZ'])
            #print twist

            #now, need to create a geomeryy Msg Twist.



            theta = math.degrees(math.atan2(rbt[1][3],rbt[0][3]))
            print(theta)
            #extract theta out of RBT, because we want zumy to move directly towards the target, not move to park ontop of (and with the orientation of) the target
            
            last_seen_zumy = time.time()
            try_success = True      
        except:
            traceback.print_exc()
            #print("in Except")
            print("AR TAGS EXISTANCE IS AS FOLLOWS: ")
            print(listener.frameExists(ar_tags['ar1']))
            print(listener.frameExists(ar_tags['arZ']))
            print ''

        global last_seen_zumy
        out = Twist()

        if (time.time() - last_seen_zumy > 2):
            #I haven't seen zumy for 2 seconds.  Issue E-stop
            #do publish here so i can be more selective on when to NOT publish stops.
            pub.publish(out)
            print("Stop!")

        elif try_success:

            out = Twist()
            
            #if abs(theta) < 5:
            #go forward!
            #print("FORWARD!")

            #ok, two parts: going forward, and turning.

            print(trans)

            dist = trans[0]
            print("dist is " + str(dist))
            print("Theta is " + str(theta))
            
            if abs(dist)>1:
                linear_gain = .13
            elif abs(dist) > .5:
                linear_gain = .12
            else:
                linear_gain = .10

            if dist > .12:
                out.linear.x =  linear_gain# * twist[0][0]
            else:
                
                #mission accomplished... i'm really close
                pass

            #else:
            #just rotate towards objective.
            #let's come up with a new RBT that achieve those goals... just rotation, no translation.
            omega = np.array([0,0,1]) #rotate about z axis?
            theta_rad = math.radians(theta)
            trans = np.array([0,0,0]) #no translation!
            rbt = eqf.create_rbt(omega,theta_rad,trans)
            twist = ats.compute_twist(rbt=rbt)

            #ok, so now I have a rotational axis.  I now need to scale it by something... ok, what about linear function?

            if abs(theta) > 20:
                rotational_gain = .18
            else:
                rotational_gain = .18*abs(theta/20)

            out.angular.z = rotational_gain*twist[1][2]
            

            print(out)

            pub.publish(out)
        #S IS STUFF TO PUBLISH
        #pub.publish(s,rospy.get_time())

        # Use our rate object to sleep until it is time to publish again
        r.sleep()
            
    exit_handler()
コード例 #16
0
def follow_ar_tag(zumy, ar_tags):
    """
    This function should obtain the rigid body transform between the Zumy and the AR ar_tag
    Then compute and send the twist required to drive the Zumy to the AR ar_tag
    """
    print(zumy)
    print(ar_tags)
    # YOUR CODE HERE

    #copy paste from lab2, change the proper nouns.  Pumbing to make a publisher

    # Create a timer object that will sleep long enough to result in
    # a 10Hz publishing rate
    r = rospy.Rate(10)  # 10hz

    #plumbing to get transforms:
    listener = tf.TransformListener()

    # Loop until the node is killed with Ctrl-C
    while not rospy.is_shutdown():
        print("loop " + str(random.random()))
        #ok, need to compute the transform between tag[0] (stationary) and tag[1] (zumy)
        try_success = False
        try:
            now = rospy.Time.now()

            listener.waitForTransform(ar_tags['ar1'], ar_tags['arZ'], now,
                                      rospy.Duration(1))
            (trans, rot) = listener.lookupTransform(ar_tags['ar1'],
                                                    ar_tags['arZ'],
                                                    now)  # rospy.Time(0))

            rbt = ats.return_rbt(np.array(trans), np.array(rot))

            #print('gab between ' + ar_tags['ar1'] + ' and ' + ar_tags['arZ'])
            #print rbt

            twist = ats.compute_twist(rbt=rbt)
            #print('twist between ' + ar_tags['ar1'] + ' and ' + ar_tags['arZ'])
            #print twist

            #now, need to create a geomeryy Msg Twist.

            theta = math.degrees(math.atan2(rbt[1][3], rbt[0][3]))
            print(theta)
            #extract theta out of RBT, because we want zumy to move directly towards the target, not move to park ontop of (and with the orientation of) the target

            last_seen_zumy = time.time()
            try_success = True
        except:
            traceback.print_exc()
            #print("in Except")
            print("AR TAGS EXISTANCE IS AS FOLLOWS: ")
            print(listener.frameExists(ar_tags['ar1']))
            print(listener.frameExists(ar_tags['arZ']))
            print ''

        global last_seen_zumy
        out = Twist()

        if (time.time() - last_seen_zumy > 2):
            #I haven't seen zumy for 2 seconds.  Issue E-stop
            #do publish here so i can be more selective on when to NOT publish stops.
            pub.publish(out)
            print("Stop!")

        elif try_success:

            out = Twist()

            #if abs(theta) < 5:
            #go forward!
            #print("FORWARD!")

            #ok, two parts: going forward, and turning.

            print(trans)

            dist = trans[0]
            print("dist is " + str(dist))
            print("Theta is " + str(theta))

            if abs(dist) > 1:
                linear_gain = .13
            elif abs(dist) > .5:
                linear_gain = .12
            else:
                linear_gain = .10

            if dist > .12:
                out.linear.x = linear_gain  # * twist[0][0]
            else:

                #mission accomplished... i'm really close
                pass

            #else:
            #just rotate towards objective.
            #let's come up with a new RBT that achieve those goals... just rotation, no translation.
            omega = np.array([0, 0, 1])  #rotate about z axis?
            theta_rad = math.radians(theta)
            trans = np.array([0, 0, 0])  #no translation!
            rbt = eqf.create_rbt(omega, theta_rad, trans)
            twist = ats.compute_twist(rbt=rbt)

            #ok, so now I have a rotational axis.  I now need to scale it by something... ok, what about linear function?

            if abs(theta) > 20:
                rotational_gain = .18
            else:
                rotational_gain = .18 * abs(theta / 20)

            out.angular.z = rotational_gain * twist[1][2]

            print(out)

            pub.publish(out)
        #S IS STUFF TO PUBLISH
        #pub.publish(s,rospy.get_time())

        # Use our rate object to sleep until it is time to publish again
        r.sleep()

    exit_handler()
コード例 #17
0
def compute_target(target, zumy_cf):
    # print target, zumy_cf
    o1, t1 = eqf.quaternion_to_exp(np.array([zumy_cf.rotation.x, zumy_cf.rotation.y, zumy_cf.rotation.z, zumy_cf.rotation.w]))
    rb_cur = eqf.create_rbt(o1, t1, np.array([zumy_cf.translation.x, zumy_cf.translation.y, zumy_cf.translation.z]))
    # print np.dot(rb_cur, np.asarray([target, 0,0,1]).reshape(4,1))
    return np.dot(rb_cur, np.asarray([target, 0,0,1]).reshape(4,1))
コード例 #18
0
try:
    # print ar_tags['ar0'], ar_tags['ar1']
    # twist = None
    # trans = np.array([[1],[0],[0]])
    # tnew = np.array([[0],[1],[0]])
    # rot = np.array([[0],[0],[0],[1]])
    # rw_cur = Transform(Vector3(trans[0], trans[1], trans[2]),Quaternion(rot[0], rot[1], rot[2], rot[3]))
    # print rw_cur
    # rw_des = Transform(Vector3(tnew[0], tnew[1], tnew[2]),Quaternion(rot[0], rot[1], rot[2], rot[3]))
    # rw_cur = np.array([[1,0,0,0],[0,1,0,1],[0,0,1,0],[0,0,0,1]])
    # rw_des = np.array([[1,0,0,1],[0,1,0,0],[0,0,1,0],[0,0,0,1]])
    o1, t1 = eqf.quaternion_to_exp(np.array([rw_cur.rotation.x, rw_cur.rotation.y, rw_cur.rotation.z, rw_cur.rotation.w]))
    o2, t2 = eqf.quaternion_to_exp(np.array([rw_des.rotation.x, rw_des.rotation.y, rw_des.rotation.z, rw_des.rotation.w]))
    print 'blah' + str([rw_des.rotation.x, rw_des.rotation.y, rw_des.rotation.z, rw_des.rotation.w])
    # print 'o2' + str(o2)
    rb_cur = eqf.create_rbt(o1, t1, np.array([rw_cur.translation.x, rw_cur.translation.y, rw_cur.translation.z]))
    rb_des = eqf.create_rbt(o2, t2, np.array([rw_des.translation.x, rw_des.translation.y, rw_des.translation.z]))
    homogeneous = eqf.compute_gab(rb_cur,rb_des)
    print 'h**o'+str(homogeneous)
    trans = homogeneous[0:3,3]
    # (omega,theta) = eqf.find_omega_theta(homogeneous[0:2,0:2])
    # try:
    #     (trans, rot) = listener.lookupTransform(rw_des, rw_cur, rospy.Time(0))
    # except:
    #     print "not enough"
    #     continue
    twist = [[0,0,0], [0,0,0]]
    unfound = True
    print 'trans'+str(trans)
    if trans[0]**2+ trans[1]**2+ trans[2]**2 < .01:
        pub.publish(Twist(Vector3(0,0,0), Vector3(0,0,0)))
コード例 #19
0
ファイル: final_code.py プロジェクト: Wes-Wang/Baxter-Speaks
def do_stuff():

    #Initialize values of arrays:
    jump = [0, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
    bindings = sums = []
    for i in np.linspace(1, 106, 106):
        bindings.append([np.zeros((3, 3)), np.zeros((3, 1))])
    sums = bindings

    tags = {
        'A': [0, 6, 7, 8, 9, 12],
        'B': [0, 1, 2, 3, 4, 12],
        'C': [5, 11],
        'D': [0, 4, 6, 7, 8, 12],
        'E': [0, 6, 7, 8, 9, 12],
        'F': [0, 1, 2, 3, 9, 12],
        'G': [9, 10],
        'H': [8, 9, 10],
        'I': [0, 1, 7, 8, 9, 12],
        'J': [0, 1, 7, 8, 9, 12],
        'K': [0, 3, 4, 6, 7, 12],
        'L': [0, 4, 5, 6, 7, 8],
        'M': [0, 6, 7, 8, 9],
        'N': [0, 6, 7, 8, 9],
        'O': [5, 11],
        'P': [13, 14],
        'Q': [13, 14],
        'R': [0, 3, 4, 12],
        'S': [0, 6, 7, 12],
        'T': [0, 6, 7, 8, 9],
        'U': [0, 3, 4, 12],
        'V': [0, 3, 4, 12],
        'W': [0, 2, 3, 4, 12],
        'X': [0, 6, 7, 8, 12],
        'Y': [0, 1, 5, 7, 8, 9],
        'Z': [0, 4, 6, 7, 8, 12],
        '_': [0, 1, 2, 3, 4, 5]
    }

    letters = [
        'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
        'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '_'
    ]

    all_frames_rec = []
    all_bindings = []
    all_scales = []
    all_moves = []

    #Wait for user to press a key to begin
    raw_input("Press enter to begin: ")

    #Bring in calibration file
    with open('alphabet.txt', 'rb') as out_file:
        alphabet = pickle.load(out_file)
    print('Listening...')

    ###################LISTENING LOOP###########################
    listener = tf.TransformListener()
    stop = False
    k = 1

    ###################### ONE LETTER BEGIN ##################
    while stop == False:
        n = np.zeros(106)  #counter for averaging
        s = scale = 0
        frames = []
        listener.clear()

        t_end = time.time() + 1.3
        print("Letter " + str(k))
        avg = True
        g_init = []
        g_final = []
        while time.time() < t_end:

            #Get translation and rotation of each tag relative to origin tag from tf topic if the frame exists
            #Check for stop signal
            markExist = np.zeros(15)

            for i in xrange(6, 10):
                if listener.canTransform(('ar_marker_' + str(i)),
                                         ('ar_marker_10'),
                                         rospy.Time(0)) == True:
                    markExist[i] = 1

            if (markExist[6] and markExist[7] and markExist[8]
                    and markExist[9]):
                print("Stop signal received. Proceeding to translation.")
                stop = True
                avg = False
                break

            ####SCALE FACTOR#### 0 17 is all but PQ, CO and PQ is 1316 and CO is 1511
            if (listener.canTransform('ar_marker_0', 'ar_marker_17',
                                      rospy.Time(0))):
                try:
                    (trans,
                     rot) = listener.lookupTransform('ar_marker_0',
                                                     'ar_marker_17',
                                                     rospy.Time(0))
                except:
                    continue
                    #Convert quaternion to an omega and theta.
                (omega, theta) = eqf.quaternion_to_exp(rot)
                g_scale = eqf.create_rbt(omega, theta, trans)

                if scale == 0:
                    scale = np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1
                else:
                    scale = scale + np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1

            if (listener.canTransform('ar_marker_11', 'ar_marker_15',
                                      rospy.Time(0))):
                try:
                    (trans,
                     rot) = listener.lookupTransform('ar_marker_11',
                                                     'ar_marker_15',
                                                     rospy.Time(0))
                except:
                    continue
                    #Convert quaternion to an omega and theta.
                (omega, theta) = eqf.quaternion_to_exp(rot)
                g_scale = eqf.create_rbt(omega, theta, trans)

                if scale == 0:
                    scale = np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1
                else:
                    scale = scale + np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1

            if (listener.canTransform('ar_marker_13', 'ar_marker_16',
                                      rospy.Time(0))):
                try:
                    (trans,
                     rot) = listener.lookupTransform('ar_marker_13',
                                                     'ar_marker_16',
                                                     rospy.Time(0))
                except:
                    continue
                    #Convert quaternion to an omega and theta.
                (omega, theta) = eqf.quaternion_to_exp(rot)
                g_scale = eqf.create_rbt(omega, theta, trans)

                if scale == 0:
                    scale = np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1
                else:
                    scale = scale + np.linalg.norm((g_scale[0:3, 3]))
                    s = s + 1

#Determine if the sign is a moving sign by comparing the initial transformation from the USB cam to AR Tag 8 to the final transformation from the USB cam to AR Tag 8.
#Obtain the initial configuration here.
            if g_init == []:
                if (listener.canTransform('usb_cam', 'ar_marker_8',
                                          rospy.Time(0))):
                    try:
                        (trans, rot) = listener.lookupTransform(
                            'usb_cam', 'ar_marker_8', rospy.Time(0))
                    except:
                        continue
                    (omega, theta) = eqf.quaternion_to_exp(rot)
                    g_init = eqf.create_rbt(omega, theta, trans)

            #Check all 110 transformations
            if avg == True:
                for i in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]:
                    for j in [
                            0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14
                    ]:
                        if (i < j):
                            if (listener.frameExists('ar_marker_' + str(i))
                                ) and (listener.frameExists('ar_marker_' +
                                                            str(j))):

                                #Get translation and rotation from tf topic
                                try:
                                    (trans, rot) = listener.lookupTransform(
                                        'ar_marker_' + str(i),
                                        'ar_marker_' + str(j), rospy.Time(0))
                                except:
                                    continue
                                (omega, theta) = eqf.quaternion_to_exp(rot)
                                #Create relative configuration of each AR tag to AR tag 0
                                g = eqf.create_rbt(omega, theta, trans)
                                #Configuration matrix is initially zero; skip index 0 to match index with AR tag number
                                if (np.sum(bindings[i + sum(jump[0:(i + 1)]) +
                                                    (j - i)][0]) == 0):
                                    print(str(i) + "," + str(j))
                                    frames.append(i + sum(jump[0:(i + 1)]) +
                                                  (j - i))
                                    n[i + sum(jump[0:(i + 1)]) +
                                      (j - i)] = n[i + sum(jump[0:(i + 1)]) +
                                                   (j - i)] + 1
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][0] = bindings[
                                             i + sum(jump[0:(i + 1)]) +
                                             (j - i)][0] = g[0:3, 0:3]
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][1] = bindings[
                                             i + sum(jump[0:(i + 1)]) +
                                             (j - i)][1] = g[0:3, 3]
                                #Sum the values in the configuration matrix to later be averaged
                                else:
                                    n[i + sum(jump[0:(i + 1)]) +
                                      (j - i)] = n[i + sum(jump[0:(i + 1)]) +
                                                   (j - i)] + 1
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][0] = (
                                             sums[i + sum(jump[0:(i + 1)]) +
                                                  (j - i)][0] + g[0:3, 0:3])
                                    sums[i + sum(jump[0:(i + 1)]) +
                                         (j - i)][1] = (
                                             sums[i + sum(jump[0:(i + 1)]) +
                                                  (j - i)][1] + g[0:3, 3])

            #Obtain the final configuration from USB cam to AR Tag 8
            if (listener.canTransform('usb_cam', 'ar_marker_8',
                                      rospy.Time(0))):
                try:
                    (trans,
                     rot) = listener.lookupTransform('usb_cam', 'ar_marker_8',
                                                     rospy.Time(0))
                except:
                    continue
                (omega, theta) = eqf.quaternion_to_exp(rot)
                g_final = eqf.create_rbt(omega, theta, trans)

        #Determine if the sign is static. If the difference between the initial and final rotation of AR Tag 8 relative to the USB cam is large enough, then consider the tag as moving.
        #The threshold is arbitrary. However in static signs, the values obtained for move_rot are on the hundredths order of magnitude. It is obvious when a tag is static or dynamic.
        move_rot = 0
        move = 0

        if g_init != [] and g_final != []:
            for i in xrange(0, 2):
                move_rot = move_rot + abs(g_init[i][i] - g_final[i][i])
            if move_rot > 0.3:
                move = 1

        #Perform calculation to obtain average and assign final value to bindings
        if avg == True:
            for i in xrange(1, 106):
                #Perform calculation if tag was seen
                if (np.sum(bindings[i][0]) != 0):
                    bindings[i][0] = sums[i][0] / n[i]
                    bindings[i][1] = sums[i][1] / n[i]
        if s != 0:
            scale = scale / s
        else:
            scale = 1

        if (s != 0) or (bindings[91][1][1] != 0):
            all_frames_rec.append(frames)
            all_bindings.append(bindings)
            all_scales.append(scale)
            all_moves.append(move)
            if avg == True:
                k = k + 1
                raw_input(
                    "Press enter when you're ready to move to the next letter."
                )
        else:
            if avg == True:
                raw_input(
                    "Something went wrong, please sign again (HINT: check if the scale tags are in view)"
                )

        #Reset bindings and sums to zero for the next letter.
        bindings = sums = []
        for i in np.linspace(1, 106, 106):
            bindings.append([np.zeros((3, 3)), np.zeros((3, 1))])
        sums = bindings
        ###################### ONE LETTER END ##################
    ###################LISTENING LOOP###########################


#####################TRANSLATE################################
#Initialize the arrays used for translating.
    phrase_out = []
    tag_trans = {
        'A': [0, 6, 7, 8, 9, 12],
        'B': [0, 1, 2, 3, 4, 12],
        'C': [5, 11],
        'D': [0, 4, 6, 7, 8, 12],
        'E': [0, 6, 7, 8, 9, 12],
        'F': [0, 1, 2, 3, 9, 12],
        'G': [9, 10],
        'H': [8, 9, 10],
        'I': [0, 1, 7, 8, 9, 12],
        'J': [0, 1, 7, 8, 9, 12],
        'K': [0, 3, 4, 6, 7, 12],
        'L': [0, 4, 5, 6, 7, 8],
        'M': [0, 6, 7, 8, 9],
        'N': [0, 6, 7, 8, 9],
        'O': [5, 11],
        'P': [13, 14],
        'Q': [13, 14],
        'R': [0, 3, 4, 12],
        'S': [0, 6, 7, 12],
        'T': [0, 6, 7, 8, 9],
        'U': [0, 3, 4, 12],
        'V': [0, 3, 4, 12],
        'W': [0, 2, 3, 4, 12],
        'X': [0, 6, 7, 8, 12],
        'Y': [0, 1, 5, 7, 8, 9],
        'Z': [0, 4, 6, 7, 8, 12],
        '_': [0, 1, 2, 3, 4, 5]
    }
    trans_final = {
        'A': [],
        'B': [],
        'C': [],
        'D': [],
        'E': [],
        'F': [],
        'G': [],
        'H': [],
        'I': [],
        'J': [],
        'K': [],
        'L': [],
        'M': [],
        'N': [],
        'O': [],
        'P': [],
        'Q': [],
        'R': [],
        'S': [],
        'T': [],
        'U': [],
        'V': [],
        'W': [],
        'X': [],
        'Y': [],
        'Z': [],
        '_': []
    }

    #Initialize trans_final with the indices in which a transformation should be found for that letter.
    for i in letters:
        if len(tag_trans[i]) == 1:
            trans_final[i] = [0]
        else:
            for n in tag_trans[i]:
                for m in tag_trans[i]:
                    if m > n:
                        trans_final[i].append(
                            str(n + sum(jump[0:(n + 1)]) + (m - n)))

    #Start the translation process
    for k in xrange(0, len(all_frames_rec) - 1):
        letters = [
            'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
            'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
            '_'
        ]
        possible = {
            'A': 0,
            'B': 0,
            'C': 0,
            'D': 0,
            'E': 0,
            'F': 0,
            'G': 0,
            'H': 0,
            'I': 0,
            'J': 0,
            'K': 0,
            'L': 0,
            'M': 0,
            'N': 0,
            'O': 0,
            'P': 0,
            'Q': 0,
            'R': 0,
            'S': 0,
            'T': 0,
            'U': 0,
            'V': 0,
            'W': 0,
            'X': 0,
            'Y': 0,
            'Z': 0,
            '_': 0
        }
        matches = {
            'A': 0,
            'B': 0,
            'C': 0,
            'D': 0,
            'E': 0,
            'F': 0,
            'G': 0,
            'H': 0,
            'I': 0,
            'J': 0,
            'K': 0,
            'L': 0,
            'M': 0,
            'N': 0,
            'O': 0,
            'P': 0,
            'Q': 0,
            'R': 0,
            'S': 0,
            'T': 0,
            'U': 0,
            'V': 0,
            'W': 0,
            'X': 0,
            'Y': 0,
            'Z': 0,
            '_': 0
        }
        error = {
            'A': 0,
            'B': 0,
            'C': 0,
            'D': 0,
            'E': 0,
            'F': 0,
            'G': 0,
            'H': 0,
            'I': 0,
            'J': 0,
            'K': 0,
            'L': 0,
            'M': 0,
            'N': 0,
            'O': 0,
            'P': 0,
            'Q': 0,
            'R': 0,
            'S': 0,
            'T': 0,
            'U': 0,
            'V': 0,
            'W': 0,
            'X': 0,
            'Y': 0,
            'Z': 0,
            '_': 0
        }
        delta = {
            'A': 0,
            'B': 0,
            'C': 0,
            'D': 0,
            'E': 0,
            'F': 0,
            'G': 0,
            'H': 0,
            'I': 0,
            'J': 0,
            'K': 0,
            'L': 0,
            'M': 0,
            'N': 0,
            'O': 0,
            'P': 0,
            'Q': 0,
            'R': 0,
            'S': 0,
            'T': 0,
            'U': 0,
            'V': 0,
            'W': 0,
            'X': 0,
            'Y': 0,
            'Z': 0,
            '_': 0
        }

        #Determine if AR tags seen in the signed letter are too many, exact, or too little for a certain letter
        exact = []
        over = []
        under = []
        for i in letters:
            for j in all_frames_rec[k]:
                if np.sum(alphabet[i][j][1]) != 0:
                    matches[i] = matches[i] + 1
                else:
                    continue

            delta[i] = matches[i] - len(trans_final[i])

            #Append the letter to the exact, over, and under matches for letter 'k' signed
            if (delta[i] == 0) and (len(all_frames_rec[k]) == len(
                    trans_final[i])):
                exact.append(i)

            elif (delta[i] >= 0) and (len(all_frames_rec[k]) > len(
                    trans_final[i])):
                over.append(i)

            elif delta[i] < 0:
                under.append(i)

        #Now that we have determined which letters are possible, we want to narrow down the possibilities by comparing to the calibrated letters
        error_rot = {
            'A': 0,
            'B': 0,
            'C': 0,
            'D': 0,
            'E': 0,
            'F': 0,
            'G': 0,
            'H': 0,
            'I': 0,
            'J': 0,
            'K': 0,
            'L': 0,
            'M': 0,
            'N': 0,
            'O': 0,
            'P': 0,
            'Q': 0,
            'R': 0,
            'S': 0,
            'T': 0,
            'U': 0,
            'V': 0,
            'W': 0,
            'X': 0,
            'Y': 0,
            'Z': 0,
            '_': 0
        }
        error_trans = {
            'A': 0,
            'B': 0,
            'C': 0,
            'D': 0,
            'E': 0,
            'F': 0,
            'G': 0,
            'H': 0,
            'I': 0,
            'J': 0,
            'K': 0,
            'L': 0,
            'M': 0,
            'N': 0,
            'O': 0,
            'P': 0,
            'Q': 0,
            'R': 0,
            'S': 0,
            'T': 0,
            'U': 0,
            'V': 0,
            'W': 0,
            'X': 0,
            'Y': 0,
            'Z': 0,
            '_': 0
        }

        #For exact matches, there are specific cases that we must specifically consider to increase the accuracy
        if exact != []:
            #Special case for M,N,T. The distinguishing factor is which two fingers the thumb is inserted between. The two fingers that have the largest translational difference determines which letter was signed.
            #i.e. if the largest translation between the tag pairs (6,7),(7,8),(8.9) is between pair (6,7), then the thumb has been inserted between the pinky and ring finger, which is letter M.
            if exact == ['M', 'N', 'T']:
                em = np.linalg.norm(all_bindings[k][70][1])
                en = np.linalg.norm(all_bindings[k][78][1])
                tee = np.linalg.norm(all_bindings[k][85][1])
                if (em > en) and (em > tee):
                    phrase_out.append('M')
                if (em < en) and (en > tee):
                    phrase_out.append('N')
                if (em < tee) and (en < tee):
                    phrase_out.append('T')
            #Special case for R,U,V. R has the index and middle finger crossed, resulting in a negative translation between (3,4).
            #U and V and distinguished by looking at the magnitude of the translation between (3,4). Larger translation means V, because in V the index and middle are spread apart more.
            elif exact == ['R', 'U', 'V']:
                error_transU = 0
                error_transV = 0
                if all_bindings[k][40][1][0] < 0:
                    phrase_out.append('R')
                else:
                    for i in [0, 1, 2]:
                        for z in xrange(0, 106):
                            all_bindings[k][z][1] = all_bindings[k][z][
                                1] * all_scales[k] / alphabet['U'][1][2]
                        error_transU = error_transU + (
                            all_bindings[k][40][1][i] -
                            alphabet['U'][40][1][i])**2

                        for z in xrange(0, 106):
                            all_bindings[k][z][1] = all_bindings[k][z][
                                1] * all_scales[k] / alphabet['U'][1][2]
                        error_transV = error_transV + (
                            all_bindings[k][40][1][i] -
                            alphabet['V'][40][1][i])**2
                    if error_transV > error_transU:
                        phrase_out.append('U')
                    else:
                        phrase_out.append('V')
            #Special case for C,O. The rotation of tag 5 on the thumb changes depending on whether C or O is signed. In signing O, the axes of tag 5 align with the axes of tag 11 on the side of the hand near the pinky.
            #In signing C, the axes of tag 5 misalign with tag 11. The decision is made by determining if the signed letter is closer to the rotation of C or O.
            elif exact == ['C', 'O']:
                error_rot = error_rotC = error_rotO = error_transC = error_transO = 0
                for i in [0, 1]:
                    error_rot = error_rot + all_bindings[k][66][0][i, i]
                    error_rotC = error_rotC + alphabet['C'][66][0][i, i]
                    error_rotO = error_rotO + alphabet['O'][66][0][i, i]

                if abs(error_rot - error_rotC) > abs(error_rot - error_rotO):
                    phrase_out.append('O')
                else:
                    phrase_out.append('C')

            #Special case for A,E. The translation of tag 12 relative to tags 6,7,8, and 9 are larger in magnitude for E than in A.
            elif exact == ['A', 'E']:
                error_transA = 0
                error_transE = 0

                for i in [75, 82, 88, 93]:
                    error_transA = error_transA + (all_bindings[k][i][1][1] -
                                                   alphabet['A'][i][1][1])**2
                    error_transE = error_transE + (all_bindings[k][i][1][1] -
                                                   alphabet['E'][i][1][1])**2

                if error_transA > error_transE:
                    phrase_out.append('E')
                else:
                    phrase_out.append('A')

            #Special case for D,Z. If in the signing phase, the sign was determined to have moved, then the letter is Z. If the sign was a static sign, then the letter is D.
            elif exact == ['D', 'Z']:
                if all_moves[k] == 1:
                    phrase_out.append('Z')
                else:
                    phrase_out.append('D')

            #Special case for I,J. If in the signing phase, the sign was determined to have moved, then the letter is J. If the sign was a static sign, then the letter is I.
            elif exact == ['I', 'J']:
                if all_moves[k] == 1:
                    phrase_out.append('J')
                else:
                    phrase_out.append('I')
            #If letter matches none of these special cases then look purely at a least squares error calculation comparison with the calibrated letters. Select the letter with the smallest error.
            else:
                for m in exact:
                    for z in xrange(0, 106):
                        all_bindings[k][z][1] = all_bindings[k][z][
                            1] * all_scales[k] / alphabet[m][1][2]
                    for l in all_frames_rec[k]:
                        for i in [0, 1, 2]:
                            for j in [0, 1, 2]:
                                error_rot[m] = error_rot[m] + (
                                    all_bindings[k][l][0][i, j] -
                                    alphabet[m][l][0][i, j])**2

                        for i in [0, 1, 2]:
                            error_trans[m] = error_trans[m] + (
                                all_bindings[k][l][1][i] -
                                alphabet[m][l][1][i])**2

                    error[m] = error_trans[m] + error_rot[m]

                for i in letters:
                    if error[i] == 0:
                        error[i] = 100000000
                phrase_out.append(min(error, key=error.get))

        #If there were no exact matches and the camera saw more tags than a letter should have, do a least squares error calculation with the "over" letters.
        elif over != []:
            for m in over:
                for z in xrange(0, 106):
                    all_bindings[k][z][1] = all_bindings[k][z][1] * all_scales[
                        k] / alphabet[m][1][2]
                for l in all_frames_rec[k]:
                    for i in [0, 1, 2]:
                        for j in [0, 1, 2]:
                            error_rot[m] = error_rot[m] + (
                                all_bindings[k][l][0][i, j] -
                                alphabet[m][l][0][i, j])**2

                    for i in [0, 1, 2]:
                        error_trans[m] = error_trans[m] + (
                            all_bindings[k][l][1][i] - alphabet[m][l][1][i])**2
                error[m] = error_trans[m] + error_rot[m]

            for i in letters:
                if error[i] == 0:
                    error[i] = 100000000
            phrase_out.append(min(error, key=error.get))

        #If there were no exact matches and no over matches, then compute least squares error calculations with letters in which less tags were seen.
        else:
            for m in under:
                for z in xrange(0, 106):
                    all_bindings[k][z][1] = all_bindings[k][z][1] * all_scales[
                        k] / alphabet[m][1][2]
                for l in all_frames_rec[k]:
                    for i in [0, 1, 2]:
                        for j in [0, 1, 2]:
                            error_rot[m] = error_rot[m] + (
                                all_bindings[k][l][0][i, j] -
                                alphabet[m][l][0][i, j])**2

                    for i in [0, 1, 2]:
                        error_trans[m] = error_trans[m] + (
                            all_bindings[k][l][1][i] - alphabet[m][l][1][i])**2

                error[m] = error_trans[m] + error_rot[m]

            for i in letters:
                if error[i] == 0:
                    error[i] = 100000000
            phrase_out.append(min(error, key=error.get))
            print("Chose " + (min(error, key=error.get) + "from UNDER."))

    #Print the resulting signed phrase to the user
    if phrase_out == []:
        phrase_out = 'YOU DID NOT SIGN ANYTHING'
    else:
        print(phrase_out)

    #Convert underscore to space
    phrase_txt = []
    for x in xrange(0, len(phrase_out)):
        if phrase_out[x] == '_':
            phrase_txt.append(' ')
        else:
            phrase_txt.append(phrase_out[x])

    phrase_txt = ''.join(phrase_txt)
    print(phrase_txt)

    #Create a text file containing the signed phrase
    # file = open("/var/local/home/team23/Downloads/interpreted.txt","w")
    file = open(
        "/home/cc/ee106a/fa16/class/ee106a-ada/Downloads/interpreted.txt", "w")
    file.write(phrase_txt)
    file.close()