def arHandler(self, marker, armname):
        pose = marker.pose.pose

        if armname == 'left':
            tool_frame = ConstantsClass.ToolFrame.Left
        elif armname == 'right':
            tool_frame = ConstantsClass.ToolFrame.Right
        time = self.listener.getLatestCommonTime(ConstantsClass.Camera, tool_frame)
        if time == 0:
            return
        (trans, rot) = self.listener.lookupTransform(ConstantsClass.Camera, tool_frame, time)

        if (self.state == ImageDetectionClass.State.CalibrateLeft):
            offset = Point()
            offset.x = pose.position.x - trans[0]
            offset.y = pose.position.y - trans[1]
            offset.z = pose.position.z - trans[2]
            self.offset = offset
            self.state = ImageDetectionClass.State.Calibrated
        elif self.state == ImageDetectionClass.State.Calibrated:
            pos = Util.positionSubtract(pose.position, self.offset)
            gp = PoseStamped()
            gp.header.stamp = marker.header.stamp
            gp.header.frame_id = marker.header.frame_id
            gp.pose.position = pos
            gp.pose.orientation = Util.makeQuaternion(rot[3], rot[0], rot[1], rot[2])
            self.leftGripperPose = gp
            self.debugAr(gp)
    def __init__(self, normal=None):
        # PointStamped TEMP!!!!!!!!!!!!!!
        self.objectPoint = None

        # gripper pose. Must both have frame_id of respective tool frame
        self.leftGripperPose = None
        self.rightGripperPose = None
        # receptacle point. Must have frame_id of global (or main camera) frame
        # is the exact place to drop off (i.e. don't need to do extra calcs to move away
        self.receptaclePoint = None
        # table normal. Must be according to global (or main camera) frame
        if normal != None:
            self.normal = normal
        else:
            # default to straight up
            # self.normal = Util.makeQuaternion(.5**.5, 0, -.5**.5, 0)

            # default to sideways
            quat = tft.quaternion_from_euler(0, 0, -math.pi / 2)
            self.normal = Util.makeQuaternion(quat[3], quat[0], quat[1], quat[2])

        # Lock so two arms can access one ImageDetectionClass
        # TEMP!!!!
        self.objectLock = Lock()

        # image processing to find object
        self.objectProcessing = ImageProcessingClass()

        # Temporary. Will eventually be placed with real image detection
        # Will subscribe to camera feeds eventually
        rospy.Subscriber("stereo_points_3d", PointStamped, self.stereoCallback)

        rospy.Subscriber("/wide_stereo/right/image_rect", Image, self.imageCallback)
      def __init__(self, normal=None):
            
            #gripper pose. Must both have frame_id of respective tool frame
            self.leftGripperPose = None
            self.rightGripperPose = None
            #receptacle point. Must have frame_id of global (or main camera) frame
            #is the exact place to drop off (i.e. don't need to do extra calcs to move away)
            self.receptaclePoint = None
            #table normal. Must be according to global (or main camera) frame
            if normal != None:
                  self.normal = normal
            else:
                  # default to straight up
                  #self.normal = Util.makeQuaternion(.5**.5, 0, -.5**.5, 0)
                  
                  # default to sideways
                  quat = tft.quaternion_from_euler(0,0,-math.pi/2)
                  self.normal = Util.makeQuaternion(quat[3], quat[0], quat[1], quat[2])


            # image processing to find object
            self.listener = tf.TransformListener()
            self.state = ImageDetectionClass.State.Calibrating
            self.objectProcessing = ImageProcessingClass()

            # Temporary. For finding the receptacle
            rospy.Subscriber('stereo_points_3d', PointStamped, self.stereoCallback)
            # Get grippers using AR
            self.debugArCount = 0
            rospy.Subscriber('/stereo_pose', ARMarkers, self.arCallback)