Beispiel #1
0
    def __init__(self, target_link):
                       
        rospy.init_node('GetJointPose')
        tf_listener = TransformListener()
        base_link = "base_link"

        print "waiting for transform"
        tf_listener.waitForTransform (target_link, base_link, rospy.Time(), rospy.Duration(4.0))
        print "done waiting"

        if not tf_listener.frameExists("base_link"):
            print "ERROR NO FRAME base_link"
            return
        
        elif not tf_listener.frameExists(target_link):
            print "ERROR NO FRAME" +  target_link
            return
        
        else:
            t = tf_listener.getLatestCommonTime("/base_link", target_link)
            joint_pose = geometry_msgs.msg.PoseStamped()
            joint_pose.header.frame_id = target_link
            joint_pose.pose.orientation.w = 1.0    # Neutral orientation
            pose_from_base = tf_listener.transformPose("/base_link", joint_pose)
            print "Position from " + base_link + " to " + target_link
            print pose_from_base
class FTSCalibSampler:
    def __init__(self, filename='poses.txt'):
        print('init')
        self.tf = TransformListener()
        self.base_link = 'base_link'
        self.tool_link = 'fts_toolside'
        self.pose_cnt = 0
        self.file = open(filename, 'w+')
        try:
            self.tf.waitForTransform(self.base_link, self.tool_link,
                                     rospy.Time(), rospy.Duration(5.0))
        except tf.Exception:  # likely a timeout
            print(
                "Timeout while waiting for the TF transformation with the map!"
                " Is someone publishing TF tansforms?\n ROS positions won't be available."
            )
            self.tf_running = False

    def update(self):
        if self.tf.frameExists(self.base_link) and self.tf.frameExists(
                self.tool_link):
            t = self.tf.getLatestCommonTime(self.base_link, self.tool_link)
            position, quaternion = self.tf.lookupTransform(
                self.base_link, self.tool_link, t)
            self.new_pose = position + list(euler_from_quaternion(quaternion))
            print(self.new_pose)

    def write(self):
        self.file.write('pose{}: {}\n'.format(self.pose_cnt,
                                              str(self.new_pose)))
        self.pose_cnt += 1
Beispiel #3
0
class myNode:
    def __init__(self, *args):
        self.tf = TransformListener()
#        rospy.Subscriber(...)
#        ...

    def some_method(self):
        if self.tf.frameExists(
                "left_fingertip_sensor_s0") and self.tf.frameExists("world"):
            t = self.tf.getLatestCommonTime("left_fingertip_sensor_s0",
                                            "world")
            position, quaternion = self.tf.lookupTransform(
                "left_fingertip_sensor_s0", "world", t)
            print position, quaternion
        else:
            print "not found frame"

    def fury(self):
        #        now = rospy.Time.now()
        self.tf.waitForTransform("pbase_link", "world", rospy.Time(0),
                                 rospy.Duration(10.0))
        print "waited"
        (trans, rot) = self.tf.lookupTransform("left_fingertip_sensor_s0",
                                               "world", rospy.Time(0))
        print trans, rot
        return trans

    def locations(self, frame1, frame2):
        self.tf.waitForTransform(frame1, frame2, rospy.Time(0),
                                 rospy.Duration(10.0))
        print "waited"
        (trans, rot) = self.tf.lookupTransform(frame1, frame2, rospy.Time(0))
        print trans, rot
Beispiel #4
0
class SLAM(object):
    '''
  Adapted from L310 coursework
  '''
    def __init__(self, robot_id):
        self.robot_id = robot_id
        self._occupancy_grid = None
        rospy.Subscriber('/robot{}/map'.format(self.robot_id),
                         OccupancyGrid_msg, self.callback)
        self._tf = TransformListener()
        # self._occupancy_grid = None # NOTE why was this defined after calling callback?
        self._pose = np.array([np.nan, np.nan, np.nan], dtype=np.float32)

    def callback(self, msg):
        values = np.array(msg.data, dtype=np.int8).reshape(
            (msg.info.width, msg.info.height))
        processed = np.empty_like(values)
        processed[:] = FREE
        processed[values < 0] = UNKNOWN
        processed[values > 50] = OCCUPIED
        processed = processed.T
        origin = [msg.info.origin.position.x, msg.info.origin.position.y, 0.]
        resolution = msg.info.resolution

        # if self._occupancy_grid is not None: # then merge new map with old map

        self._occupancy_grid = OccupancyGrid(processed, origin, resolution,
                                             msg)

    def update(self):
        # Get pose w.r.t. map.
        a = 'robot{}/occupancy_grid'.format(self.robot_id)
        b = 'robot{}/base_link'.format(self.robot_id)
        if self._tf.frameExists(a) and self._tf.frameExists(b):
            try:
                t = rospy.Time(0)
                position, orientation = self._tf.lookupTransform(
                    '/' + a, '/' + b, t)
                self._pose[X] = position[X]
                self._pose[Y] = position[Y]
                _, _, self._pose[YAW] = euler_from_quaternion(orientation)
            except Exception as e:
                print(e)
        else:
            print('Unable to find:', self._tf.frameExists(a),
                  self._tf.frameExists(b))
        pass

    @property
    def ready(self):
        return self._occupancy_grid is not None and not np.isnan(self._pose[0])

    @property
    def pose(self):
        return self._pose

    @property
    def occupancy_grid(self):
        return self._occupancy_grid
class ChallengeProblemLogger(object):
  _knownObstacles = {}
  _placedObstacle = False
  _lastgzlog = 0.0
  _tf_listener = None
  
  def __init__(self,name):
    self._name = name;

    self._experiment_started = False
    self._tf_listener = TransformListener()

    # Subscribe to robot pose ground truth from gazebo
    rospy.Subscriber("/gazebo/model_states", ModelStates, callback=self.gazebo_model_monitor,
                     queue_size=1)

  # Whenever we get a report from Gazebo, map the gazebo coordinate to map coordinates and
  # log this
  # Only do this every second - this should be accurate enough
  # TODO: model is assumed to be the third in the list. Need to make this based
  #       on the array to account for obstacles (maybe)
  def gazebo_model_monitor(self, data):
    if (len(data.pose))<=2:
      return
    data_time = rospy.get_rostime().to_sec()
    if ((self._lastgzlog == 0.0) | (data_time - self._lastgzlog >= 1)):
      # Only do this every second

      # Get the turtlebot model state information (assumed to be indexed at 2)    
      tb_pose = data.pose[2]
      tb_position = tb_pose.position
      self._lastgzlog = data_time

      # Do this only if the transform exists
      if self._tf_listener.frameExists("/base_link") and self._tf_listener.frameExists("/map"):
        self._tf_listener.waitForTransform("/map", "/base_link", rospy.Time(0), rospy.Duration(1))
        (trans,rot) = self._tf_listener.lookupTransform("/map", "/base_link",rospy.Time(0))
        rospy.loginfo("BRASS | Turtlebot | {},{}".format(trans[0], trans[1]))
      
      # Log any obstacle information, but do it only once. This currently assumes one obstacle
      # TODO: test this
      if len(data.name) > 3:
        addedObstacles = {}
        removedObstacles = self._knownObstacles.copy()
        for obs in range(3, len(data.name)-1):
          if (data.name[obs] not in self._knownObstacles):
            addedObstacles[data.name[obs]] = obs
          else:
             self._knownObstacles[data.name[obs]] = obs
 	     del removedObstacles[data.name[obs]]
        
        for key, value in removedObstacles.iteritems():
           rospy.logInfo("BRASS | Obstacle {} | removed".format(key))
           del self._knownObstacles[key]

        for key, value in addedObstacles.iteritems():
	   obs_pos = data.pose[value].position
           rospy.logInfo ("BRASS | Obstacle {} | {},{}".format(key, obs_pos.x, obs_pos.y))
	   self._knownObstacles[key] = value
Beispiel #6
0
class TfTest(RosTestCase):

    def setUpEnv(self):
        
        robot = ATRV()

        odometry = Odometry()
        robot.append(odometry)
        odometry.add_stream('ros')

        motion = Teleport()
        robot.append(motion)
        motion.add_stream('socket')
        
        robot2 = ATRV()
        robot2.translate(0,1,0)
        odometry2 = Odometry()
        robot2.append(odometry2)
        odometry2.add_stream('ros', frame_id="map", child_frame_id="robot2")

        env = Environment('empty', fastmode = True)
        env.add_service('socket')

    def _check_pose(self, frame1, frame2, position, quaternion, precision = 0.01):

        t = self.tf.getLatestCommonTime(frame1, frame2)
        observed_position, observed_quaternion = self.tf.lookupTransform(frame1, frame2, t)

        for a,b in zip(position, observed_position):
            self.assertAlmostEqual(a, b, delta = precision)
        for a,b in zip(quaternion, observed_quaternion):
            self.assertAlmostEqual(a, b, delta = precision)



    def test_tf(self):

        rospy.init_node('morse_ros_tf_test')

        self.tf = TransformListener()

        sleep(1)
        self.assertTrue(self.tf.frameExists("/odom"))
        self.assertTrue(self.tf.frameExists("/base_footprint"))
        self.assertTrue(self.tf.frameExists("/map"))
        self.assertTrue(self.tf.frameExists("/robot2"))

        self._check_pose("odom", "base_footprint", [0,0,0], [0,0,0,1])
        self._check_pose("map", "robot2", [0,0,0], [0,0,0,1])

        with Morse() as morse:
            teleport = morse.robot.motion

            teleport.publish({'x' : 2, 'y' : 0, 'z' : 0, \
                              'yaw' : 0, 'pitch' : 0.0, 'roll' : 0.0})
            morse.sleep(0.1)

        self._check_pose("odom", "base_footprint", [2,0,-0.1], [0,0,0,1])
Beispiel #7
0
class TransformNode:
    def __init__(self, *args):
        self.tf = TransformListener()
        rospy.Subscriber("/tf", TFMessage, transform, queue_size=1)

    def transform(self, msg):
        if self.tf.frameExists("/base_link") and self.tf.frameExists("/map"):
            t = self.tf.getLatestCommonTime("/base_link", "/map")
            position, quaternion = self.tf.lookupTransform("/base_link", "/map", t)
            print position, quaternion
Beispiel #8
0
class SLAM(object):
    def __init__(self):
        rospy.Subscriber('/map', OccupancyGrid, self.callback)
        self._tf = TransformListener()
        self._occupancy_grid = None
        self._pose = np.array([np.nan, np.nan, np.nan], dtype=np.float32)

    def callback(self, msg):
        values = np.array(msg.data, dtype=np.int8).reshape(
            (msg.info.width, msg.info.height))
        processed = np.empty_like(values)
        processed[:] = 0
        processed[values < 0] = 1
        processed[values > 50] = 2
        processed = processed.T
        origin = [msg.info.origin.position.x, msg.info.origin.position.y, 0.]
        resolution = msg.info.resolution
        # self._occupancy_grid = rrt.OccupancyGrid(processed, origin, resolution)

    def update(self):
        # Get pose w.r.t. map.
        a = 'occupancy_grid'
        # a = 'map'
        b = LEADER + '/base_link'
        if self._tf.frameExists(a) and self._tf.frameExists(b):
            try:
                t = rospy.Time(0)
                position, orientation = self._tf.lookupTransform(
                    '/' + a, '/' + b, t)
                self._pose[X] = position[X]
                self._pose[Y] = position[Y]
                _, _, self._pose[YAW] = euler_from_quaternion(orientation)

                print('pose:', self._pose)

            except Exception as e:
                print(e)
        else:
            print('Unable to finddd:', self._tf.frameExists(a),
                  self._tf.frameExists(b))
        pass

    @property
    def ready(self):
        return self._occupancy_grid is not None and not np.isnan(self._pose[0])

    @property
    def pose(self):
        return self._pose

    @property
    def occupancy_grid(self):
        return self._occupancy_grid
Beispiel #9
0
def run():

    tt = tf.Transformer(True, rospy.Duration(10.0))

    tfl = TransformListener()
    #print tf.allFramesAsString()
    print tfl.getFrameStrings()
    if tfl.frameExists("/base_link") and tfl.frameExists("/base_footprint"):
        t = tfl.getLatestCommonTime("/base_link", "/base_footprint")
        position, quaternion = tfl.lookupTransform("/base_link",
                                                   "/base_footprint", t)
        print position, quaternion
    else:
        print "no"
Beispiel #10
0
def get_object_pose(object_frame, reference_frame):
    pose = None
    tf_l = TransformListener()
    try:
        print object_frame, reference_frame
        tf_l.waitForTransform(object_frame, reference_frame, rospy.Time(0),
                              rospy.Duration(5.0))
        if (tf_l.frameExists(object_frame)
                and tf_l.frameExists(reference_frame)):
            p, q = tf_l.lookupTransform(reference_frame, object_frame,
                                        rospy.Time(0))
            pose = Pose(Point(*p), Quaternion(*q))
    except (tf.LookupException, tf.ConnectivityException):
        print "Unable to retrieve object pose!"
    del tf_l  # kill listener to prevent annoying warnings
    return pose
class KinectNiteHelp:
    def __init__(self, name="kinect_listener", user=1):
        # rospy.init_node(name, anonymous=True)
        self.tf = TransformListener()
        self.user = user

    def getLeftHandPos(self):
        if self.tf.frameExists(BASE_FRAME) and self.tf.frameExists("left_hand_1"):
            print " Inside TF IF Get LEft HAnd POS "
            t = self.tf.getLatestCommonTime(BASE_FRAME, "left_hand_1")
            (leftHandPos, quaternion) = self.tf.lookupTransform(BASE_FRAME, "left_hand_1", t)
            return leftHandPos

    def getRightHandPos(self):
        if self.tf.frameExists(BASE_FRAME) and self.tf.frameExists("right_hand_1"):
            t = self.tf.getLatestCommonTime(BASE_FRAME, "right_hand_1")
            (rightHandPos, quaternion) = self.tf.lookupTransform(BASE_FRAME, "right_hand_1", t)
            return rightHandPos
class Transformation():
	def __init__(self):
		pub = 0
		self.tf = TransformListener()
		self.tf1 = TransformerROS()
		self.fdata = geometry_msgs.msg.TransformStamped()
		self.fdata_base = geometry_msgs.msg.TransformStamped()
		self.transform = tf.TransformBroadcaster()
		self.wrench = WrenchStamped()
		self.wrench_bl = WrenchStamped()
		
	def wrench_cb(self,msg):
		self.wrench = msg.wrench
		self.transform.sendTransform((0,0,-0.025),quaternion_from_euler(3.14, 0, 3.665195102, 'rxyz'),rospy.Time.now(),'/ft_debug_link','/arm_7_link')
		self.fdata.transform.translation.x = self.wrench.force.x
		self.fdata.transform.translation.y = self.wrench.force.y
		self.fdata.transform.translation.z = self.wrench.force.z
		
		try:
			if self.tf.frameExists("/dummy_link") and self.tf.frameExists("ft_debug_link"):
				t = self.tf.getLatestCommonTime("/dummy_link", "/ft_debug_link")
				(transform_ee_base_position,transform_ee_base_quaternion) = self.tf.lookupTransform("/dummy_link", '/ft_debug_link', t)
	   				#print transform_ee_base_position
	   				#print transform_ee_base_quaternion
	   				#print self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)
		except(tf.LookupException,tf.ConnectivityException):
			print("TRANSFORMATION ERROR")
			sss.say(["error"])	
			#return 'failed'
		
		self.fdata_base.transform.translation.x =((self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[0,0] * self.fdata.transform.translation.x)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[0,1] * self.fdata.transform.translation.y)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[0,2] * self.fdata.transform.translation.z)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[0,3]))
		
		self.fdata_base.transform.translation.y =((self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[1,0] * self.fdata.transform.translation.x)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[1,1] * self.fdata.transform.translation.y)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[1,2] * self.fdata.transform.translation.z)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[1,3]))

		self.fdata_base.transform.translation.z =((self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[2,0] * self.fdata.transform.translation.x)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[2,1] * self.fdata.transform.translation.y)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[2,2] * self.fdata.transform.translation.z)+ (self.tf1.fromTranslationRotation(transform_ee_base_position,transform_ee_base_quaternion)[2,3]))	
		
		self.wrench_bl.wrench.force.y = self.fdata_base.transform.translation.y
		self.wrench_bl.wrench.force.x = self.fdata_base.transform.translation.x
		self.wrench_bl.wrench.force.z = self.fdata_base.transform.translation.z
		self.wrench_bl.header.stamp = rospy.Time.now();
		self.pub.publish(self.wrench_bl)
Beispiel #13
0
class TfNode:
    def __init__(self, *args):
        self.tf = TransformListener()
        self.sub = rospy.Subscriber(
            "/detectedObjs_primitive",
            thesis_visualization.msg.objectLocalization,
            self.__subCb,
            queue_size=1)
        self.msg = thesis_visualization.msg.objectLocalization()
        self.thread_sub = Thread(target=self.start_sub)

    def __subCb(self, msg):
        self.msg = msg
        print self.msg.modelList

    def start_sub(self):
        rospy.spin()

    def subscribe(self):
        self.thread_sub.start()

    def unregister_sub(self):
        self.sub.unregister()
        rospy.signal_shutdown("close subcriber")

    def example_function(self):
        if self.tf.frameExists("/world") and self.tf.frameExists(
                "/kinect2_depth"):
            t = self.tf.getLatestCommonTime("/world", "/kinect2_depth")
            p1 = geometry_msgs.msg.PoseStamped()
            p1.header.frame_id = "kinect2_depth"
            p1.pose.orientation.w = 1.0  # Neutral orientation
            p_in_base = self.tf.transformPose("/world", p1_)
            print "Position of the fingertip in the robot base:"
            print p_in_base

    def pcd2worldFrame(self, pcd):
        self.tf.transformPointCloud("world", pcd)
Beispiel #14
0
class GoToHuman():

    global lastknown_human
    global lastknown_RAS
    lastknown_human = (1.0, 1.0)
    lastknown_RAS = (0.0, 0.0)

    def __init__(self):


        rospy.init_node('nav_test_human', anonymous=False)

        #Listeners
        # rospy.Subscriber("tf", tf, setRAS(tf.lookupTransform("/base_link"))
        self.tf = TransformListener()
        self.setRAS()
        # rospy.Subscriber("human/human/human", Human, setHuman(data))

        locations = self.getLocation()
        human = locations[0]
        RAS = locations[1]


        if (human==RAS): print("Incorrect") #Shouldn't be same location, means it wasn't updated from listener

        msg = self.doMath(human[0], human[1], RAS[0], RAS[1])       
	

        #log stuff
    	rospy.loginfo("Beginning goto_human service")
    	rospy.spin()

    	#what to do if shut down (e.g. ctrl + C or failure)
    	rospy.on_shutdown(self.shutdown)

        return msg #message from moving RAS


    def doMath(self, h_x, h_y, r_x, r_y):

        #Do math, given location A and B, straight line between the two with 2ft gap between

        rospy.wait_for_service('goto_xy') #wait for service to start, need to start yourself (run file)
        goto_human = rospy.ServiceProxy('goto_xy', Goto_xy)

        rospy.wait_for_service('rotate') #wait for service to start, need to start yourself (run file)
        rotate_to_human = rospy.ServiceProxy('rotate', Rotate)

        if ((h_x == r_x) or (h_y == r_y)):


            # Simple -- Just keep it two feet away

            if (h_x == r_x): #horizontal match, change this to be more forgiving with in 5 degrees?
            
                #rosservice call goto_xy h_x, h_y-0.6096
                horizontal = goto_human(h_x, h_y-0.6096)
                rotate_to_human(r_x, r_y, h_x, h_y)
                return horizontal.response
            
            else:
            
                goto(h_x-0.6096, h_y) #vertical  match, change this to be more forgiving with in 5 degrees?
                #rosservice call goto_xy h_x-0.6096, h_y
                vert = goto_human(h_x-0.6096, h_y)
                rotate_to_human(r_x, r_y, h_x, h_y)
                return triangle.response

        else:

            # triangle -- equalatiral triangle with 0.6096m (2ft) longest size 
            # and the legs would be 0.431m (1.41ft) each.
            #rosservice call goto_xy h_x-0.431, h_y-0.431
            triangle = goto_human(h_x-0.431, h_y-0.431)
            rotate_to_human(r_x, r_y, h_x, h_y)
            return triangle.response
        

    
    def getLocation(self):
        return [lastknown_human, lastknown_RAS]


    def setRAS(self): #get data from tf listener
        #wiki.ros.org/tf/TfUsingPython
        if self.tf.frameExists("/base_link") and self.tf.frameExists("/map"):
            t = self.tf.getLatestCommonTime("/base_link", "/map")
            position, quaternion = self.tf.lookupTransform("/base_link", "/map", t) #position is tuple (x, y, z), quat (x, y, z, w)

            lastknown_RAS = (position[0], position[1])
        

    def setHuman(data): #get data from Human listener, currently incorrect
        # lastknown_human = (data.x, data.y)
        lastknown_human = (2.0, 2.0) #testing with human 2m away from origin 
    

    def shutdown(self): 
        rospy.loginfo("Editing Service")
Beispiel #15
0
class GripperMarkers:

    _offset = 0.09
    def __init__(self, side_prefix):

        
        self.side_prefix = side_prefix
        self._im_server = InteractiveMarkerServer('ik_request_markers_' + self.side_prefix)
        self._tf_listener = TransformListener()
        self._menu_handler = MenuHandler()

        self._menu_handler.insert('Move arm here', callback=self.move_to_pose_cb)

        self.r_joint_names = ['r_shoulder_pan_joint',
                              'r_shoulder_lift_joint',
                              'r_upper_arm_roll_joint',
                              'r_elbow_flex_joint',
                              'r_forearm_roll_joint',
                              'r_wrist_flex_joint',
                              'r_wrist_roll_joint']
        self.l_joint_names = ['l_shoulder_pan_joint',
                              'l_shoulder_lift_joint',
                              'l_upper_arm_roll_joint',
                              'l_elbow_flex_joint',
                              'l_forearm_roll_joint',
                              'l_wrist_flex_joint',
                              'l_wrist_roll_joint']

        # Create a trajectory action client
        r_traj_contoller_name = None
        l_traj_contoller_name = None
        if self.side_prefix == 'r':
                r_traj_controller_name = '/r_arm_controller/joint_trajectory_action'
                self.r_traj_action_client = SimpleActionClient(r_traj_controller_name,
                        JointTrajectoryAction)
                rospy.loginfo('Waiting for a response from the trajectory '
                        + 'action server for RIGHT arm...')
                self.r_traj_action_client.wait_for_server()
        
        else:
                l_traj_controller_name = '/l_arm_controller/joint_trajectory_action'
                self.l_traj_action_client = SimpleActionClient(l_traj_controller_name,
                        JointTrajectoryAction)
                rospy.loginfo('Waiting for a response from the trajectory '
                        + 'action server for LEFT arm...')
                self.l_traj_action_client.wait_for_server()

        self.is_control_visible = False
        self.ee_pose = self.get_ee_pose()
        self.ik = IK(side_prefix)
        self.update_viz()
        self._menu_handler.apply(self._im_server, 'ik_target_marker_' + self.side_prefix)
        self._im_server.applyChanges()
        print self.ik

    def get_ee_pose(self):
        from_frame = 'base_link'
        to_frame = self.side_prefix + '_wrist_roll_link'
        try:
            if self._tf_listener.frameExists(from_frame) and self._tf_listener.frameExists(to_frame):
                t = self._tf_listener.getLatestCommonTime(from_frame, to_frame)
                # t = rospy.Time.now()
                (pos, rot) = self._tf_listener.lookupTransform(to_frame, from_frame, t) # Note argument order :(
            else:
                rospy.logerr('TF frames do not exist; could not get end effector pose.')
        except Exception as err:
            rospy.logerr('Could not get end effector pose through TF.')
            rospy.logerr(err)
            pos = [1.0, 0.0, 1.0]
            rot = [0.0, 0.0, 0.0, 1.0]

        return Pose(Point(pos[0], pos[1], pos[2]),
                Quaternion(rot[0], rot[1], rot[2], rot[3]))

    def move_to_pose_cb(self, feedback):
        rospy.loginfo('You pressed the `Move arm here` button from the menu.')
        '''Moves the arm to the desired joints'''
        print feedback
        time_to_joint = 2.0
        positions = self.ik.get_ik_for_ee(feedback.pose)
        velocities = [0] * len(positions)
        traj_goal = JointTrajectoryGoal()
        traj_goal.trajectory.header.stamp = (rospy.Time.now() + rospy.Duration(0.1))
        traj_goal.trajectory.points.append(JointTrajectoryPoint(positions=positions,
                            velocities=velocities, time_from_start=rospy.Duration(time_to_joint)))
        
        if (self.side_prefix == 'r'):
            traj_goal.trajectory.joint_names = self.r_joint_names
            self.r_traj_action_client.send_goal(traj_goal)
        else:
            traj_goal.trajectory.joint_names = self.l_joint_names
            self.l_traj_action_client.send_goal(traj_goal)
        pass

    def marker_clicked_cb(self, feedback):
        if feedback.event_type == InteractiveMarkerFeedback.POSE_UPDATE:
            self.ee_pose = feedback.pose
            self.update_viz()
            self._menu_handler.reApply(self._im_server)
            self._im_server.applyChanges()

        elif feedback.event_type == InteractiveMarkerFeedback.BUTTON_CLICK:
            rospy.loginfo('Changing visibility of the pose controls.')
            if (self.is_control_visible):
                self.is_control_visible = False
            else:
                self.is_control_visible = True
        else:
            rospy.loginfo('Unhandled event: ' + str(feedback.event_type))

    def update_viz(self):
        menu_control = InteractiveMarkerControl()
        menu_control.interaction_mode = InteractiveMarkerControl.BUTTON
        menu_control.always_visible = True
        frame_id = 'base_link'
        pose = self.ee_pose
        
        menu_control = self._add_gripper_marker(menu_control)
        text_pos = Point()
        text_pos.x = pose.position.x
        text_pos.y = pose.position.y
        text_pos.z = pose.position.z + 0.1
        text = 'x=' + str(pose.position.x) + ' y=' + str(pose.position.y) + ' x=' + str(pose.position.z)
        menu_control.markers.append(Marker(type=Marker.TEXT_VIEW_FACING,
                                           id=0, scale=Vector3(0, 0, 0.03),
                                           text=text,
                                           color=ColorRGBA(0.0, 0.0, 0.0, 0.5),
                                           header=Header(frame_id=frame_id),
                                           pose=Pose(text_pos, Quaternion(0, 0, 0, 1))))
        int_marker = InteractiveMarker()
        int_marker.name = 'ik_target_marker_' + self.side_prefix
        int_marker.header.frame_id = frame_id
        int_marker.pose = pose
        int_marker.scale = 0.2
        self._add_6dof_marker(int_marker)
        int_marker.controls.append(menu_control)
        self._im_server.insert(int_marker, self.marker_clicked_cb)

    def _add_gripper_marker(self, control):
        is_hand_open=False
        if is_hand_open:
            angle = 28 * numpy.pi / 180.0
        else:
            angle = 0
        transform1 = tf.transformations.euler_matrix(0, 0, angle)
        transform1[:3, 3] = [0.07691 - GripperMarkers._offset, 0.01, 0]
        transform2 = tf.transformations.euler_matrix(0, 0, -angle)
        transform2[:3, 3] = [0.09137, 0.00495, 0]
        t_proximal = transform1
        t_distal = tf.transformations.concatenate_matrices(transform1, transform2)
        mesh1 = self._make_mesh_marker()
        mesh1.mesh_resource = ('package://pr2_description/meshes/gripper_v0/gripper_palm.dae')
        mesh1.pose.position.x = -GripperMarkers._offset
        mesh1.pose.orientation.w = 1
        mesh2 = self._make_mesh_marker()
        mesh2.mesh_resource = ('package://pr2_description/meshes/gripper_v0/l_finger.dae')
        mesh2.pose = GripperMarkers.get_pose_from_transform(t_proximal)
        mesh3 = self._make_mesh_marker()
        mesh3.mesh_resource = ('package://pr2_description/meshes/gripper_v0/l_finger_tip.dae')
        mesh3.pose = GripperMarkers.get_pose_from_transform(t_distal)
        quat = tf.transformations.quaternion_multiply(
        tf.transformations.quaternion_from_euler(numpy.pi, 0, 0),
        tf.transformations.quaternion_from_euler(0, 0, angle))
        transform1 = tf.transformations.quaternion_matrix(quat)
        transform1[:3, 3] = [0.07691 - GripperMarkers._offset, -0.01, 0]
        transform2 = tf.transformations.euler_matrix(0, 0, -angle)
        transform2[:3, 3] = [0.09137, 0.00495, 0]
        t_proximal = transform1
        t_distal = tf.transformations.concatenate_matrices(transform1,transform2)
        mesh4 = self._make_mesh_marker()
        mesh4.mesh_resource = ('package://pr2_description/meshes/gripper_v0/l_finger.dae')
        mesh4.pose = GripperMarkers.get_pose_from_transform(t_proximal)
        mesh5 = self._make_mesh_marker()
        mesh5.mesh_resource = ('package://pr2_description/meshes/gripper_v0/l_finger_tip.dae')
        mesh5.pose = GripperMarkers.get_pose_from_transform(t_distal)

        control.markers.append(mesh1)
        control.markers.append(mesh2)
        control.markers.append(mesh3)
        control.markers.append(mesh4)
        control.markers.append(mesh5)
        return control

    @staticmethod
    def get_pose_from_transform(transform):
        pos = transform[:3,3].copy()
        rot = tf.transformations.quaternion_from_matrix(transform)
        return Pose(Point(pos[0], pos[1], pos[2]),
                Quaternion(rot[0], rot[1], rot[2], rot[3]))

    def _make_mesh_marker(self):
        mesh = Marker()
        mesh.mesh_use_embedded_materials = False
        mesh.type = Marker.MESH_RESOURCE
        mesh.scale.x = 1.0
        mesh.scale.y = 1.0
        mesh.scale.z = 1.0
        print self
        ik_solution = self.ik.get_ik_for_ee(self.ee_pose)
        if (ik_solution is None):
            mesh.color = ColorRGBA(1.0, 0.0, 0.0, 0.6)
        else:
            mesh.color = ColorRGBA(0.0, 1.0, 0.0, 0.6)
        return mesh

    def _add_6dof_marker(self, int_marker):
        is_fixed = True
        control = self._make_6dof_control('rotate_x', Quaternion(1, 0, 0, 1), False, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('move_x', Quaternion(1, 0, 0, 1), True, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('rotate_z', Quaternion(0, 1, 0, 1), False, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('move_z', Quaternion(0, 1, 0, 1), True, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('rotate_y', Quaternion(0, 0, 1, 1), False, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('move_y', Quaternion(0, 0, 1, 1), True, is_fixed)
        int_marker.controls.append(control)

    def _make_6dof_control(self, name, orientation, is_move, is_fixed):
        control = InteractiveMarkerControl()
        control.name = name
        control.orientation = orientation
        control.always_visible = False
        if (self.is_control_visible):
            if is_move:
                control.interaction_mode = InteractiveMarkerControl.MOVE_AXIS
            else:
                control.interaction_mode = InteractiveMarkerControl.ROTATE_AXIS
        else:
            control.interaction_mode = InteractiveMarkerControl.NONE
        if is_fixed:
            control.orientation_mode = InteractiveMarkerControl.FIXED
        return control
#!/usr/bin/env python
import rospy
from tf import TransformListener
import time

rospy.init_node('tf_tester', anonymous=True)
time.sleep(1)
tf_listener = TransformListener()

while True:
    frames = tf_listener.allFramesAsString()
    if frames:
        #1/0
        pass
    if tf_listener.frameExists(
            "base_link"):  #and tf_listener.frameExists("map"):
        t = tf_listener.getLatestCommonTime("odom", "map")
        position, quaternion = tf_listener.lookupTransform("odom", "map", t)
        print "odom->map", position, quaternion
        t = tf_listener.getLatestCommonTime("base_link", "odom")
        position, quaternion = tf_listener.lookupTransform(
            "base_link", "odom", t)
        print "base_link->odom", position, quaternion
        position, quaternion = tf_listener.lookupTransform(
            "base_link", "map",
            rospy.Time.now())  # Problems with time in the past
        print "base_link->map", position, quaternion
        break
#    time.sleep(1)
Beispiel #17
0
class AccurateDocking(RComponent):
    """
    A class used to make a simple docking process
    """
    def __init__(self):
        self.dock_action_name = "/rb1_base/pp_docker"
        self.move_action_name = "/rb1_base/move"

        self.robot_link = "rb1_base_base_footprint"
        self.pregoal_link = "laser_docking_pregoal_footprint"
        self.goal_link = "laser_docking_goal_footprint"

        RComponent.__init__(self)

    def ros_read_params(self):
        """Gets params from param server"""
        RComponent.ros_read_params(self)

        #self.command_name = rospy.get_param('~command_name', self.command_name)

    def ros_setup(self):
        """Creates and inits ROS components"""

        RComponent.ros_setup(self)

        self.tf = TransformListener()

        self.move_action_client = actionlib.SimpleActionClient(
            self.move_action_name, MoveAction)
        self.move_action_client.wait_for_server()

        self.dock_action_client = actionlib.SimpleActionClient(
            self.dock_action_name, DockAction)
        self.dock_action_client.wait_for_server()

        return 0

    def ready_state(self):
        """Actions performed in ready state"""

        # Docking
        dock_goal = MoveActionGoal()
        dock_goal.goal.dock_frame = self.pregoal_link
        dock_goal.goal.robot_dock_frame = self.robot_link
        #dock_goal.goal.dock_frame = self.goal_link
        #dock_goal.goal.dock_offset.x = -0.5
        self.dock_action_client.send_goal(dock_goal)
        self.dock_action_client.wait_for_result()

        if self.dock_action_client.get_result() == True:

            # Execute move
            if self.tf.frameExists(self.robot_link) and self.tf.frameExists(
                    self.goal_link):

                # Get relative goal position
                t = self.tf.getLatestCommonTime(self.robot_link,
                                                self.goal_link)
                (position,
                 quaternion) = self.tf.lookupTransform(self.robot_link,
                                                       self.goal_link, t)
                (_, _, orientation) = euler_from_quaternion(quaternion)

                # Orientate robot
                move_goal = MoveActionGoal()
                move_goal.goal.goal.theta = orientation
                self.move_action_client.send_goal(move_goal)
                self.move_action_client.wait_for_result()

                if self.move_action_client.get_result() == True:

                    # Get relative goal position
                    t = self.tf.getLatestCommonTime(self.robot_link,
                                                    self.goal_link)
                    (position, quaternion) = self.tf.lookupTransform(
                        self.robot_link, self.goal_link, t)
                    (_, _, orientation) = euler_from_quaternion(quaternion)

                    # Move forward
                    move_goal = MoveActionGoal()
                    move_goal.goal.goal.x = position[0]
                    self.move_action_client.send_goal(move_goal)
                    self.move_action_client.wait_for_result()

            else:
                rospy.ERROR("Move failed. TF are not available")
        else:
            rospy.ERROR("Docking failed")

        switch_to_state(State.SHUTDOWN_STATE)

    def shutdown(self):

        return RComponent.shutdown(self)

    def switch_to_state(self, new_state):
        """Performs the change of state"""

        return RComponent.switch_to_state(self, new_state)
Beispiel #18
0
class ttsconvert():
    def __init__(self):
        rospy.init_node('google', anonymous=False)
        rospy.loginfo('Initializing')
        #soundhandle = SoundClient(blocking=True)
        self.goal_sent = False
        self.tf = TransformListener()
        # What to do if shut down (e.g. Ctrl-C or failure)
        rospy.on_shutdown(self.shutdown)
        # Tell the action client that we want to spin a thread by default
        self.move_base = actionlib.SimpleActionClient("move_base",
                                                      MoveBaseAction)
        rospy.loginfo("Wait for the action server to come up")
        # Allow up to 5 seconds for the action server to come up
        self.move_base.wait_for_server(rospy.Duration(5))

    def speak(self):
        sound_client = SoundClient()
        G = nx.read_graphml(
            "/opt/ros/indigo/share/pocketsphinx/demo/Ourstuff/gSpeech.graphml",
            unicode)
        root = 'n1'
        current = root
        tracker = {'task': None, 'room': None}
        counter = 0
        output = None
        sound_client.playWave('/home/turtlebot/wavFiles/beep.wav',
                              blocking=False)
        while True:
            #counter = counter +1
            # THE USER SPEAKS
            subprocess.call('./speech-rec.sh')
            # ANALYZE WHAT THE USER SAID
            f = open("words.log", "r+")
            line = f.readline()
            #output = None
            if line:
                if line[0] in 't':
                    speech = line[12:].rstrip().lower()
                    print speech
                    if speech == "stop":
                        break
                    see = -1
                    for e in G.edges(current, data=True):
                        expected = str(e[2].values()[0])
                        match = re.search(expected, speech)
                        if match or expected == 'room':
                            see = 1
                            current = e[1]
                            output = G.node[current]['dialogue']
                            if (expected == 'guide'):
                                tracker['task'] = 'guide'
                                break  #out of for loop
                            elif (expected == 'deliver'):
                                tracker['task'] = 'deliver'
                                tts = gTTS(
                                    text=
                                    'Please place your item on top of my shell.',
                                    lang='en')
                                tts.save('/home/turtlebot/wavFiles/item.wav')
                                rospy.loginfo(
                                    'Turtlebot says place you item...')
                                sound_client.playWave(
                                    '/home/turtlebot/wavFiles/item.wav',
                                    blocking=True)
                                break  #out of for loop
                            elif (expected == 'task'):
                                if (tracker['task'] == 'deliver'):
                                    tracker['task'] = 'guide'
                                elif (tracker['task'] == 'guide'):
                                    tracker['task'] = 'deliver'
                                    tts = gTTS(
                                        text=
                                        'Please place your item on top of my shell.',
                                        lang='en')
                                    tts.save(
                                        '/home/turtlebot/wavFiles/item.wav')
                                    rospy.loginfo(
                                        'Turtlebot says place you item...')
                                    sound_client.playWave(
                                        '/home/turtlebot/wavFiles/item.wav',
                                        blocking=True)
                                tracker['room'] = str(room_number[0])
                                output = output.replace(
                                    '*', tracker['room'], 1)
                                #if (tracker['task'] is not None):
                                output = output.replace(
                                    '#', tracker['task'], 1)

                            elif (expected == 'room'):
                                if any(char.isdigit() for char in speech):
                                    room_number = [
                                        int(s) for s in speech.split()
                                        if s.isdigit()
                                    ]
                                    if (room_number[0] < 201
                                            or room_number[0] > 207):
                                        tts = gTTS(
                                            text=
                                            'The room is out of range. I can only go to room 201 to 207',
                                            lang='en')
                                        tts.save(
                                            '/home/turtlebot/wavFiles/oor.wav')
                                        rospy.loginfo(
                                            'Turtlebot says room out of range...'
                                        )
                                        sound_client.playWave(
                                            '/home/turtlebot/wavFiles/oor.wav',
                                            blocking=True)
                                        current = e[0]
                                        output = G.node[current]['dialogue']
                                    else:
                                        tracker['room'] = str(room_number[0])
                                        output = output.replace(
                                            '*', tracker['room'], 1)
                                        #if (tracker['task'] is not None):
                                        output = output.replace(
                                            '#', tracker['task'], 1)
                    print "out of the loop"
                    if see < 0:
                        #tts = gTTS(text = 'Sorry, I did not understand that.', lang = 'en')
                        #tts.save('/home/turtlebot/wavFiles/pardon.wav')
                        rospy.loginfo('Turtlebot says I do not understand...')
                        sound_client.playWave(
                            '/home/turtlebot/wavFiles/pardon.wav',
                            blocking=True)

            if output:
                tts = gTTS(text=output, lang='en')
                tts.save('/home/turtlebot/wavFiles/output.wav')
                rospy.loginfo('Turtlebot says I understood what you said...')
                sound_client.playWave('/home/turtlebot/wavFiles/output.wav',
                                      blocking=True)
            '''else:
	        #tts = gTTS(text = 'Sorry, I did not understand that.', lang = 'en')
		#tts.save('/home/turtlebot/wavFiles/pardon.wav')
		rospy.loginfo('Turtlebot says I do not understand...')
		sound_client.playWave('/home/turtlebot/wavFiles/pardon.wav')'''

            # CHECK IF YOU HAVE TO GO OUT OF LOOP
            if (output == "Hold on! Let me calculate the path. Here I go!"):
                tts = gTTS(text='Byeeee', lang='en')
                tts.save('/home/turtlebot/wavFiles/bye.wav')
                rospy.loginfo('Turtlebot says end of conversation...')
                sound_client.playWave('/home/turtlebot/wavFiles/bye.wav',
                                      blocking=True)
                break

        return tracker

    def goto(self, pos, quat):
        print "goto started"
        sound_client = SoundClient()
        #wait for sound_play to connect to publishers otherwise, it will miss first published psg
        rospy.sleep(3)  #3 seconds?
        tts = gTTS(text='Going to destination', lang='en')
        tts.save("/home/turtlebot/wavFiles/goingTo.wav")

        self.move_base.max_vel_x = 0.25

        # Send a goal
        self.goal_sent = True
        if self.tf.frameExists("/base_link") and self.tf.frameExists("/map"):
            t = self.tf.getLatestCommonTime("/base_link", "/map")
            position, quaternion = self.tf.lookupTransform(
                "/base_link", "/map", t)
            rospy.loginfo(position[0])

        goal = MoveBaseGoal()
        #if(os.path.isfile("/home/wavFiles/goingTo.wav") == False)
        #the path to gtts-cli.py might have to be changed based on setup

        #play the wav file
        rospy.loginfo("Playing goingTo.wav")
        sound_client.playWave("/home/turtlebot/wavFiles/goingTo.wav")
        goal.target_pose.header.frame_id = 'map'
        goal.target_pose.header.stamp = rospy.Time.now()
        goal.target_pose.pose = Pose(
            Point(pos['x'], pos['y'], 0.000),
            Quaternion(quat['r1'], quat['r2'], quat['r3'], quat['r4']))

        # Start moving
        self.move_base.send_goal(goal)

        # Allow TurtleBot up to 60 seconds to complete task
        success = self.move_base.wait_for_result(rospy.Duration(120))

        state = self.move_base.get_state()
        result = False

        if success and state == GoalStatus.SUCCEEDED:
            # We made it!
            result = True
            tts = gTTS(text='Yay, reached destination', lang='en')
            tts.save("/home/turtlebot/wavFiles/reachedTo.wav")

            sound_client.playWave("/home/turtlebot/wavFiles/reachedTo.wav")

        else:
            self.move_base.cancel_goal()

        self.goal_sent = False

        return result

    def shutdown(self):
        if self.goal_sent:
            self.move_base.cancel_goal()
        rospy.loginfo("Stop")
        rospy.sleep(1)
class MapWhiteout:
    def map_callback(self, map):
        print("received map!")
        self.map = map
        if self.pose is None:  # uninitialized pose
            print("pose not received yet, passing on map...")
        else:
            grid = np.array(map.data)
            grid = np.resize(grid, (map.info.height, map.info.width))

            res = map.info.resolution
            origin = np.array(
                [map.info.origin.position.x, map.info.origin.position.y])

            m_center = self.pose
            #print m_center
            m_center_adj = m_center - origin
            #print m_center_adj
            cell_center = (m_center_adj / res)

            m_width = 0.4
            cell_width = (m_width / res)

            y_start = int(cell_center[0] - cell_width / 2)
            y_stop = int(cell_center[0] + cell_width / 2)

            x_start = int(cell_center[1] - cell_width / 2)
            x_stop = int(cell_center[1] + cell_width / 2)

            grid[x_start:x_stop, y_start:y_stop].fill(0)
            #print "grid value: " + str(any(grid))

            # print type(np.resize(grid, (1, map.info.height*map.info.width)).astype(int).tolist()[0])
            map.data = np.resize(grid,
                                 (1, map.info.height * map.info.width)).astype(
                                     np.int8).tolist()[0]

    def __init__(self):
        rospy.init_node("map_whiteout")

        self.map = None
        self.pose = None

        self.pub = rospy.Publisher("/turtlebot/map",
                                   OccupancyGrid,
                                   latch=True,
                                   queue_size=1)

        self.sub1 = rospy.Subscriber("/map", OccupancyGrid, self.map_callback)
        self.tf = TransformListener()

        print("running map whiteout node...")
        r = rospy.Rate(1)
        while not rospy.is_shutdown():
            try:
                if self.tf.frameExists("/map") and self.tf.frameExists(
                        "/turtlebot/base_link"):
                    t = self.tf.getLatestCommonTime("/map",
                                                    "/turtlebot/base_link")
                    pos, quat = self.tf.lookupTransform(
                        "/map", "/turtlebot/base_link", t)
                    print("got map to base link transform")
                    self.pose = np.array([pos[0], pos[1]])
            except:
                pass

            if self.map is not None:
                self.pub.publish(self.map)
                r.sleep()
Beispiel #20
0
class ttsconvert():
    def __init__(self):
        tracker = {'room': None}
        # initiliaze
        rospy.init_node('ttsconvert', anonymous=False)
        self.tf = TransformListener()
        #rospy.loginfo('Example: Playing sounds in *blocking* mode.')
        soundhandle = SoundClient(blocking=True)
        self.goal_sent = False
        #rospy.loginfo('Good Morning.')
        #soundhandle.playWave('/home/turtlebot/wavFiles/start.wav')
        self.move_base = actionlib.SimpleActionClient("move_base",
                                                      MoveBaseAction)
        rospy.loginfo("Wait for the action server to come up")
        # Allow up to 5 seconds for the action server to come up
        self.move_base.wait_for_server(rospy.Duration(5))
        # What to do you ctrl + c
        rospy.on_shutdown(self.shutdown)

        # Import the graphml to be read later
        G = nx.read_graphml("2nd_demo.graphml", unicode)
        root = 'n1'
        current = root

        # Clear words.log
        f = open("words.log", "r+")
        f.seek(0)
        f.truncate()

        # Clear clean.log
        with open("clean.log", "w"):
            pass

# Constantly read from words.log for new lines
        while True:
            line = f.readline()

            # If the user said something
            if line:
                # The lines with dialogue all begin with a numerical value
                if line[0][:1] in '0123456789':
                    # remove 9 numbers, colon, and space from the beginning, and any whitespace from the end of the line
                    speech = line[11:].rstrip()
                    print speech

                    count = 0
                    # Search through all edges connected to the current node
                    for e in G.edges(current, data=True):

                        # If what was spoken matches the 'spoken' attribute of the edge
                        if str(speech) == str(e[2].values()[0]):
                            # Switch the current node to be the node the edge goes to
                            current = e[1]

                            # find '*' symbol in output string and and replace it with what user said stored in speech

                            # Get the dialogue stored in the new node and save it in 'output'
                            output = G.node[current]['dialogue']
                            if current == 'n7':
                                output = output.replace('*', str(speech))
                                tracker['room'] = str(speech).lower()
                            print 'OUTPUT: %s' % output

                            tts = gTTS(text=output, lang='en')
                            tts.save('/home/raeesa/Desktop/line.wav')

                            subprocess.Popen([
                                'gnome-terminal', '-x', 'bash', '-c',
                                'pacmd set-source-mute 2 1'
                            ])
                            #rospy.loginfo('playing output')
                            soundhandle.playWave(
                                '/home/raeesa/Desktop/line.wav', blocking=True)
                            #soundhandle.say(output)
                            subprocess.Popen([
                                'gnome-terminal', '-x', 'bash', '-c',
                                'pacmd set-source-mute 2 0'
                            ])

                            # Add 'output' to the top of clean.log
                            with open("clean.log", "r+") as g:
                                # Read everything from clean.log into 'content'
                                content = g.read()
                                # Go to the top of the file
                                g.seek(0, 0)
                                # Write 'output' with 'content' appended to the end back to clean.log
                                g.write(output.rstrip('\r\n') + '\n' + content)
                                g.close()

                            if current == 'n9':
                                room = tracker.get('room')
                                position = {
                                    'x': goals.get(room, "201")[0],
                                    'y': goals.get(room, "201")[1]
                                }
                                # Customize the following values so they are appropriate for your location
                                #position = {'x': 5.12, 'y' : 2.72}
                                quaternion = {
                                    'r1': 0.000,
                                    'r2': 0.000,
                                    'r3': 0.000,
                                    'r4': 1.000
                                }

                                rospy.loginfo("Go to (%s, %s) pose",
                                              position['x'], position['y'])
                                self.goto(position, quaternion)

                    # If there are no outgoing edges from the current node, go back to the root
                            if G.out_degree(current) == 0:
                                current = root

    def goto(self, pos, quat):
        print "goto started"
        sound_client = SoundClient()
        #wait for sound_play to connect to publishers otherwise, it will miss first published psg
        rospy.sleep(3)  #3 seconds?
        tts = gTTS(text='Going to destination', lang='en')
        tts.save("/home/turtlebot/wavFiles/goingTo.wav")
        # Send a goal
        self.goal_sent = True
        if self.tf.frameExists("/base_link") and self.tf.frameExists("/map"):
            t = self.tf.getLatestCommonTime("/base_link", "/map")
            position, quaternion = self.tf.lookupTransform(
                "/base_link", "/map", t)
            rospy.loginfo(position[0])

        goal = MoveBaseGoal()
        #if(os.path.isfile("/home/wavFiles/goingTo.wav") == False)
        #the path to gtts-cli.py might have to be changed based on setup

        #play the wav file
        rospy.loginfo("Playing goingTo.wav")
        sound_client.playWave("/home/turtlebot/wavFiles/goingTo.wav")
        goal.target_pose.header.frame_id = 'map'
        goal.target_pose.header.stamp = rospy.Time.now()
        goal.target_pose.pose = Pose(
            Point(pos['x'], pos['y'], 0.000),
            Quaternion(quat['r1'], quat['r2'], quat['r3'], quat['r4']))

        # Start moving
        self.move_base.send_goal(goal)

        # Allow TurtleBot up to 60 seconds to complete task
        success = self.move_base.wait_for_result(rospy.Duration(120))

        state = self.move_base.get_state()
        result = False

        if success and state == GoalStatus.SUCCEEDED:
            # We made it!
            result = True
            tts = gTTS(text='Yay, reached destination', lang='en')
            tts.save("/home/turtlebot/wavFiles/reachedTo.wav")

            sound_client.playWave("/home/turtlebot/wavFiles/reachedTo.wav")

        else:
            self.move_base.cancel_goal()

        self.goal_sent = False

        return result

    def shutdown(self):
        if self.goal_sent:
            self.move_base.cancel_goal()
        rospy.loginfo("Stop")
        rospy.sleep(1)
class i2oNode(object):
    def __init__(self, *args, **kwargs): #pylint: disable=unused-argument
        rospy.init_node('i2oNode', anonymous=True)

        self.tf = TransformListener()
        # print str(dir(self.tf))

        self.imu_to_baselink = None

        # PUB/SUB Setup
        self.pub = rospy.Publisher('imu2odom', Odometry, queue_size=10)
        self.init_sub = rospy.Subscriber('initialize_localization', Bool, queue_size=10)
        self.sub = rospy.Subscriber('imu', Imu, self.imu_callback)

        # state model
        self.baselink_model = State2D(0,0,0)
        self.imu_model = State2D(0,0,0)
        self.imu_zero = State2D(0,0,0)
        self.init_state = False

    # MAIN FUNCTION

    def listal(self): # listen-talk

        # run the node
        rate = rospy.Rate(10) # 10hz
        while not rospy.is_shutdown():
            rate.sleep()
        return 0

    # === TRANSITION FUNCTION ===

    def update_state_imu(self, state, imu_msg):
        # update state with new imu_information
        
        # Renamed state vars
        #    (time - time) => duration
        dt = (imu_msg.header.stamp - state.timestamp()).to_sec()
        final_orientation = self.quat_to_euler(imu_msg.orientation)
        # jerk
        twerk = self.twerk(state.theta(), final_orientation, state.omega(), state.alpha(), dt)
        
        # intermediate vars
        avg_heading = self.theta_avg(twerk, state.theta(), state.omega(), state.alpha(), dt)
        ang_acc = self.ang_acc(twerk, state.alpha(), dt)
        # lin_vel((a_measured, v0, dt))
        lin_vel = self.lin_vel(imu_msg.linear_acceleration.x, state.v(), dt)

        # assign final state
        state.stamp = imu_msg.header.timestamp
        state.point = self.linear_shift(state.x(), state.y(), avg_heading, state.v(), imu_msg.linear_acceleration.x, dt) # includes x, y
        state.heading = final_orientation # includes heading
        state.ang_acc = ang_acc # includes alpha
        state.ang_vel = Vector.from_Vector3(imu_msg.angular_velocity) # includes omega
        state.lin_vel = lin_vel # includes v
        state.lin_acc = Vector.from_Vector3(imu_msg.linear_acceleration) # includes a

        return state

    def update_state_init(self, state, imu_msg):
        # average state with new imu_information
        
        # Renamed state vars
        #    (time - time) => duration
        dt = (imu_msg.header.stamp - state.timestamp()).to_sec()
        final_orientation = self.quat_to_euler(imu_msg.orientation)
        # jerk
        twerk = self.twerk(state.theta(), final_orientation, state.omega(), state.alpha(), dt)
        
        # intermediate vars
        avg_heading = self.theta_avg(twerk, state.theta(), state.omega(), state.alpha(), dt)
        ang_acc = self.ang_acc(twerk, state.alpha(), dt)
        # lin_vel((a_measured, v0, dt))
        lin_vel = self.lin_vel(imu_msg.linear_acceleration.x, state.v(), dt)

        # average final state
        # so the values collected during the init are a moving average
        # this all should be pretty stable around 0, because the robot should be stopped
        # also, all state models should use this to zero-out their models
        state.point = Vector.average(state.point, self.linear_shift(state.x(), state.y(), avg_heading, state.v(), imu_msg.linear_acceleration.x, dt)) # includes x, y
        state.heading = state.heading/2 + final_orientation/2 # includes heading
        state.ang_acc = Vector.average(state.ang_acc, ang_acc) # includes alpha
        state.ang_vel = Vector.average(state.ang_vel, Vector.from_Vector3(imu_msg.angular_velocity)) # includes omega
        state.lin_vel = Vector.average(state.lin_vel, lin_vel) # includes v
        state.lin_acc = Vector.average(state.lin_acc, Vector.from_Vector3(imu_msg.linear_acceleration)) # includes a

        return state

    # DATA CALLBACK

    def imu_callback(self, imu_msg):
        # TODO(buckbaskin): implement initialization/zeroing, not necessarily here

        if self.init_state:
            # do initializations
            self.imu_zero = self.update_state_init(self.imu_zero, imu_msg)

        else:
            # lookup Transformation here because of unknown imu frame until callback
            if self.tf.frameExists('/base_link') and self.tf.frameExists(imu_msg.header.frame_id): #pylint: disable=no-member
                try:
                    t = self.tf.getLatestCommonTime('/baselink', imu_msg.header.frame_id) #pylint: disable=no-member
                    position, quaternion = self.tf.lookupTransform('/baselink', imu_msg.header.frame_id, t) #pylint: disable=no-member
                    self.imu_to_baselink = [position, quaternion]
                    rospy.loginfo(' >>> Transform Found')
                except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                    rospy.loginfo(' <<< Error finding transform')
            
            # update state models
            if self.imu_to_baselink and len(self.imu_to_baselink) == 2:
                # the imu first updates the statemodel for the imu frame
                self.imu_model = self.update_state_imu(self.imu_model, imu_msg)
                # baselink_imu is imu data in the baselink frame
                baselink_imu = self.convert_to_baselink(imu_msg, self.imu_model, self.imu_to_baselink)
                # then the baselink (robot) model is updated using the transformed data
                self.baselink_model = self.update_state_imu(self.baselink_model, baselink_imu)

                # immediately publish data
                self.pub.publish(self.baselink_model.to_msg())            
            else:
                rospy.loginfo(' <<< Imu data not used. Saved transform not valid')

    def init_cb(self, b):
        if b.data:
            self.init_state = True
        else:
            self.init_state = False

    # DATA TRANSFORMATION

    def convert_to_baselink(self, imu_msg, imu_model, transform):
        # convert imu_msg from its frame to baselink frame
        
        # Assumption! TODO: I'm going to assume that the axis are aligned between frames to start 
        # come back to this later, and rotate to align axis first
        #   proposed path:
        #       -> rorate-transform data (orientation, angular velocity, linear acceleration) to align with base_link
        #       -> run the same code below
        '''
        [sensor_msgs/Imu]:
        std_msgs/Header header - DONE
          uint32 seq
          time stamp
          string frame_id
        geometry_msgs/Quaternion orientation
          float64 x
          float64 y
          float64 z
          float64 w
        float64[9] orientation_covariance
        geometry_msgs/Vector3 angular_velocity
          float64 x
          float64 y
          float64 z
        float64[9] angular_velocity_covariance
        geometry_msgs/Vector3 linear_acceleration - DONE
          float64 x
          float64 y
          float64 z
        float64[9] linear_acceleration_covariance - DONE
        '''
        new_msg = Imu()

        # Header
        new_msg.header = imu_msg.header
        new_msg.header.frame_id = '/base_link'
        
        # Orientation (same based on Assumption! above)
        new_msg.orientation = imu_msg.orientation
        #   including covariance, because same. will likely drop for rotation
        new_msg.orientation_covariance = imu_msg.orientation_covariance

        # Angular Velocity (same based on Assumption! above)
        new_msg.angular_velocity = imu_msg.angular_velocity
        #   including covariance, because same. will likely drop for rotation
        new_msg.angular_velocity_covariance = imu_msg.angular_velocity_covariance

        # Linear Acceleration (not the same, even with assumption)
        new_msg.linear_acceleration = self.solid_body_translate_lin_acc(imu_msg.linear_acceleration, imu_model, transform)

        return new_msg

    def solid_body_translate_lin_acc(self, lin_acc, state, transform):
        # TODO(buckbaskin): check dynamics notes

        # Free Moving Rigid Body Kinematics
        # A-p = A-o + w-dot x R_prime-p + w x (w x R_prime-p)
        # We have A-p (the linear acceleration from the Imu)
        # We are solving for A-o (the acceleration for the baselink frame)
        # angular velocity is consistent across the rigid body (measured at Imu)
        # angular acceleration (w-dot) is interpolated from velocity data
        # R-p is the vector between the two (transform)

        # known: A-p, w-dot, w, R-p
        # solving for: A-o
        # A-o = A-p - w-dot x R-p - w x ( w x R-p )
        Ap = Vector.from_Vector3(lin_acc) # v is a state_model.Vector
        position = transform[0]
        r = Vector.from_tf_position(position)
        Ap = lin_acc
        w_dot = state.alpha()
        w = state.omega()
        
        Ao = (Ap
                .minus(w_dot.cross(r))
                .minus(w.cross(w.cross(r))))
        return Ao.to_Vector3()

    # HELPER FUNCTIONS

    def theta_avg(self, twerk, theta0, w0, a0, dt):
        # twerk aka twisting jerk
        return twerk*pow(dt,3)/24 + a0*pow(dt,2)/6 + w0*dt/2 + theta0

    def twerk(self, theta0, theta1, w0, a0, dt):
        twerk = ((((((theta1-theta0) / dt) - w0) / (dt / 2)) - a0) / (dt / 6))
        return twerk

    def ang_acc(self, twerk, a0, dt):
        a1 = a0 + twerk*dt
        return Vector(0,0,a1)

    def lin_vel(self, a_measured, v0, dt):
        v1 = v0 + a_measured*dt
        return Vector(v1, 0, 0)

    def linear_shift(self, x0, y0, theta, v0, a, dt):
        v_avg = v0 + (v0 + a*dt)
        dx = v_avg*math.cos(theta)
        dy = v_avg*math.sin(theta)
        return Vector(x0+dx, y0+dy, 0)

    def quat_to_euler(self, quaternion):
        # returns the heading from a given quaternion
        q = quaternion
        q_array = [q.x, q.y, q.z, q.w]
        return euler_from_quaternion(q_array)[2]

    def euler_to_quat(self, euler_heading):
        # returns Quaternion ros message
        q = quaternion_from_euler(0,0,euler_heading)
        return Quaternion(w=q[3], x=q[0], y=q[1], z=q[2])
class KeyPointManager(object):
    def __init__(self):
        self.tf = TransformListener()
        self.keyPointList = list()
        
    def add(self, marker):
        for i in range(len(self.keyPointList)):
            if (self.keyPointList[i].id==marker.id):
                return
        position = self.transformMarkerToWorld(marker)
        k = KeyPoint(marker.id, Point(position[0], position[1], position[2]), Quaternion(0.,  0.,  0.,  1.))
        self.keyPointList.append(k)
        self.addWaypointMarker(k)
        rospy.loginfo('Marker is added to following position')
        rospy.loginfo(k.pose)
        pass
    def getWaypoints(self):
        waypoints = list()
        for i in range(len(self.keyPointList)):
            waypoints.append(self.keyPointList[i].pose);
        return waypoints
            
    def keyPointListComplete(self):
        if (len(self.keyPointList)==5):
            self.keyPointList.sort(key=lambda x: x.id, reverse=True)
            return True
        return False
    def markerHasValidId(self, marker):
        if (marker.id>=61) and (marker.id<=65):
            return True
        return False
    def transformMarkerToWorld(self, marker):
        markerTag = "ar_marker_"+str(marker.id )
        rospy.loginfo(markerTag);
        if self.tf.frameExists("map") and self.tf.frameExists(markerTag):
            self.tf.waitForTransform("map", markerTag, rospy.Time(), rospy.Duration(4.0))
            t = self.tf.getLatestCommonTime("map", markerTag)
            position, quaternion = self.tf.lookupTransform("map", markerTag, t)
            return self.shiftPoint( position, quaternion)
    def shiftPoint(self, position, quaternion):
        try:
            euler = euler_from_quaternion(quaternion)
            yaw = euler[2]
            tf_mat = np.array([[np.cos(yaw), -np.sin(yaw), 0, position[0]],[np.sin(yaw), np.cos(yaw), 0, position[1]],[0, 0, 1, 0],[0, 0, 0, 1]])
            displacement = np.array([[1, 0, 0, 0],[0, 1, 0, 0.35],[0, 0, 1, 0],[0, 0, 0, 1]])
            point_map = np.dot(tf_mat, displacement)
            position = (point_map[0,3], point_map[1,3], 0)
        except Exception as inst:
            print type(inst)     # the exception instance
            print inst.args      # arguments stored in .args
            print inst
        return position
    def addWaypointMarker(self, keyPoint):
        rospy.loginfo('Publishing marker')
        # Set up our waypoint markers
        marker_scale = 0.2
        marker_lifetime = 0 # 0 is forever
        marker_ns = 'waypoints'
        marker_id = keyPoint.id
        marker_color = {'r': 0.0, 'g': 1.0, 'b': 0.0, 'a': 1.0}
        
        # Initialize the marker points list.
        self.waypoint_markers = Marker()
        self.waypoint_markers.ns = marker_ns
        self.waypoint_markers.id = marker_id
        self.waypoint_markers.type = Marker.CUBE_LIST
        self.waypoint_markers.action = Marker.ADD
        self.waypoint_markers.lifetime = rospy.Duration(marker_lifetime)
        self.waypoint_markers.scale.x = marker_scale
        self.waypoint_markers.scale.y = marker_scale
        self.waypoint_markers.color.r = marker_color['r']
        self.waypoint_markers.color.g = marker_color['g']
        self.waypoint_markers.color.b = marker_color['b']
        self.waypoint_markers.color.a = marker_color['a']
        
        self.waypoint_markers.header.frame_id = 'map'
        self.waypoint_markers.header.stamp = rospy.Time.now()
        self.waypoint_markers.points = list()
        p = Point(keyPoint.pose.position.x, keyPoint.pose.position.y, keyPoint.pose.position.z)
        self.waypoint_markers.points.append(p)

        # Publish the waypoint markers
        self.marker_pub = rospy.Publisher('waypoint_markers', Marker)

        self.marker_pub.publish(self.waypoint_markers)
class Alexa:
    """Alexa Voice Control Module
    
    This ROS node connects to AWS IoT and sends commands to ROS.

    """

    # set up constants
    def __init__(self):
        # create a new node
        rospy.init_node('alexa', anonymous=True)
        self.action_client = actionlib.SimpleActionClient(
            'move_base', MoveBaseAction)
        self.tf = TransformListener()
        self.tag_id = rospy.get_param("~user_tag_id", '1001')

        self.FEET_TO_M = 0.3048
        self.DEGREES_TO_RAD = math.pi / 180

        root_dir = os.path.dirname(os.path.abspath(__file__))

        # set up AWS constants
        self.host = rospy.get_param(
            "~host", 'a1vgqh9vgvjzyh.iot.us-east-1.amazonaws.com')
        self.rootCAPath = root_dir + rospy.get_param(
            "~rootCAPath", '/Certificates/root-CA.crt')
        self.certificatePath = root_dir + rospy.get_param(
            "~certificatePath", '/Certificates/Pi.cert.pem')
        self.privateKeyPath = root_dir + rospy.get_param(
            "~privateKeyPath", '/Certificates/Pi.private.key')
        self.clientId = rospy.get_param("~clientId", "Pi")
        self.topic = rospy.get_param("~topic", '/Transnavigators/Pi')

    # callback for receiving AWS message
    def _callback(self, client, userdata, message):
        """This callback receives a JSON message and outputs a ROS move_base command
        
        """
        # extract data
        data_string = message.payload.decode("utf8").replace("'", '"')
        rospy.loginfo(data_string)
        data = json.loads(data_string)

        # publish data
        goal = MoveBaseGoal()
        goal.target_pose.header.frame_id = "base_footprint"
        goal.target_pose.header.stamp = rospy.Time.now()
        goal.target_pose.pose.position.z = 0
        goal.target_pose.pose.position = Point(0, 0, 0)
        goal.target_pose.pose.orientation = Quaternion(0, 0, 0, 1)
        if 'type' not in data:
            return
        # Move forward
        if data['type'] == 'forward':
            if 'distance' in data and 'distanceUnit' in data:
                goal.target_pose.pose.position.x = float(data['distance'])
                if data['distanceUnit'] == 'feet':
                    goal.target_pose.pose.position.x *= self.FEET_TO_M
            else:
                return  # Remove this if it works
                #goal.target_pose.pose.position.x = 100000.0

        # Turn the wheelchair
        elif data['type'] == 'turn':
            if 'angle' in data:
                angle = float(data['angle'])
                if data['angleUnit'] == 'degrees':
                    angle *= self.DEGREES_TO_RAD
            else:
                angle = 90
            if 'direction' in data and data['direction'] == 'right':
                angle = -angle
            goal.target_pose.pose.orientation = Quaternion(
                0, 0, math.sin(angle / 2), math.cos(angle / 2))

        # Stop the wheelchair
        elif data['type'] == 'stop':
            goal.target_pose.pose.position = Point(0, 0, 0)
            goal.target_pose.pose.orientation = Quaternion(0, 0, 0, 1)

        # Go to the localino tag
        elif data['type'] == 'locateme' and self.tf.frameExists(
                "/base_link") and self.tf.frameExists("/tag_" + self.tag_id):
            t = self.tf.getLatestCommonTime("/base_link",
                                            '/tag_' + self.tag_id)
            pos, quat = self.tf.lookupTransform("/base_link",
                                                "/tag_" + self.tag_id, t)

            # Go to the target, rotation is the vector from here to the tag
            goal.target_pose.pose.position = pos
            dist = math.sqrt(pos.x**2 + pos.y**2)
            goal.target_pose.pose.orientation = Quaternion(
                0, 0, pos.y / dist, pos.x / dist)

        # Go to the static landmark
        elif data['type'] == 'moveto' and self.tf.frameExists(
                "/map") and self.tf.frameExists("/%s" % str(data['location'])):
            t = self.tf.getLatestCommonTime("/base_link",
                                            "/%s" % str(data['location']))
            pos, quat = self.tf.lookupTransform("/base_link",
                                                "/%s" % str(data['location']),
                                                t)

            # Go to the target, rotation is the vector from here to the location
            goal.target_pose.pose.position = pos
            dist = math.sqrt(pos.x**2 + pos.y**2)
            goal.target_pose.pose.orientation = Quaternion(
                0, 0, pos.y / dist, pos.x / dist)

        else:
            rospy.logerr("Could not find transform from base_link to map")
            return

        self.action_client.send_goal(goal)

    def begin(self):
        """This function sets up the AWS IoT MQTT client, connects, and waits until ROS closes
        
        """
        rospy.loginfo("Connecting to AWS")

        # Configure logging
        logger = logging.getLogger("AWSIoTPythonSDK.core")
        logger.setLevel(logging.WARN)
        stream_handler = logging.StreamHandler()
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        stream_handler.setFormatter(formatter)
        logger.addHandler(stream_handler)

        # MQTT Client documaentation: https://s3.amazonaws.com/aws-iot-device-sdk-python-docs/sphinx/html/index.html
        aws_iot_mqtt_client = AWSIoTMQTTClient(self.clientId)
        aws_iot_mqtt_client.configureEndpoint(self.host, 8883)
        aws_iot_mqtt_client.configureCredentials(self.rootCAPath,
                                                 self.privateKeyPath,
                                                 self.certificatePath)

        # AWSIoTMQTTClient connection configuration
        aws_iot_mqtt_client.configureAutoReconnectBackoffTime(1, 32, 20)
        aws_iot_mqtt_client.configureOfflinePublishQueueing(
            -1)  # Infinite offline Publish queueing
        aws_iot_mqtt_client.configureDrainingFrequency(2)  # Draining: 2 Hz
        aws_iot_mqtt_client.configureConnectDisconnectTimeout(10)  # 10 sec
        aws_iot_mqtt_client.configureMQTTOperationTimeout(5)  # 5 sec

        # Connect and subscribe to AWS IoT
        aws_iot_mqtt_client.connect()
        aws_iot_mqtt_client.subscribe(self.topic, 0, self._callback)

        rospy.loginfo("Connected to " + self.topic)

        self.action_client.wait_for_server()

        rospy.loginfo("Connected to the action server")

        # wait
        rospy.spin()
class pcScan_to_pcWorld():
    def __init__(self):
        self.pcSub = rospy.Subscriber('/slam_cloud', pc, self.callback)
        self._tf = TransformListener()
        self.laser_pc = None
        self.mat_laser2world = None
        self.world_pc = None
        self.laser_pc_sync = None

    def callback(self, data):
        self.laser_pc = data

    def update(self):
        target_frame = "world"
        src_frame = "laser0_frame"
        if self._tf.frameExists(target_frame) and self._tf.frameExists(
                src_frame):
            try:
                self.mat_laser2world = self._tf.asMatrix(
                    target_frame, self.laser_pc.header)
            except Exception as e:
                print(e)
        else:
            print('Unable to find:', self._tf.frameExists(target_frame),
                  self._tf.frameExists(src_frame))

        if not isinstance(self.laser_pc, pc) or not isinstance(
                self.mat_laser2world, np.ndarray):
            return
        try:
            lpc = copy.deepcopy(self.laser_pc)
            mat = copy.deepcopy(self.mat_laser2world)
            wpc = copy.deepcopy(self.laser_pc)
            for idx, p in enumerate(np.array(lpc.points)):
                col = np.array([[float(p.x), float(p.y), float(p.z), 1.0]]).T
                tran_p = np.matmul(mat, col).reshape((4))
                wpc.points[idx].x = tran_p[X]
                wpc.points[idx].y = tran_p[Y]
                wpc.points[idx].z = tran_p[Z]
            self.world_pc = wpc
            self.laser_pc_sync = lpc
        except Exception as e:
            print(e)

    def get_Cell(self):
        thres = 0.05  #threshold to be considered as occupied
        pt = copy.deepcopy(self.world_pc.points)
        val = copy.deepcopy(self.world_pc.channels[0].values)  #'intensities'
        mat = copy.deepcopy(self.mat_laser2world)
        pt = list(pt)
        val = np.array(val)
        trans_pt = [[p.x, p.y, p.z] for p in pt]
        trans_pt = np.array(trans_pt)
        occupied_world = trans_pt[val < thres]

        laser_pt = copy.deepcopy(self.laser_pc_sync.points)
        laser_pt = list(laser_pt)
        trans_laser_pt = [[p.x, p.y, p.z] for p in laser_pt]
        trans_laser_pt = np.array(trans_laser_pt)
        occupied_laser = trans_laser_pt[val < thres]
        # free = trans_pt[val >=thres] #Higher intensity laser means obstacle doesnt exist

        #Calculate free space based on distance from obstacles
        free_world = []
        distance = 0.5
        for pt in occupied_laser:
            a = abs(pt[X]) / distance
            b = abs(pt[Y]) / distance
            c = abs(pt[Z]) / distance
            num_of_sample = max(a, b, c)

            x = np.linspace(0, pt[X], num_of_sample)
            y = np.linspace(0, pt[Y], num_of_sample)
            z = np.linspace(0, pt[Z], num_of_sample)

            coords = zip(x, y, z)
            for c in coords:
                #Translate back to World Coordinate
                col = np.array([[float(c[X]),
                                 float(c[Y]),
                                 float(c[Z]), 1.0]]).T
                tran_p = np.matmul(mat, col).reshape((4))
                wx = tran_p[X]
                wy = tran_p[Y]
                wz = tran_p[Z]

                free_world.append((wx, wy, wz))

        print("Occupied", len(occupied_world), "Free", len(free_world))

        return occupied_world, free_world

    def ready(self):
        return isinstance(self.world_pc, pc)
Beispiel #25
0
class calib:
    def __init__(self, *args):
        self.tf = TransformListener()
        self.detector_service = rospy.ServiceProxy("/fiducials/get_fiducials", DetectObjects)

        self.frame_camera_mount = rospy.get_param('~frame_camera_mount')
        self.frame_marker_mount = rospy.get_param('~frame_marker_mount')
        self.frame_marker = rospy.get_param('~frame_marker', "/marker")

    def compute(self):
        
        x_values = []
        y_values = []
        z_values = []
        roll_values = []
        pitch_values = []
        yaw_values = []
        
        count_success = 0
        count_failed = 0
        while count_success <= 15 and count_failed <= 100:
            try: 
                rospy.wait_for_service("/fiducials/get_fiducials", 3.0)
                res = self.detector_service(DetectObjectsRequest())
                
                if self.tf.frameExists(self.frame_camera_mount) and self.tf.frameExists(self.frame_marker_mount):
                    t = self.tf.getLatestCommonTime(self.frame_camera_mount, self.frame_marker_mount)
                    position, quaternion = self.tf.lookupTransform(self.frame_camera_mount, self.frame_marker_mount, t)
                    euler = tf.transformations.euler_from_quaternion(quaternion)
                    print '<origin xyz="'+str(position[0])+" "+str(position[1])+" "+str(position[2])+'" rpy="'+str(euler[0])+" "+str(euler[1])+" "+str(euler[2])+'" />'
                    x_values.append(position[0])
                    y_values.append(position[1])
                    z_values.append(position[2])
                    roll_values.append(euler[0])
                    pitch_values.append(euler[1])
                    yaw_values.append(euler[2])
                else:
                    print "tf does not exist!", self.tf.frameExists(self.frame_camera_mount), self.tf.frameExists(self.frame_marker_mount)
            except:
                print "did not detect marker."
                print "count_success = ", count_success
                print "count_failed = ", count_failed
                count_failed += 1
                continue
            count_success += 1
        
        if len(x_values) < 5:
            print "to few detections, aborting"
            return
                
        x_avg_value = (float)(sum(x_values))/len(x_values)
        y_avg_value = (float)(sum(y_values))/len(y_values)
        z_avg_value = (float)(sum(z_values))/len(z_values)
        roll_avg_value = (float)(sum(roll_values))/len(roll_values)
        pitch_avg_value = (float)(sum(pitch_values))/len(pitch_values)
        yaw_avg_value = (float)(sum(yaw_values))/len(yaw_values)

        print "The average values (without hardcoding) <xyz> are : \n\n" + '<origin xyz="' + str(x_avg_value) +" "+ str(-y_avg_value) +" "+  str(z_avg_value) + '" rpy="' + str(roll_avg_value) + " " + str(pitch_avg_value) + " " + str(yaw_avg_value) + '" />\n'
        
        # HACK set some DOf to fixed values
        z_avg_value = 1.80
        roll_avg_value = -3.1415926
        pitch_avg_value = 0
        yaw_avg_value = -3.1415926
        
        print "The average values<xyz> are : \n\n" + '<origin xyz="' + str(x_avg_value) +" "+ str(-y_avg_value) +" "+  str(z_avg_value) + '" rpy="' + str(roll_avg_value) + " " + str(pitch_avg_value) + " " + str(yaw_avg_value) + '" />\n'
Beispiel #26
0
class FetchClient(object):
    def __init__(self):
        self.bfp = True
        self.robot = robot_interface.Robot_Interface()
        self.url = 'http://172.31.76.30:80/ThinkingQ/'
        self.joint_names = ["shoulder_pan_joint",
                       "shoulder_lift_joint", "upperarm_roll_joint",
                       "elbow_flex_joint", "forearm_roll_joint",
                       "wrist_flex_joint", "wrist_roll_joint"]
        self.tf_listener = TransformListener()
        trfm = Transformer()
        self.r2b = trfm.transform_matrix_of_frames(
            'head_camera_rgb_optical_frame', 'base_link')
        self.model = load_model("./model/0.988.h5")
        self.br = TransformBroadcaster()
        self.move_group = MoveGroupInterface("arm", "base_link")
        # self.pose_group = moveit_commander.MoveGroupCommander("arm")
        self.cam = camera.RGBD()
        self.position_cloud = None
        while True:
            try:
                cam_info = self.cam.read_info_data()
                if cam_info is not None:
                    break
            except:
                rospy.logerr('camera info not recieved')
        self.pcm = PCM()
        self.pcm.fromCameraInfo(cam_info)

    def transform_matrix_of_frames(self, source_frame, target_frame):
        if self.tf_listener.frameExists(source_frame) and self.tf_listener.frameExists(target_frame):
            t = self.tf_listener.getLatestCommonTime(
                source_frame, target_frame)
            # Compute the transform from source to target
            position, quaternion = self.tf_listener.lookupTransform(
                target_frame, source_frame, t)

            transfrom_matrix = transformations.quaternion_matrix(quaternion)
            transfrom_matrix[0:3, 3] = position

            return transfrom_matrix
        else:
            return None

    def get_corner(self):
        print "get_corner"
        html = ""
        while html=="":
            image_time = time.time()
            img = self.cam.read_color_data()
            while img is None:
                img = self.cam.read_color_data()
            cv2.imwrite("image/fetch.jpg", img)
            files = {'IMAGE': (str(image_time) + '.jpg', open('image/fetch.jpg', 'rb'), 'image/png', {})}
            response = requests.request("POST", url=self.url, files=files, timeout=15000)
            html = response.text
        print html
        exit(0)
        a = html.split("\n")[1:-1]
        # print a
        pl = np.array([int(i) for i in a])
        # print pl
        re = []
        i = 0
        while i < len(a):
            point = []
            # print "pl i", pl[i]
            point.append(pl[i])
            point.append(pl[i+1])
            re.append(point)
            i = i+2
        # print re
        return re

    def convert_camera_position_to_base_position(self, array):
        print 'r2b', self.r2b
        ret = np.dot(self.r2b[0:3, 0:3], array)  # Rotate
        ret = ret + self.r2b[0:3, 3]
        return ret

    def update_position_cloud(self):
        pc = self.robot.camera.read_point_cloud()
        arr = pypcd.numpy_pc2.pointcloud2_to_array(pc, split_rgb=True)
        self.position_cloud = pypcd.numpy_pc2.get_xyz_points(arr, remove_nans=False)

    def get_position_by_pix(self, x, y):
        pc = self.robot.camera.read_point_cloud()
        arr = pypcd.numpy_pc2.pointcloud2_to_array(pc, split_rgb=True)
        po = pypcd.numpy_pc2.get_xyz_points(arr, remove_nans=False)
        return po[x, y]

    def broadcast_position(self, position, name):
        # while self.bfp:
        # print "111"
        a = transformations.quaternion_from_euler(
            ai=-2.355, aj=-3.14, ak=0.0)
        b = transformations.quaternion_from_euler(
            ai=0.0, aj=0.0, ak=1.57)
        base_rot = transformations.quaternion_multiply(a, b)
        self.br.sendTransform(position,
                              base_rot,
                              rospy.Time.now(),
                              name,
                              'head_camera_rgb_optical_frame')

    def get_position_of_baselink(self, name):
        listener = TransformListener()
        rate = rospy.Rate(10.0)
        while not rospy.is_shutdown():
            try:
                fetch_client.broadcast_position(cam_position, name)
                pose, rot = listener.lookupTransform('base_link', name, rospy.Time(0))
            except Exception as e:
                print e
                continue
            rate.sleep()
            if pose:
                return pose

    def show_image(self, im):
        plt.imshow(im)
        pylab.show()

    def move_to_position(self, position):
        position[2] -= 0.24
        x = np.round(np.array([position]), 4) * 100
        print self.model.predict(x)
        joints = np.round(self.model.predict(x)[0], 4) / 100
        print joints
        joint_3 = 0 - joints[1] - joints[2] + 1.5


        disco_poses = [
            [joints[0], joints[1], 0.0, joints[2], 0.0, joint_3, 1.5+joints[0]],# 1 close gripper
        ]

        for pose in disco_poses:
            if rospy.is_shutdown():
                break
            print pose
            # Plans the joints in joint_names to angles in pose
            self.move_group.moveToJointPosition(self.joint_names, pose, wait=False)

            # Since we passed in wait=False above we need to wait here
            self.move_group.get_move_action().wait_for_result()
            result = self.move_group.get_move_action().get_result()

            if result:
                # Checking the MoveItErrorCode
                if result.error_code.val == MoveItErrorCodes.SUCCESS:
                    rospy.loginfo("Reach success!")

                    # position = self.pose_group.get_current_pose("wrist_roll_link").pose.position
                    # print(position)
                else:
                    # If you get to this point please search for:
                    # moveit_msgs/MoveItErrorCodes.msg
                    rospy.logerr("Reach fail")
            else:
                rospy.logerr("MoveIt! failure no result returned.")
        # This stops all arm movement goals
        # It should be called when a program is exiting so movement stops
        # self.move_group.get_move_action().cancel_all_goals()

    def move_to_position_up(self, position):
        position[2] -= 0.24
        x = np.round(np.array([position]), 4) * 100
        print self.model.predict(x)
        joints = np.round(self.model.predict(x)[0], 4) / 100
        print joints
        joint_3 = 0 - joints[1] - joints[2] + 1.5


        disco_poses = [
            [joints[0], joints[1], 0.0, joints[2], 0.0, joint_3 , 0-joints[1]-joints[2]], #up
        ]

        for pose in disco_poses:
            if rospy.is_shutdown():
                break
            print pose
            # Plans the joints in joint_names to angles in pose
            self.move_group.moveToJointPosition(self.joint_names, pose, wait=False)

            # Since we passed in wait=False above we need to wait here
            self.move_group.get_move_action().wait_for_result()
            result = self.move_group.get_move_action().get_result()

            if result:
                # Checking the MoveItErrorCode
                if result.error_code.val == MoveItErrorCodes.SUCCESS:
                    rospy.loginfo("Reach success!")

                    # position = self.pose_group.get_current_pose("wrist_roll_link").pose.position
                    # print(position)
                else:
                    # If you get to this point please search for:
                    # moveit_msgs/MoveItErrorCodes.msg
                    rospy.logerr("Reach fail")
            else:
                rospy.logerr("MoveIt! failure no result returned.")

    def move_to_position_with_up(self, position):
        position[2] -= 0.24
        x = np.round(np.array([position]), 4) * 100
        print self.model.predict(x)
        joints = np.round(self.model.predict(x)[0], 4) / 100
        print joints
        joint_3 = 0 - joints[1] - joints[2] + 1.5


        disco_poses = [
            [joints[0], joints[1], 0.0, joints[2] - 0.6, 0.0, joint_3 + 0.6, 1.5+joints[0]], #up
            [joints[0], joints[1], 0.0, joints[2], 0.0, joint_3, 1.5+joints[0]],# 1 close gripper
        ]

        for pose in disco_poses:
            if rospy.is_shutdown():
                break
            print pose
            # Plans the joints in joint_names to angles in pose
            self.move_group.moveToJointPosition(self.joint_names, pose, wait=False)

            # Since we passed in wait=False above we need to wait here
            self.move_group.get_move_action().wait_for_result()
            result = self.move_group.get_move_action().get_result()

            if result:
                # Checking the MoveItErrorCode
                if result.error_code.val == MoveItErrorCodes.SUCCESS:
                    rospy.loginfo("Reach success!")

                    # position = self.pose_group.get_current_pose("wrist_roll_link").pose.position
                    # print(position)
                else:
                    # If you get to this point please search for:
                    # moveit_msgs/MoveItErrorCodes.msg
                    rospy.logerr("Reach fail")
            else:
                rospy.logerr("MoveIt! failure no result returned.")

    def stow(self):
        disco_poses = [
            [-0.0, -0.7, 0.0, 0.5, 0.0, 1.5, 1.3],
            [1.32, 0.7, 0.0, -2.0, 0.0, -0.57, 0.0]#stow
        ]

        for pose in disco_poses:
            if rospy.is_shutdown():
                break
            print pose
            # Plans the joints in joint_names to angles in pose
            self.move_group.moveToJointPosition(self.joint_names, pose, wait=False)

            # Since we passed in wait=False above we need to wait here
            self.move_group.get_move_action().wait_for_result()
            result = self.move_group.get_move_action().get_result()

            if result:
                # Checking the MoveItErrorCode
                if result.error_code.val == MoveItErrorCodes.SUCCESS:
                    rospy.loginfo("Reach success!")
                    if disco_poses.index(pose) == 1:
                        self.robot.open_gripper()
                    # position = self.pose_group.get_current_pose("wrist_roll_link").pose.position
                    # print(position)
                else:
                    # If you get to this point please search for:
                    # moveit_msgs/MoveItErrorCodes.msg
                    rospy.logerr("Reach fail")
            else:
                rospy.logerr("MoveIt! failure no result returned.")
Beispiel #27
0
class Bag2UosConverter:
    def __init__(self, rootdir, scale_factor, transform_pitch_angle):
	rospy.init_node('bad_to_uos_convert', anonymous=False)
	rospy.Subscriber('velotime_points', PointCloud2, self.processMsg)
	self.counter = 0
	self.rootdir = rootdir
	self.scale_factor = scale_factor
	self.tf = TransformListener()
	self.init = True
	self.transform_pitch_angle = math.radians(transform_pitch_angle)
	pass
    def transformPoint(self,x ,y , z):
	yy = y
	xx =  math.cos(self.transform_pitch_angle) * x + math.sin(self.transform_pitch_angle) * z
	zz = -math.sin(self.transform_pitch_angle) * x + math.cos(self.transform_pitch_angle) * z
	return xx, yy, zz
    def run(self):
	rospy.spin()
	pass
    def processMsg(self, msg):
	print 'data arrived'
	# print list(cloud)
	# print self.tf.frameExists("velodyne") , self.tf.frameExists("odom")

	if self.tf.frameExists("world") and self.tf.frameExists("odom"):
	    t = self.tf.getLatestCommonTime("world", "odom")
	    position, quat= self.tf.lookupTransform("world", "odom", t)
	    print position, quat
	    euler = tf.transformations.euler_from_quaternion(quat)
	    # heading_rad = math.degrees(euler[2])
	    heading_rad = euler[2]
	    print heading_rad
	    cloud = pc2.read_points(msg, skip_nans=True)
	    self.saveData(list(cloud))
	    self.savePose(position[0],position[1],0,0,0,heading_rad)

	    self.counter = self.counter + 1
	if self.init:
	    self.init = False
	    cloud = pc2.read_points(msg, skip_nans=True)
	    self.saveData(list(cloud))
	    self.savePose(0,0,0,0,0,0)
	    self.counter = self.counter + 1

    def saveData(self, cloud):
	file_string = 'scan'+format(self.counter,'03d')
	fullfilename_data = self.rootdir + file_string + '.3d'
	fh_data  = open(fullfilename_data, 'w')
	for (x,y,z,intensity,ring) in cloud:
	    # print x,y,z
	    x, y, z = self.transformPoint(x, y, z)
	    x = x*self.scale_factor
	    y = y*self.scale_factor
	    z = z*self.scale_factor
	    str_ = format(x, '.1f')    + ' ' + format(z, '.1f')     + ' ' + format(y, '.1f')   + '\n'
	    fh_data.write(str_)
	fh_data.close()

    def savePose(self, x, y, z, roll, pitch, yaw):
	file_string = 'scan'+format(self.counter,'03d')
	fullfilename_pose = self.rootdir + file_string + '.pose'
	fh_pose  = open(fullfilename_pose, 'w')
	x = x*self.scale_factor
	y = y*self.scale_factor
	z = z*self.scale_factor
	roll  = roll*180/math.pi
	yaw   = yaw*180/math.pi
	pitch = pitch*180/math.pi
	str_first_line = format(x, '.1f')    + ' ' + format(-z, '.1f')     + ' ' + format(y, '.1f')   + '\n'
	str_2nd_line   = format(roll, '.1f') + ' ' + format(-yaw, '.1f') + ' ' + format(pitch, '.1f') + '\n'
	print str_first_line+str_2nd_line
	fh_pose.writelines([str_first_line, str_2nd_line])
	fh_pose.close()
Beispiel #28
0
class Movement:
    def __init__(self, main=None):
        self.main = main

        self.map_ = Map()
        self.tf = TransformListener()

        # Last n visited locations
        self.visited = deque(self.map_.locations, 3)

        # MoveBaseGoal
        self.pos = MoveBaseGoal()
        self.pos.target_pose.pose.orientation.w = 1.0
        self.pos.target_pose.header.frame_id = '/map'

        # SimpleActionClient
        self.client = actionlib.SimpleActionClient('move_base', MoveBaseAction)
        self.client.wait_for_server()
        self.longest_waiting_time = rospy.Duration(10)

        # Clear costmaps
        rospy.wait_for_service('/move_base/clear_costmaps')
        self.clear_costmaps = rospy.ServiceProxy('/move_base/clear_costmaps',
                Empty)

        # Register listener for motor availability and odometry.
        self.disabled = False
        rospy.wait_for_message("/mobile_base/commands/motor_power", MotorPower)
        rospy.Subscriber("/mobile_base/commands/motor_power", MotorPower,
                self._get_motor_info)
        self.odom = 0.0
        rospy.wait_for_message("/odom", Odometry)
        rospy.Subscriber('/odom', Odometry, self._get_odom_data)

        # Teleop
        self.teleop = rospy.Publisher('/cmd_vel_mux/input/teleop', Twist,
                queue_size=1)
        self.TELEOP_X_MAX = 0.4
        self.TELEOP_X_SCALE = 1.0
        self.TELEOP_Z_MAX = 1.3
        self.TELEOP_Z_SCALE = 0.6
        self.TELEOP_SPEED = 0.8

    def abort(self):
        self.client.cancel_all_goals()

    # goal: Point()
    def _move_to(self, goal):
        self.clear_costmaps()
        self.pos.target_pose.pose.position = goal
        self.pos.target_pose.header.stamp = rospy.Time.now()
        self.client.send_goal(self.pos)

    # portal: key of Map.points
    def _move_to_portal(self, portal):
        goal = self.map_.points[portal]
        self._move_to(goal)

    def explore(self):
        portal = random.choice(self.map_.locations)
        while portal in self.visited:
            portal = random.choice(self.map_.locations)
        print("Explore: " + portal)
        self._move_to_portal(portal)
        while self.client.get_state() == GoalStatus.PENDING or \
                self.client.get_state() == GoalStatus.ACTIVE:
            time.sleep(0.05)
        if self.client.get_state() != GoalStatus.SUCCEEDED:
            print("Failed to explore " + portal)
        else:
            self.visited.append(portal)
            # Do a 360-degree rotation.
            teleop_cmd = Twist()
            teleop_cmd.angular.z = 2.0
            self.teleop.publish(teleop_cmd)

    def follow(self, pos):
        teleop_cmd = Twist()
        # No rotate if tag is in the middle.
        if abs(pos.x) < 0.1:
            teleop_cmd.angular.z = 0.0
        else:
            teleop_cmd.angular.z = -1 * pos.x / self.TELEOP_X_MAX * \
                    self.TELEOP_X_SCALE * self.TELEOP_SPEED
        # Break if too close.
        if pos.z < 0.4:
            teleop_cmd.linear.x = 0.0
        else:
            teleop_cmd.linear.x = pos.z / self.TELEOP_Z_MAX * \
                    self.TELEOP_Z_SCALE * self.TELEOP_SPEED
        print("Follow: distance = " + str(pos.z))
        self.teleop.publish(teleop_cmd)

    def rotate_to(self, target, direction):
        print("Now at: " + str(self.odom) + ". Rotate to: " + str(target))
        teleop_cmd = Twist()
        teleop_cmd.angular.z = direction * 0.5
        while not self.main.detector.detected and abs(self.odom - target) > 0.01:
            time.sleep(0.2)
            self.teleop.publish(teleop_cmd)

    def get_position(self):
        if self.tf.frameExists("/base_link") and self.tf.frameExists("/map"):
            t = rospy.Time(0)
            self.tf.waitForTransform("/base_link", "/map", t,
                    rospy.Duration(4.0))
            position, rotation = self.tf.lookupTransform("/base_link", "/map",
                    t)
            return position
        else:
            print("Frame doesn't exist.")

    def _get_motor_info(self, data):
        """Callback function of subscriber.

        Args:
            data (MotorPower)

        """
        # print("Got motor info!")
        if data.state == MotorPower.ON:
            self.disabled = False
        else:
            self.disabled = True
        # print(data)

    def _get_odom_data(self, data):
        self.odom = data.pose.pose.orientation.z
Beispiel #29
0
    def run(self, aut_path=None):
        """Intialize all NL components."""
        # pylint: disable=W0603
        global WORLD_KNOWLEDGE

        # Start the NL pipeline
        if not aut_path:
            print "Starting NL pipeline..."
            init_pipes()
        else:
            print "Skipping loading nlpipeline because an automaton was loaded"

        # Start the ROS node
        print "Initializing ROS node..."
        rospy.init_node('nlproxy')

        # Set up the state mgr and its callbacks
        print "Starting state manager..."
        self.state_mgr = StateManager(self)
        self.state_mgr.set_basedir(LTLGEN_BASE_DIR)

        # Load the automaton if needed
        if aut_path:
            self.state_mgr.load_test_automaton(aut_path, False)

        # Create world knowledge
        print "Starting knowledge..."
        WORLD_KNOWLEDGE = Knowledge(self)
        self.state_mgr.world_knowledge = WORLD_KNOWLEDGE

        # Wait a little for ROS to avoid timing startup issues.
        print "Waiting for ROS node to settle..."
        time.sleep(1)

        # Set up ROS listening
        print "Subscribing to notifications..."
        # Set up state manager's sending and listening
        pub = add_ltl_publisher()
        self.state_mgr.set_publisher(pub)
        add_subscriber(LTL_ENVIRONMENT_TOPIC,
                       self.state_mgr.process_sensor_data)
        add_subscriber(LTL_ENVIRONMENT_TOPIC,
                       WORLD_KNOWLEDGE.process_sensor_data)

        # Create the input comm_proxy and iPad connections
        print "Starting comm_proxy..."
        self.comm_proxy = CallbackSocket(self.text_port)
        self.comm_proxy.register_callback(self.process_text)
        self.ipad_conn = iPadConnection(self.map_port)
        self.ipad_conn.register_rename_callback(rename_entity)
        self.ipad_conn.register_text_callback(self.process_text)
        # TODO Add highlight callback
        self.map_proxy = MapProxy(self.ipad_conn)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, self.ipad_conn.add_icons)
        WORLD_KNOWLEDGE.map_proxy = self.map_proxy

        # Set up odometry forwarding to the ipad
        tf = TransformListener()
        while not tf.frameExists("/map"):
            rospy.logwarn("Not ready for transforms yet")
            rospy.sleep(1.0)
        position_proxy = RobotPositionProxy(self.ipad_conn, tf)
        rospy.Subscriber(ODOM_TOPIC, Odometry, position_proxy.forward_position)

        # Set up getting directions
        direction_proxy = DirectionProxy()
        rospy.Subscriber(LOCATION_TOPIC, String, direction_proxy.set_location)
        WORLD_KNOWLEDGE.direction_proxy = direction_proxy

        print "NLMaster startup complete!"
        print "*" * 80

        # Finally, wait for input before quitting
        try:
            while True:
                text = raw_input("").strip()
                if text == "q":
                    break
        except (KeyboardInterrupt, EOFError):
            pass
        except:
            raise
        finally:
            # Only shutdown the pipeline if we actually were taking language input
            if not aut_path:
                print "Shutting down NL pipeline..."
                close_pipes()
            self.comm_proxy.shutdown()
            self.ipad_conn.shutdown()

        sys.exit()
Beispiel #30
0
class ObjectMap:

    def __init__(self):
        self.result_pub_ = rospy.Publisher("object_map", MarkerArray, queue_size=10)

        # subscribe object detections from current frame
        self.convex_hull_sub_ = rospy.Subscriber("object_convex_hull", MarkerArray, self.convex_hull_callback, queue_size=10)
        
        self.people_sub_ = rospy.Subscriber("/hdl_people_tracking_nodelet/markers", MarkerArray, self.people_callback, queue_size=10)
        '''
        On the /hdl_people_tracking_nodelet/markers topic
            - the first marker is a CUBE_LIST
            - people poses are in marker_array.markers[0].points
            - following markers in marker_array are text
        '''


        self.ground_plane_height_ = -0.35

        self.object_map_ = []

        self.iou_thresh_ = 0.05 # iou threshold for data association
        self.dist_thresh_ = 0.6 # the distance from a person to an object for the object to be considered as occupied
        self.need_clean_thresh_ = 80 # how many frame an object is occupied make it needs to be cleaned
        self.duplicate_iou_thresh_ = 0.8 # threshold to view two detections as duplicate in the same frame

        self.tf_listener_ = TransformListener()

    def publish_convex_hull_marker(self):
        ''' Publish the object map
        '''
        marker_array = MarkerArray()

        for i in range(len(self.object_map_)):
            line_strip = Marker()
            line_strip.header.frame_id = "map"
            line_strip.header.stamp = rospy.Time.now()
            line_strip.ns = "object_map"
            line_strip.action = Marker.ADD
            line_strip.pose.orientation.w = 1.0

            line_strip.id = i

            line_strip.type = Marker.LINE_STRIP

            line_strip.scale.x = 0.05

            line_strip.color.g = 1.0

            if self.object_map_[i].need_clean_ == True:
                line_strip.color.r = 1.0
            elif self.object_map_[i].occupied_ > 0:
                line_strip.color.r = self.object_map_[i].occupied_ / float(self.need_clean_thresh_)
            else:
                line_strip.color.b = 1.0
            
            line_strip.color.a = 1.0

            for j in range(self.object_map_[i].points_.shape[0]):
                point = Point()
                point.x = self.object_map_[i].points_[j, 0]
                point.y = self.object_map_[i].points_[j, 1]
                point.z = 0
                line_strip.points.append(point)

            point = Point()
            point.x = self.object_map_[i].points_[0, 0]
            point.y = self.object_map_[i].points_[0, 1]
            point.z = 0
            line_strip.points.append(point)

            line_strip.lifetime = rospy.Duration(0)

            marker_array.markers.append(line_strip)

            # publish class tag
            class_tag = copy.deepcopy(self.object_map_[i].class_text_)
            class_tag.header = copy.deepcopy(line_strip.header)
            class_tag.ns = "class_tag"
            class_tag.id = copy.deepcopy(line_strip.id)
            class_tag.lifetime = rospy.Duration(0)
            marker_array.markers.append(class_tag)

            # Add 'Need Disinfection' tag
            if self.object_map_[i].need_clean_ == True:
                text_marker = Marker()
                text_marker.header = copy.deepcopy(line_strip.header)
                text_marker.action = Marker.ADD
                text_marker.ns = "disinfection_tag"
                text_marker.id = copy.deepcopy(line_strip.id)
                text_marker.lifetime = rospy.Duration(0)
                text_marker.type = Marker.TEXT_VIEW_FACING
                text_marker.scale.z = 0.2
                text_marker.pose.position = copy.deepcopy(line_strip.points[0])
                text_marker.pose.position.z += 0.5
                text_marker.color.r = 1.0
                text_marker.color.g = 1.0
                text_marker.color.a = 1.0
                text_marker.text = "Need Disinfection"
                marker_array.markers.append(text_marker)
        
        self.result_pub_.publish(marker_array)


    def calculate_iou(self, polygon_1, polygon_2):
        ''' Calculate IoU of two polygons

        Args:
            polygon_1/polygon_2: polygons as 2D numpy array
        '''
        poly_1 = shapely.geometry.Polygon(polygon_1)
        poly_2 = shapely.geometry.Polygon(polygon_2)
        iou = poly_1.intersection(poly_2).area / poly_1.union(poly_2).area
        return iou

    def data_association(self, current_frame_detection_tuple):
        ''' Implement the data association pipeline
        '''
        # create a matrix of IoU scores
        # rows are detection list, columns are map list
        '''
        Example:
        Detection\Map     Object 0     Object 1     Object 2
        Detection A       IoU = 0         0            0 
        Detection B          0.56         0            0
        Detection C          0            0.77         0

        Object 2 in the map is not matches
        Detection A is a new object
        '''

        iou_matrix = np.zeros((len(current_frame_detection_tuple), len(self.object_map_)))

        for i in range(iou_matrix.shape[0]):
            for j in range(iou_matrix.shape[1]):
                # calculate iou and fill the matrix
                iou = self.calculate_iou(current_frame_detection_tuple[i][0], self.object_map_[j].points_)
                # it can be a new detection is iou is below the threshold
                if iou <= self.iou_thresh_:
                    iou = 0
                iou_matrix[i, j] = -iou # fill with negative value as cost

        new_detection_idx = []
        for i in range(iou_matrix.shape[0]):
            # find new detections
            if np.sum(iou_matrix[i, :]) == 0:
                # this is a new detection
                self.object_map_.append(ObjectMarker(current_frame_detection_tuple[i][0], \
                                                        current_frame_detection_tuple[i][1]))
                new_detection_idx.append(i)

        not_matched_objects = []
        for j in range(iou_matrix.shape[1]):
            # find not matched objects
            if np.sum(iou_matrix[:, j]) == 0:
                not_matched_objects.append(j)

        # run the hungarian algorithm
        row_ind, col_ind = linear_sum_assignment(iou_matrix)

        for idx in range(row_ind.shape[0]):
            if (row_ind[idx] in new_detection_idx) or (col_ind[idx] in not_matched_objects):
                # either no match or new detection case
                continue
            
            matched_detection = row_ind[idx]
            matched_object_in_map = col_ind[idx]

            # if the match iou is too small, do nothing
            # caveat: add negative sign here
            if -iou_matrix[matched_detection, matched_object_in_map] < self.iou_thresh_:
                continue

            # otherwise, get a new convex hull
            self.object_map_[matched_object_in_map].add_associated_times()
            new_points = np.append(self.object_map_[matched_object_in_map].points_, \
                                    current_frame_detection_tuple[matched_detection][0],\
                                    axis = 0)
            
            new_convex_hull = ConvexHull(new_points)
            # update the object
            self.object_map_[matched_object_in_map].update_points(new_points[new_convex_hull.vertices, :])

    def check_duplicate(self, polygon_1, polygon_2):
        ''' Check if two detections are duplicate by 
                checking whether one convex hull is (almost) completely inside the other one

        Args:
            polygon_1/polygon_2: polygons as 2D numpy array
        '''

        poly_1 = shapely.geometry.Polygon(polygon_1)
        poly_2 = shapely.geometry.Polygon(polygon_2)
        intersection = poly_1.intersection(poly_2).area
        if (intersection / poly_1.area) > self.duplicate_iou_thresh_ \
             or (intersection / poly_2.area) > self.duplicate_iou_thresh_:
            return True
        else:
            return False


    def remove_duplication_detection(self, current_frame_detection_tuple):
        ''' Remove duplicate detection and combine them into one convex hull
        '''
        output = []
        for i in range(len(current_frame_detection_tuple)):
            no_duplicate = True

            for j in range(i + 1, len(current_frame_detection_tuple)):
                if self.check_duplicate(current_frame_detection_tuple[i][0], current_frame_detection_tuple[j][0]):
                    no_duplicate = False
                    new_points = np.append(current_frame_detection_tuple[i][0], \
                                            current_frame_detection_tuple[j][0],\
                                            axis = 0)
                    new_convex_hull = ConvexHull(new_points)
                    # update the new convex hull to the latter one
                    # it will then be added to the output when the loop traverse to there
                    current_frame_detection_tuple[j][0] = new_points[new_convex_hull.vertices, :]
            
            if no_duplicate:
                output.append(current_frame_detection_tuple[i])
        
        return output


    def convex_hull_callback(self, data):
        # subcribe to all the objects (convex hull) in current frame

        current_frame_detection_tuple = [] # stores all the objects in this frame, a list of numpy arrays

        data_idx = 0
        while data_idx < len(data.markers):

            object_convex_hull =  np.empty((0,2), float)
            for point in data.markers[data_idx].points:
                # add each point into the list
                object_convex_hull = np.append(object_convex_hull, [[point.x, point.y]], axis=0)
            
            current_frame_detection_tuple.append([object_convex_hull, data.markers[data_idx + 1]])

            data_idx += 2

        if (len(current_frame_detection_tuple) == 0):
            return
        
        current_frame_detection_tuple = self.remove_duplication_detection(current_frame_detection_tuple)

        # do data association using the Hungarian algorithm
        if len(self.object_map_) == 0:
            # initailize the map
            for det in current_frame_detection_tuple:
                self.object_map_.append(ObjectMarker(det[0], det[1]))
            return
        else:
            self.data_association(current_frame_detection_tuple)


    def people_callback(self, data):
        # traverse all the objects in the map
        # if a person is within a specific distance, add 1 to occupied counting

        # if hasn't initialized yet, return
        if len(self.object_map_) == 0:
            return

        if self.tf_listener_.frameExists("map") and self.tf_listener_.frameExists("velodyne"):
            t = self.tf_listener_.getLatestCommonTime("map", "velodyne")

        
        # store all the person
        person_list = []
        for person_marker in data.markers[0].points:

            # transform to map frame
            pos_in_velodyne = PoseStamped()
            pos_in_velodyne.pose.position.x = person_marker.x
            pos_in_velodyne.pose.position.y = person_marker.y
            pos_in_velodyne.header.frame_id = "velodyne"
            pos_in_velodyne.pose.orientation.w = 1.0
            pos_in_map = self.tf_listener_.transformPose("map", pos_in_velodyne)


            person = shapely.geometry.Point(pos_in_map.pose.position.x,\
                                            pos_in_map.pose.position.y)
            person_list.append(person)
        
        
        for obj_idx in range(len(self.object_map_)):
            polygon = shapely.geometry.Polygon(self.object_map_[obj_idx].points_)
            if_occupied = False
            for person in person_list:
                distance = person.distance(polygon)
                if distance < self.dist_thresh_:
                    if_occupied = True
            
            if if_occupied == True:
                self.object_map_[obj_idx].add_occupied_times()
            else:
                self.object_map_[obj_idx].reset_occupied_times()
        
        for idx in range(len(self.object_map_)):
            # if an object has been occupied for more than a certain number of frames
            # set need clean to be true
            if self.object_map_[idx].occupied_ > self.need_clean_thresh_:
                self.object_map_[idx].set_need_clean()
class SLAM(object):
  def __init__(self):
    rospy.Subscriber('/map', OccupancyGrid, self.callback)
    self._tf = TransformListener()
    self._occupancy_grid = None
    self._pose = np.array([np.nan, np.nan, np.nan], dtype=np.float32)
    
  def callback(self, msg):
    pass

  def update(self):
    # Get pose w.r.t. map.
    r0_pose = self.get_pose(LEADER)
    r1_pose = self.get_pose(FOLLOWER_1)
    r2_pose = self.get_pose(FOLLOWER_2)

    print('leader')
    print('\t X: ', r0_pose[X])
    print('\t Y: ', r0_pose[Y])
    print('\t YAW: ', r0_pose[YAW])

    print()

    print('follower 1')
    print('\t X: ', r1_pose[X])
    print('\t Y: ', r1_pose[Y])
    print('\t YAW: ', r1_pose[YAW])

    print()

    print('follower 2')
    print('\t X: ', r2_pose[X])
    print('\t Y: ', r2_pose[Y])
    print('\t YAW: ', r2_pose[YAW])

    print()
    print()
    print()



  def get_pose(self, robot):
    a = 'occupancy_grid'
    b = robot + '/base_link'
    if self._tf.frameExists(a) and self._tf.frameExists(b):
      try:
        t = rospy.Time(0)
        position, orientation = self._tf.lookupTransform('/' + a, '/' + b, t)
        pose = np.zeros(3, dtype=np.float32)
        pose[X] = position[X]
        pose[Y] = position[Y]
        _, _, pose[YAW] = euler_from_quaternion(orientation)
        return pose

      except Exception as e:
        print(e)
    else:
      print('Unable to find:', self._tf.frameExists(a), self._tf.frameExists(b))


  @property
  def ready(self):
    return self._occupancy_grid is not None and not np.isnan(self._pose[0])

  @property
  def pose(self):
    return self._pose

  @property
  def occupancy_grid(self):
    return self._occupancy_grid
class Inchworm(object):
    """ Default class to subscribe and publish to orthosis robots """
    def __init__(self, namespace="/", timestep=0.01):
        """ Initialize the class with the namespace and timestep as params """

        super(Inchworm, self).__init__()
        self._default_pub_rate = 10000
        self._namespace = namespace
        self._timestep = timestep

        self.tf = TransformListener()

        self._is_set_point_ctrl = bool(
            rospy.get_param(namespace + "set_point_enable"))

        self._is_set_point_ctrl = True
        self._walk = bool(rospy.get_param(namespace + "default_conf"))

        self._n_joints = int(rospy.get_param(namespace + "n_joints"))

        # Movable joints
        self._m_joints = rospy.get_param(namespace +
                                         "parameters/control/joints")
        # self._n_joints = len(self._m_joints)

        self._m_joints_dict = {
            typ: rospy.get_param(namespace + "all_joints/{}".format(typ))
            for typ in urdf.Joint.TYPES
        }

        self._fixed_joint_names = []
        self._floating_and_unknown_joint_names = []
        for k, v in self._m_joints_dict.items():
            if len(v) < 1:
                del self._m_joints_dict[k]
            else:
                if k == 'fixed':
                    self._fixed_joint_names = v
                    del self._m_joints_dict[k]
                elif k == 'floating' or k == 'unknown':
                    self._floating_and_unknown_joint_names = v
                    del self._m_joints_dict[k]

        # del self._floating_and_unknown_joint_names[0]

        self._links = {}
        for typ, joints in self._m_joints_dict.items():
            for joint_i, joint in enumerate(joints):
                if joint not in self._links.keys():
                    self._links[joint] = rospy.get_param(
                        namespace + "joints/{}/{}".format(typ, joint_i))

        self._fixed_links_and_joints = {
            joint:
            rospy.get_param(namespace + "joints/fixed/{}".format(joint_i))
            for joint_i, joint in enumerate(self._fixed_joint_names)
        }

        self.initializeRBDLModel()

        self._M = np.zeros([self._n_joints, self._n_joints])
        self._C = np.zeros([self._n_joints, self._n_joints])

        self._scale = rospy.get_param(namespace + "scale")

        # Init joint states to zero
        self._joint_states = {
            joint: [1.0, 2.0, 3.0]
            for joint_i, joint in enumerate(self._m_joints)
        }

        self._joint_limits = {
            joint: rospy.get_param(namespace + "joint/limits/{}".format(joint))
            for joint in self._m_joints
        }

        # print self._joint_limits

        self.subToJointStates()

        # Setup effort publishers
        self._effort_flag = bool(
            rospy.get_param(namespace + "is_effort_enabled"))
        if self._effort_flag:
            self._joint_effort_pubs = {
                joint: rospy.Publisher(
                    (namespace +
                     "control/config/joint_effort_controller_joint_{}/command".
                     format(joint_i)),
                    Float64,
                    queue_size=10)
                for joint_i, joint in enumerate(self._m_joints)
            }
        else:
            self._joint_effort_pubs = {
                joint: rospy.Publisher(
                    (namespace +
                     "control/config/joint_position_controller_joint{}/command"
                     .format(joint_i)),
                    Float64,
                    queue_size=10)
                for joint_i, joint in enumerate(self._m_joints)
            }

        # Publish initial efforts to reach home position?
        for k, v in self._joint_effort_pubs.items():
            cmd = 0
            self._joint_effort_pubs[k].publish(cmd)

        return

    def jointStatesCb(self, data):
        """ Joint States Callback """

        for joint_i, joint in enumerate(self._m_joints):
            index = data.name.index(joint)
            msg = [
                data.position[index], data.velocity[index], data.effort[index]
            ]
            self._joint_states[joint] = msg

        return

    def subToJointStates(self):
        """ Subscribe to joint state messages if published """

        self._joint_states_sub = rospy.Subscriber(
            self._namespace + "joint_states", JointState, self.jointStatesCb)

        return

    def publishJointEfforts(self, cmd=None, effort=True):
        """ Publish cmd[arr] values to joints """
        # if effort:
        if cmd is None:
            cmd = [0 for i, (k, v) in enumerate(self._joint_effort_pubs)]

            for i, (k, v) in enumerate(self._joint_effort_pubs.items()):
                cmd[i] = 0.01 * np.sin(rospy.get_time() * np.pi / 2) * 1
                # print str(i) + " : " + str(cmd[i])
                self._joint_effort_pubs[k].publish(cmd[i])

        else:
            for i, c in enumerate(cmd):
                k = self._states_map.keys()[self._states_map.values().index(i)]
                self._joint_effort_pubs[k].publish(cmd[i])

        return

    def initializeRBDLModel(self):
        """ Load the URDF model using RBDL """

        rospack = rospkg.RosPack()
        rospack.list()

        root_path = rospack.get_path('inchworm_description')
        model_path = root_path + "/urdf/inchworm_description.urdf"
        # Create a new model
        self._model = rbdl.loadModel(model_path)

        self._q = np.zeros(self._model.q_size)
        self._qdot = np.zeros(self._model.qdot_size)
        self._qddot = np.zeros(self._model.qdot_size)
        self._tau = np.zeros(self._model.qdot_size)
        self._G = np.zeros(self._model.qdot_size)

        self._states_map = {
            joint: self._model.mJoints[self._model.GetBodyId(link[1])].q_index
            for joint, link in self._links.items()
        }

        self._unique_links = list()
        for link in self._links.values():
            if len(self._unique_links) == 0:
                self._unique_links.append(link[0])
            else:
                for i in range(2):
                    if link[i] in self._unique_links:
                        continue
                    else:
                        self._unique_links.append(link[i])

        flajv = self._fixed_links_and_joints.values()
        flajv_ = []
        for i in flajv:
            flajv_.append(i[1])

        for i, link in enumerate(self._unique_links):
            if link in flajv_:
                del self._unique_links[i]

        self._bodies_map = {
            link: (self._model.GetBodyId(link),
                   self._model.mBodies[self._model.GetBodyId(link)])
            for link in self._unique_links
        }

        self._end_effector_positions = []
        for frames in self._fixed_links_and_joints.values():
            if self.tf.frameExists(frames[0]) and self.tf.frameExists(
                    frames[1]):
                t = self.tf.getLatestCommonTime(frames[0], frames[1])
                position, quaternion = self.tf.lookupTransform(
                    frames[0], frames[1], t)
                self._end_effector_positions.extend(position)

        # print self._end_effector_positions

        return
class yoboticsOdometry:
    def __init__(self):
        rospy.Subscriber("/yobotics/gazebo/contacts", Contacts,
                         self.contacts_callback)
        self.odom_publisher = rospy.Publisher("odom/raw",
                                              Odometry,
                                              queue_size=50)

        self.odom_broadcaster = tf.TransformBroadcaster()
        self.tf = TransformListener()

        self.foot_links = [
            "lf_foot_link", "rf_foot_link", "lh_foot_link", "rh_foot_link"
        ]

        self.nominal_foot_positions = [[0, 0], [0, 0], [0, 0], [0, 0]]
        self.prev_foot_positions = [[0, 0], [0, 0], [0, 0], [0, 0]]
        self.prev_theta = [0, 0, 0, 0]
        self.prev_stance_angles = [0, 0, 0, 0]
        self.prev_time = 0

        self.pos_x = 0
        self.pos_y = 0
        self.theta = 0

        self.publish_odom_tf(0, 0, 0, 0)
        rospy.sleep(1)

        for i in range(4):
            self.nominal_foot_positions[i] = self.get_foot_position(i)
            self.prev_foot_positions[i] = self.nominal_foot_positions[i]
            self.prev_theta[i] = math.atan2(self.prev_foot_positions[i][1],
                                            self.prev_foot_positions[i][0])

    def publish_odom(self, x, y, z, theta, vx, vy, vth):
        current_time = rospy.Time.now()
        odom = Odometry()
        odom.header.stamp = current_time
        odom.header.frame_id = "odom"

        odom_quat = tf.transformations.quaternion_from_euler(0, 0, theta)
        odom.pose.pose = Pose(Point(x, y, z), Quaternion(*odom_quat))

        odom.child_frame_id = "base_footprint"
        odom.twist.twist = Twist(Vector3(vx, vy, 0), Vector3(0, 0, vth))

        # publish the message
        self.odom_publisher.publish(odom)

    def publish_odom_tf(self, x, y, z, theta):
        current_time = rospy.Time.now()

        odom_quat = quaternion_from_euler(0, 0, theta)

        self.odom_broadcaster.sendTransform((x, y, z), odom_quat, current_time,
                                            "base_footprint", "odom")

    def get_foot_position(self, leg_id):
        if self.tf.frameExists("base_link" and self.foot_links[leg_id]):
            t = self.tf.getLatestCommonTime("base_link",
                                            self.foot_links[leg_id])
            position, quaternion = self.tf.lookupTransform(
                "base_link", self.foot_links[leg_id], t)
            return position
        else:
            return 0, 0, 0

    def contacts_callback(self, data):
        self.leg_contact_states = data.contacts

    def is_almost_equal(self, a, b, max_rel_diff):
        diff = abs(a - b)
        a = abs(a)
        b = abs(b)
        largest = 0
        if b > a:
            largest = b
        else:
            larget = a

        if diff <= (largest * max_rel_diff):
            return True

        return False

    def run(self):
        while not rospy.is_shutdown():
            leg_contact_states = self.leg_contact_states
            current_foot_position = [[0, 0], [0, 0], [0, 0], [0, 0]]
            stance_angles = [0, 0, 0, 0]
            current_theta = [0, 0, 0, 0]
            delta_theta = 0
            in_xy = False
            total_contact = sum(leg_contact_states)
            x_sum = 0
            y_sum = 0
            theta_sum = 0

            for i in range(4):
                current_foot_position[i] = self.get_foot_position(i)

            for i in range(4):
                current_theta[i] = math.atan2(current_foot_position[i][1],
                                              current_foot_position[i][0])
                from_nominal_x = self.nominal_foot_positions[i][
                    0] - current_foot_position[i][0]
                from_nominal_y = self.nominal_foot_positions[i][
                    1] - current_foot_position[i][1]
                stance_angles[i] = math.atan2(from_nominal_y, from_nominal_x)
                # print stance_angles
                #check if it's moving in the x or y axis
                if self.is_almost_equal(stance_angles[i], abs(1.5708),
                                        0.001) or self.is_almost_equal(
                                            stance_angles[i], abs(3.1416),
                                            0.001):
                    in_xy = True

                if current_foot_position[i] != None and leg_contact_states[
                        i] == True and total_contact == 2:
                    delta_x = (self.prev_foot_positions[i][0] -
                               current_foot_position[i][0]) / 2
                    delta_y = (self.prev_foot_positions[i][1] -
                               current_foot_position[i][1]) / 2

                    x = delta_x * math.cos(self.theta) - delta_y * math.sin(
                        self.theta)
                    y = delta_x * math.sin(self.theta) + delta_y * math.cos(
                        self.theta)

                    x_sum += delta_x
                    y_sum += delta_y

                    self.pos_x += x
                    self.pos_y += y

                    if not in_xy:
                        delta_theta = self.prev_theta[i] - current_theta[i]
                        theta_sum += delta_theta
                        self.theta += delta_theta / 2

            now = rospy.Time.now().to_sec()
            dt = now - self.prev_time

            vx = x_sum / dt
            vy = y_sum / dt
            vth = theta_sum / dt

            self.publish_odom(self.pos_x, self.pos_y, 0, self.theta, vx, vy,
                              vth)

            # self.publish_odom_tf(self.pos_x, self.pos_y, 0, self.theta)

            self.prev_foot_positions = current_foot_position
            self.prev_theta = current_theta
            self.prev_stance_angles = stance_angles

            self.prev_time = now
            rospy.sleep(0.01)
Beispiel #34
0
    def run(self, aut_path=None):
        """Intialize all NL components."""
        # pylint: disable=W0603
        global WORLD_KNOWLEDGE
    
        # Start the NL pipeline
        if not aut_path:
            print "Starting NL pipeline..."
            init_pipes()
        else:
            print "Skipping loading nlpipeline because an automaton was loaded"
     
        # Start the ROS node
        print "Initializing ROS node..."
        rospy.init_node('nlproxy')
    
        # Set up the state mgr and its callbacks
        print "Starting state manager..."
        self.state_mgr = StateManager(self)
        self.state_mgr.set_basedir(LTLGEN_BASE_DIR)
    
        # Load the automaton if needed
        if aut_path:
            self.state_mgr.load_test_automaton(aut_path, False)
    
        # Create world knowledge
        print "Starting knowledge..."
        WORLD_KNOWLEDGE = Knowledge.Knowledge(self)
        self.state_mgr.world_knowledge = WORLD_KNOWLEDGE
        
        # Wait a little for ROS to avoid timing startup issues.
        print "Waiting for ROS node to settle..."
        time.sleep(1)
        
        # Set up ROS listening
        print "Subscribing to notifications..."
        # Set up state manager's sending and listening
        pub = add_ltl_publisher()
        self.state_mgr.set_publisher(pub)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, self.state_mgr.process_sensor_data)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, WORLD_KNOWLEDGE.process_sensor_data)

        # Create the input comm_proxy and iPad connections
        print "Starting comm_proxy..."
        self.comm_proxy = CallbackSocket(self.text_port)
        self.comm_proxy.register_callback(self.process_text)
        self.ipad_conn = iPadConnection(self.map_port)
        self.ipad_conn.register_rename_callback(Knowledge.rename_entity)
        self.ipad_conn.register_text_callback(self.process_text)
        # TODO Add highlight callback
        self.map_proxy = MapProxy(self.ipad_conn)
        add_subscriber(LTL_ENVIRONMENT_TOPIC, self.ipad_conn.add_icons)        
        WORLD_KNOWLEDGE.map_proxy = self.map_proxy

        # Set up odometry forwarding to the ipad
        tf = TransformListener()
        while not tf.frameExists("/map"):
            rospy.logwarn("Not ready for transforms yet")
            rospy.sleep(1.0)
        position_proxy = RobotPositionProxy(self.ipad_conn,tf)
        rospy.Subscriber(ODOM_TOPIC, Odometry, position_proxy.forward_position)

        # Set up getting directions
        direction_proxy = DirectionProxy()
        rospy.Subscriber(LOCATION_TOPIC, String, direction_proxy.set_location)
        WORLD_KNOWLEDGE.direction_proxy = direction_proxy
        
        print "NLMaster startup complete!"
        print "*" * 80
    
        # Finally, wait for input before quitting
        try:
            while True:
                text = raw_input("").strip()
                if text == "q":
                    break
        except (KeyboardInterrupt, EOFError):
            pass
        except:
            raise
        finally:
            # Only shutdown the pipeline if we actually were taking language input
            if not aut_path:
                print "Shutting down NL pipeline..."
                close_pipes()
            self.comm_proxy.shutdown()
            self.ipad_conn.shutdown()
    
        sys.exit()
Beispiel #35
0
class KeyPointManager(object):
    def __init__(self):
        self.tf = TransformListener()
        self.keyPointList = list()

    def add(self, marker):
        for i in range(len(self.keyPointList)):
            if (self.keyPointList[i].id == marker.id):
                return
        position = self.transformMarkerToWorld(marker)
        k = KeyPoint(marker.id, Point(position[0], position[1], position[2]),
                     Quaternion(0., 0., 0., 1.))
        self.keyPointList.append(k)
        self.addWaypointMarker(k)
        rospy.loginfo('Marker is added to following position')
        rospy.loginfo(k.pose)
        pass

    def getWaypoints(self):
        waypoints = list()
        for i in range(len(self.keyPointList)):
            waypoints.append(self.keyPointList[i].pose)
        return waypoints

    def keyPointListComplete(self):
        if (len(self.keyPointList) == 5):
            self.keyPointList.sort(key=lambda x: x.id, reverse=True)
            return True
        return False

    def markerHasValidId(self, marker):
        if (marker.id >= 61) and (marker.id <= 65):
            return True
        return False

    def transformMarkerToWorld(self, marker):
        markerTag = "ar_marker_" + str(marker.id)
        rospy.loginfo(markerTag)
        if self.tf.frameExists("map") and self.tf.frameExists(markerTag):
            self.tf.waitForTransform("map", markerTag, rospy.Time(),
                                     rospy.Duration(4.0))
            t = self.tf.getLatestCommonTime("map", markerTag)
            position, quaternion = self.tf.lookupTransform("map", markerTag, t)
            return self.shiftPoint(position, quaternion)

    def shiftPoint(self, position, quaternion):
        try:
            euler = euler_from_quaternion(quaternion)
            yaw = euler[2]
            tf_mat = np.array([[np.cos(yaw), -np.sin(yaw), 0, position[0]],
                               [np.sin(yaw),
                                np.cos(yaw), 0, position[1]], [0, 0, 1, 0],
                               [0, 0, 0, 1]])
            displacement = np.array([[1, 0, 0, 0], [0, 1, 0, 0.35],
                                     [0, 0, 1, 0], [0, 0, 0, 1]])
            point_map = np.dot(tf_mat, displacement)
            position = (point_map[0, 3], point_map[1, 3], 0)
        except Exception as inst:
            print type(inst)  # the exception instance
            print inst.args  # arguments stored in .args
            print inst
        return position

    def addWaypointMarker(self, keyPoint):
        rospy.loginfo('Publishing marker')
        # Set up our waypoint markers
        marker_scale = 0.2
        marker_lifetime = 0  # 0 is forever
        marker_ns = 'waypoints'
        marker_id = keyPoint.id
        marker_color = {'r': 0.0, 'g': 1.0, 'b': 0.0, 'a': 1.0}

        # Initialize the marker points list.
        self.waypoint_markers = Marker()
        self.waypoint_markers.ns = marker_ns
        self.waypoint_markers.id = marker_id
        self.waypoint_markers.type = Marker.CUBE_LIST
        self.waypoint_markers.action = Marker.ADD
        self.waypoint_markers.lifetime = rospy.Duration(marker_lifetime)
        self.waypoint_markers.scale.x = marker_scale
        self.waypoint_markers.scale.y = marker_scale
        self.waypoint_markers.color.r = marker_color['r']
        self.waypoint_markers.color.g = marker_color['g']
        self.waypoint_markers.color.b = marker_color['b']
        self.waypoint_markers.color.a = marker_color['a']

        self.waypoint_markers.header.frame_id = 'map'
        self.waypoint_markers.header.stamp = rospy.Time.now()
        self.waypoint_markers.points = list()
        p = Point(keyPoint.pose.position.x, keyPoint.pose.position.y,
                  keyPoint.pose.position.z)
        self.waypoint_markers.points.append(p)

        # Publish the waypoint markers
        self.marker_pub = rospy.Publisher('waypoint_markers', Marker)

        self.marker_pub.publish(self.waypoint_markers)
Beispiel #36
0
class GripperMarkers:

    _offset = 0.09

    def __init__(self, side_prefix):

        self.side_prefix = side_prefix
        self._im_server = InteractiveMarkerServer('ik_request_markers_' +
                                                  self.side_prefix)
        self._tf_listener = TransformListener()
        self._menu_handler = MenuHandler()

        self._menu_handler.insert('Move arm here',
                                  callback=self.move_to_pose_cb)

        self.r_joint_names = [
            'r_shoulder_pan_joint', 'r_shoulder_lift_joint',
            'r_upper_arm_roll_joint', 'r_elbow_flex_joint',
            'r_forearm_roll_joint', 'r_wrist_flex_joint', 'r_wrist_roll_joint'
        ]
        self.l_joint_names = [
            'l_shoulder_pan_joint', 'l_shoulder_lift_joint',
            'l_upper_arm_roll_joint', 'l_elbow_flex_joint',
            'l_forearm_roll_joint', 'l_wrist_flex_joint', 'l_wrist_roll_joint'
        ]

        # Create a trajectory action client
        r_traj_contoller_name = None
        l_traj_contoller_name = None
        if self.side_prefix == 'r':
            r_traj_controller_name = '/r_arm_controller/joint_trajectory_action'
            self.r_traj_action_client = SimpleActionClient(
                r_traj_controller_name, JointTrajectoryAction)
            rospy.loginfo('Waiting for a response from the trajectory ' +
                          'action server for RIGHT arm...')
            self.r_traj_action_client.wait_for_server()

        else:
            l_traj_controller_name = '/l_arm_controller/joint_trajectory_action'
            self.l_traj_action_client = SimpleActionClient(
                l_traj_controller_name, JointTrajectoryAction)
            rospy.loginfo('Waiting for a response from the trajectory ' +
                          'action server for LEFT arm...')
            self.l_traj_action_client.wait_for_server()

        self.is_control_visible = False
        self.ee_pose = self.get_ee_pose()
        self.ik = IK(side_prefix)
        self.update_viz()
        self._menu_handler.apply(self._im_server,
                                 'ik_target_marker_' + self.side_prefix)
        self._im_server.applyChanges()
        print self.ik

    def get_ee_pose(self):
        from_frame = 'base_link'
        to_frame = self.side_prefix + '_wrist_roll_link'
        try:
            if self._tf_listener.frameExists(
                    from_frame) and self._tf_listener.frameExists(to_frame):
                t = self._tf_listener.getLatestCommonTime(from_frame, to_frame)
                # t = rospy.Time.now()
                (pos, rot) = self._tf_listener.lookupTransform(
                    to_frame, from_frame, t)  # Note argument order :(
            else:
                rospy.logerr(
                    'TF frames do not exist; could not get end effector pose.')
        except Exception as err:
            rospy.logerr('Could not get end effector pose through TF.')
            rospy.logerr(err)
            pos = [1.0, 0.0, 1.0]
            rot = [0.0, 0.0, 0.0, 1.0]

        return Pose(Point(pos[0], pos[1], pos[2]),
                    Quaternion(rot[0], rot[1], rot[2], rot[3]))

    def move_to_pose_cb(self, feedback):
        rospy.loginfo('You pressed the `Move arm here` button from the menu.')
        '''Moves the arm to the desired joints'''
        print feedback
        time_to_joint = 2.0
        positions = self.ik.get_ik_for_ee(feedback.pose)
        velocities = [0] * len(positions)
        traj_goal = JointTrajectoryGoal()
        traj_goal.trajectory.header.stamp = (rospy.Time.now() +
                                             rospy.Duration(0.1))
        traj_goal.trajectory.points.append(
            JointTrajectoryPoint(
                positions=positions,
                velocities=velocities,
                time_from_start=rospy.Duration(time_to_joint)))

        if (self.side_prefix == 'r'):
            traj_goal.trajectory.joint_names = self.r_joint_names
            self.r_traj_action_client.send_goal(traj_goal)
        else:
            traj_goal.trajectory.joint_names = self.l_joint_names
            self.l_traj_action_client.send_goal(traj_goal)
        pass

    def marker_clicked_cb(self, feedback):
        if feedback.event_type == InteractiveMarkerFeedback.POSE_UPDATE:
            self.ee_pose = feedback.pose
            self.update_viz()
            self._menu_handler.reApply(self._im_server)
            self._im_server.applyChanges()

        elif feedback.event_type == InteractiveMarkerFeedback.BUTTON_CLICK:
            rospy.loginfo('Changing visibility of the pose controls.')
            if (self.is_control_visible):
                self.is_control_visible = False
            else:
                self.is_control_visible = True
        else:
            rospy.loginfo('Unhandled event: ' + str(feedback.event_type))

    def update_viz(self):
        menu_control = InteractiveMarkerControl()
        menu_control.interaction_mode = InteractiveMarkerControl.BUTTON
        menu_control.always_visible = True
        frame_id = 'base_link'
        pose = self.ee_pose

        menu_control = self._add_gripper_marker(menu_control)
        text_pos = Point()
        text_pos.x = pose.position.x
        text_pos.y = pose.position.y
        text_pos.z = pose.position.z + 0.1
        text = 'x=' + str(pose.position.x) + ' y=' + str(
            pose.position.y) + ' x=' + str(pose.position.z)
        menu_control.markers.append(
            Marker(type=Marker.TEXT_VIEW_FACING,
                   id=0,
                   scale=Vector3(0, 0, 0.03),
                   text=text,
                   color=ColorRGBA(0.0, 0.0, 0.0, 0.5),
                   header=Header(frame_id=frame_id),
                   pose=Pose(text_pos, Quaternion(0, 0, 0, 1))))
        int_marker = InteractiveMarker()
        int_marker.name = 'ik_target_marker_' + self.side_prefix
        int_marker.header.frame_id = frame_id
        int_marker.pose = pose
        int_marker.scale = 0.2
        self._add_6dof_marker(int_marker)
        int_marker.controls.append(menu_control)
        self._im_server.insert(int_marker, self.marker_clicked_cb)

    def _add_gripper_marker(self, control):
        is_hand_open = False
        if is_hand_open:
            angle = 28 * numpy.pi / 180.0
        else:
            angle = 0
        transform1 = tf.transformations.euler_matrix(0, 0, angle)
        transform1[:3, 3] = [0.07691 - GripperMarkers._offset, 0.01, 0]
        transform2 = tf.transformations.euler_matrix(0, 0, -angle)
        transform2[:3, 3] = [0.09137, 0.00495, 0]
        t_proximal = transform1
        t_distal = tf.transformations.concatenate_matrices(
            transform1, transform2)
        mesh1 = self._make_mesh_marker()
        mesh1.mesh_resource = (
            'package://pr2_description/meshes/gripper_v0/gripper_palm.dae')
        mesh1.pose.position.x = -GripperMarkers._offset
        mesh1.pose.orientation.w = 1
        mesh2 = self._make_mesh_marker()
        mesh2.mesh_resource = (
            'package://pr2_description/meshes/gripper_v0/l_finger.dae')
        mesh2.pose = GripperMarkers.get_pose_from_transform(t_proximal)
        mesh3 = self._make_mesh_marker()
        mesh3.mesh_resource = (
            'package://pr2_description/meshes/gripper_v0/l_finger_tip.dae')
        mesh3.pose = GripperMarkers.get_pose_from_transform(t_distal)
        quat = tf.transformations.quaternion_multiply(
            tf.transformations.quaternion_from_euler(numpy.pi, 0, 0),
            tf.transformations.quaternion_from_euler(0, 0, angle))
        transform1 = tf.transformations.quaternion_matrix(quat)
        transform1[:3, 3] = [0.07691 - GripperMarkers._offset, -0.01, 0]
        transform2 = tf.transformations.euler_matrix(0, 0, -angle)
        transform2[:3, 3] = [0.09137, 0.00495, 0]
        t_proximal = transform1
        t_distal = tf.transformations.concatenate_matrices(
            transform1, transform2)
        mesh4 = self._make_mesh_marker()
        mesh4.mesh_resource = (
            'package://pr2_description/meshes/gripper_v0/l_finger.dae')
        mesh4.pose = GripperMarkers.get_pose_from_transform(t_proximal)
        mesh5 = self._make_mesh_marker()
        mesh5.mesh_resource = (
            'package://pr2_description/meshes/gripper_v0/l_finger_tip.dae')
        mesh5.pose = GripperMarkers.get_pose_from_transform(t_distal)

        control.markers.append(mesh1)
        control.markers.append(mesh2)
        control.markers.append(mesh3)
        control.markers.append(mesh4)
        control.markers.append(mesh5)
        return control

    @staticmethod
    def get_pose_from_transform(transform):
        pos = transform[:3, 3].copy()
        rot = tf.transformations.quaternion_from_matrix(transform)
        return Pose(Point(pos[0], pos[1], pos[2]),
                    Quaternion(rot[0], rot[1], rot[2], rot[3]))

    def _make_mesh_marker(self):
        mesh = Marker()
        mesh.mesh_use_embedded_materials = False
        mesh.type = Marker.MESH_RESOURCE
        mesh.scale.x = 1.0
        mesh.scale.y = 1.0
        mesh.scale.z = 1.0
        print self
        ik_solution = self.ik.get_ik_for_ee(self.ee_pose)
        if (ik_solution is None):
            mesh.color = ColorRGBA(1.0, 0.0, 0.0, 0.6)
        else:
            mesh.color = ColorRGBA(0.0, 1.0, 0.0, 0.6)
        return mesh

    def _add_6dof_marker(self, int_marker):
        is_fixed = True
        control = self._make_6dof_control('rotate_x', Quaternion(1, 0, 0, 1),
                                          False, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('move_x', Quaternion(1, 0, 0, 1),
                                          True, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('rotate_z', Quaternion(0, 1, 0, 1),
                                          False, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('move_z', Quaternion(0, 1, 0, 1),
                                          True, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('rotate_y', Quaternion(0, 0, 1, 1),
                                          False, is_fixed)
        int_marker.controls.append(control)
        control = self._make_6dof_control('move_y', Quaternion(0, 0, 1, 1),
                                          True, is_fixed)
        int_marker.controls.append(control)

    def _make_6dof_control(self, name, orientation, is_move, is_fixed):
        control = InteractiveMarkerControl()
        control.name = name
        control.orientation = orientation
        control.always_visible = False
        if (self.is_control_visible):
            if is_move:
                control.interaction_mode = InteractiveMarkerControl.MOVE_AXIS
            else:
                control.interaction_mode = InteractiveMarkerControl.ROTATE_AXIS
        else:
            control.interaction_mode = InteractiveMarkerControl.NONE
        if is_fixed:
            control.orientation_mode = InteractiveMarkerControl.FIXED
        return control
Beispiel #37
0
class MapRelay(object):
  def __init__(self, master_relay, name, map_topic_name, relay_map_topic_suffix, max_dist):
    self._name = name
    self._namespace = '/' + name + '/'

    self.subscribe_topic = self._namespace + map_topic_name
    rospy.Subscriber(self.subscribe_topic, OccupancyGrid, self.callback)
    self.publish_topic = self._namespace + relay_map_topic_suffix
    self.publisher = rospy.Publisher(self.publish_topic, OccupancyGrid, queue_size = 5)

    self.master_relay = master_relay
    self.max_dist = max_dist
    self._tf = TransformListener()
    self._occupancy_grid = None
    self._occupancy_grid_pub = None
    self._pose = np.array([np.nan, np.nan, np.nan], dtype=np.float32)
    
  def callback(self, msg):
    # self.update_pose()
    self._occupancy_grid = msg
    # if self.master_relay is None:
    #   self._occupancy_grid_pub = msg
    # else:
    #   self.master_relay.update_pose()
    #   p1 = self.master_relay.pose
    #   # p2 = self.master_relay.get_pose(self._name)
    #   p2 = self.pose
    #   dist = np.linalg.norm([p1[X] - p2[X], p1[Y] - p2[Y]])
    #   print("Distance ", self._name, self.master_relay._name, dist)
    #   if dist <= self.max_dist:
    #     print("Map updated ", dist, self.max_dist)
    #     self._occupancy_grid = msg

  def publish_map(self):
    if self.master_relay is None:
      self._occupancy_grid_pub = self._occupancy_grid
    else:
      dist = self.get_distance(self.master_relay._name)
      if dist is not None and dist <= self.max_dist:
        # print("AAAAAAAAAAAAAAAA Map updated ", dist, self.max_dist)
        self._occupancy_grid_pub = self._occupancy_grid
      # self.master_relay.update_pose()
      # self.update_pose()
      # p1 = self.master_relay.pose
      # # p2 = self.master_relay.get_pose(self._name)
      # p2 = self.pose
      # if not np.isnan(p2[X]) and not np.isnan(p1[X]):
      #   dist = np.linalg.norm([p1[X] - p2[X], p1[Y] - p2[Y]])
      #   print("AAAAAAAAAAAAAAA Distance ", self.master_relay._name, self._name, p1, p2, dist)
      #   if dist <= self.max_dist:
      #     print("AAAAAAAAAAAAAAAA Map updated ", dist, self.max_dist)
      #     self._occupancy_grid_pub = self._occupancy_grid
      # else:
      #   print("AAAAAAAAAAAAAAAAA Nan ", self.master_relay._name, self._name, p1, p2)

    if self._occupancy_grid_pub is not None:
      self.publisher.publish(self._occupancy_grid_pub)
      # print("Published")
  
  def get_distance(self, robot_name):
    a = self._name + '/base_link'
    b = robot_name + '/base_link'
    dist = None
    try:
      t = rospy.Time(0)
      position, orientation = self._tf.lookupTransform(a, b, t)
      dist = np.linalg.norm([position[X], position[Y]])
      # print(position, dist)
    except Exception as e:
      print(e)
    return dist
    

  def update_pose(self):
    # Get pose w.r.t. map.
    a = self._name + '/map'
    b = self._name + '/base_link'
    if True: # self._tf.frameExists(a) and self._tf.frameExists(b):
      try:
        t = rospy.Time(0)
        position, orientation = self._tf.lookupTransform(a, b, t)
        self._pose[X] = position[X]
        self._pose[Y] = position[Y]
        _, _, self._pose[YAW] = euler_from_quaternion(orientation)
        # print('pose updated')
        # print(self._pose)
      except Exception as e:
        print(e)
        print('Unable to find:', a, self._tf.frameExists(a), b, self._tf.frameExists(b))
    else:
      print('Unable to find:', a, self._tf.frameExists(a), b, self._tf.frameExists(b))
    pass

  def get_pose(self, robot_name):
    # Get pose w.r.t. map.
    a = self._name + '/map'
    b = robot_name + '/base_link'
    if self._tf.frameExists(a) and self._tf.frameExists(b):
      try:
        t = rospy.Time(0)
        position, orientation = self._tf.lookupTransform(a, b, t)
        self._pose[X] = position[X]
        self._pose[Y] = position[Y]
        _, _, self._pose[YAW] = euler_from_quaternion(orientation)
        # print('pose updated')
        # print(self._pose)
      except Exception as e:
        print(e)
    else:
      print('Unable to find:', a, self._tf.frameExists(a), b, self._tf.frameExists(b))
    pass

  @property
  def ready(self):
    return self._occupancy_grid is not None # and not np.isnan(self._pose[0])

  @property
  def pose(self):
    return self._pose

  @property
  def occupancy_grid(self):
    return self._occupancy_grid
class MergePoses:

    def __init__(self):

        self.avg_pose = None
        self.tl = TransformListener()

        self.topics = rospy.get_param('~poses',[])
        print self.topics
        if len(self.topics) == 0:
            rospy.logerr('Please provide a poses list as input parameter')
            return
        self.output_pose = rospy.get_param('~output_pose','ar_avg_pose')
        self.output_frame = rospy.get_param('~output_frame', 'rf_map')
        
        self.subscribers = []
        for i in self.topics:
            subi = rospy.Subscriber(i, PoseStamped, self.callback, queue_size=1)
            self.subscribers.append(subi)

        self.pub = rospy.Publisher(self.output_pose, PoseStamped, queue_size=1)
            
        self.mutex_avg = threading.Lock()
        self.mutex_t = threading.Lock()
        
        self.transformations = {}
        
    def callback(self, pose_msg):

        # get transformation to common frame
        position = None
        quaternion = None
        if self.transformations.has_key(pose_msg.header.frame_id):
            position, quaternion = self.transformations[pose_msg.header.frame_id]
        else:
            if self.tl.frameExists(pose_msg.header.frame_id) and \
            self.tl.frameExists(self.output_frame):
                t = self.tl.getLatestCommonTime(self.output_frame, pose_msg.header.frame_id)
                position, quaternion = self.tl.lookupTransform(self.output_frame,
                                                               pose_msg.header.frame_id, t)
                self.mutex_t.acquire()
                self.transformations[pose_msg.header.frame_id] = (position, quaternion)
                self.mutex_t.release()
                rospy.loginfo("Got static transform for %s" % pose_msg.header.frame_id)

        # transform pose
        framemat = self.tl.fromTranslationRotation(position, quaternion)

        posemat = self.tl.fromTranslationRotation([pose_msg.pose.position.x,
                                                   pose_msg.pose.position.y,
                                                   pose_msg.pose.position.z],
                                                  [pose_msg.pose.orientation.x,
                                                   pose_msg.pose.orientation.y,
                                                   pose_msg.pose.orientation.z,
                                                   pose_msg.pose.orientation.w])

        newmat = numpy.dot(framemat,posemat)
        
        xyz = tuple(translation_from_matrix(newmat))[:3]
        quat = tuple(quaternion_from_matrix(newmat))

        tmp_pose = PoseStamped()
        tmp_pose.header.stamp = pose_msg.header.stamp
        tmp_pose.header.frame_id = self.output_frame
        tmp_pose.pose = Pose(Point(*xyz),Quaternion(*quat))
        
        tmp_angles = euler_from_quaternion([tmp_pose.pose.orientation.x,
                                            tmp_pose.pose.orientation.y,
                                            tmp_pose.pose.orientation.z,
                                            tmp_pose.pose.orientation.w])
        
        # compute average
        self.mutex_avg.acquire()
        
        if self.avg_pose == None or (pose_msg.header.stamp - self.avg_pose.header.stamp).to_sec() > 0.5:
            self.avg_pose = tmp_pose
        else:            
            self.avg_pose.header.stamp = pose_msg.header.stamp
            a = 0.7
            b = 0.3
            self.avg_pose.pose.position.x = a*self.avg_pose.pose.position.x + b*tmp_pose.pose.position.x
            self.avg_pose.pose.position.y = a*self.avg_pose.pose.position.y + b*tmp_pose.pose.position.y
            self.avg_pose.pose.position.z = a*self.avg_pose.pose.position.z + b*tmp_pose.pose.position.z

            angles = euler_from_quaternion([self.avg_pose.pose.orientation.x,
                                            self.avg_pose.pose.orientation.y,
                                            self.avg_pose.pose.orientation.z,
                                            self.avg_pose.pose.orientation.w])
            angles = list(angles)
            angles[0] = avgAngles(angles[0], tmp_angles[0], 0.7, 0.3)
            angles[1] = avgAngles(angles[1], tmp_angles[1], 0.7, 0.3)
            angles[2] = avgAngles(angles[2], tmp_angles[2], 0.7, 0.3)

            q = quaternion_from_euler(angles[0], angles[1], angles[2])
            
            self.avg_pose.pose.orientation.x = q[0]
            self.avg_pose.pose.orientation.y = q[1]
            self.avg_pose.pose.orientation.z = q[2]
            self.avg_pose.pose.orientation.w = q[3]

        self.pub.publish(self.avg_pose)
            
        self.mutex_avg.release()
class Rotate(smach.State):
  #class handles the rotation until program is stopped
  def __init__(self):
    self.tf = TransformListener()
    smach.State.__init__(self,
      outcomes=['finished','failed'],
      input_keys=['base_pose','stop_rotating','id'],
      output_keys=['detected'])
    rospy.Subscriber("/cob_people_detection/detection_tracker/face_position_array",DetectionArray, self.callback)
    self.stop_rotating=False
    self.detections=      list()
    self.false_detections=list()

  def callback(self,msg):
    # go through list of detections and append them to detection list
    if len(msg.detections) >0:
      #clear detection list
      del self.detections[:]
      for i in xrange( len(msg.detections)):
        self.detections.append(msg.detections[i].label)
    return

  def execute(self, userdata):
    sss.say(["I am going to take a look around now."])

    # get position from tf
    if self.tf.frameExists("/base_link") and self.tf.frameExists("/map"):
        t = self.tf.getLatestCommonTime("/base_link", "/map")
        position, quaternion = self.tf.lookupTransform("/base_link", "/map", t)
  # calculate angles from quaternion
	[r,p,y]=euler_from_quaternion(quaternion)
	#print r
	#print p
	#print y
  #print position
    else:
        print "No transform available"
        return "failed"

    time.sleep(1)
    self.stop_rotating=False
    # create relative pose - x,y,theta
    curr_pose=list()
    curr_pose.append(0)
    curr_pose.append(0)
    curr_pose.append(0.1)

    while not rospy.is_shutdown() and self.stop_rotating==False and curr_pose[2]< 3.14:
      handle_base = sss.move_base_rel("base", curr_pose)

      #check in detection and react appropriately
      for det in self.detections:
        # right person is detected
        if det == userdata.id:
          self.stop_rotating=True
          sss.say(['I have found you, %s! Nice to see you.'%str(det)])
        elif det in self.false_detections:
        # false person is detected
          print "Already in false detections"
       #  person detected is unknown - only react the first time
        elif det == "Unknown":
          print "Unknown face detected"
          sss.say(['Hi! Nice to meet you, but I am still searching for %s.'%str(userdata.id)])
          self.false_detections.append("Unknown")
      # wrong face is detected the first time
        else:
          self.false_detections.append(det)
          print "known - wrong face detected"
          sss.say(['Hello %s! Have you seen %s.'%(str(det),str(userdata.id))])
      #clear detection list, so it is not checked twice
      del self.detections[:]
      time.sleep(2)

    print "-->stop rotating"
    return 'finished'
class Laser2PCWorld():
    #translate laser to pc and publish point cloud to /cloud_in
    def __init__(self):
        self.laserSub = rospy.Subscriber('/scan', LaserScan,
                                         self.laserCallBack)
        self.laser = [float('inf')]
        self.pc_laser = None
        self.pc_world = None
        self._tf = TransformListener()
        self.occupied_cells = []
        self.free_cells = []
        self.mat_laser2world = None

    def laserCallBack(self, data):
        self.laser = data

    def update(self):
        # Get the transformation matrix for laser to world
        target_frame = "world"
        src_frame = "laser0_frame"
        if self._tf.frameExists(target_frame) and self._tf.frameExists(
                src_frame):
            try:
                self.mat_laser2world = self._tf.asMatrix(
                    target_frame, self.laser.header)
            except Exception as e:
                print(e)
        else:
            print('Unable to find:', self._tf.frameExists(target_frame),
                  self._tf.frameExists(src_frame))

        # Transform laser data to world coordinate
        if not isinstance(self.laser, LaserScan) or not isinstance(
                self.mat_laser2world, np.ndarray):
            return
        try:
            l = copy.deepcopy(self.laser)
            mat = copy.deepcopy(self.mat_laser2world)

            free = []  #in world coord
            occuiped = []  #in world coord
            pc_laser = []
            pc_world = []
            for r in range(len(l.ranges)):
                radius = 0
                encountered_obs = True
                if l.ranges[r] == float('inf'):
                    #append free cells till max range
                    radius = l.range_max
                    encountered_obs = False

                theta = 180 - (l.angle_min + r * l.angle_increment
                               )  #due to differences in ros corrdinate frame
                x = l.ranges[radius] * np.cos(theta)
                y = l.ranges[radius] * np.sin(theta)
                z = 0

                pc_laser.append([x, y, z])
                col = np.array([[x, y, z, 1]]).T
                tran = np.matmul(mat, col).reshape((4))
                pc_world.append([tran[X], tran[Y], tran[X]])

                discretized_pt = self.append_free_space(
                    [tran[X], tran[Y], tran[X]])
                # print("discretized_pt", len(discretized_pt))
                if encountered_obs:
                    # print("before", len(occuiped))
                    occuiped.extend(discretized_pt[-10:])
                    # print("pt", discretized_pt[-10:])
                    free.extend(discretized_pt[:-10])
                    # print("after", len(occuiped))
                else:
                    free.extend(discretized_pt)
                # print("encoutered obs", encountered_obs)
                # print("occuipied", len(occuiped))
                # print("free", len(free))

            self.pc_laser = pc_laser
            self.pc_world = pc_world
            self.occupied_cells = occuiped
            self.free_cells = free
        except Exception as e:
            print(e)

    def append_free_space(self, pt):
        distance = 0.05
        a = abs(pt[X]) / distance
        b = abs(pt[Y]) / distance
        c = abs(pt[Z]) / distance
        num_of_sample = max(a, b, c)

        x = np.linspace(0, pt[X], num_of_sample)
        y = np.linspace(0, pt[Y], num_of_sample)
        z = np.linspace(0, pt[Z], num_of_sample)
        coords = zip(x, y, z)
        return coords

    @property
    def ready(self):
        laser_ready = isinstance(self.laser, LaserScan)
        mat_ready = isinstance(self.mat_laser2world, np.ndarray)
        return laser_ready and mat_ready

    @property
    def measurements(self):
        return self._measurements
Beispiel #41
0
class controller:

    def __init__(self):
        # self.kp=1.0*1
        self.moving=True
        self.robot_L = None
        self.robot_R= None
        self.theta= None
        self.goal =np.array([0.0 , 0.0 , 0.0])
        self.ball_pos =np.array([0.0 , 0.0 , 0.0])
        self.ball_vel =np.array([0.0 , 0.0 , 0.0])
        self.kp = 0.3

        self.kp_z = 1.0*0.8

        self.kd = 1.0*0
        # self.kd=1.0*0
        self.robot_R = [0.0,0.0,0.0,0.0,0.0,0.0]
        self.robot_L = [0.0,0.0,0.0,0.0,0.0,0.0]
        self.robot_pose = np.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
        self.robot_vel = np.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # message order is right then left
        # self.goal = np.array([0.28637367486953735, -0.23328398168087006, 0.09796947240829468, 0.9996191263198853, 0.027586041018366814, -0.0007667283643968403 /
        #                       0.16970492899417877, 0.3836527466773987, 0.16571415960788727, 0.8278752565383911, -0.37586238980293274, -0.4163532853126526])

        # Subscribing to join_state_topic. It should contain message from YuMi Paul node
        # [right arm left arm - joints angles]
        self.tf = TransformListener()
        self.operational_pub = rospy.Publisher("/operational_velocity_command",Float32MultiArray,queue_size=1)

        self.operational_sub = rospy.Subscriber("/operational_position_R",Float32MultiArray,self.callback_operational_R,buff_size=1)
        # Subscribing to simulation from Sean with ball's position and velocity
        self.camera_coords_sub = rospy.Subscriber("/camera_coords",Float32MultiArray,self.callback_camera_coords,buff_size=1)
        self.ball_sub = rospy.Subscriber("/ball_position",Float32MultiArray,self.callback_ball,buff_size=1)
        self.ball_vel_sub = rospy.Subscriber("/ball_velocity",Float32MultiArray,self.callback_vel_ball,buff_size=1)
        self.joint_states_sub = rospy.Subscriber("/joint_state_R",Float32MultiArray,self.callback_joint_state_R,buff_size=1)
        self.joints_R =[0.0,0.0,0.0,0.0,0.0,0.0,0.0]


    # Publishing to joint command which receives a message from Yumi Paul node
        # What we actually sending to the robot to do
        # Use Jacobian matrix to find joint velocities for each joint
        self.joint_command_pub = rospy.Publisher("/joint_command_topic",Float32MultiArray,queue_size=10)

        # Publishing to yumi hand topic with velocities message
        # end effector position (need to add rocket length) -[x y z] - get it from transform from 0 to 7
        # Try to get this message from already existing function getVel
        self.yumi_hand_pub = rospy.Publisher("/yumi_hand_topic",Float32MultiArray,queue_size=10)


    def callback_joint_state_R(self,msg):
        if any(np.isnan(msg.data)):
            return
        self.joints_R = msg.data[:7]

    def callback_ball(self,msg):
        self.ball_pos[0] = msg.data[0]
        self.ball_pos[1] = msg.data[1]
        self.ball_pos[2] = msg.data[2]

    def callback_vel_ball(self,msg):
        self.ball_vel[0] = msg.data[0]
        self.ball_vel[1] = msg.data[1]
        self.ball_vel[2] = msg.data[2]

    def callback_camera_coords(self,msg):
        if self.tf.frameExists("yumi_body") and self.tf.frameExists("camera_depth_optical_frame"):
            t = self.tf.getLatestCommonTime("yumi_body", "camera_depth_optical_frame")
            #position, quaternion = self.tf.lookupTransform("yumi_body", "camera_depth_optical_frame", t)
            p1 = PoseStamped()
            p1.pose.position.x =msg.data[0]
            p1.pose.position.y =msg.data[1]
            p1.pose.position.z =msg.data[2]
            p1.header.frame_id = "camera_depth_optical_frame"
            p_in_base = self.tf.transformPose("yumi_body", p1)
            self.goal[0] = p_in_base.pose.position.x
            self.goal[1] = p_in_base.pose.position.y
            #print(p1)

    def callback_operational_R(self,msg):
        self.robot_L = [0.0,0.0,0.0,0.0,0.0,0.0]
        self.robot_R=list(msg.data)
        self.theta=self.robot_R[0:8]
        for i in range(0,len(self.robot_L)):
            self.robot_R.append(self.robot_L[i]) #combines two arrays
        self.robot_pose= np.array(self.robot_R)

        if self.moving==True:
            vel_msg=self.getVelMsg()
            self.operational_pub.publish(vel_msg)



            #velocity message for joints (14 values for each joint. Left can be 0).

            # self.joint_command_pub.publish(vel_Msg)
            # self.yumi_hand_pub.publish(hand_msg)


    def callback_goal(self,msg):
        self.goal = np.array(msg.data)
        self.moving=True

    def getVelMsg(self):
        #self.goal = np.array([0.88,  0.343, 0.3])
        self.ball_vel= self.ball_vel
        try:
            serv= rospy.ServiceProxy('getLastTransformation', getM)
            resp = serv(self.joints_R)
        except rospy.ServiceException, e:
            print("Service call failed: %s"%e)
            return self.robot_vel

        Te = np.matrix(np.zeros((4,4)))
        Te[0:3,:] = np.reshape(resp.M,(3,4))
        Te[3,3] = 1.0
        #Te = np.linalg.inv(Te)
        Tep = np.matrix([[1.0, 0.0,0.0, 0.0],[0.0, 0.0, -1.0, 0.0],[0.0, 1.0, 0.0, 0.25],[0.0, 0.0, 0.0 ,1.0]])
        #Tep = np.matrix([[0.0, 0.0,1.0, 0.0],[0.0, 1.0, 0.0, 0.0],[-1.0, 0.0, 0.0, 0.05],[0.0, 0.0, 0.0 ,1.0]])
        #Tep = np.matrix([[0.0, -1.0 ,0.0, 0.0],[1.0, 0.0, 0.0, 0.0],[0.0, 0.0, 1.0, 0.05],[0.0, 0.0, 0.0 ,1.0]])
        # Tep = np.matrix([[1.0, 0.0 ,0.0, 0.0],[0.0, 1.0, 0.0, 0.0],[0.0, 0.0, 1.0, 0.05],[0.0, 0.0, 0.0 ,1.0]])
    #T0p = np.linalg.inv(np.matmul(Te,Tep))
        #Tep = np.linalg.inv(Tep)
        T0p = np.matmul(Te,Tep)
        tmp = T0p[0:3,3]
        self.ball_pos[2] = 0.25
        p = (self.ball_pos[0:3]-np.array(tmp.transpose()))*2
        normal = np.matmul(T0p[0:3,0:3],np.array([1.0,0.0,0.0]))
        tmp[2] = 0.0
        self.ball_pos[2] = 0.0
        #save = np.array([0.0,0.0,1.0])-0.5*(self.ball_pos-np.array([0.2,-0.5,0.0]))
        save = np.array([0.0,0.0,1.0])-0.5*(self.robot_pose[0:3]-np.array([0.0,-0.2,0.0]))
        #save[1]*=-1
        save =save/np.linalg.norm(save)
        rot = np.cross(normal,np.array(save))
        rot = np.matrix(rot).transpose()
        tmp = normal
        tmp =tmp/np.linalg.norm(normal)
        an1 = np.arccos(np.clip(np.dot(tmp,np.array([-0.0,0.0,1.0])), -1.0, 1.0))
        #Tpe = np.linalg.inv(Tep)
        #rot = np.matmul(Tpe[0:3,0:3],np.matrix(rot).transpose())
        # d = self.goal[0:3]*0-self.robot_vel[0:3]
        direction = p/np.linalg.norm(p)
        val = direction*np.linalg.norm(p)*self.kp #+direction*d*self.kd
        val = p
        for i in range(0,2):
            #val[i] = np.sign(val[i])
            self.robot_vel[i] =12.0*val[0,i]+self.ball_vel[i]
        self.robot_vel[2] = 4.0*val[0,2]
        # p_z = self.goal[2]-self.robot_pose[2]
        # direction_z = p_z/np.linalg.norm(p_z)
        # val_z = -direction_z*np.linalg.norm(p_z)*self.kp_z

        #val_z = 0.0
        #if (self.ball_vel[2]<0.0):
        self.robot_vel[2] += -1*0.4*self.ball_vel[2]/(2*np.abs(val[0,2])+0.2)
        #else:
       #     self.robot_vel[2] += 0.0
        tmp = self.robot_pose[3:6]
        tmp =tmp/np.linalg.norm(self.robot_pose[3:6])
        rot2 =np.cross(tmp,np.array([1.0,0.0,0.0]))
        an2 = np.arccos(np.clip(np.dot(tmp,np.array([1.0,0.0,0.0])), -1.0, 1.0))
        # np.arccos(np.dot(self.robot_pose[3:6],np.array([1.0,0.0,0.0])))
        self.robot_vel[3:6] = (rot2*0.8*an2+an1*rot.transpose())*300
        #self.robot_vel[1]=0.0
        #self.robot_vel[0]=0.1

        vel_msg = Float32MultiArray()
        vel_msg.data = self.robot_vel.tolist()

        return vel_msg
Beispiel #42
0
class DynamicTFBroadcaster:
    def __init__(self):
        self.pub_tf = rospy.Publisher("/denso_cube_tf",
                                      tf.msg.tfMessage,
                                      queue_size=10)
        self.tf = TransformListener()
        self.t = geometry_msgs.msg.TransformStamped()
        self.translation_array = np.zeros((3), dtype=np.float64)
        self.mainloop()

    """ def median(self,array):
			norma = np.sqrt(np.power(array, 2))
			median_array.pop(0)
			median_array.append(norma)
			median_array.sort()
			print(array)
			median = median_array(6)
			return median """

    def mainloop(self):
        values_array = np.zeros((7, 4), dtype=np.float64)

        i = 0
        while not rospy.is_shutdown():
            # Run this loop at about 10Hz
            rospy.sleep(0.5)

            if self.tf.frameExists("denso") and self.tf.frameExists(
                    "ar_marker_0"):
                #t = self.tf.getLatestCommonTime("world", "ar_marker_7")
                position, quaternion = self.tf.lookupTransform(
                    "denso", "ar_marker_0", rospy.Time(0))
                self.t.header.frame_id = "denso"
                self.t.header.stamp = rospy.Time.now()
                self.t.child_frame_id = "ar_marker_0"
                self.t.transform.translation.x = position[0]
                self.t.transform.translation.y = position[1]
                self.t.transform.translation.z = position[2]

                self.t.transform.rotation.x = quaternion[0]
                self.t.transform.rotation.y = quaternion[1]
                self.t.transform.rotation.z = quaternion[2]
                self.t.transform.rotation.w = quaternion[3]
                tfm = tf.msg.tfMessage([self.t])
                self.pub_tf.publish(tfm)

                values_array[0][i] = position[0]
                values_array[1][i] = position[1]
                values_array[2][i] = position[2]
                values_array[3][i] = quaternion[0]
                values_array[4][i] = quaternion[1]
                values_array[5][i] = quaternion[2]
                values_array[6][i] = quaternion[3]

                #translation_array = [position[0], position[1], position[2]

                if (i == 3):
                    i = 0
                    translation_x = self.mean(values_array[0])
                    translation_y = self.mean(values_array[1])
                    translation_z = self.mean(values_array[2])

                    #rotation_x = self.madian(values_array[3])
                    #rotation_y = self.median(values_array[4])
                    #rotation_z = self.median(values_array[5])
                    #rotation_w = self.median(values_array[6])

                    print("Translation x: %.3f " % (translation_x))
                    print("Translation y: %.3f " % (translation_y))
                    print("Translation z: %.3f " % (translation_z))

                    #print("Rotation x: %.3f " % (rotation_x))
                    #print("Rotation y: %.3f " % (rotation_y))
                    #print("Rotation z: %.3f " % (rotation_z))
                    #print("Rotation w: %.3f " % (rotation_w))
                else:
                    i = i + 1

                #print(values_array[0])

                rospy.loginfo(tfm)

    def mean(self, array):
        mean = array[0] + array[1] + array[2] + array[3]
        return mean / 4
Beispiel #43
0
class TurnTableOperator(object):
  def __init__(self, serial_port, marker_id, contactdb_recorder, hand_pose_recorder,
      step=3, motor_speed=150, motor_acceleration=200, marker_stable_thresh=30):
    """"
    :param serial_port:
    :param marker_id:
    :param {contactdb,hand_pose}_recorder: Object of the RosbagRecord class
    :param step:
    :param motor_speed:
    :param motor_acceleration:
    :param marker_stable_thresh: no. of times marker needs to be seen
    continuously
    """
    self.step = step
    self.marker_id = marker_id
    self.contactdb_recorder = contactdb_recorder
    self.hand_pose_recorder = hand_pose_recorder
    self.tt_base_t = []
    self.tt_base_pose = TransformStamped()
    self.tf_listener = TransformListener()
    self.marker_seen_count = 0
    self.marker_stable_thresh = marker_stable_thresh
    self.marker_pose_ready = False
    self.tt_angle_bcaster = tf2_ros.TransformBroadcaster()
    self.tt_center_bcaster = tf2_ros.TransformBroadcaster()

    # logging for the Arduino driver
    ch = logging.StreamHandler()
    ch.setLevel(logging.DEBUG)
    board.logger.addHandler(ch)

    # init and connect the driver
    self.arduino = board.Board()
    self.arduino.serial_name = serial_port
    try:
      self.arduino.connect()
    except Exception as e:
      rospy.logerr('Could not connect to turntable: {:s}'.format(e))
      raise e

    # init and configure the motor
    self.arduino.motor_invert(True)
    self.arduino.motor_enable()
    self.arduino.motor_reset_origin()
    self.arduino.motor_speed(motor_speed)
    self.arduino.motor_acceleration(motor_acceleration)

    # OpenCV window for clicking
    self.im_size = 50
    self.button_win_name = 'Marker Ready'
    self.button_im = self._make_im([0, 0, 255])
    cv2.imshow(self.button_win_name, self.button_im)
    cv2.waitKey(1)

    rospy.sleep(0.5)
    assert self.tf_listener.frameExists('turntable_base_tmp')
    assert self.tf_listener.frameExists('boson_frame')

  def _make_im(self, pixel):
    """
    :param pixel: (3, )
    :return:
    """
    im = np.asarray(pixel, dtype=np.uint8)[np.newaxis, np.newaxis, :]
    im = np.repeat(np.repeat(im, self.im_size, axis=0), self.im_size, axis=1)
    return im

  def publish_tfs(self, tt_angle):
    t = TransformStamped()  # pose of turntable_frame w.r.t. turntable_base
    t.header.stamp = rospy.Time.now()
    t.header.frame_id = 'turntable_base'
    t.child_frame_id  = 'turntable_frame'
    t.transform.translation.x = 0
    t.transform.translation.y = 0
    t.transform.translation.z = 0
    q = tx.quaternion_from_euler(0, 0, np.deg2rad(tt_angle))
    t.transform.rotation.x = q[0]
    t.transform.rotation.y = q[1]
    t.transform.rotation.z = q[2]
    t.transform.rotation.w = q[3]
    self.tt_angle_bcaster.sendTransform(t)

    t.header.frame_id = 'kinect2_rgb_optical_frame'
    t.child_frame_id = 'turntable_base'
    tm = np.mean(self.tt_base_t, axis=0)
    t.transform.translation.x = tm[0]
    t.transform.translation.y = tm[1]
    t.transform.translation.z = tm[2]
    t.transform.rotation = self.tt_base_pose.transform.rotation
    self.tt_center_bcaster.sendTransform(t)

  def record_hand_pose(self, object_name):
    self.hand_pose_recorder.start_recording(object_name)

  def run_turntable(self, object_name):
    # first stop the hand pose recorder node if it is active
    self.hand_pose_recorder.stop_recording_handler()
    # start recording for contactdb
    angle = 0
    self.contactdb_recorder.start_recording(object_name)
    rospy.sleep(1.5)
    while angle < 360:
      rospy.loginfo('Angle = {:d}'.format(angle))
      self.publish_tfs(tt_angle=angle)
      rospy.sleep(0.5)
      self.arduino.motor_move(self.step)
      angle += self.step
      rospy.sleep(0.5)
      cv2.waitKey(1)
    self.contactdb_recorder.stop_recording_handler()

  def disconnect(self):
    self.arduino.disconnect()
    rospy.loginfo('Disconnected from Arduino')
    self.contactdb_recorder.stop_recording_handler()
    self.hand_pose_recorder.stop_recording_handler()

  def ar_tag_callback(self, markers):
    cv2.imshow(self.button_win_name, self.button_im)
    cv2.waitKey(1)

    if self.marker_pose_ready:
      return

    for m in markers.markers:
      if m.id != self.marker_id:
        continue
      if self.marker_seen_count > self.marker_stable_thresh:
        self.button_im = self._make_im([0, 255, 0])
        self.marker_pose_ready = True
        self.marker_seen_count = self.marker_stable_thresh
      else:
        self.button_im = self._make_im([0, 0, 255])
        pos, rotquat = self.tf_listener.lookupTransform(
          'kinect2_rgb_optical_frame', 'turntable_base_tmp', rospy.Time(0))
        if np.isnan(pos).any() or np.isnan(rotquat).any():
          rospy.logwarn('NaN pose')
          break
        self.marker_seen_count += 1
        self.tt_base_t.append(pos)
        self.tt_base_pose.transform.rotation.x = rotquat[0]
        self.tt_base_pose.transform.rotation.y = rotquat[1]
        self.tt_base_pose.transform.rotation.z = rotquat[2]
        self.tt_base_pose.transform.rotation.w = rotquat[3]
        rospy.loginfo('Marker {:d} seen {:d} times'.
                      format(self.marker_id, self.marker_seen_count))
        rospy.loginfo('Current pose of TT base w.r.t. Kinect ='
                      '[{:4.3f}, {:4.3f}, {:4.3f}]'.
                      format(self.tt_base_pose.transform.translation.x,
                             self.tt_base_pose.transform.translation.y,
                             self.tt_base_pose.transform.translation.z))
      break
    else:
      rospy.logwarn('Marker {:d} not seen, resetting counter'.
                    format(self.marker_id))