Ejemplo n.º 1
0
    def __init__(self,
                 topic='motion/controller/look_at/in_pose_ref',
                 duration=120,
                 interval=[3, 5],
                 maxXYZ=[1, 0.3, 0.5],
                 minXYZ=[1.0, -0.3, 0.0],
                 frame_xyz='base_link',
                 frame_out='odom_combined'):
        super(RandPoseGenerator, self).__init__(outcomes=['done', 'failure'])

        # Store topic parameter for later use.
        if not isinstance(topic, str):
            raise ValueError("Topic name must be string.")
        if not isinstance(duration, (int, float)) or duration <= 0:
            raise ValueError("Duration must be positive number.")
        if len(interval) != 2 or any([
                not isinstance(t, (int, float)) for t in interval
        ]) or any([t < 0 for t in interval]) or interval[0] > interval[1]:
            raise ValueError("Interval must represent interval of time.")
        if len(minXYZ) != 3 or any(
            [not isinstance(v, (int, float)) for v in minXYZ]):
            raise ValueError("minXYZ: list of three numbers was expected.")
        if len(maxXYZ) != 3 or any(
            [not isinstance(v, (int, float)) for v in maxXYZ]):
            raise ValueError("maxXYZ: list of three numbers was expected.")
        if not isinstance(frame_xyz, str) or not isinstance(frame_out, str):
            raise ValueError("Frame names must be string.")

        self._topic = topic
        self._duration = Duration.from_sec(duration)
        self._interval = interval
        self._minXYZ = minXYZ
        self._maxXYZ = maxXYZ
        self._frame_in = frame_xyz
        self._frame_out = frame_out

        # create proxies
        self._publisher = ProxyPublisher({self._topic: PoseStamped})
        self._tf_listener = ProxyTransformListener().listener()
        self._tf_listener.waitForTransform(self._frame_out, self._frame_in,
                                           rospy.Time(), rospy.Duration(1))
        if not self._tf_listener.canTransform(self._frame_out, self._frame_in,
                                              rospy.Time(0)):
            raise ValueError(
                "Unable to perform transform between frames %s and %s." %
                (self._frame_out, self._frame_in))

        # state
        self._start_timestamp = None
        self._movement_timestamp = None
        self._movement_duration = Duration()

        # error in enter hook
        self._error = False
	def __init__(self):
		super(BrohoofSM, self).__init__()
		self.name = 'Brohoof'

		# parameters of this behavior
		self.add_parameter('wait_time', 5)
		self.add_parameter('neck_angle', 0.25)
		self.add_parameter('brohoof_cone', 0.78)
		self.add_parameter('brohoof_upper_limit', 0.30)

		# references to used behaviors

		# Additional initialization code can be added inside the following tags
		# [MANUAL_INIT]

                self._tf = ProxyTransformListener().listener()
Ejemplo n.º 3
0
	def __init__(self, choice):
		'''
		Constructor
		'''
		super(GetPipePose, self).__init__(outcomes=['succeeded'], input_keys = ['centerpose'], output_keys = ['pipepose'])
		self._choice = choice
		
		self._tf = ProxyTransformListener().listener()
		self._succeeded = False
 def __init__(self):
     '''
     Get tf proxy and neessary transform. 
     :raises: any of the exceptions that :meth:`~tf.Transformer.lookupTransform` can raise
     '''
     # get transform listener singlenton
     self._tf = ProxyTransformListener().listener()
     # wait for pose publication
     self._tf.waitForTransform('bone51', 'base_link', rospy.Time(),
                               rospy.Duration(10.0))
     # get neck base translation vector in base_link frame
     (trans, rot) = self._tf.lookupTransform('base_link', 'bone51',
                                             rospy.Time())
     self._t5051 = trans
     # get translation from bone51 to bone52
     (trans, rot) = self._tf.lookupTransform('bone51', 'bone52',
                                             rospy.Time())
     self._t5152 = trans
	def __init__(self):
		super(DetectPipesSM, self).__init__()
		self.name = 'Detect Pipes'

		# parameters of this behavior
		self.add_parameter('pose_offset', 0.3)
		self.add_parameter('close_offset', 0.1)

		# references to used behaviors

		# Additional initialization code can be added inside the following tags
		# [MANUAL_INIT]
		self._tf = ProxyTransformListener().listener()
Ejemplo n.º 6
0
class GetPipePose(EventState):
	'''
	># centerpose	PoseStamped()		Pose of centered pipe.
	#> pipepose	PoseStamped()		Pose of desired pipe.
	'''
	TOP_LEFT = 1
	TOP_RIGHT = 2
	CENTER = 3
	DOWN_LEFT = 4
	DOWN_RIGHT = 5

	def __init__(self, choice):
		'''
		Constructor
		'''
		super(GetPipePose, self).__init__(outcomes=['succeeded'], input_keys = ['centerpose'], output_keys = ['pipepose'])
		self._choice = choice
		
		self._tf = ProxyTransformListener().listener()
		self._succeeded = False
		
	def execute(self, userdata):
		'''
		Execute this state
		'''
		if self._succeeded:
			return 'succeeded'
		
			
	def on_enter(self, userdata):
		tempPose = PoseStamped()
		tempPose = self._tf.transformPose('base_link', userdata.centerpose)
		
		if self._choice == GetPipePose.TOP_LEFT:
			tempPose.pose.position.y = tempPose.pose.position.y + 0.105
			tempPose.pose.position.z = tempPose.pose.position.z + 0.105
		if self._choice == GetPipePose.TOP_RIGHT:
			tempPose.pose.position.y = tempPose.pose.position.y - 0.105
			tempPose.pose.position.z = tempPose.pose.position.z + 0.105
		if self._choice == GetPipePose.DOWN_RIGHT:
			tempPose.pose.position.y = tempPose.pose.position.y - 0.105
			tempPose.pose.position.z = tempPose.pose.position.z - 0.105
		if self._choice == GetPipePose.DOWN_LEFT:	
			tempPose.pose.position.y = tempPose.pose.position.y + 0.105
			tempPose.pose.position.z = tempPose.pose.position.z - 0.105

		userdata.pipepose = tempPose
		
		self._succeeded = True
	def __init__(self, move_group, ignore_collisions = False):
		'''
		Constructor
		'''
		super(CalculateCartesianPathState, self).__init__(outcomes=['planned', 'failed'],
											input_keys=['eef_pose'],
											output_keys=['joint_trajectory', 'plan_fraction'])
		
		self._topic = '/compute_cartesian_path'
		self._srv = ProxyServiceCaller({self._topic: GetCartesianPath})

		self._tf = ProxyTransformListener().listener()

		self._move_group = move_group
		self._result = None
		self._failed = False
		self._ignore_collisions = ignore_collisions
class CalculateCartesianPathState(EventState):
	'''
	Calculates the trajectory for a cartesian path to the given goal endeffector pose.

	-- move_group 		string 				Name of the move group to be used for planning.
	-- ignore_collisions bool 				Ignore collision, use carefully.

	># eef_pose			PoseStamped			Target pose of the endeffector.

	#> joint_trajectory	JointTrajectory 	Calculated joint trajectory.
	#> plan_fraction 	float 				Fraction of the planned path.

	<= planned 								Found a plan to the target.
	<= failed 								Failed to find a plan to the given target.

	'''


	def __init__(self, move_group, ignore_collisions = False):
		'''
		Constructor
		'''
		super(CalculateCartesianPathState, self).__init__(outcomes=['planned', 'failed'],
											input_keys=['eef_pose'],
											output_keys=['joint_trajectory', 'plan_fraction'])
		
		self._topic = '/compute_cartesian_path'
		self._srv = ProxyServiceCaller({self._topic: GetCartesianPath})

		self._tf = ProxyTransformListener().listener()

		self._move_group = move_group
		self._result = None
		self._failed = False
		self._ignore_collisions = ignore_collisions
		
		
	def execute(self, userdata):
		'''
		Execute this state
		'''
		if self._failed:
			return 'failed'

		if self._result.error_code.val == MoveItErrorCodes.SUCCESS and self._result.fraction > .2:
			userdata.plan_fraction = self._result.fraction
			userdata.joint_trajectory = self._result.solution.joint_trajectory
			Logger.loginfo('Successfully planned %.2f of the path' % self._result.fraction)
			return 'planned'
		elif self._result.error_code.val == MoveItErrorCodes.SUCCESS:
			Logger.logwarn('Only planned %.2f of the path' % self._result.fraction)
			self._failed = True
			return 'failed'
		else:
			Logger.logwarn('Failed to plan trajectory: %d' % self._result.error_code.val)
			self._failed = True
			return 'failed'

			
	def on_enter(self, userdata):
		self._failed = False
		request = GetCartesianPathRequest()
		request.group_name = self._move_group
		request.avoid_collisions = not self._ignore_collisions
		request.max_step = 0.05

		request.header = userdata.eef_pose.header
		request.waypoints.append(userdata.eef_pose.pose)

		now = rospy.Time.now()

		try:
			self._tf.waitForTransform('base_link', 'gripper_cam_link', now, rospy.Duration(1))
			(p,q) = self._tf.lookupTransform('gripper_cam_link', 'base_link', now)

			c = OrientationConstraint()
			c.header.frame_id = 'base_link'
			c.header.stamp = now
			c.link_name = 'gripper_cam_link'
			c.orientation.x = q[0]
			c.orientation.y = q[1]
			c.orientation.z = q[2]
			c.orientation.w = q[3]
			c.weight = 1
			c.absolute_x_axis_tolerance = 0.1
			c.absolute_y_axis_tolerance = 0.1
			c.absolute_z_axis_tolerance = 0.1

			#request.path_constraints.orientation_constraints.append(c)

			self._result = self._srv.call(self._topic, request)
		except Exception as e:
			Logger.logwarn('Exception while calculating path:\n%s' % str(e))
			self._failed = True
Ejemplo n.º 9
0
    def __init__(self,
                 detection_topic='detection',
                 label='*',
                 type='*',
                 exit_states=['no_detections'],
                 pose_topic='',
                 pose_frame_id='odom_combined',
                 detection_period=1.0):
        super(ObjectDetectionMonitor, self).__init__(outcomes=[
            'no_detections', 'have_detections', 'detection_matches', 'failure'
        ],
                                                     output_keys=['pose'])

        # store state parameter for later use.
        self._detection_topic = detection_topic
        self._label = label
        self._type = type
        self._pose_topic = pose_topic
        self._frame_id = pose_frame_id
        self._exit_states = exit_states
        self._detection_period = detection_period

        # setup proxies
        self._detection_subscriber = ProxySubscriberCached(
            {self._detection_topic: DetectionArray})
        self._detection_subscriber.enable_buffer(self._detection_topic)
        if self._pose_topic:
            self._pose_publisher = ProxyPublisher(
                {self._pose_topic: PoseStamped})
        else:
            self._pose_publisher = None
        self._tf_listener = ProxyTransformListener().listener()

        # check parameters
        if not isinstance(
                self._frame_id,
                str):  # or not self._tf_listener.frameExists(self._frame_id):
            raise ValueError(
                'ObjectDetectionMonitor: %s is not a valid frame' %
                self._frame_id)
        if not isinstance(self._label, str) or not isinstance(self._type, str):
            raise ValueError(
                'ObjectDetectionMonitor: label and type parameters must be string.'
            )
        if not isinstance(self._detection_period,
                          float) or self._detection_period < 0.0:
            raise ValueError(
                'ObjectDetectionMonitor: detection_period must be float.')
        if not isinstance(self._exit_states, list) or not all([
                state
                in ['no_detections', 'have_detections', 'detection_matches']
                for state in self._exit_states
        ]):
            raise ValueError(
                'ObjectDetectionMonitor: incorrect list of exit states.')

        # pose buffers
        self._pose = None
        self._last_detection_stamp = None
        self._last_match_stamp = None
        self._last_match_id = None
Ejemplo n.º 10
0
class ObjectDetectionMonitor(EventState):
    '''
    Receive list of detected objects as cob_object_detection_msgs/DetectionArray messages and check if there is an object 
    which matches given (label, type) pair. Based on present of this object classify situation in `no_detections` state, 
    `have_detections` (there is objects but none of them matches give (label, type)),
    `detection_matches` (desired object found). In the last case its pose bublished on given topic.
    
    -- detection_topic                       string          DetectionArray message topic.
    -- label                                 string          Object label. Use '*' to match all labals.
    -- type                                  string          Object type. Use '*' to match all types.
    -- detection_period                      float           Parameteres: detection_period (s), position_tolerance (m), orientation_tolerance (rad).
    -- exit_states                           string[]        Stop monitoring and return outcome if detected situation in this list.
    -- pose_topic                            string          Topic for publishing matched object pose (may be Empty).
    -- pose_frame_id                         string          Frame in which pose should be published.

    #> pose                                  object          Object pose before exit.

    <= no_detections 	                     Nothing have been detected for detection_period.
    <= have_detections 	                     Objects were detected, but not of them matches (label, type)
    <= detecton_matches	                     Object presents and it is moving. `pose` key contains last pose.
    <= failure	                             Runtime error.

    '''
    def __init__(self,
                 detection_topic='detection',
                 label='*',
                 type='*',
                 exit_states=['no_detections'],
                 pose_topic='',
                 pose_frame_id='odom_combined',
                 detection_period=1.0):
        super(ObjectDetectionMonitor, self).__init__(outcomes=[
            'no_detections', 'have_detections', 'detection_matches', 'failure'
        ],
                                                     output_keys=['pose'])

        # store state parameter for later use.
        self._detection_topic = detection_topic
        self._label = label
        self._type = type
        self._pose_topic = pose_topic
        self._frame_id = pose_frame_id
        self._exit_states = exit_states
        self._detection_period = detection_period

        # setup proxies
        self._detection_subscriber = ProxySubscriberCached(
            {self._detection_topic: DetectionArray})
        self._detection_subscriber.enable_buffer(self._detection_topic)
        if self._pose_topic:
            self._pose_publisher = ProxyPublisher(
                {self._pose_topic: PoseStamped})
        else:
            self._pose_publisher = None
        self._tf_listener = ProxyTransformListener().listener()

        # check parameters
        if not isinstance(
                self._frame_id,
                str):  # or not self._tf_listener.frameExists(self._frame_id):
            raise ValueError(
                'ObjectDetectionMonitor: %s is not a valid frame' %
                self._frame_id)
        if not isinstance(self._label, str) or not isinstance(self._type, str):
            raise ValueError(
                'ObjectDetectionMonitor: label and type parameters must be string.'
            )
        if not isinstance(self._detection_period,
                          float) or self._detection_period < 0.0:
            raise ValueError(
                'ObjectDetectionMonitor: detection_period must be float.')
        if not isinstance(self._exit_states, list) or not all([
                state
                in ['no_detections', 'have_detections', 'detection_matches']
                for state in self._exit_states
        ]):
            raise ValueError(
                'ObjectDetectionMonitor: incorrect list of exit states.')

        # pose buffers
        self._pose = None
        self._last_detection_stamp = None
        self._last_match_stamp = None
        self._last_match_id = None

    def on_enter(self, userdata):
        # reset pose buffers
        self._pose = None
        stamp = rospy.Time.now()
        self._last_detection_stamp = stamp
        self._last_match_stamp = stamp
        self._last_match_id = None

        Logger.loginfo('ObjectDetectionMonitor: start monitoring.')

    def execute(self, userdata):
        # list of found matches
        have_detections = False
        best_match = None
        match = None
        # check if new message is available
        while self._detection_subscriber.has_buffered(self._detection_topic):
            # get detections msg
            detections_msg = self._detection_subscriber.get_from_buffer(
                self._detection_topic)
            # check if have detections
            if len(detections_msg.detections) > 0:
                have_detections = True
            # process detections
            for detection in detections_msg.detections:
                # check match
                is_match = True
                if self._label != '*' and detection.label != self._label:
                    is_match = False
                if self._type != '*' and detection.type != self._type:
                    is_match = False
                # register match
                if is_match:
                    match = detection
                    if match.id == self._last_match_id:
                        best_match = match

        if best_match:
            match = best_match

        # result
        stamp = rospy.Time.now()
        if not have_detections:
            # no object detected
            # check if detection preiod exceeded
            if (stamp - self._last_detection_stamp
                ).to_sec() > self._detection_period:
                # no detections state
                if 'no_detections' in self._exit_states:
                    Logger.loginfo('ObjectDetectionMonitor: no detections.')
                    return 'no_detections'
        elif not match:
            # object detected but it is not match
            self._last_detection_stamp = stamp
            # check if detection perod exceeded
            if (stamp - self._last_detection_stamp
                ).to_sec() > self._detection_period:
                # have_detections state
                if 'have_detections' in self._exit_states:
                    Logger.loginfo('ObjectDetectionMonitor: detection.')
                    return 'have_detections'
        else:
            # appropriate match is found
            self._last_match_stamp = stamp
            self._last_match_id = match.id
            # publish
            if self._pose_publisher:
                # transform to frame_id
                try:
                    # transform
                    pose_stamped = PoseStamped(
                        pose=match.pose,
                        header=Header(frame_id=match.header.frame_id))
                    self._pose = self._tf_listener.transformPose(
                        self._frame_id, pose_stamped)
                except tf.Exception as e:
                    Logger.logwarn(
                        'Unable to transform from %s to %s' %
                        (match.pose.header.frame_id, self._frame_id))
                    return 'failure'
                # publish
                self._pose_publisher.publish(self._pose_topic, self._pose)
            # match_detection state
            if 'detection_matches' in self._exit_states:
                Logger.loginfo('ObjectDetectionMonitor: match.')
                return 'detection_matches'

        return None

    def on_exit(self, userdata):
        userdata.pose = self._pose
class HeadIK:
    '''
    Implements SweetieBot head and eyes invese kinematics. Meant to be run in FlexBe state enviroment.

    '''
    def __init__(self):
        '''
        Get tf proxy and neessary transform. 
        :raises: any of the exceptions that :meth:`~tf.Transformer.lookupTransform` can raise
        '''
        # get transform listener singlenton
        self._tf = ProxyTransformListener().listener()
        # wait for pose publication
        self._tf.waitForTransform('bone51', 'base_link', rospy.Time(),
                                  rospy.Duration(10.0))
        # get neck base translation vector in base_link frame
        (trans, rot) = self._tf.lookupTransform('base_link', 'bone51',
                                                rospy.Time())
        self._t5051 = trans
        # get translation from bone51 to bone52
        (trans, rot) = self._tf.lookupTransform('bone51', 'bone52',
                                                rospy.Time())
        self._t5152 = trans

    def pointDirectionToHeadPose(self,
                                 point_stamped,
                                 neck_angle,
                                 side_angle,
                                 limit_joints=True):
        '''
        Calculate Proto2 head pose so it is oriented toward the given point. 
        :param point_stamped: geometry_msg.msg.PointStamped point to look at.
        :param neck_angle: joint51 desired angle in radians.
        :param side_angle: joint54 desired angle in radians.
        :param limit_joints: limit joints positions to prevent self collisions.
        :return: sensor_msg.msg.JointState head pose (joint ranges are not checked). Return `None` if transform is not available.
        '''

        joints = JointState()
        joints.header = Header()
        joints.header.stamp = rospy.Time.now()
        # set known fields
        joints.name = ['joint51', 'joint52', 'joint53', 'joint54']
        joints.position = [neck_angle, 0.0, 0.0, side_angle]

        if limit_joints:
            joints.position = self.limitHeadJoints51to54(joints.position)
            neck_angle = joints.position[0]
            side_angle = joints.position[3]

        try:
            # convert point coordinates to base_link frame
            point_stamped = self._tf.transformPoint(
                'base_link',
                PointStamped(
                    header=Header(frame_id=point_stamped.header.frame_id),
                    point=point_stamped.point))
        except tf.Exception as e:
            Logger.logwarn('Cannot transform to base_link:\n%s' % str(e))
            return None

        # move to neck base
        direction = [
            point_stamped.point.x, point_stamped.point.y,
            point_stamped.point.z, 1.0
        ]
        upward = [0.0, 0.0, 1.0, 1.0]
        direction[:3] = numpy.subtract(direction[:3], self._t5051)
        # rotate to neck_angle around Ox to transform it to desired bone51 frame.
        direction = numpy.dot(rotation_matrix(neck_angle, (0, 1, 0)),
                              direction)
        upward = numpy.dot(rotation_matrix(neck_angle, (0, 1, 0)), upward)
        # move origin to bone52
        direction[:3] = numpy.subtract(direction[:3], self._t5152)
        # calculate rotation angles
        print(upward)
        joints.position[1] = math.atan2(direction[1], direction[0])
        upward = numpy.dot(rotation_matrix(joints.position[1], (0, 0, 1)),
                           upward)
        print(upward)
        joints.position[2] = math.atan2(
            direction[2], math.sqrt(direction[0]**2 + direction[1]**2))
        upward = numpy.dot(rotation_matrix(joints.position[2], (0, 1, 0)),
                           upward)
        print(upward)
        joints.position[3] = math.atan2(upward[1], upward[2]) + side_angle
        # limit joints
        if limit_joints:
            joints.position = self.limitHeadJoints51to54(joints.position)

        return joints

    def pointDirectionToEyesPose(self, point_stamped):
        '''
        Calculate Proto2 eyes angles for deprecated eyes module.
        :param point_stamped: geometry_msg.msg.PointStamped point to look at.
        :return: sensor_msg.msg.JointState head pose (joint ranges are not checked). Return `None` if transform is not available.
        '''
        joints = JointState()
        joints.header = Header()
        joints.header.stamp = rospy.Time.now()
        # set known fields
        joints.name = ['eyes_pitch', 'eyes_yaw']
        joints.position = [0.0, 0.0]

        try:
            # convert point coordinates to base_link frame
            point_stamped = self._tf.transformPoint(
                'bone54',
                PointStamped(
                    header=Header(frame_id=point_stamped.header.frame_id),
                    point=point_stamped.point))
        except tf.Exception as e:
            Logger.logwarn('Cannot transform to bone54:\n%s' % str(e))
            return None
        # calculate angles
        direction = [
            point_stamped.point.x, point_stamped.point.y, point_stamped.point.z
        ]
        joints.position[0] = math.atan2(
            point_stamped.point.z,
            math.sqrt(point_stamped.point.x**2 + point_stamped.point.y**2))
        joints.position[1] = -math.atan2(
            point_stamped.point.y,
            math.sqrt(point_stamped.point.x**2 + point_stamped.point.z**2))
        # limit joints position
        joints.position = list(
            numpy.clip(joints.position, -numpy.pi / 2, numpy.pi / 2))

        return joints

    def limitHeadJoints51to54(self, position):
        '''
        Clip joint51, joint52, joint53, joint54 to collision safe diapazone.
        :param position: array of joint positions in natural order (joint51,.. joint54)
        :return: clipped array.
        '''
        return list(
            numpy.clip(position, [-1.00, -1.57, -0.55, -0.65],
                       [0.25, 1.57, 1.0, 0.65]))
Ejemplo n.º 12
0
class RandPoseGenerator(EventState):
    '''
    Periodically generate geometry_msgs/PoseStamped messge in given parallelepiped. 
    Frames for parallelepiped is defined and the frame in which message is published can be different.

    -- topic        string          Topic where PoseStamped message should be published.
    -- duration     float           How long this state will be executed (seconds).
    -- interval     float[2]        Array of floats, maximal and minimal interval between movements.
    -- minXYZ       float[3]        Max values for x, y, z coordinates.
    -- maxXYZ       float[3]        Min values for x, y, z coordinates.
    -- frame_xyz    string          Coordinate frame of parallelepiped. 
    -- frame_out    string          Frame in which pose is publised.

    <= done 	                    Finished.
    <= failed 	                    Failed to activate FollowJointState controller.

    '''
    def __init__(self,
                 topic='motion/controller/look_at/in_pose_ref',
                 duration=120,
                 interval=[3, 5],
                 maxXYZ=[1, 0.3, 0.5],
                 minXYZ=[1.0, -0.3, 0.0],
                 frame_xyz='base_link',
                 frame_out='odom_combined'):
        super(RandPoseGenerator, self).__init__(outcomes=['done', 'failure'])

        # Store topic parameter for later use.
        if not isinstance(topic, str):
            raise ValueError("Topic name must be string.")
        if not isinstance(duration, (int, float)) or duration <= 0:
            raise ValueError("Duration must be positive number.")
        if len(interval) != 2 or any([
                not isinstance(t, (int, float)) for t in interval
        ]) or any([t < 0 for t in interval]) or interval[0] > interval[1]:
            raise ValueError("Interval must represent interval of time.")
        if len(minXYZ) != 3 or any(
            [not isinstance(v, (int, float)) for v in minXYZ]):
            raise ValueError("minXYZ: list of three numbers was expected.")
        if len(maxXYZ) != 3 or any(
            [not isinstance(v, (int, float)) for v in maxXYZ]):
            raise ValueError("maxXYZ: list of three numbers was expected.")
        if not isinstance(frame_xyz, str) or not isinstance(frame_out, str):
            raise ValueError("Frame names must be string.")

        self._topic = topic
        self._duration = Duration.from_sec(duration)
        self._interval = interval
        self._minXYZ = minXYZ
        self._maxXYZ = maxXYZ
        self._frame_in = frame_xyz
        self._frame_out = frame_out

        # create proxies
        self._publisher = ProxyPublisher({self._topic: PoseStamped})
        self._tf_listener = ProxyTransformListener().listener()
        self._tf_listener.waitForTransform(self._frame_out, self._frame_in,
                                           rospy.Time(), rospy.Duration(1))
        if not self._tf_listener.canTransform(self._frame_out, self._frame_in,
                                              rospy.Time(0)):
            raise ValueError(
                "Unable to perform transform between frames %s and %s." %
                (self._frame_out, self._frame_in))

        # state
        self._start_timestamp = None
        self._movement_timestamp = None
        self._movement_duration = Duration()

        # error in enter hook
        self._error = False

    def on_enter(self, userdata):
        self._error = False
        # set start timestamp
        self._start_timestamp = Time.now()
        self._movement_timestamp = Time.now()

        Logger.loginfo('Start random pose generation, topic `%s`.' %
                       self._topic)

    def execute(self, userdata):
        if self._error:
            return 'failure'

        time_now = Time.now()
        # check if time elasped
        if time_now - self._start_timestamp > self._duration:
            return 'done'
        # check if we have tosend new point
        if time_now - self._movement_timestamp > self._movement_duration:
            # determine new interval
            self._movement_timestamp = time_now
            self._movement_duration = Duration.from_sec(
                random.uniform(*self._interval))
            # form message
            msg = PoseStamped()
            msg.header.frame_id = self._frame_in
            msg.header.stamp = Time(0.0)
            # random position
            msg.pose.position.x = random.uniform(self._minXYZ[0],
                                                 self._maxXYZ[0])
            msg.pose.position.y = random.uniform(self._minXYZ[1],
                                                 self._maxXYZ[1])
            msg.pose.position.z = random.uniform(self._minXYZ[2],
                                                 self._maxXYZ[2])
            # transform to output frame
            try:
                # transform
                self._pose = self._tf_listener.transformPose(
                    self._frame_out, msg)
            except tf.Exception as e:
                Logger.logwarn('Unable to transform from %s to %s: %s' %
                               (self._frame_in, self._frame_out, e))
                return 'failure'
            # publish pose
            msg.header.stamp = self._movement_timestamp
            self._publisher.publish(self._topic, msg)

    def on_exit(self, userdata):
        Logger.loginfo('Done random pose generation.')
class DetectPipesSM(Behavior):
	'''
	Position as required and try different ways to detect the pipe positions
	'''


	def __init__(self):
		super(DetectPipesSM, self).__init__()
		self.name = 'Detect Pipes'

		# parameters of this behavior
		self.add_parameter('pose_offset', 0.3)
		self.add_parameter('close_offset', 0.1)

		# references to used behaviors

		# Additional initialization code can be added inside the following tags
		# [MANUAL_INIT]
		self._tf = ProxyTransformListener().listener()
		# [/MANUAL_INIT]

		# Behavior comments:

		# O 98 480 
		# Run in full autonomy to skip looking, run in high to decide



	def create(self):
		joints_arm_with_gripper = ['arm_joint_%d'%i for i in range(5)]
		srdf = "hector_tracker_robot_moveit_config/config/taurob_tracker.srdf"
		percept_class = 'pipes'
		move_group = "arm_with_gripper_group"
		move_group_no_eef = "arm_group"
		# x:268 y:162, x:472 y:46
		_state_machine = OperatableStateMachine(outcomes=['finished', 'failed'])
		_state_machine.userdata.object_pose = None
		_state_machine.userdata.object_type = percept_class
		_state_machine.userdata.test_pregrasp = [0, 1.57, 0.77, 0.38, 0]
		_state_machine.userdata.index = -1

		# Additional creation code can be added inside the following tags
		# [MANUAL_CREATE]
		
		# [/MANUAL_CREATE]

		# x:30 y:365, x:569 y:352, x:45 y:183
		_sm_gripper_action_0 = OperatableStateMachine(outcomes=['finished', 'failed', 'back'])

		with _sm_gripper_action_0:
			# x:168 y:53
			OperatableStateMachine.add('Decide_Close_Gripper',
										OperatorDecisionState(outcomes=['close_gripper', 'move_back'], hint="Grasp pipe?", suggestion='close_gripper'),
										transitions={'close_gripper': 'Close_Gripper', 'move_back': 'back'},
										autonomy={'close_gripper': Autonomy.Full, 'move_back': Autonomy.Full})

			# x:221 y:173
			OperatableStateMachine.add('Close_Gripper',
										GripperState(action=0.8, duration=2.0),
										transitions={'done': 'Manipulate_Pipe', 'failed': 'failed'},
										autonomy={'done': Autonomy.Low, 'failed': Autonomy.High})

			# x:229 y:391
			OperatableStateMachine.add('Decide_Open_Gripper',
										OperatorDecisionState(outcomes=['open_gripper', 'move_back'], hint="Open gripper or pull pipe?", suggestion='open_gripper'),
										transitions={'open_gripper': 'Open_Gripper', 'move_back': 'back'},
										autonomy={'open_gripper': Autonomy.High, 'move_back': Autonomy.Full})

			# x:241 y:290
			OperatableStateMachine.add('Manipulate_Pipe',
										LogState(text="Perform desired pipe manipulation", severity=Logger.REPORT_HINT),
										transitions={'done': 'Decide_Open_Gripper'},
										autonomy={'done': Autonomy.High})

			# x:215 y:511
			OperatableStateMachine.add('Open_Gripper',
										GripperState(action=GripperState.OPEN, duration=1.0),
										transitions={'done': 'finished', 'failed': 'failed'},
										autonomy={'done': Autonomy.Low, 'failed': Autonomy.High})


		# x:153 y:384, x:388 y:221
		_sm_move_back_out_1 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['pipes_out_pose'])

		with _sm_move_back_out_1:
			# x:118 y:87
			OperatableStateMachine.add('Get_Cartesian_Path_Back',
										CalculateCartesianPathState(move_group=move_group, ignore_collisions=True),
										transitions={'planned': 'Execute_Cartesian_Path_Back', 'failed': 'failed'},
										autonomy={'planned': Autonomy.Low, 'failed': Autonomy.High},
										remapping={'eef_pose': 'pipes_out_pose', 'joint_trajectory': 'joint_trajectory', 'plan_fraction': 'plan_fraction'})

			# x:116 y:216
			OperatableStateMachine.add('Execute_Cartesian_Path_Back',
										MoveitExecuteTrajectoryState(),
										transitions={'done': 'finished', 'failed': 'failed'},
										autonomy={'done': Autonomy.Low, 'failed': Autonomy.High},
										remapping={'joint_trajectory': 'joint_trajectory'})


		# x:230 y:517, x:550 y:229
		_sm_move_to_close_pose_2 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['pipes_pose'])

		with _sm_move_to_close_pose_2:
			# x:97 y:43
			OperatableStateMachine.add('Move_Pose_Cartesian',
										CalculationState(calculation=lambda p: self.update_pose(p, self.close_offset)),
										transitions={'done': 'Visualize_Pipes_Close_Pose'},
										autonomy={'done': Autonomy.Off},
										remapping={'input_value': 'pipes_pose', 'output_value': 'pipes_close_pose'})

			# x:113 y:140
			OperatableStateMachine.add('Visualize_Pipes_Close_Pose',
										PublishPoseState(topic='/pipes/close_pose'),
										transitions={'done': 'Get_Cartesian_Path'},
										autonomy={'done': Autonomy.Low},
										remapping={'pose': 'pipes_close_pose'})

			# x:147 y:371
			OperatableStateMachine.add('Execute_Cartesian_Path',
										MoveitExecuteTrajectoryState(),
										transitions={'done': 'finished', 'failed': 'failed'},
										autonomy={'done': Autonomy.Low, 'failed': Autonomy.High},
										remapping={'joint_trajectory': 'joint_trajectory'})

			# x:121 y:260
			OperatableStateMachine.add('Get_Cartesian_Path',
										CalculateCartesianPathState(move_group=move_group, ignore_collisions=True),
										transitions={'planned': 'Execute_Cartesian_Path', 'failed': 'failed'},
										autonomy={'planned': Autonomy.Low, 'failed': Autonomy.High},
										remapping={'eef_pose': 'pipes_close_pose', 'joint_trajectory': 'joint_trajectory', 'plan_fraction': 'plan_fraction'})


		# x:119 y:516, x:358 y:275
		_sm_move_to_out_pose_3 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['pipes_pose'], output_keys=['pipes_out_pose'])

		with _sm_move_to_out_pose_3:
			# x:83 y:35
			OperatableStateMachine.add('Move_Pose_Out',
										CalculationState(calculation=lambda p: self.update_pose(p, self.pose_offset)),
										transitions={'done': 'Visualize_Pipes_Out_Pose'},
										autonomy={'done': Autonomy.Off},
										remapping={'input_value': 'pipes_pose', 'output_value': 'pipes_out_pose'})

			# x:85 y:307
			OperatableStateMachine.add('Move_To_Joints',
										MoveitToJointsState(move_group=move_group, joint_names=joints_arm_with_gripper, action_topic='/move_group'),
										transitions={'reached': 'Correct_Gripper_Rotation', 'planning_failed': 'failed', 'control_failed': 'failed'},
										autonomy={'reached': Autonomy.Low, 'planning_failed': Autonomy.High, 'control_failed': Autonomy.High},
										remapping={'joint_config': 'joint_config'})

			# x:77 y:118
			OperatableStateMachine.add('Visualize_Pipes_Out_Pose',
										PublishPoseState(topic='/pipes/out_pose'),
										transitions={'done': 'Get_Joint_Target'},
										autonomy={'done': Autonomy.Low},
										remapping={'pose': 'pipes_out_pose'})

			# x:83 y:211
			OperatableStateMachine.add('Get_Joint_Target',
										CalculateIKState(move_group=move_group, ignore_collisions=False),
										transitions={'planned': 'Move_To_Joints', 'failed': 'failed'},
										autonomy={'planned': Autonomy.Low, 'failed': Autonomy.High},
										remapping={'eef_pose': 'pipes_out_pose', 'joint_config': 'joint_config'})

			# x:71 y:408
			OperatableStateMachine.add('Correct_Gripper_Rotation',
										GripperRollState(rotation=GripperRollState.HORIZONTAL, duration=1.0),
										transitions={'done': 'finished', 'failed': 'failed'},
										autonomy={'done': Autonomy.Low, 'failed': Autonomy.High})


		# x:272 y:264, x:55 y:574
		_sm_eef_to_detected_4 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['pipes_pose'], output_keys=['pipes_pose'])

		with _sm_eef_to_detected_4:
			# x:58 y:35
			OperatableStateMachine.add('Move_To_Out_Pose',
										_sm_move_to_out_pose_3,
										transitions={'finished': 'Decide_Move_Close', 'failed': 'failed'},
										autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
										remapping={'pipes_pose': 'pipes_pose', 'pipes_out_pose': 'pipes_out_pose'})

			# x:568 y:38
			OperatableStateMachine.add('Move_To_Close_Pose',
										_sm_move_to_close_pose_2,
										transitions={'finished': 'Gripper_Action', 'failed': 'failed'},
										autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
										remapping={'pipes_pose': 'pipes_pose'})

			# x:175 y:448
			OperatableStateMachine.add('Move_Back_Out',
										_sm_move_back_out_1,
										transitions={'finished': 'finished', 'failed': 'failed'},
										autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
										remapping={'pipes_out_pose': 'pipes_out_pose'})

			# x:326 y:34
			OperatableStateMachine.add('Decide_Move_Close',
										OperatorDecisionState(outcomes=['close', 'end'], hint="Move closer to pipe?", suggestion='close'),
										transitions={'close': 'Move_To_Close_Pose', 'end': 'finished'},
										autonomy={'close': Autonomy.High, 'end': Autonomy.Full})

			# x:626 y:219
			OperatableStateMachine.add('Gripper_Action',
										_sm_gripper_action_0,
										transitions={'finished': 'Decide_Move_Back', 'failed': 'failed', 'back': 'Move_Back_Out'},
										autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit, 'back': Autonomy.Inherit})

			# x:408 y:452
			OperatableStateMachine.add('Decide_Move_Back',
										OperatorDecisionState(outcomes=['move_back', 'end'], hint="Move back from pipe?", suggestion='move_back'),
										transitions={'move_back': 'Move_Back_Out', 'end': 'finished'},
										autonomy={'move_back': Autonomy.High, 'end': Autonomy.Full})


		# x:30 y:365, x:130 y:365
		_sm_select_pipe_5 = OperatableStateMachine(outcomes=['finished', 'next'], input_keys=['index', 'object_pose', 'object_data'], output_keys=['index', 'pipe_pose'])

		with _sm_select_pipe_5:
			# x:98 y:78
			OperatableStateMachine.add('Increment_Index',
										CalculationState(calculation=lambda x: x+1),
										transitions={'done': 'Select_Pipe'},
										autonomy={'done': Autonomy.Off},
										remapping={'input_value': 'index', 'output_value': 'index'})

			# x:102 y:178
			OperatableStateMachine.add('Select_Pipe',
										SelectPipePose(),
										transitions={'selected': 'next', 'out_of_range': 'finished'},
										autonomy={'selected': Autonomy.Off, 'out_of_range': Autonomy.Off},
										remapping={'object_pose': 'object_pose', 'object_data': 'object_data', 'pose_choice': 'index', 'pipe_pose': 'pipe_pose'})


		# x:427 y:433, x:453 y:131
		_sm_get_pipes_6 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['object_pose'], output_keys=['object_pose', 'object_data'])

		with _sm_get_pipes_6:
			# x:51 y:28
			OperatableStateMachine.add('Get_Observation_Arm_Config',
										GetJointsFromSrdfState(config_name="observation_pose", srdf_file=srdf, move_group="", robot_name=""),
										transitions={'retrieved': 'Move_To_Observation_Pose', 'file_error': 'failed'},
										autonomy={'retrieved': Autonomy.Off, 'file_error': Autonomy.Off},
										remapping={'joint_values': 'joints_observation_pose'})

			# x:53 y:111
			OperatableStateMachine.add('Move_To_Observation_Pose',
										MoveitToJointsState(move_group="arm_group", joint_names=joints_arm_with_gripper[0:4], action_topic='/move_group'),
										transitions={'reached': 'Detect_Pipes', 'planning_failed': 'failed', 'control_failed': 'failed'},
										autonomy={'reached': Autonomy.Low, 'planning_failed': Autonomy.High, 'control_failed': Autonomy.High},
										remapping={'joint_config': 'joints_observation_pose'})

			# x:67 y:303
			OperatableStateMachine.add('Get_Pipes_Pose',
										MonitorPerceptState(required_support=0, percept_class='pipes', track_percepts=False),
										transitions={'detected': 'Visualize_Pipes_Pose'},
										autonomy={'detected': Autonomy.Off},
										remapping={'object_id': 'object_id', 'object_pose': 'object_pose', 'object_data': 'object_data'})

			# x:59 y:396
			OperatableStateMachine.add('Visualize_Pipes_Pose',
										PublishPoseState(topic='/pipes/center_pose'),
										transitions={'done': 'finished'},
										autonomy={'done': Autonomy.Low},
										remapping={'pose': 'object_pose'})

			# x:72 y:210
			OperatableStateMachine.add('Detect_Pipes',
										PipeDetectionState(max_attempts=100),
										transitions={'found': 'Get_Pipes_Pose', 'unknown': 'failed'},
										autonomy={'found': Autonomy.Low, 'unknown': Autonomy.High})



		with _state_machine:
			# x:94 y:72
			OperatableStateMachine.add('Get_Pipes',
										_sm_get_pipes_6,
										transitions={'finished': 'Select_Pipe', 'failed': 'failed'},
										autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
										remapping={'object_pose': 'object_pose', 'object_data': 'object_data'})

			# x:492 y:328
			OperatableStateMachine.add('Move_To_Pregrasp',
										MoveitToJointsState(move_group="arm_group", joint_names=joints_arm_with_gripper[0:4], action_topic='/move_group'),
										transitions={'reached': 'EEF_To_Detected', 'planning_failed': 'failed', 'control_failed': 'failed'},
										autonomy={'reached': Autonomy.Low, 'planning_failed': Autonomy.High, 'control_failed': Autonomy.High},
										remapping={'joint_config': 'test_pregrasp'})

			# x:87 y:328
			OperatableStateMachine.add('Decide_Look_At',
										OperatorDecisionState(outcomes=['focus', 'skip'], hint="Should sensor head focus pipes?", suggestion='skip'),
										transitions={'focus': 'Look_At_Target', 'skip': 'Move_To_Pregrasp'},
										autonomy={'focus': Autonomy.Full, 'skip': Autonomy.High})

			# x:300 y:428
			OperatableStateMachine.add('Look_At_Target',
										LookAtWaypoint(),
										transitions={'reached': 'Move_To_Pregrasp', 'failed': 'failed'},
										autonomy={'reached': Autonomy.High, 'failed': Autonomy.Full},
										remapping={'waypoint': 'pipes_pose'})

			# x:94 y:222
			OperatableStateMachine.add('Select_Pipe',
										_sm_select_pipe_5,
										transitions={'finished': 'finished', 'next': 'Decide_Look_At'},
										autonomy={'finished': Autonomy.Inherit, 'next': Autonomy.Inherit},
										remapping={'index': 'index', 'object_pose': 'object_pose', 'object_data': 'object_data', 'pipe_pose': 'pipes_pose'})

			# x:783 y:322
			OperatableStateMachine.add('EEF_To_Detected',
										_sm_eef_to_detected_4,
										transitions={'finished': 'Select_Pipe', 'failed': 'Select_Pipe'},
										autonomy={'finished': Autonomy.Inherit, 'failed': Autonomy.Inherit},
										remapping={'pipes_pose': 'pipes_pose'})


		return _state_machine


	# Private functions can be added inside the following tags
	# [MANUAL_FUNC]
	def update_pose(self, pose, offset):
		new_pose = self._tf.transformPose('base_link', pose)
		new_pose.pose.position.x -= offset
		new_pose.pose.orientation.x = 0
		new_pose.pose.orientation.y = 0
		new_pose.pose.orientation.z = 0
		new_pose.pose.orientation.w = 1
		return new_pose
class BrohoofSM(Behavior):
	'''
	Move leg1 up to specified target, perform greeting, wait and pt leg down. 

Robot is assumed to be standing on four legs.
	'''


	def __init__(self):
		super(BrohoofSM, self).__init__()
		self.name = 'Brohoof'

		# parameters of this behavior
		self.add_parameter('wait_time', 5)
		self.add_parameter('neck_angle', 0.25)
		self.add_parameter('brohoof_cone', 0.78)
		self.add_parameter('brohoof_upper_limit', 0.30)

		# references to used behaviors

		# Additional initialization code can be added inside the following tags
		# [MANUAL_INIT]

                self._tf = ProxyTransformListener().listener()
		
		# [/MANUAL_INIT]

		# Behavior comments:



	def create(self):
		# x:228 y:396, x:225 y:240, x:1010 y:659
		_state_machine = OperatableStateMachine(outcomes=['finished', 'unreachable', 'failed'], input_keys=['pose'])
		_state_machine.userdata.pose = PoseStamped(Header(frame_id = 'base_link'), Pose(Point(0.4, 0.0, -0.05), Quaternion(0, 0, 0, 1)))

		# Additional creation code can be added inside the following tags
		# [MANUAL_CREATE]
		
		# [/MANUAL_CREATE]


		with _state_machine:
			# x:118 y:85
			OperatableStateMachine.add('CheckHandPosition',
										DecisionState(outcomes=['good','bad'], conditions=self.checkHandPosition),
										transitions={'good': 'GetHeadPose', 'bad': 'unreachable'},
										autonomy={'good': Autonomy.Off, 'bad': Autonomy.Off},
										remapping={'input_value': 'pose'})

			# x:890 y:253
			OperatableStateMachine.add('SayHello',
										TextCommandState(type='voice/play_wav', command='hello_im_sweetie_bot_friedship_programms', topic='control'),
										transitions={'done': 'Wait'},
										autonomy={'done': Autonomy.Off})

			# x:663 y:69
			OperatableStateMachine.add('RaiseHead',
										MoveitToJointsState(move_group='head', joint_names=['joint51', 'joint52', 'joint53', 'joint54'], action_topic='move_group'),
										transitions={'reached': 'RaiseLegProgram', 'planning_failed': 'failed', 'control_failed': 'failed'},
										autonomy={'reached': Autonomy.Off, 'planning_failed': Autonomy.Off, 'control_failed': Autonomy.Off},
										remapping={'joint_config': 'head_pose'})

			# x:309 y:66
			OperatableStateMachine.add('GetHeadPose',
										GetJointValuesState(joints=['joint51', 'joint52', 'joint53', 'joint54']),
										transitions={'retrieved': 'CalcRaisedHeadPose'},
										autonomy={'retrieved': Autonomy.Off},
										remapping={'joint_values': 'head_pose'})

			# x:485 y:73
			OperatableStateMachine.add('CalcRaisedHeadPose',
										CalculationState(calculation=self.calculateHeadPoseFunc),
										transitions={'done': 'RaiseHead'},
										autonomy={'done': Autonomy.Off},
										remapping={'input_value': 'head_pose', 'output_value': 'head_pose'})

			# x:891 y:65
			OperatableStateMachine.add('RaiseLegProgram',
										ExecuteJointTrajectory(action_topic='motion/controller/joint_trajectory', trajectory_param='brohoof_begin', trajectory_ns='/saved_msgs/joint_trajectory'),
										transitions={'success': 'SayHello', 'partial_movement': 'failed', 'invalid_pose': 'failed', 'failure': 'failed'},
										autonomy={'success': Autonomy.Off, 'partial_movement': Autonomy.Off, 'invalid_pose': Autonomy.Off, 'failure': Autonomy.Off},
										remapping={'result': 'result'})

			# x:510 y:396
			OperatableStateMachine.add('ReturnLegProgrammed',
										ExecuteJointTrajectory(action_topic='motion/controller/joint_trajectory', trajectory_param='brohoof_end', trajectory_ns='/saved_msgs/joint_trajectory'),
                                                                                transitions={'success': 'finished', 'partial_movement': 'failed', 'invalid_pose': 'failed', 'failure': 'failed'},
										autonomy={'success': Autonomy.Off, 'partial_movement': Autonomy.Off, 'invalid_pose': Autonomy.Off, 'failure': Autonomy.Off},
										remapping={'result': 'result'})

			# x:851 y:399
			OperatableStateMachine.add('Wait',
										WaitState(wait_time=self.wait_time),
										transitions={'done': 'ReturnLegProgrammed'},
										autonomy={'done': Autonomy.Off})


		return _state_machine


	# Private functions can be added inside the following tags
	# [MANUAL_FUNC]
        def calculateHeadPoseFunc(self, joints):
	        joints = list(joints)
	        joints[0] = 0.0
	        joints[2] = self.neck_angle
	        return joints

	def checkHandPosition(self, input_pose):
	        '''
	        Check if hand is in good position for brohoof. Return 'good' or 'bad'.
	        '''
	        # check if frame header presents
	        if not input_pose.header.frame_id:
	            Logger.logerr('checkHandPosition: Header is missing.')
	            return 'bad'
	        # convert ot base_link frame
	        try:
	            output_pose = self._tf.transformPose('base_link', PoseStamped(Header(frame_id = input_pose.header.frame_id), input_pose.pose))
	        except tf.Exception as e:
	            Logger.logerr('calculateHeadPoseFunc: Cannot transform from `%s` to `base_link` frame.' % input_pose.header.frame_id)
	            return 'bad'
	        pos = output_pose.pose.position
	        Logger.loginfo('calculateHeadPoseFunc: object pos: %s' % str(pos))
	        # Check if boundarie are good
	        if pos.z > self.brohoof_upper_limit or pos.z < -0.15: 
	            # too high or to low
	            return 'bad'
	        if pos.x < 0.25:
	            # too close
                    return 'bad'
                # Move to shoulder1 point
                pos.x -= 0.080
                pos.y -= 0.037
                pos.z += 0.027
                # Check if angle is out of cone
                if abs(math.atan2(pos.y,pos.x)) > self.brohoof_cone:
                    # out of cone
                    return 'bad'
	        return 'good'
class CreatePurePursuitState(EventState):
    '''
    This state calculates the twist required to follow a constant curvature arc to the pure pursuit intersection point.
    The command is published as a TwistStamped command based on parameters.
    The state monitors the iRobot Create bumper status, and stops and returns a failed outcome if a bumper is activated.

    If arc motion is used, the arc should be less than or equal to pi/2 radians.  Use multiple targets for longer arcs.

.......-- desired_velocity     float     Desired velocity in m/s (default: 0.2)
.......-- max_rotation_rate    float     Maximum rotation rate radians/s (default: 10.0)
       -- target_frame:        string    Coordinate frame of target point (default: 'map')
       -- target_x:            float     Target point x-coordinate (m)
       -- target_y:            float     Target point y-coordinate (m)
       -- target_type:         string    Desired motion ('line','arc') (default: 'line')
       -- lookahead_distance:  float     Lookahead distance (m) (default:  0.25)
       -- timeout              float     Transform timeout (seconds) (default: 0.08)
       -- recover_mode         bool      Recover path (typically on startup) (default: False)
       -- center_x:            float     Center point x-coordinate for circle defining arc motion (default: 0.0)
       -- center_y:            float     Center point y-coordinate for circle defining arc motion (default: 0.0)
       -- cmd_topic            string    topic name of the robot command (default: '/create_node/cmd_vel')
       -- sensor_topic         string    topic of the iRobot Create sensor state (default: '/create_node/sensor_state')
       -- odometry_topic:      string    topic of the iRobot Create sensor state (default:   '/create_node/odom'
       -- marker_topic:        string    topic of the RViz marker used for visualization (default: '/pure_pursuit_marker')
       -- marker_size:         float     Size of RViz marker used for target (default: 0.05)
       ># prior_point          object    Prior waypoint at start of this segment
       #> target_point         object    Target point at end of this calculation (becomes next node's prior)
       <= done                 Reached the end of target relevance
       <= failed               A bumper was activated prior to completion
    '''
    def __init__(self,
                 desired_velocity=0.2,
                 max_rotation_rate=10.0,
                 target_frame='map',
                 target_x=1.0,
                 target_y=0.1,
                 target_type='line',
                 lookahead_distance=0.25,
                 timeout=0.08,
                 recover_mode=False,
                 center_x=0.0,
                 center_y=0.0,
                 cmd_topic='/create_node/cmd_vel',
                 sensor_topic='/create_node/sensor_state',
                 odometry_topic='/create_node/odom',
                 marker_topic='/pure_pursuit_marker',
                 marker_size=0.05):
        # Declare outcomes, input_keys, and output_keys by calling the super constructor with the corresponding arguments.
        super(CreatePurePursuitState,
              self).__init__(outcomes=['done', 'failed'],
                             input_keys=['prior_point'],
                             output_keys=['target_point'])

        # Store state parameter for later use.
        self._twist = TwistStamped()
        self._twist.twist.linear.x = desired_velocity
        self._twist.twist.angular.z = 0.0

        self._desired_velocity = desired_velocity
        self._max_rotation_rate = max_rotation_rate  # Used to clamp the rotation calculations

        self._current_position = PointStamped()
        self._current_position.header.stamp = rospy.Time.now()
        self._current_position.header.frame_id = target_frame

        self._target = PointStamped()
        self._target.header.stamp = rospy.Time.now()
        self._target.header.frame_id = target_frame
        self._target.point = Point(target_x, target_y, 0.0)

        self._center = PointStamped()
        self._center.header = self._target.header
        self._center.point = Point(center_x, center_y, 0.0)

        self._lookahead = lookahead_distance
        self._recover_mode = recover_mode
        self._target_type = target_type
        self._target_frame = target_frame
        if (self._target_type == 'arc'):
            self._radius = np.sqrt((center_x - target_x)**2 +
                                   (center_y - target_y)**2)

            Logger.loginfo(
                'Using arc type with center=(%d, %d) target=(%d,%d) radius=%d'
                % (self._center.point.x, self._center.point.y,
                   self._target.point.x, self._target.point.y, self._radius))

        self._odom_frame = 'odom'  # Update with the first odometry message
        self._robot_frame = 'base_footprint'
        self._failed = False
        self._timeout = rospy.Duration(timeout)  # transform timeout

        # The constructor is called when building the state machine, not when actually starting the behavior.
        # Thus, we cannot save the starting time now and will do so later.
        self._start_time = None

        self._done = None  # Track the outcome so we can detect if transition is blocked

        self._odom_topic = odometry_topic
        self._sensor_topic = sensor_topic
        self._marker_topic = marker_topic
        self._cmd_topic = cmd_topic

        self._listener = ProxyTransformListener()

        self._sensor_sub = ProxySubscriberCached(
            {self._sensor_topic: CreateSensorState})
        self._odom_sub = ProxySubscriberCached({self._odom_topic: Odometry})
        self._pub = ProxyPublisher({self._cmd_topic: TwistStamped})

        if (self._marker_topic != ""):
            self._marker_pub = ProxyPublisher({self._marker_topic: Marker})
        else:
            self._marker_pub = None

        self._marker = Marker()
        self._marker.header.frame_id = self._target_frame
        self._marker.header.stamp = rospy.Time.now()
        self._marker.ns = "pure_pursuit_waypoints"
        self._marker.id = int(target_x * 1000000) + int(target_y * 1000)
        self._marker.type = Marker.SPHERE
        self._marker.action = Marker.ADD
        self._marker.pose.position.x = target_x
        self._marker.pose.position.y = target_y
        self._marker.pose.position.z = 0.0
        self._marker.pose.orientation.x = 0.0
        self._marker.pose.orientation.y = 0.0
        self._marker.pose.orientation.z = 0.0
        self._marker.pose.orientation.w = 1.0
        self._marker.scale.x = marker_size
        self._marker.scale.y = marker_size
        self._marker.scale.z = marker_size
        self._marker.color.a = 1.0  # Don't forget to set the alpha!
        self._marker.color.r = 0.0
        self._marker.color.g = 0.0
        self._marker.color.b = 1.0

        self._reference_marker = Marker()
        self._reference_marker.header.frame_id = self._target_frame
        self._reference_marker.header.stamp = rospy.Time.now()
        self._reference_marker.ns = "pure_pursuit_reference"
        self._reference_marker.id = 1
        self._reference_marker.type = Marker.SPHERE
        self._reference_marker.action = Marker.ADD
        self._reference_marker.pose.position.x = target_x
        self._reference_marker.pose.position.y = target_y
        self._reference_marker.pose.position.z = 0.0
        self._reference_marker.pose.orientation.x = 0.0
        self._reference_marker.pose.orientation.y = 0.0
        self._reference_marker.pose.orientation.z = 0.0
        self._reference_marker.pose.orientation.w = 1.0
        self._reference_marker.scale.x = marker_size * 0.75
        self._reference_marker.scale.y = marker_size * 0.75
        self._reference_marker.scale.z = marker_size * 0.75
        self._reference_marker.color.a = 0.0  # Add, but make invisible at first
        self._reference_marker.color.r = 1.0
        self._reference_marker.color.g = 0.0
        self._reference_marker.color.b = 1.0

        self._local_target_marker = Marker()
        self._local_target_marker.header.frame_id = self._target_frame
        self._local_target_marker.header.stamp = rospy.Time.now()
        self._local_target_marker.ns = "pure_pursuit_target"
        self._local_target_marker.id = 1
        self._local_target_marker.type = Marker.SPHERE
        self._local_target_marker.action = Marker.ADD
        self._local_target_marker.pose.position.x = target_x
        self._local_target_marker.pose.position.y = target_y
        self._local_target_marker.pose.position.z = 0.0
        self._local_target_marker.pose.orientation.x = 0.0
        self._local_target_marker.pose.orientation.y = 0.0
        self._local_target_marker.pose.orientation.z = 0.0
        self._local_target_marker.pose.orientation.w = 1.0
        self._local_target_marker.scale.x = marker_size
        self._local_target_marker.scale.y = marker_size
        self._local_target_marker.scale.z = marker_size
        self._local_target_marker.color.a = 0.0  # Add, but make invisible at first
        self._local_target_marker.color.r = 1.0
        self._local_target_marker.color.g = 0.0
        self._local_target_marker.color.b = 1.0

    # Transform point into odometry frame
    def transformOdom(self, point):
        try:
            # Get transform
            self._listener.listener().waitForTransform(self._odom_frame,
                                                       point.header.frame_id,
                                                       point.header.stamp,
                                                       self._timeout)
            return self._listener.listener().transformPoint(
                self._odom_frame, point)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException) as e:
            Logger.logwarn('Failed to get the transformation\n   %s' % str(e))
            self._failed = True
            return None
        except Exception as e:
            Logger.logwarn(
                'Failed to get the transformation due to unknown error\n    %s'
                % str(e))
            self._failed = True
            return None

    # Transform point into map frame
    def transformMap(self, odometry):

        odom_position = PointStamped()
        odom_position.header = odometry.header
        odom_position.point = odometry.pose.pose.position

        try:
            # Get transform
            self._listener.listener().waitForTransform(
                self._target_frame, odometry.header.frame_id,
                odometry.header.stamp, self._timeout)
            return self._listener.listener().transformPoint(
                self._target_frame, odom_position)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException) as e:
            Logger.logwarn(
                'Failed to get the transformation to target_frame\n   %s' %
                str(e))
            self._failed = True
            return None
        except Exception as e:
            Logger.logwarn(
                'Failed to get the transformation to target frame due to unknown error\n   %s'
                % str(e))
            self._failed = True
            return None

    # Transform point into robot body frame
    def transformBody(self, point):
        try:
            # Get transform
            self._listener.listener().waitForTransform(self._robot_frame,
                                                       point.header.frame_id,
                                                       point.header.stamp,
                                                       self._timeout)
            return self._listener.listener().transformPoint(
                self._robot_frame, point)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException) as e:
            Logger.logwarn('Failed to get the transformation\n   %s' % str(e))
            self._failed = True
            return None
        except:
            Logger.logwarn(
                'Failed to get the transformation due to unknown error\n')
            self._failed = True
            return None

    def execute(self, userdata):
        # This method is called periodically while the state is active.
        # If no outcome is returned, the state will stay active.

        if (self._done):
            # We have completed the state, and therefore must be blocked by autonomy level
            # Stop the robot, but and return the prior outcome
            ts = TwistStamped()
            ts.header.stamp = rospy.Time.now()
            self._pub.publish(self._cmd_topic, ts)
            userdata.target_point = self._target  # Set the user data for passing to next node
            return self._done

        # Check bumpers to see if we hit something
        if (self._sensor_sub.has_msg(self._sensor_topic)):
            # check the status of the bumpers
            sensors = self._sub.get_last_msg(self._sensor_topic)
            if ((sensors.bumps_wheeldrops > 0) or sensors.cliff_left
                    or sensors.cliff_front_left or sensors.cliff_front_right
                    or sensors.cliff_right):
                ts = TwistStamped()
                ts.header.stamp = rospy.Time.now()
                self._pub.publish(self._cmd_topic, ts)
                userdata.target_point = self._target  # Set the user data for passing to next node
                Logger.logwarn(
                    '%s  Bumper contact = %d  cliff: left=%d %d %d %d = right '
                    % (self.name, sensors.bumps_wheeldrops, sensors.cliff_left,
                       sensors.cliff_front_left, sensors.cliff_front_right,
                       sensors.cliff_right))
                self._done = 'failed'
                return 'failed'

        # Get the latest odometry data
        if (self._sub.has_msg(self._odom_topic)):
            self._last_odom = self._sub.get_last_msg(self._odom_topic)

        # Update the current pose
        self._current_position = self.transformMap(self._last_odom)
        if (self._failed):
            userdata.target_point = self._target  # Set the user data for passing to next node
            self._done = 'failed'
            return 'failed'

        # Transform the target points into the current odometry frame
        self._target.header.stamp = self._last_odom.header.stamp
        local_target = self._target
        #self.transformOdom(self._target)

        # If target point is withing lookahead distance then we are done
        dr = np.sqrt(
            (local_target.point.x - self._current_position.point.x)**2 +
            (local_target.point.y - self._current_position.point.y)**2)
        if (dr < self._lookahead):
            Logger.loginfo(
                ' %s  Lookahead circle is past target - done!  target=(%f, %f, %f) robot=(%f,%f, %f)  dr=%f lookahead=%f '
                % (self.name, local_target.point.x, local_target.point.y,
                   local_target.point.z, self._current_position.point.x,
                   self._current_position.point.y,
                   self._current_position.point.z, dr, self._lookahead))
            userdata.target_point = self._target  # Set the user data for passing to next node
            self._done = 'done'
            return 'done'

        # Transform the prior target point into the current odometry frame
        self._prior_point.header.stamp = self._last_odom.header.stamp
        local_prior = self._prior_point  #self.transformOdom(self._prior_point)
        if (self._failed):
            userdata.target_point = self._target  # Set the user data for passing to next node
            self._done = 'failed'
            return 'failed'

        # Assume we can go the desired velocity
        self._twist.twist.linear.x = self._desired_velocity

        lookahead = None
        if (self._target_type == 'arc'):
            lookahead = self.calculateArcTwist(local_prior, local_target)
        else:
            lookahead = self.calculateLineTwist(local_prior, local_target)

        if (lookahead is None):
            # Did not get a lookahead point so we either failed, completed segment, or are deliberately holding the prior velocity (to recover from minor perturbations)
            if (self._done is not None):
                userdata.target_point = self._target  # Set the user data for passing to next node
            return self._done  # return what was set (either 'failed' or 'done')

        # Sanity check the rotation rate
        if (math.fabs(self._twist.twist.angular.z) > self._max_rotation_rate):
            self._twist.twist.linear.x = self._desired_velocity * self._max_rotation_rate / math.fabs(
                self._twist.twist.angular.z)  # decrease the speed
            self._twist.twist.angular.z = math.copysign(
                self._max_rotation_rate, self._twist.twist.angular.z)

        # Normal operation - publish the latest calculated twist
        self._twist.header.stamp = rospy.Time.now()
        self._pub.publish(self._cmd_topic, self._twist)

        if (self._marker_pub):
            self._marker_pub.publish(self._marker_topic,
                                     self._reference_marker)
            self._marker_pub.publish(self._marker_topic,
                                     self._local_target_marker)
        return None

    def on_enter(self, userdata):
        # This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
        self._start_time = rospy.Time.now()
        self._done = None  # reset the completion flag
        self._failed = False  # reset the failed flag

        self._prior_point = userdata.prior_point

        if (self._marker_pub):
            self._marker.action = Marker.MODIFY
            self._marker.color.a = 1.0
            self._marker.color.r = 0.0
            self._marker.color.g = 1.0  # Indicate this target is active
            self._marker.color.b = 0.0
            self._marker_pub.publish(self._marker_topic, self._marker)

        #Logger.logdebug("  Moving toward  target=%f,%f  from position=%f,%f" %
        #        (self._target.point.x, self._target.point.y,
        #         self._current_position.point.x, self._current_position.point.y))

    def on_exit(self, userdata):
        # This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
        elapsed_time = rospy.Time.now() - self._start_time
        userdata.target_point = self._target  # Set the user data for passing to next node

        #Logger.logdebug("  Spent %f seconds in this segment  target=%f,%f  position=%f,%f" %
        #        (elapsed_time.to_sec(),self._target.point.x, self._target.point.y,
        #         self._current_position.point.x, self._current_position.point.y))

        if (self._marker_pub):
            self._marker.color.a = 1.0  # Don't forget to set the alpha!
            self._marker.color.r = 0.8  # Indicate this target is no longer active
            self._marker.color.g = 0.0
            self._marker.color.b = 0.0
            self._marker_pub.publish(self._marker_topic, self._marker)

    def on_start(self):

        # Wait for odometry message
        while (not self._odom_sub.has_msg(self._sensor_topic)):
            Logger.logwarn('Waiting for odometry message from the robot  ')
            rospy.sleep(0.05)

        while (not self._sub.has_msg(self._odom_topic)):
            Logger.logwarn(
                'Waiting for odometry message to become available from the robot '
            )
            rospy.sleep(0.25)

        self._last_odom = self._sub.get_last_msg(self._odom_topic)
        self._odom_frame = self._last_odom.header.frame_id
        #Logger.loginfo('   odometry frame id <%s>' % (self._odom_frame))

        # Update the target transformation
        self._target.header.stamp = self._last_odom.header.stamp
        while (self.transformOdom(self._target) is None):
            Logger.logwarn(
                'Waiting for tf transformations to odometry frame to become available from the robot '
            )
            rospy.sleep(0.25)
            self._target.header.stamp = rospy.Time.now()

        while (self.transformMap(self._last_odom) is None):
            Logger.logwarn(
                'Waiting for tf transformations to map frame become available from the robot '
            )
            rospy.sleep(0.25)
            self._last_odom = self._sub.get_last_msg(self._odom_topic)

        self._current_position = self.transformMap(self._last_odom)

        # This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
        if (self._marker_pub):
            self._marker.action = Marker.ADD
            self._marker.color.a = 1.0  # Set alpha otherwise the marker is invisible
            self._marker.color.r = 0.0
            self._marker.color.g = 0.0
            self._marker.color.b = 1.0  # Indicate this target is planned
            self._marker_pub.publish(self._marker_topic, self._marker)
            self._marker_pub.publish(self._marker_topic,
                                     self._reference_marker)
            self._marker_pub.publish(self._marker_topic,
                                     self._local_target_marker)
            self._marker.action = Marker.MODIFY
            self._reference_marker.action = Marker.MODIFY
            self._reference_marker.color.a = 1.0  # Set alpha so it will become visible on next publish
            self._local_target_marker.action = Marker.MODIFY
            self._local_target_marker.color.a = 1.0  # Set alpha so it will become visible on next publish

    # Attempt to recover the path for large error
    def recoverPath(self, local_target, local_prior, pv, qv):
        pp = pv.x * pv.x + pv.y * pv.y
        qq = qv.x * qv.x + qv.y * qv.y
        qp = -qv.x * pv.x - qv.y * pv.y  # negate qv

        dp = qp / pp  # fractional distance along the pv vector

        # Assume we steer toward the initial point
        control = PointStamped()
        control.header = local_prior.header
        control.point = deepcopy(local_prior.point)

        if (dp > 1.0):
            # Steer toward the target point
            control = local_target
        elif (dp > 0.0):
            # Steer toward the closest point
            control.point.x = control.point.x + dp * pv.x  # Control point in the odometry frame
            control.point.y = control.point.y + dp * pv.y  # Control point in the odometry frame
            control.point.z = control.point.z + dp * pv.z  # Control point in the odometry frame

        self._reference_marker.pose.position = deepcopy(control.point)
        self._local_target_marker.pose.position = deepcopy(local_target.point)

        control_robot = self.transformBody(control)

        if (control_robot.point.x <= 0.001):
            control_robot = self.transformBody(local_target)  # One last try
            if (control_robot.point.x <= 0.001):
                dist = control_robot.point.x * control_robot.point.x + control_robot.point.y * control_robot.point.y
                if (dist > 2.5):
                    Logger.loginfo(
                        "recovery control point is behind the robot and far way   abort recovery!"
                    )
                    return False
                else:
                    # Target is close enough - do a zero radius turn toward the target line until our closet point is ahead
                    self._twist.twist.linear.x = 0.0
                    self._twist.twist.angular.z = math.copysign(
                        self._desired_velocity / 0.13, control_robot.point.y)
                    return True
            else:
                # Target is ahead - do a zero radius turn toward the target line until our closet point is ahead
                self._twist.twist.linear.x = 0.0
                self._twist.twist.angular.z = math.copysign(
                    self._desired_velocity / 0.13, control_robot.point.y)
                return True

        # Assume lookahead tangent to the control point
        dc = Vector3(control.point.x - self._current_position.point.x,
                     control.point.y - self._current_position.point.y, 0.0)
        curvature = 2.0 * control_robot.point.y / (dc.x * dc.x + dc.y * dc.y)
        if (np.isnan(curvature)):
            Logger.logerr("invalid curvature calculation   abort recovery!")
            return False

        self._twist.twist.angular.z = curvature * self._desired_velocity
        return True

    # Method to calculate the lookahead point given line segment from prior to target
    def calculateLineTwist(self, local_prior, local_target):

        # Define a line segment from prior to the target (assume 2D)
        pv = Vector3(local_target.point.x - local_prior.point.x,
                     local_target.point.y - local_prior.point.y, 0.0)
        qv = Vector3(local_prior.point.x - self._current_position.point.x,
                     local_prior.point.y - self._current_position.point.y, 0.0)

        # Find intersection of line segment with lookahead circle centered at the  robot
        a = pv.x * pv.x + pv.y * pv.y  #
        b = 2.0 * (qv.x * pv.x + qv.y * pv.y)
        c = (qv.x * qv.x + qv.y * qv.y) - self._lookahead * self._lookahead

        if (a < 0.001):
            Logger.logerr(
                ' %s  Invalid prior and target for line  target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f '
                % (self.name, local_target.point.x, local_target.point.y,
                   local_target.point.z, local_prior.point.x,
                   local_prior.point.y, local_prior.point.z,
                   self._current_position.point.x,
                   self._current_position.point.y,
                   self._current_position.point.z, pv.x, pv.y, qv.x, qv.y, a,
                   b, c))
            self._done = 'failed'
            return None

        discrim = b * b - 4 * a * c
        if (discrim < 0.0):
            # No intersection - this should not be unless bad start or localization perturbation
            if (self._recover_mode):
                # Attempt to regain path
                if (not self.recoverPath(local_target, local_prior, pv, qv)):
                    Logger.logwarn(
                        ' %s  Path recovery failed for line  target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                        % (self.name, local_target.point.x,
                           local_target.point.y, local_target.point.z,
                           local_prior.point.x, local_prior.point.y,
                           local_prior.point.z, self._current_position.point.x,
                           self._current_position.point.y,
                           self._current_position.point.z, pv.x, pv.y, qv.x,
                           qv.y, a, b, c, discrim))
                    self._done = 'failed'
                    return None

            else:
                Logger.logwarn(
                    ' %s  No path recovery - no intersection for line  target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, local_target.point.x, local_target.point.y,
                       local_target.point.z, local_prior.point.x,
                       local_prior.point.y, local_prior.point.z,
                       self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._done = 'failed'
                return None
        else:
            # solve quadratic equation for intersection points
            sqd = math.sqrt(discrim)
            t1 = (-b - sqd) / (2 * a)  # min value
            t2 = (-b + sqd) / (2 * a)  # max value
            if (t2 < t1):
                Logger.logwarn(
                    ' %s  Say what!: t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._done = 'failed'
                return None

            if (t2 < 0.0):
                # all intersections are behind the segment - this should not be unless bad start or localization perturbation
                if (self._recover_mode):
                    # Attempt to regain path
                    if (not self.recoverPath(local_target, local_prior, pv,
                                             qv)):
                        Logger.logwarn(
                            ' %s  Path recovery failed with circle before segment target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                            % (self.name, local_target.point.x,
                               local_target.point.y, local_target.point.z,
                               local_prior.point.x, local_prior.point.y,
                               local_prior.point.z,
                               self._current_position.point.x,
                               self._current_position.point.y,
                               self._current_position.point.z, pv.x, pv.y,
                               qv.x, qv.y, a, b, c, discrim))
                        self._done = 'failed'
                        return None
                elif (t2 > -0.1):
                    # likely due to localization perturbation
                    Logger.logwarn(
                        ' %s  Circle is before segment - continue prior motion! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                        % (self.name, t1, t2, sqd, local_target.point.x,
                           local_target.point.y, local_target.point.z,
                           local_prior.point.x, local_prior.point.y,
                           local_prior.point.z, self._current_position.point.x,
                           self._current_position.point.y,
                           self._current_position.point.z, pv.x, pv.y, qv.x,
                           qv.y, a, b, c, discrim))
                    return None
                else:
                    Logger.logwarn(
                        ' %s  Circle is before segment! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                        % (self.name, t1, t2, sqd, local_target.point.x,
                           local_target.point.y, local_target.point.z,
                           local_prior.point.x, local_prior.point.y,
                           local_prior.point.z, self._current_position.point.x,
                           self._current_position.point.y,
                           self._current_position.point.z, pv.x, pv.y, qv.x,
                           qv.y, a, b, c, discrim))
                    self._done = 'failed'
                    return None
            elif (t1 > 1.0):
                # all intersections are past the segment
                Logger.loginfo(
                    ' %s  Circle is past segment - done! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._done = 'done'
                return None
            elif (t1 < 0.0 and t2 > 1.0):
                # Segment is contained inside the lookahead circle
                Logger.loginfo(
                    ' %s  Circle contains segment - move along!: t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._done = 'done'
                return None
            elif (t2 > 1.0):
                # The lookahead circle extends beyond the target point - we are finished here
                Logger.loginfo(
                    ' %s  Circle extends past segment - done! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._done = 'done'
                return None
            else:
                # This is the normal case
                # Must be line segment
                control = deepcopy(local_prior)
                control.point.x = control.point.x + t2 * pv.x  # Control point in the odometry frame
                control.point.y = control.point.y + t2 * pv.y  # Control point in the odometry frame
                control.point.z = control.point.z + t2 * pv.z  # Control point in the odometry frame
                self._reference_marker.pose.position = control.point
                self._local_target_marker.pose.position = local_target.point

                control_robot = self.transformBody(control)

                curvature = 2.0 * control_robot.point.y / (self._lookahead *
                                                           self._lookahead)
                self._twist.twist.angular.z = curvature * self._desired_velocity
                return control_robot

        return "Recovery"  # Must have be in a recovery mode

    # Method to calculate the lookahead point and twist given arc segment from prior to target
    def calculateArcTwist(self, local_prior, local_target):

        # Transform the relevant points into the odometry frame
        self._center.header.stamp = self._last_odom.header.stamp
        local_center = self._center  #self.transformOdom(self._center)
        if (self._failed):
            self._done = 'failed'
            return None

        # Format for the equations
        xa = local_center.point.x
        ya = local_center.point.y
        ra = self._radius

        xr = self._current_position.point.x
        yr = self._current_position.point.y
        rl = self._lookahead

        # Calculate the discriminant used in x- and y- calculations
        xd = (-((ra - rl)**2 - (xa - xr)**2 - (ya - yr)**2) *
              ((ra + rl)**2 - (xa - xr)**2 - (ya - yr)**2) * (ya - yr)**2
              )  # discriminant for x solution

        yd = (-(ra**4 + (-rl**2 + (xa - xr)**2 + (ya - yr)**2)**2 - 2 * ra**2 *
                (rl**2 + (xa - xr)**2 + (ya - yr)**2)) * (ya - yr)**2)

        discrim = np.min((xd, yd))

        if (discrim < 0.0):
            # No intersection - this should not be unless bad start
            if (self._recover_mode):
                # Attempt to regain path by driving toward the chord defining the arc
                pv = Vector3(local_target.point.x - local_prior.point.x,
                             local_target.point.y - local_prior.point.y, 0.0)
                qv = Vector3(
                    local_prior.point.x - self._current_position.point.x,
                    local_prior.point.y - self._current_position.point.y, 0.0)
                if (not self.recoverPath(local_target, local_prior, pv, qv)):
                    Logger.logwarn(
                        ' Path recovery failed for arc  target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f) rrad=%f  center=(%f,%f) crad=%f discrim: xd=%f yd=%f  '
                        % (local_target.point.x, local_target.point.y,
                           local_target.point.z, local_prior.point.x,
                           local_prior.point.y, local_prior.point.z,
                           self._current_position.point.x,
                           self._current_position.point.y,
                           self._current_position.point.z, rl, xa, ya, ra, xd,
                           yd))
                    self._done = 'failed'
                    return None

            else:
                Logger.logwarn(
                    ' No path recovery - no intersection for arc  target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  rrad=%f  center=(%f,%f) crad=%f  discrim: xd=%f yd=%f   '
                    % (local_target.point.x, local_target.point.y,
                       local_target.point.z, local_prior.point.x,
                       local_prior.point.y, local_prior.point.z,
                       self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, rl, xa, ya, ra, xd, yd))
                self._done = 'failed'
                return None
        else:
            # solve quadratic equation for intersection points
            xp = (1. / (2 * ((xa - xr)**2 + (ya - yr)**2))
                  )  # x solution prefix

            x1 = xp * (rl**2 * (xa - xr) + ra**2 * (-xa + xr) + (xa + xr) *
                       ((xa - xr)**2 + (ya - yr)**2) - np.sqrt(xd))
            x2 = xp * (rl**2 * (xa - xr) + ra**2 * (-xa + xr) + (xa + xr) *
                       ((xa - xr)**2 + (ya - yr)**2) + np.sqrt(xd))

            y1 = (-ra**2 * (ya - yr)**2 + (xa - xr) * np.sqrt(yd) + (ya - yr) *
                  (rl**2 * (ya - yr) + ((xa - xr)**2 + (ya - yr)**2) *
                   (ya + yr))) / (2 * ((xa - xr)**2 + (ya - yr)**2) *
                                  (ya - yr))
            y2 = (-ra**2 * (ya - yr)**2 + (xr - xa) * np.sqrt(yd) + (ya - yr) *
                  (rl**2 * (ya - yr) + ((xa - xr)**2 + (ya - yr)**2) *
                   (ya + yr))) / (2 * ((xa - xr)**2 + (ya - yr)**2) *
                                  (ya - yr))

            # Choose the intersection closest to the target point
            # All vectors on circle should have same radius from center.  A*B = |A||B|Cos(theta)
            dt = np.array(
                [local_target.point.x - xa, local_target.point.y - ya])
            dp = np.array([local_prior.point.x - xa, local_prior.point.y - ya])
            d1 = np.array([x1 - xa, y1 - ya])
            d2 = np.array([x2 - xa, y2 - ya])

            ap = np.dot(dt, dp)  # > implies smaller angle to target
            a1 = np.dot(dt, d1)
            a2 = np.dot(dt, d2)

            control = deepcopy(local_prior)
            if (ap > a1 and ap > a2):
                # Intersections must be before the prior point
                pv = Vector3(local_target.point.x - local_prior.point.x,
                             local_target.point.y - local_prior.point.y, 0.0)
                qv = Vector3(
                    local_prior.point.x - self._current_position.point.x,
                    local_prior.point.y - self._current_position.point.y, 0.0)
                if (self._recover_mode):
                    # Attempt to regain path by driving toward the chord defining the arc
                    if (not self.recoverPath(local_target, local_prior, pv,
                                             qv)):
                        Logger.logwarn(
                            ' Path recovery failed for arc target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  center=(%f,%f)  pv=(%f,%f) qv=(%f,%f) discrim: xd=%f yd=%f   '
                            % (local_target.point.x, local_target.point.y,
                               local_target.point.z, local_prior.point.x,
                               local_prior.point.y, local_prior.point.z,
                               self._current_position.point.x,
                               self._current_position.point.y,
                               self._current_position.point.z, xa, ya, pv.x,
                               pv.y, qv.x, qv.y, xd, yd))
                        Logger.logwarn(
                            "  dt=(%f, %f)  dp=(%f, %f) d1=(%f, %f) d2=(%f, %f)  ap=%f a1=%f a2=%f"
                            % (dt[0], dt[1], dp[0], dp[1], d1[0], d1[1], d2[0],
                               d2[1], ap, a1, a2))
                        self._done = 'failed'
                        return None
                else:
                    # Check to see if we are close to prior in case of perturbation due to localization
                    dp1 = np.dot(dp, d1)
                    dp1 = dp1 / (np.linalg.norm(dp) * np.linalg.norm(d1))
                    dp2 = np.dot(dp, d2)
                    dp2 = dp2 / (np.linalg.norm(dp) * np.linalg.norm(d2))

                    dpm = np.max((dp1, dp2))

                    if (dpm > 0.9):
                        Logger.logwarn(
                            'Before prior but close, hold current motion!  target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)   center=(%f,%f)  pv=(%f,%f) qv=(%f,%f) discrim: xd=%f yd=%f '
                            % (local_target.point.x, local_target.point.y,
                               local_target.point.z, local_prior.point.x,
                               local_prior.point.y, local_prior.point.z,
                               self._current_position.point.x,
                               self._current_position.point.y,
                               self._current_position.point.z, xa, ya, pv.x,
                               pv.y, qv.x, qv.y, xd, yd))
                        Logger.logwarn(
                            "  dt=(%f, %f)  dp=(%f, %f) d1=(%f, %f) d2=(%f, %f)  ap=%f a1=%f a2=%f dp1=%f dp2=%f (%f)"
                            % (dt[0], dt[1], dp[0], dp[1], d1[0], d1[1], d2[0],
                               d2[1], ap, a1, a2, dp1, dp2, dpm))
                        return None
                    else:
                        Logger.logwarn(
                            ' No path recovery - no valid intersection for arc  target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)   center=(%f,%f)  pv=(%f,%f) qv=(%f,%f) discrim: xd=%f yd=%f '
                            % (local_target.point.x, local_target.point.y,
                               local_target.point.z, local_prior.point.x,
                               local_prior.point.y, local_prior.point.z,
                               self._current_position.point.x,
                               self._current_position.point.y,
                               self._current_position.point.z, xa, ya, pv.x,
                               pv.y, qv.x, qv.y, xd, yd))
                        Logger.logwarn(
                            "  dt=(%f, %f)  dp=(%f, %f) d1=(%f, %f) d2=(%f, %f)  ap=%f a1=%f a2=%f dp1=%f dp2=%f (%f)"
                            % (dt[0], dt[1], dp[0], dp[1], d1[0], d1[1], d2[0],
                               d2[1], ap, a1, a2, dp1, dp2, dpm))
                        self._done = 'failed'
                        return None
            elif (a1 > a2):
                # Use intersection 1
                control.point.x = x1
                control.point.y = y1

            else:
                # Use intersection 2
                control.point.x = x2
                control.point.y = y2

            self._reference_marker.pose.position = deepcopy(control.point)
            self._local_target_marker.pose.position = deepcopy(
                local_target.point)

            control_robot = self.transformBody(control)

            curvature = 2.0 * control_robot.point.y / (self._lookahead *
                                                       self._lookahead)
            self._twist.twist.angular.z = curvature * self._desired_velocity
            return control_robot

        return "Recovery"  # Must have be in a recovery mode
    def __init__(self,
                 desired_velocity=0.2,
                 max_rotation_rate=10.0,
                 target_frame='map',
                 target_x=1.0,
                 target_y=0.1,
                 target_type='line',
                 lookahead_distance=0.25,
                 timeout=0.08,
                 recover_mode=False,
                 center_x=0.0,
                 center_y=0.0,
                 cmd_topic='/create_node/cmd_vel',
                 sensor_topic='/create_node/sensor_state',
                 odometry_topic='/create_node/odom',
                 marker_topic='/pure_pursuit_marker',
                 marker_size=0.05):
        # Declare outcomes, input_keys, and output_keys by calling the super constructor with the corresponding arguments.
        super(CreatePurePursuitState,
              self).__init__(outcomes=['done', 'failed'],
                             input_keys=['prior_point'],
                             output_keys=['target_point'])

        # Store state parameter for later use.
        self._twist = TwistStamped()
        self._twist.twist.linear.x = desired_velocity
        self._twist.twist.angular.z = 0.0

        self._desired_velocity = desired_velocity
        self._max_rotation_rate = max_rotation_rate  # Used to clamp the rotation calculations

        self._current_position = PointStamped()
        self._current_position.header.stamp = rospy.Time.now()
        self._current_position.header.frame_id = target_frame

        self._target = PointStamped()
        self._target.header.stamp = rospy.Time.now()
        self._target.header.frame_id = target_frame
        self._target.point = Point(target_x, target_y, 0.0)

        self._center = PointStamped()
        self._center.header = self._target.header
        self._center.point = Point(center_x, center_y, 0.0)

        self._lookahead = lookahead_distance
        self._recover_mode = recover_mode
        self._target_type = target_type
        self._target_frame = target_frame
        if (self._target_type == 'arc'):
            self._radius = np.sqrt((center_x - target_x)**2 +
                                   (center_y - target_y)**2)

            Logger.loginfo(
                'Using arc type with center=(%d, %d) target=(%d,%d) radius=%d'
                % (self._center.point.x, self._center.point.y,
                   self._target.point.x, self._target.point.y, self._radius))

        self._odom_frame = 'odom'  # Update with the first odometry message
        self._robot_frame = 'base_footprint'
        self._failed = False
        self._timeout = rospy.Duration(timeout)  # transform timeout

        # The constructor is called when building the state machine, not when actually starting the behavior.
        # Thus, we cannot save the starting time now and will do so later.
        self._start_time = None

        self._done = None  # Track the outcome so we can detect if transition is blocked

        self._odom_topic = odometry_topic
        self._sensor_topic = sensor_topic
        self._marker_topic = marker_topic
        self._cmd_topic = cmd_topic

        self._listener = ProxyTransformListener()

        self._sensor_sub = ProxySubscriberCached(
            {self._sensor_topic: CreateSensorState})
        self._odom_sub = ProxySubscriberCached({self._odom_topic: Odometry})
        self._pub = ProxyPublisher({self._cmd_topic: TwistStamped})

        if (self._marker_topic != ""):
            self._marker_pub = ProxyPublisher({self._marker_topic: Marker})
        else:
            self._marker_pub = None

        self._marker = Marker()
        self._marker.header.frame_id = self._target_frame
        self._marker.header.stamp = rospy.Time.now()
        self._marker.ns = "pure_pursuit_waypoints"
        self._marker.id = int(target_x * 1000000) + int(target_y * 1000)
        self._marker.type = Marker.SPHERE
        self._marker.action = Marker.ADD
        self._marker.pose.position.x = target_x
        self._marker.pose.position.y = target_y
        self._marker.pose.position.z = 0.0
        self._marker.pose.orientation.x = 0.0
        self._marker.pose.orientation.y = 0.0
        self._marker.pose.orientation.z = 0.0
        self._marker.pose.orientation.w = 1.0
        self._marker.scale.x = marker_size
        self._marker.scale.y = marker_size
        self._marker.scale.z = marker_size
        self._marker.color.a = 1.0  # Don't forget to set the alpha!
        self._marker.color.r = 0.0
        self._marker.color.g = 0.0
        self._marker.color.b = 1.0

        self._reference_marker = Marker()
        self._reference_marker.header.frame_id = self._target_frame
        self._reference_marker.header.stamp = rospy.Time.now()
        self._reference_marker.ns = "pure_pursuit_reference"
        self._reference_marker.id = 1
        self._reference_marker.type = Marker.SPHERE
        self._reference_marker.action = Marker.ADD
        self._reference_marker.pose.position.x = target_x
        self._reference_marker.pose.position.y = target_y
        self._reference_marker.pose.position.z = 0.0
        self._reference_marker.pose.orientation.x = 0.0
        self._reference_marker.pose.orientation.y = 0.0
        self._reference_marker.pose.orientation.z = 0.0
        self._reference_marker.pose.orientation.w = 1.0
        self._reference_marker.scale.x = marker_size * 0.75
        self._reference_marker.scale.y = marker_size * 0.75
        self._reference_marker.scale.z = marker_size * 0.75
        self._reference_marker.color.a = 0.0  # Add, but make invisible at first
        self._reference_marker.color.r = 1.0
        self._reference_marker.color.g = 0.0
        self._reference_marker.color.b = 1.0

        self._local_target_marker = Marker()
        self._local_target_marker.header.frame_id = self._target_frame
        self._local_target_marker.header.stamp = rospy.Time.now()
        self._local_target_marker.ns = "pure_pursuit_target"
        self._local_target_marker.id = 1
        self._local_target_marker.type = Marker.SPHERE
        self._local_target_marker.action = Marker.ADD
        self._local_target_marker.pose.position.x = target_x
        self._local_target_marker.pose.position.y = target_y
        self._local_target_marker.pose.position.z = 0.0
        self._local_target_marker.pose.orientation.x = 0.0
        self._local_target_marker.pose.orientation.y = 0.0
        self._local_target_marker.pose.orientation.z = 0.0
        self._local_target_marker.pose.orientation.w = 1.0
        self._local_target_marker.scale.x = marker_size
        self._local_target_marker.scale.y = marker_size
        self._local_target_marker.scale.z = marker_size
        self._local_target_marker.color.a = 0.0  # Add, but make invisible at first
        self._local_target_marker.color.r = 1.0
        self._local_target_marker.color.g = 0.0
        self._local_target_marker.color.b = 1.0
Ejemplo n.º 17
0
class PurePursuitState(EventState):
    """
    This state calculates the twist required to follow a constant curvature arc to the pure pursuit intersection point.
    The command is published as a TwistStamped command based on parameters.

    If arc motion is used, the arc should be less than or equal to pi/2 radians.  Use multiple targets for longer arcs.

       -- desired_velocity     float     Desired velocity in m/s (default: 0.2)
       -- max_rotation_rate    float     Maximum rotation rate radians/s (default: 10.0)
       -- target_frame:        string    Coordinate frame of target point (default: 'map')
       -- target_x:            float     Target point x-coordinate (m)
       -- target_y:            float     Target point y-coordinate (m)
       -- target_type:         string    Desired motion ('line','arc') (default: 'line')
       -- lookahead_distance:  float     Lookahead distance (m) (default:  0.25)
       -- timeout              float     Transform timeout (seconds) (default: 0.08)
       -- recover_mode         bool      Recover path (typically on startup) (default: False)
       -- center_x:            float     Center point x-coordinate for circle defining arc motion (default: 0.0)
       -- center_y:            float     Center point y-coordinate for circle defining arc motion (default: 0.0)
       -- cmd_topic            string    topic name of the robot command (default: '/create_node/cmd_vel')
       -- odometry_topic:      string    topic of the iRobot Create sensor state (default:   '/create_node/odom'
       -- marker_topic:        string    topic of the RViz marker used for visualization (default: '/pure_pursuit_marker')
       -- marker_size:         float     Size of RViz marker used for target (default: 0.05)

       ># indice                int	 The index
       ># path			Path	 The path

       #> indice		int	 The index + 1
       #> plan			Path	 The path

       <= done                 Reached the end of target relevance
       <= continue		Continue following the path
       <= failed               A bumper was activated prior to completion
    """
    def __init__(self,
                 desired_velocity=0.2,
                 max_rotation_rate=10.0,
                 target_frame='map',
                 target_x=1.0,
                 target_y=0.1,
                 target_type='line',
                 lookahead_distance=0.25,
                 timeout=0.08,
                 recover_mode=False,
                 center_x=0.0,
                 center_y=0.0,
                 cmd_topic='cmd_vel',
                 odometry_topic='odom',
                 marker_topic='pure_pursuit_marker',
                 marker_size=0.05):
        # Declare outcomes, input_keys, and output_keys by calling the super constructor with the corresponding arguments.
        super(PurePursuitState,
              self).__init__(outcomes=['done', 'continue', 'failed'],
                             input_keys=['indice', 'plan'],
                             output_keys=['indice', 'plan'])

        # Store state parameter for later use.
        self._twist = TwistStamped()
        self._twist.twist.linear.x = desired_velocity
        self._twist.twist.angular.z = 0.0

        self._desired_velocity = desired_velocity
        self._max_rotation_rate = max_rotation_rate  # Used to clamp the rotation calculations

        self._current_position = PointStamped()
        self._current_position.header.stamp = rospy.Time.now()
        self._current_position.header.frame_id = target_frame

        self._target = PointStamped()
        self._target.header.stamp = rospy.Time.now()
        self._target.header.frame_id = target_frame
        self._target.point = Point(target_x, target_y, 0.0)

        self._prior = PointStamped()
        self._prior.header.stamp = rospy.Time.now()
        self._prior.header.frame_id = target_frame

        self._lookahead = lookahead_distance
        self._recover_mode = recover_mode
        self._target_type = target_type
        self._target_frame = target_frame
        if (self._target_type == 'arc'):
            self._radius = np.sqrt((center_x - target_x)**2 +
                                   (center_y - target_y)**2)

            Logger.loginfo(
                'Using arc type with center=(%d, %d) target=(%d,%d) radius=%d'
                % (self._center.point.x, self._center.point.y,
                   self._target.point.x, self._target.point.y, self._radius))

        self._odom_frame = 'odom'  # Update with the first odometry message
        self._robot_frame = 'base_footprint'
        self._failed = False
        self._timeout = rospy.Duration(timeout)  # transform timeout

        # The constructor is called when building the state machine, not when actually starting the behavior.
        # Thus, we cannot save the starting time now and will do so later.
        self._start_time = None

        self._return = None  # Track the outcome so we can detect if transition is blocked

        self._odom_topic = odometry_topic
        self._marker_topic = marker_topic
        self._cmd_topic = cmd_topic

        self._listener = ProxyTransformListener()

        self._odom_sub = ProxySubscriberCached({self._odom_topic: Odometry})
        self._pub = ProxyPublisher({self._cmd_topic: TwistStamped})

        if (self._marker_topic != ""):
            self._marker_pub = ProxyPublisher({self._marker_topic: Marker})
        else:
            self._marker_pub = None

        self._marker = Marker()
        self._marker.header.frame_id = self._target_frame
        self._marker.header.stamp = rospy.Time.now()
        self._marker.ns = "pure_pursuit_waypoints"
        self._marker.id = int(target_x * 1000000) + int(target_y * 1000)
        self._marker.type = Marker.SPHERE
        self._marker.action = Marker.ADD
        self._marker.pose.position.x = target_x
        self._marker.pose.position.y = target_y
        self._marker.pose.position.z = 0.0
        self._marker.pose.orientation.x = 0.0
        self._marker.pose.orientation.y = 0.0
        self._marker.pose.orientation.z = 0.0
        self._marker.pose.orientation.w = 1.0
        self._marker.scale.x = marker_size
        self._marker.scale.y = marker_size
        self._marker.scale.z = marker_size
        self._marker.color.a = 1.0  # Don't forget to set the alpha!
        self._marker.color.r = 0.0
        self._marker.color.g = 0.0
        self._marker.color.b = 1.0

        self._reference_marker = Marker()
        self._reference_marker.header.frame_id = self._target_frame
        self._reference_marker.header.stamp = rospy.Time.now()
        self._reference_marker.ns = "pure_pursuit_reference"
        self._reference_marker.id = 1
        self._reference_marker.type = Marker.SPHERE
        self._reference_marker.action = Marker.ADD
        self._reference_marker.pose.position.x = target_x
        self._reference_marker.pose.position.y = target_y
        self._reference_marker.pose.position.z = 0.0
        self._reference_marker.pose.orientation.x = 0.0
        self._reference_marker.pose.orientation.y = 0.0
        self._reference_marker.pose.orientation.z = 0.0
        self._reference_marker.pose.orientation.w = 1.0
        self._reference_marker.scale.x = marker_size * 0.75
        self._reference_marker.scale.y = marker_size * 0.75
        self._reference_marker.scale.z = marker_size * 0.75
        self._reference_marker.color.a = 0.0  # Add, but make invisible at first
        self._reference_marker.color.r = 1.0
        self._reference_marker.color.g = 0.0
        self._reference_marker.color.b = 1.0

        self._local_target_marker = Marker()
        self._local_target_marker.header.frame_id = self._target_frame
        self._local_target_marker.header.stamp = rospy.Time.now()
        self._local_target_marker.ns = "pure_pursuit_target"
        self._local_target_marker.id = 1
        self._local_target_marker.type = Marker.SPHERE
        self._local_target_marker.action = Marker.ADD
        self._local_target_marker.pose.position.x = target_x
        self._local_target_marker.pose.position.y = target_y
        self._local_target_marker.pose.position.z = 0.0
        self._local_target_marker.pose.orientation.x = 0.0
        self._local_target_marker.pose.orientation.y = 0.0
        self._local_target_marker.pose.orientation.z = 0.0
        self._local_target_marker.pose.orientation.w = 1.0
        self._local_target_marker.scale.x = marker_size
        self._local_target_marker.scale.y = marker_size
        self._local_target_marker.scale.z = marker_size
        self._local_target_marker.color.a = 0.0  # Add, but make invisible at first
        self._local_target_marker.color.r = 1.0
        self._local_target_marker.color.g = 0.0
        self._local_target_marker.color.b = 1.0

    # Transform point into odometry frame
    def transformOdom(self, point):
        try:
            # Get transform
            self._listener.listener().waitForTransform(self._odom_frame,
                                                       point.header.frame_id,
                                                       point.header.stamp,
                                                       self._timeout)
            return self._listener.listener().transformPoint(
                self._odom_frame, point)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException) as e:
            Logger.logwarn('Failed to get the transformation\n%s' % str(e))
            self._failed = True
            return None
        except Exception as e:
            Logger.logwarn(
                'Failed to get the transformation due to unknown error\n %s' %
                str(e))
            self._failed = True
            return None

    # Transform point into map frame
    def transformMap(self, odometry):

        odom_position = PointStamped()
        odom_position.header = odometry.header
        odom_position.point = odometry.pose.pose.position

        try:
            # Get transform
            self._listener.listener().waitForTransform(
                self._target_frame, odometry.header.frame_id,
                odometry.header.stamp, self._timeout)
            return self._listener.listener().transformPoint(
                self._target_frame, odom_position)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException) as e:
            Logger.logwarn(
                'Failed to get the transformation to target_frame\n%s' %
                str(e))
            self._failed = True
            return None
        except Exception as e:
            Logger.logwarn(
                'Failed to get the transformation to target frame due to unknown error\n %s'
                % str(e))
            self._failed = True
            return None

    # Transform point into robot body frame
    def transformBody(self, point):
        try:
            # Get transform
            self._listener.listener().waitForTransform(self._robot_frame,
                                                       point.header.frame_id,
                                                       point.header.stamp,
                                                       self._timeout)
            return self._listener.listener().transformPoint(
                self._robot_frame, point)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException) as e:
            Logger.logwarn('Failed to get the transformation\n%s' % str(e))
            self._failed = True
            return None
        except:
            Logger.logwarn(
                'Failed to get the transformation due to unknown error\n')
            self._failed = True
            return None

    def execute(self, userdata):
        # This method is called periodically while the state is active.
        # If no outcome is returned, the state will stay active.

        if (self._return):
            # We have completed the state, and therefore must be blocked by autonomy level
            # Stop the robot, but and return the prior outcome
            ts = TwistStamped()
            ts.header.stamp = rospy.Time.now()
            self._pub.publish(self._cmd_topic, ts)
            return self._return

        # Get the latest odometry data
        if (self._sub.has_msg(self._odom_topic)):
            self._last_odom = self._sub.get_last_msg(self._odom_topic)

        # Update the current pose
        self._current_position = self.transformMap(self._last_odom)
        if (self._failed):
            self._return = 'failed'
            return 'failed'

        # Transform the target points into the current odometry frame
        self._target.header.stamp = self._last_odom.header.stamp
        local_target = self._target
        #self.transformOdom(self._target)

        # If target point is withing lookahead distance then we are done
        dr = np.sqrt(
            (local_target.point.x - self._current_position.point.x)**2 +
            (local_target.point.y - self._current_position.point.y)**2)
        if (dr < self._lookahead):
            Logger.loginfo(
                ' %s  Lookahead circle is past target - done! target=(%f, %f, %f) robot=(%f,%f, %f)  dr=%f lookahead=%f '
                % (self.name, local_target.point.x, local_target.point.y,
                   local_target.point.z, self._current_position.point.x,
                   self._current_position.point.y,
                   self._current_position.point.z, dr, self._lookahead))
            if (userdata.indice == len(userdata.plan.poses) - 1):

                self._return = 'done'
                return 'done'
            else:
                self._return = 'continue'
                return 'continue'

        # Transform the prior target point into the current odometry frame
        self._prior.header.stamp = self._last_odom.header.stamp
        local_prior = self._prior  #self.transformOdom(self._prior)
        if (self._failed):
            self._return = 'failed'
            return 'failed'

        # Assume we can go the desired velocity
        self._twist.twist.linear.x = self._desired_velocity

        lookahead = None
        lookahead = self.calculateLineTwist(local_prior, local_target)

        if (lookahead is None):
            # Did not get a lookahead point so we either failed, completed segment, or are deliberately holding the prior velocity (to recover from minor perturbations)
            if (self._return is not None):
                Logger.logerr("No lookahead!")
            return self._return  # return what was set (either 'failed' or 'done')

        # Sanity check the rotation rate
        if (math.fabs(self._twist.twist.angular.z) > self._max_rotation_rate):
            self._twist.twist.linear.x = self._desired_velocity * self._max_rotation_rate / math.fabs(
                self._twist.twist.angular.z)  # decrease the speed
            self._twist.twist.angular.z = math.copysign(
                self._max_rotation_rate, self._twist.twist.angular.z)

        # Normal operation - publish the latest calculated twist
        self._twist.header.stamp = rospy.Time.now()
        self._pub.publish(self._cmd_topic, self._twist)

        if (self._marker_pub):
            self._marker_pub.publish(self._marker_topic,
                                     self._reference_marker)
            self._marker_pub.publish(self._marker_topic,
                                     self._local_target_marker)

        return None

    def on_enter(self, userdata):

        # This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
        self._start_time = rospy.Time.now()
        self._return = None  # reset the completion flag
        self._failed = False  # reset the failed flag

        if (self._marker_pub):
            self._marker.action = Marker.MODIFY
            self._marker.color.a = 1.0
            self._marker.color.r = 0.0
            self._marker.color.g = 1.0  # Indicate this target is active
            self._marker.color.b = 0.0
            self._marker_pub.publish(self._marker_topic, self._marker)

        if (userdata.indice > 0
                and userdata.indice < len(userdata.plan.poses)):
            Logger.loginfo("   Access data for index %d" % (userdata.indice))
            self._target.point = Point(
                userdata.plan.poses[userdata.indice].pose.position.x,
                userdata.plan.poses[userdata.indice].pose.position.y, 0.0)
            self._prior.point = Point(
                userdata.plan.poses[userdata.indice - 1].pose.position.x,
                userdata.plan.poses[userdata.indice - 1].pose.position.y, 0.0)
            Logger.loginfo(
                "  Moving toward  target %d =%f,%f  from prior=%f,%f" %
                (userdata.indice, self._target.point.x, self._target.point.y,
                 self._prior.point.x, self._prior.point.y))
        else:
            Logger.logerr(
                "   Invalid index %d - cannot access the path points!" %
                (userdata.indice))
            self._target.point = None
            self._prior.point = None
            self._failed = True
            self._return = 'failed'

        # Increment the index for the next segment
        userdata.indice += 1

    def on_exit(self, userdata):
        # This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
        elapsed_time = rospy.Time.now() - self._start_time
        #        userdata.target_point = self._target   # Set the user data for passing to next node

        #Logger.logdebug("  Spent %f seconds in this segment  target=%f,%f  position=%f,%f" %
        #        (elapsed_time.to_sec(),self._target.point.x, self._target.point.y,
        #         self._current_position.point.x, self._current_position.point.y))

        if (self._marker_pub):
            self._marker.color.a = 1.0  # Don't forget to set the alpha!
            self._marker.color.r = 0.8  # Indicate this target is no longer active
            self._marker.color.g = 0.0
            self._marker.color.b = 0.0
            self._marker_pub.publish(self._marker_topic, self._marker)

    def on_start(self):

        self._return = None  # Clear completion flag

        # Wait for odometry message
        while (not self._odom_sub.has_msg(self._odom_topic)):
            Logger.logwarn('Waiting for odometry message from the robot ')
            rospy.sleep(0.05)

        while (not self._odom_sub.has_msg(self._odom_topic)):
            Logger.logwarn(
                'Waiting for odometry message to become available from the robot '
            )
            rospy.sleep(0.25)

        self._last_odom = self._sub.get_last_msg(self._odom_topic)
        self._odom_frame = self._last_odom.header.frame_id
        #Logger.loginfo('   odometry frame id <%s>' % (self._odom_frame))

        # Update the target transformation
        self._target.header.stamp = self._last_odom.header.stamp
        while (self.transformOdom(self._target) is None):
            Logger.logwarn(
                'Waiting for tf transformations to odometry frame to become available from the robot '
            )
            rospy.sleep(0.25)
            self._target.header.stamp = rospy.Time.now()

        while (self.transformMap(self._last_odom) is None):
            Logger.logwarn(
                'Waiting for tf transformations to map frame become available from the robot '
            )
            rospy.sleep(0.25)
            self._last_odom = self._sub.get_last_msg(self._odom_topic)

        self._current_position = self.transformMap(self._last_odom)

        # This method is called when the state becomes active, i.e. a transition from another state to this one is taken.
        if (self._marker_pub):
            self._marker.action = Marker.ADD
            self._marker.color.a = 1.0  # Set alpha otherwise the marker is invisible
            self._marker.color.r = 0.0
            self._marker.color.g = 0.0
            self._marker.color.b = 1.0  # Indicate this target is planned
            self._marker_pub.publish(self._marker_topic, self._marker)
            self._marker_pub.publish(self._marker_topic,
                                     self._reference_marker)
            self._marker_pub.publish(self._marker_topic,
                                     self._local_target_marker)
            self._marker.action = Marker.MODIFY
            self._reference_marker.action = Marker.MODIFY
            self._reference_marker.color.a = 1.0  # Set alpha so it will become visible on next publish
            self._local_target_marker.action = Marker.MODIFY
            self._local_target_marker.color.a = 1.0  # Set alpha so it will become visible on next publish

    # Method to calculate the lookahead point given line segment from prior to target
    def calculateLineTwist(self, local_prior, local_target):

        # Define a line segment from prior to the target (assume 2D)
        pv = Vector3(local_target.point.x - local_prior.point.x,
                     local_target.point.y - local_prior.point.y, 0.0)
        qv = Vector3(local_prior.point.x - self._current_position.point.x,
                     local_prior.point.y - self._current_position.point.y, 0.0)

        # Find intersection of line segment with lookahead circle centered at the  robot
        a = pv.x * pv.x + pv.y * pv.y  #
        b = 2.0 * (qv.x * pv.x + qv.y * pv.y)
        c = (qv.x * qv.x + qv.y * qv.y) - self._lookahead * self._lookahead

        if (a < 0.001):
            Logger.logerr(
                ' %s  Invalid prior and target for line: target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f '
                % (self.name, local_target.point.x, local_target.point.y,
                   local_target.point.z, local_prior.point.x,
                   local_prior.point.y, local_prior.point.z,
                   self._current_position.point.x,
                   self._current_position.point.y,
                   self._current_position.point.z, pv.x, pv.y, qv.x, qv.y, a,
                   b, c))
            self._return = 'failed'
            return None

        discrim = b * b - 4 * a * c
        if (discrim < 0.0):
            # No intersection - this should not be unless bad start or localization perturbation
            Logger.logwarn(
                ' %s  No path recovery - no intersection for line: target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                % (self.name, local_target.point.x, local_target.point.y,
                   local_target.point.z, local_prior.point.x,
                   local_prior.point.y, local_prior.point.z,
                   self._current_position.point.x,
                   self._current_position.point.y,
                   self._current_position.point.z, pv.x, pv.y, qv.x, qv.y, a,
                   b, c, discrim))
            self._return = 'failed'
            return None
        else:
            # solve quadratic equation for intersection points
            sqd = math.sqrt(discrim)
            t1 = (-b - sqd) / (2 * a)  # min value
            t2 = (-b + sqd) / (2 * a)  # max value
            if (t2 < t1):
                Logger.logwarn(
                    ' %s  Say what! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._return = 'failed'
                return None

            if (t2 < 0.0):
                # all intersections are behind the segment - this should not be unless bad start or localization perturbation
                if (t2 > -0.1):
                    # likely due to localization perturbation
                    Logger.logwarn(
                        ' %s  Circle is before segment - continue prior motion! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                        % (self.name, t1, t2, sqd, local_target.point.x,
                           local_target.point.y, local_target.point.z,
                           local_prior.point.x, local_prior.point.y,
                           local_prior.point.z, self._current_position.point.x,
                           self._current_position.point.y,
                           self._current_position.point.z, pv.x, pv.y, qv.x,
                           qv.y, a, b, c, discrim))
                    return None
                else:
                    Logger.logwarn(
                        ' %s  Circle is before segment! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                        % (self.name, t1, t2, sqd, local_target.point.x,
                           local_target.point.y, local_target.point.z,
                           local_prior.point.x, local_prior.point.y,
                           local_prior.point.z, self._current_position.point.x,
                           self._current_position.point.y,
                           self._current_position.point.z, pv.x, pv.y, qv.x,
                           qv.y, a, b, c, discrim))
                    self._return = 'failed'
                    return None
            elif (t1 > 1.0):
                # all intersections are past the segment
                Logger.loginfo(
                    ' %s  Circle is past segment - done! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._return = 'done'
                return None
            elif (t1 < 0.0 and t2 > 1.0):
                # Segment is contained inside the lookahead circle
                Logger.loginfo(
                    ' %s  Circle contains segment - move along! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._return = 'done'
                return None
            elif (t2 > 1.0):
                # The lookahead circle extends beyond the target point - we are finished here
                Logger.loginfo(
                    ' %s  Circle extends past segment - done! t1=%f t2=%f sqd=%f target=(%f, %f, %f) prior=(%f, %f, %f) robot=(%f,%f, %f)  pv=(%f,%f) qv=(%f,%f) a=%f b=%f c=%f discrim=%f '
                    % (self.name, t1, t2, sqd, local_target.point.x,
                       local_target.point.y, local_target.point.z,
                       local_prior.point.x, local_prior.point.y,
                       local_prior.point.z, self._current_position.point.x,
                       self._current_position.point.y,
                       self._current_position.point.z, pv.x, pv.y, qv.x, qv.y,
                       a, b, c, discrim))
                self._return = 'done'
                return None
            else:
                # This is the normal case
                # Must be line segment
                control = deepcopy(local_prior)
                control.point.x = control.point.x + t2 * pv.x  # Control point in the odometry frame
                control.point.y = control.point.y + t2 * pv.y  # Control point in the odometry frame
                control.point.z = control.point.z + t2 * pv.z  # Control point in the odometry frame
                self._reference_marker.pose.position = control.point
                self._local_target_marker.pose.position = local_target.point

                control_robot = self.transformBody(control)

                curvature = 2.0 * control_robot.point.y / (self._lookahead *
                                                           self._lookahead)
                self._twist.twist.angular.z = curvature * self._desired_velocity
                return control_robot

        return None