Пример #1
0
def goToMouth(robotConfig=None, binNum=0, isExecute=True, withPause=False):
    ## robotConfig: current time robot configuration
    joint_topic = '/joint_states'

    ## initialize listener rospy
    listener = tf.TransformListener()
    rospy.sleep(0.1)
    br = tf.TransformBroadcaster()
    rospy.sleep(0.1)

    # plan store
    plans = []

    ## initial variable and tcp definitions
    # set tcp
    l2 = 0.47
    tip_hand_transform = [
        0, 0, l2, 0, 0, 0
    ]  # to be updated when we have a hand design finalized
    # broadcast frame attached to tcp
    pubFrame(br,
             pose=tip_hand_transform,
             frame_id='tip',
             parent_frame_id='link_6',
             npub=5)
    # get position of the tcp in world frame
    pose_world = coordinateFrameTransform(tip_hand_transform[0:3], 'link_6',
                                          'map', listener)
    tcpPos = [
        pose_world.pose.position.x, pose_world.pose.position.y,
        pose_world.pose.position.z
    ]
    tcpPosHome = tcpPos
    # set home orientation
    gripperOri = [0, 0.7071, 0, 0.7071]

    # move to bin mouth
    distFromShelf = 0.15
    mouthPt, temp = getBinMouthAndFloor(distFromShelf, binNum)
    mouthPt = coordinateFrameTransform(mouthPt, 'shelf', 'map', listener)
    targetPosition = [
        mouthPt.pose.position.x, mouthPt.pose.position.y,
        mouthPt.pose.position.z
    ]
    q_initial = robotConfig
    planner = IK(q0=q_initial,
                 target_tip_pos=targetPosition,
                 target_tip_ori=gripperOri,
                 tip_hand_transform=tip_hand_transform,
                 joint_topic=joint_topic)
    plan = planner.plan()
    s = plan.success()
    if s:
        print '[goToMouth] move to bin mouth successful'
        plan.visualize()
        plans.append(plan)
        if isExecute:
            pauseFunc(withPause)
            plan.execute()
    else:
        print '[goToMouth] move to bin mouth fail'
        return None

    qf = plan.q_traj[-1]

    ## open gripper fully
    openGripper()

    return plan
    def __init__(self,
                 tf_listener=None,
                 advertise_service=False,
                 advertise_action=True):

        #init a TF transform listener
        if tf_listener == None:
            self.tf_listener = tf.TransformListener()
        else:
            self.tf_listener = tf_listener

        #point cloud processing action
        self._container_client = actionlib.SimpleActionClient(
            'find_container_action', FindContainerAction)
        rospy.loginfo("Waiting for find_container_action...")
        self._container_client.wait_for_server()

        #cluster grasp planning service
        service_name = "plan_point_cluster_grasp"
        rospy.loginfo("waiting for plan_point_cluster_grasp service")
        rospy.wait_for_service(service_name)
        rospy.loginfo("service found")
        self._cluster_grasp_srv = rospy.ServiceProxy(service_name,
                                                     GraspPlanning)

        #cluster grasp planning params
        service_name = "set_point_cluster_grasp_params"
        rospy.loginfo("waiting for set_point_cluster_grasp_params service")
        rospy.wait_for_service(service_name)
        rospy.loginfo("service found")
        self._cluster_grasp_params_srv = rospy.ServiceProxy(
            service_name, SetPointClusterGraspParams)

        #planning scene setting service
        planning_scene_srv_name = "environment_server/set_planning_scene_diff"
        rospy.loginfo("waiting for %s service" % planning_scene_srv_name)
        rospy.wait_for_service(planning_scene_srv_name)
        self._planning_scene_srv = rospy.ServiceProxy(planning_scene_srv_name,
                                                      SetPlanningSceneDiff)

        #what approximate directions should the grasps be pointing into (as with a container)?
        self._opening_dir_list = rospy.get_param("~opening_dir_list",
                                                 [[0, 0, 1], [-1, 0, 0]])

        #inserted for HRI studies--if the task number is set, change the opening_dir_list
        task_number = rospy.get_param("interactive_grasping/task_number", 0)
        if task_number == 3:
            rospy.loginfo("Task number is 3, doing front grasps only.")
            self._opening_dir_list = [[-1, 0, 0]]
        elif task_number > 0:
            rospy.loginfo("Task number is 1 or 2, doing overhead grasps only.")
            self._opening_dir_list = [[0, 0, 1]]

        #whether to make the pregrasp just outside the cluster bounding box, or let it be the default (10 cm)
        self._pregrasp_just_outside_box = rospy.get_param(
            "~pregrasp_just_outside_box", False)

        #name of the cluster planner service topic
        self._cluster_planner_name = rospy.get_param(
            "cluster_planner_name", "point_cluster_grasp_planner")

        #advertise the grasp planning service
        if advertise_service:
            rospy.Service('plan_segmented_clutter_grasps', GraspPlanning,
                          self.plan_segmented_clutter_grasps_callback)
            rospy.loginfo(
                "segmented clutter grasp planner is ready for queries")

        #advertise the grasp planning action
        if advertise_action:
            self._as = actionlib.SimpleActionServer(
                rospy.get_name(),
                GraspPlanningAction,
                execute_cb=self.segmented_clutter_grasps_execute_cb)
            self._as.start()
            rospy.loginfo(rospy.get_name() + " is ready for queries")
#!/usr/bin/env python  
import roslib
roslib.load_manifest('project1_ws')
import rospy
import math
import tf
import geometry_msgs.msg
import turtlesim.srv

if __name__ == '__main__':
    rospy.init_node('tf_listener2')

    listener = tf.TransformListener()

    rospy.wait_for_service('spawn')
    spawner = rospy.ServiceProxy('spawn', turtlesim.srv.Spawn)
    spawner(3, 2, 0, 'turtle2')
    turtle_vel = rospy.Publisher('turtle2/cmd_vel', geometry_msgs.msg.Twist,queue_size=1)

    rate = rospy.Rate(10.0)
    while not rospy.is_shutdown():
        try:
            (trans,rot) = listener.lookupTransform('/turtle2', '/turtle1', rospy.Time(0))
        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            continue
        if not (math.sqrt(trans[0] ** 2 + trans[1] ** 2))<0.1:
            angular = 0.7 * math.atan2(trans[1], trans[0])
            linear = 0.1 * math.sqrt(trans[0] ** 2 + trans[1] ** 2)
            cmd = geometry_msgs.msg.Twist()
            cmd.linear.x = linear
            cmd.angular.z = angular
Пример #4
0
def node():

	global mapData

    	rospy.Subscriber("/robot_1/map", OccupancyGrid, mapCallBack)
    	pub = rospy.Publisher('shapes', Marker, queue_size=10)
    	rospy.init_node('RRTexplorer', anonymous=False)

    	#Actionlib client
    	client1 = actionlib.SimpleActionClient('/robot_1/move_base', MoveBaseAction)
    	client1.wait_for_server()
    	
    	

    	goal = MoveBaseGoal()
    	goal.target_pose.header.stamp=rospy.Time.now()
    	goal.target_pose.header.frame_id="/robot_1/map"


    	   	
    	rate = rospy.Rate(100)	

	listener = tf.TransformListener()
	listener.waitForTransform('/robot_1/map', '/robot_1/base_link', rospy.Time(0),rospy.Duration(10.0))
	
        try:
		(trans,rot) = listener.lookupTransform('/robot_1/map', '/robot_1/base_link', rospy.Time(0))
		
		
        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
		trans=[0,0]
		
	xinx=trans[0]
	xiny=trans[1]	
	goal.target_pose.pose.position.x=xinx-0.5
    	goal.target_pose.pose.position.y=0
    	goal.target_pose.pose.position.z=0
    	goal.target_pose.pose.orientation.w = 1.0
    	
   	
    	
    	client1.send_goal(goal)
    	client1.wait_for_result()
    	client1.get_result() 
	x_init=array([xinx,xiny])
	
	V=array([x_init])
	i=1.0
	E=concatenate((x_init,x_init))	

    	points=Marker()
       	line=Marker()
#Set the frame ID and timestamp.  See the TF tutorials for information on these.
    	points.header.frame_id=line.header.frame_id="/robot_1/map"
    	points.header.stamp=line.header.stamp=rospy.Time.now()
	
    	points.ns=line.ns = "markers"
    	points.id = 0
    	line.id =1
	
    	points.type = Marker.POINTS
    	line.type=Marker.LINE_LIST
#Set the marker action.  Options are ADD, DELETE, and new in ROS Indigo: 3 (DELETEALL)
    	points.action = line.action = Marker.ADD;
	
    	points.pose.orientation.w = line.pose.orientation.w = 1.0;
	
    	line.scale.x = 0.02;
    	points.scale.x=0.2; 
    	line.scale.y= 0.02;
    	points.scale.y=0.2; 
	
    	line.color.r =9.0/255.0
	line.color.g= 91.0/255.0
	line.color.b =236.0/255.0
    	points.color.r = 255.0/255.0
	points.color.g = 0.0/255.0
	points.color.b = 0.0/255.0
   	
    	points.color.a=1;
	line.color.a = 0.6;
    	points.lifetime =line.lifetime = rospy.Duration();
	

    	p=Point()

    	p.x = x_init[0] ;
    	p.y = x_init[0] ;
    	p.z = 0;

    	pp=[]
        pl=[]
    	pp.append(copy(p))
    

	frontiers=[]
	xdim=mapData.info.width
	ydim=mapData.info.height
	resolution=mapData.info.resolution
	Xstartx=mapData.info.origin.position.x
	Xstarty=mapData.info.origin.position.y 
	

#-------------------------------RRT------------------------------------------
	while not rospy.is_shutdown():

	 
# Sample free
          xr=(random()*20.0)-10.0
	  yr=(random()*20.0)-10.0
	  x_rand = array([xr,yr])
	  
	  
	  
 
# Nearest
	  x_nearest=V[Nearest(V,x_rand),:]

# Steer
	  x_new=Steer(x_nearest,x_rand,param.eta)



# ObstacleFree
	  checking=ObstacleFree2(x_nearest,x_new,mapData)
	  
	  if checking==-1:

          	
          	if len(frontiers)>0:
          		frontiers=vstack((frontiers,x_new))
          	else:
          		frontiers=[x_new]
          		
          	(trans,rot) = listener.lookupTransform('/robot_1/map', '/robot_1/base_link', rospy.Time(0))
	  	xinx=trans[0]
		xiny=trans[1]	
		x_init=array([xinx,xiny])
	  	V=array([x_init])
	  	E=concatenate((x_init,x_init))
	  	pp=[]
        	pl=[]
	  	
	  
	  elif checking==1:
	 	V=vstack((V,x_new))	
	 	temp=concatenate((x_nearest,x_new))        
	        E=vstack((E,temp))


		

          
          
	  z=0
          while z<len(frontiers):
	  	if gridValue(mapData,frontiers[z])!=-1:
	  		frontiers=delete(frontiers, (z), axis=0)
	  		z=z-1
		z+=1
	  frontiers=assigner1rrtfront(goal,frontiers,client1,listener)  	
          print rospy.Time.now(),"   ",len(frontiers)	
	  pp=[]	
	  for q in range(0,len(frontiers)):
	  	
	  	p.x=frontiers[q][0]
          	p.y=frontiers[q][1]
          	pp.append(copy(p))
          	

          		
          
#Plotting
	  	
  	  points.points=pp
          pl=prepEdges(E)
          line.points=pl
          pub.publish(line)        
          pub.publish(points) 
		
	  
	  rate.sleep()
    def __init__(self):
        """Initialize this _AutoRallyCtrlr."""

        rospy.init_node("autoRally_controller")

        # Parameters

        # Wheels
        (left_steer_link_name, left_steer_ctrlr_name,
         left_front_axle_ctrlr_name, self._left_front_inv_circ) = \
         self._get_front_wheel_params("left")
        (right_steer_link_name, right_steer_ctrlr_name,
         right_front_axle_ctrlr_name, self._right_front_inv_circ) = \
         self._get_front_wheel_params("right")
        (left_rear_link_name, left_rear_axle_ctrlr_name,
         self._left_rear_inv_circ) = \
         self._get_rear_wheel_params("left")
        (self._right_rear_link_name, right_rear_axle_ctrlr_name,
         self._right_rear_inv_circ) = \
         self._get_rear_wheel_params("right")

        list_ctrlrs = rospy.ServiceProxy("controller_manager/list_controllers",
                                         ListControllers)
        list_ctrlrs.wait_for_service()

        # Shock absorbers
        shock_param_list = rospy.get_param("~shock_absorbers", [])
        self._shock_pubs = []
        try:
            for shock_params in shock_param_list:
                try:
                    ctrlr_name = shock_params["controller_name"]
                    try:
                        eq_pos = shock_params["equilibrium_position"]
                    except:
                        eq_pos = self._DEF_EQ_POS
                    eq_pos = float(eq_pos)
                except:
                    rospy.logwarn("An invalid parameter was specified for a "
                                  "shock absorber. The shock absorber will "
                                  "not be used.")
                    continue

                pub = rospy.Publisher(ctrlr_name + "/command",
                                      Float64,
                                      latch=True,
                                      queue_size=1)
                _wait_for_ctrlr(list_ctrlrs, ctrlr_name)
                pub.publish(eq_pos)
                self._shock_pubs.append(pub)
        except:
            rospy.logwarn("The specified list of shock absorbers is invalid. "
                          "No shock absorbers will be used.")

        # Command timeout
        try:
            self._cmd_timeout = float(
                rospy.get_param("~cmd_timeout", self._DEF_CMD_TIMEOUT))
        except:
            rospy.logwarn("The specified command timeout value is invalid. "
                          "The default timeout value will be used instead.")
            self._cmd_timeout = self._DEF_CMD_TIMEOUT

        # Publishing frequency
        try:
            pub_freq = float(
                rospy.get_param("~publishing_frequency", self._DEF_PUB_FREQ))
            if pub_freq <= 0.0:
                raise ValueError()
        except:
            rospy.logwarn("The specified publishing frequency is invalid. "
                          "The default frequency will be used instead.")
            pub_freq = self._DEF_PUB_FREQ
        self._sleep_timer = rospy.Rate(pub_freq)

        # _last_cmd_time is the time at which the most recent Ackermann
        # driving command was received.
        self._last_cmd_time = rospy.get_time()

        # _ackermann_cmd_lock is used to control access to _steer_ang,
        # _steer_ang_vel, _speed, and _accel.
        #        self._ackermann_cmd_lock = threading.Lock()
        #        self._steer_ang = 0.0      # Steering angle
        #        self._steer_ang_vel = 0.0  # Steering angle velocity
        #        self._speed = 0.0
        #        self._accel = 0.0          # Acceleration

        self.lastCmdTime = rospy.get_time()
        self.servoCmds = dict()
        self.servoCmdLock = threading.Lock()

        self._last_steer_ang = 0.0  # Last steering angle
        self._theta_left = 0.0  # Left steering joint angle
        self._theta_right = 0.0  # Right steering joint angle

        self._last_speed = 0.0
        self._last_accel_limit = 0.0  # Last acceleration limit
        # Axle angular velocities
        self._left_front_ang_vel = 0.0
        self._right_front_ang_vel = 0.0
        self._left_rear_ang_vel = 0.0
        self._right_rear_ang_vel = 0.0

        # _joint_dist_div_2 is the distance between the steering joints,
        # divided by two.
        tfl = tf.TransformListener()
        ls_pos = self._get_link_pos(tfl, left_steer_link_name)
        rs_pos = self._get_link_pos(tfl, right_steer_link_name)
        self._joint_dist_div_2 = numpy.linalg.norm(ls_pos - rs_pos) / 2
        lrw_pos = self._get_link_pos(tfl, left_rear_link_name)
        rrw_pos = numpy.array([0.0] * 3)
        front_cent_pos = (ls_pos + rs_pos) / 2  # Front center position
        rear_cent_pos = (lrw_pos + rrw_pos) / 2  # Rear center position
        self._wheelbase = numpy.linalg.norm(front_cent_pos - rear_cent_pos)
        self._inv_wheelbase = 1 / self._wheelbase  # Inverse of _wheelbase
        self._wheelbase_sqr = self._wheelbase**2

        #load servo commander priorities
        self.commandPriorities = rospy.get_param("~servoCommandProirities", [])
        self.commandPriorities = sorted(self.commandPriorities.items(),
                                        key=operator.itemgetter(1))
        rospy.loginfo("AutoRallyGazeboController: Loaded %d servo commanders",
                      len(self.commandPriorities))

        # Publishers and subscribers
        self._left_steer_cmd_pub = \
            _create_cmd_pub(list_ctrlrs, left_steer_ctrlr_name)
        self._right_steer_cmd_pub = \
            _create_cmd_pub(list_ctrlrs, right_steer_ctrlr_name)

        self._left_front_axle_cmd_pub = \
            _create_axle_cmd_pub(list_ctrlrs, left_front_axle_ctrlr_name)
        self._right_front_axle_cmd_pub = \
            _create_axle_cmd_pub(list_ctrlrs, right_front_axle_ctrlr_name)
        self._left_rear_axle_cmd_pub = \
            _create_axle_cmd_pub(list_ctrlrs, left_rear_axle_ctrlr_name)
        self._right_rear_axle_cmd_pub = \
            _create_axle_cmd_pub(list_ctrlrs, right_rear_axle_ctrlr_name)

        self.servoCmdSub = \
            rospy.Subscriber("servoCommand", servoMSG,
                             self.servoCmdCb, queue_size=1)
Пример #6
0
    def __init__(self, robot_name, robot_config, robot_is_marsupial):
        rospy.init_node('cloudsim2osgar', log_level=rospy.DEBUG)
        self.bus = Bus()
        self.robot_name = robot_name
        self.robot_config = robot_config
        self.prev_gas_detected = None  # report on change including the first reading

        self.mapping_info_provider = MappingInformationProvider()

        # common topics
        topics = [
            ('/' + robot_name + '/battery_state', BatteryState, self.battery_state, ('battery_state',)),
            ('/subt/score', Int32, self.score, ('score',)),
            ('/' + robot_name + '/gas_detected', Bool, self.gas_detected, ('gas_detected', )),
        ]

        publishers = {}
        publishers['cmd_vel'] = (rospy.Publisher('/' + robot_name + '/cmd_vel', Twist, queue_size=1), twist)

        self.tf = tf.TransformListener()

        # configuration specific topics
        robot_description = rospy.get_param("/{}/robot_description".format(robot_name))
        if robot_config == "ROBOTIKA_X2_SENSOR_CONFIG_1":
            rospy.loginfo("robotika x2")
        elif robot_config == "SSCI_X4_SENSOR_CONFIG_2":
            rospy.loginfo("ssci drone")
            topics.append(('/' + robot_name + '/top_scan', LaserScan, self.top_scan, ('top_scan',)))
            topics.append(('/' + robot_name + '/bottom_scan', LaserScan, self.bottom_scan, ('bottom_scan',)))
            topics.append(('/' + robot_name + '/odom_fused', Odometry, self.odom_fused, ('pose3d',)))
            topics.append(('/' + robot_name + '/air_pressure', FluidPressure, self.air_pressure, ('air_pressure',)))
            topics.append(('/' + robot_name + '/front_scan', LaserScan, self.scan360, ('scan360',)))
            topics.append(('/rtabmap/rgbd/compressed', RGBDImage, self.rgbd_front, ('rgbd_front',)))
            if robot_name.endswith('XM'):
                topics.append(('/mapping/octomap_binary', Octomap, self.octomap, ('octomap',)))
            if robot_is_marsupial == 'true':
                rospy.loginfo("X4 is marsupial")
                publishers['detach'] = (rospy.Publisher('/' + robot_name + '/detach', Empty, queue_size=1), empty)
        elif robot_config == "CORO_PAM_SENSOR_CONFIG_1":
            rospy.loginfo("CoRo Pam drone")
            topics.append(('/' + robot_name + '/local_map/output/up', LaserScan, self.top_scan, ('top_scan',)))
            topics.append(('/' + robot_name + '/local_map/output/down', LaserScan, self.bottom_scan, ('bottom_scan',)))
            topics.append(('/' + robot_name + '/odom_fused', Odometry, self.odom_fused, ('pose3d',)))
            topics.append(('/' + robot_name + '/air_pressure', FluidPressure, self.air_pressure, ('air_pressure',)))
            topics.append(('/rtabmap/rgbd/front/compressed', RGBDImage, self.rgbd_front, ('rgbd_front',)))
            topics.append(('/rtabmap/rgbd/left/compressed_rotated', RGBDImage, self.rgbd_left, ('rgbd_left',)))
            topics.append(('/rtabmap/rgbd/right/compressed_rotated', RGBDImage, self.rgbd_right, ('rgbd_right',)))
            topics.append(('/' + robot_name + '/local_map/output/scan', LaserScan, self.scan360, ('scan360',)))
            if robot_name.endswith('XM'):
                topics.append(('/mapping/octomap_binary', Octomap, self.octomap, ('octomap',)))
            if robot_is_marsupial == 'true':
                rospy.loginfo("CoRo Pam is marsupial")
                publishers['detach'] = (rospy.Publisher('/' + robot_name + '/detach', Empty, queue_size=1), empty)
        elif robot_config == "TEAMBASE":
            rospy.loginfo("teambase")
        elif robot_config.startswith("ROBOTIKA_FREYJA_SENSOR_CONFIG"):
            if robot_config.endswith("_2"):
                rospy.loginfo("freya 2 (with comms beacons)")
                publishers['deploy'] = (rospy.Publisher('/' + robot_name + '/breadcrumb/deploy', Empty, queue_size=1), empty)
            else:
                rospy.loginfo("freya 1 (basic)")
            topics.append(('/' + robot_name + '/odom_fused', Odometry, self.odom_fused, ('pose3d',)))
            topics.append(('/' + robot_name + '/scan_front', LaserScan, self.scan_front, ('scan_front',)))
            topics.append(('/' + robot_name + '/scan_rear', LaserScan, self.scan_rear, ('scan_rear',)))
            topics.append(('/' + robot_name + '/local_map/output/scan', LaserScan, self.scan360, ('scan360',)))
            topics.append(('/rtabmap/rgbd/front/compressed', RGBDImage, self.rgbd_front, ('rgbd_front',)))
            topics.append(('/rtabmap/rgbd/rear/compressed', RGBDImage, self.rgbd_rear, ('rgbd_rear',)))
            topics.append(('/' + robot_name + '/camera_left/image_raw/compressed', CompressedImage, self.camera_left, ('camera_left',)))
            topics.append(('/' + robot_name + '/camera_right/image_raw/compressed', CompressedImage, self.camera_right, ('camera_right',)))
            if robot_name.endswith('XM'):
                topics.append(('/mapping/octomap_binary', Octomap, self.octomap, ('octomap',)))
        elif robot_config.startswith("ROBOTIKA_KLOUBAK_SENSOR_CONFIG"):
            if robot_config.endswith("_2"):
                rospy.loginfo("k2 2 (with comms beacons)")
                publishers['deploy'] = (rospy.Publisher('/' + robot_name + '/breadcrumb/deploy', Empty, queue_size=1), empty)
            else:
                rospy.loginfo("k2 1 (basic)")
            topics.append(('/' + robot_name + '/odom_fused', Odometry, self.odom_fused, ('pose3d',)))
            topics.append(('/' + robot_name + '/scan_front', LaserScan, self.scan_front, ('scan_front',)))
            topics.append(('/' + robot_name + '/scan_rear', LaserScan, self.scan_rear, ('scan_rear',)))
            topics.append(('/' + robot_name + '/local_map/output/scan', LaserScan, self.scan360, ('scan360',)))
            topics.append(('/rtabmap/rgbd/front/compressed', RGBDImage, self.rgbd_front, ('rgbd_front',)))
            topics.append(('/rtabmap/rgbd/rear/compressed', RGBDImage, self.rgbd_rear, ('rgbd_rear',)))
        else:
            rospy.logerr("unknown configuration")
            return

        if robot_config.startswith("ROBOTIKA_KLOUBAK_SENSOR_CONFIG"):
            topics.append(('/' + robot_name + '/imu/front/data', Imu, self.imu, ('acc',)))
        else:
            topics.append(('/' + robot_name + '/imu/data', Imu, self.imu, ('acc',)))

        outputs = functools.reduce(operator.add, (t[-1] for t in topics)) + ('robot_name', 'joint_angle')
        self.bus.register(outputs)

        for name, type, handler, _ in topics:
            rospy.loginfo("waiting for {}".format(name))
            rospy.wait_for_message(name, type)
            setattr(self, handler.__name__+"_count", 0)
            rospy.Subscriber(name, type, handler)

        self.bus.publish('robot_name', robot_name)

        # main thread receives data from osgar and sends it to ROS
        while True:
            channel, data = self.bus.listen()
            # switch on channel to feed different ROS publishers
            if channel in publishers:
                publisher, handle = publishers[channel]
                publisher.publish(handle(data))

                if channel != 'cmd_vel':
                    # for debugging breadcrumbs deployment and marsupial detachment
                    rospy.logdebug("receiving: {} {}".format(channel, data))
            elif channel == 'artf_xyz':
                for artf_type, artf_xyz, _, _ in data:
                    self.mapping_info_provider.add_artifact(artf_type, artf_xyz)
            else:
                rospy.loginfo("ignoring: {} {}".format(channel, data))
Пример #7
0
    def __init__(self,
                 map_topic='/map',
                 paths=[
                     '/move_base/SBPLLatticePlanner/plan',
                     '/move_base/TrajectoryPlannerROS/local_plan'
                 ],
                 polygons=['/move_base/local_costmap/robot_footprint'],
                 tf=None,
                 parent=None):
        super(NavView, self).__init__()
        self._parent = parent

        self._pose_mode = False
        self._goal_mode = False
        self.last_path = None

        self.map_changed.connect(self._update)
        self.destroyed.connect(self.close)

        #ScrollHandDrag
        self.setDragMode(QGraphicsView.ScrollHandDrag)

        self._map = None
        self._map_item = None

        self.w = 0
        self.h = 0

        self._paths = {}
        self._polygons = {}
        self.path_changed.connect(self._update_path)
        self.polygon_changed.connect(self._update_polygon)

        self._colors = [(238, 34, 116), (68, 134, 252), (236, 228, 46),
                        (102, 224, 18), (242, 156, 6), (240, 64, 10),
                        (196, 30, 250)]

        self._scene = QGraphicsScene()

        if tf is None:
            self._tf = tf.TransformListener()
        else:
            self._tf = tf
        self.map_sub = rospy.Subscriber('/map', OccupancyGrid, self.map_cb)

        for path in paths:
            self.add_path(path)

        for poly in polygons:
            self.add_polygon(poly)

        try:
            self._pose_pub = rospy.Publisher('/initialpose',
                                             PoseWithCovarianceStamped,
                                             queue_size=100)
            self._goal_pub = rospy.Publisher('/move_base_simple/goal',
                                             PoseStamped,
                                             queue_size=100)
        except TypeError:
            self._pose_pub = rospy.Publisher('/initialpose',
                                             PoseWithCovarianceStamped)
            self._goal_pub = rospy.Publisher('/move_base_simple/goal',
                                             PoseStamped)

        self.setScene(self._scene)
Пример #8
0
    def __init__(self):
        # Real robot new positions
        # self.joint_positions_p1 = 1*np.array([0.313771815355006, 0.7874442301146876, -0.35008257875556276, -1.605427980371526, 0.39355274195809514, 0.7924178032543101, -1.4842731591265332])
        # self.joint_positions_p2 = 1*np.array([0.5628616530365289, 0.8266034640114479, -0.343280991826965, -1.548290132212003, 0.36961525944794194, 0.787609672383395, -1.4840846472048561])
        # self.joint_positions_p3 = 1*np.array([0.7087980537375754, 0.9364973682354464, -0.3175523589760338, -1.3048212555591447, 0.32881175402628704, 0.9519498877320673, -1.483692343658399])
        # self.joint_positions_home = 1*np.array([0.5569787767738359, 0.2807509613788765, -0.2521163338340084, -1.4935594952802898, 0.06308649381713503, 1.3422390616692874, -1.528839315708223])

        # FOR VIDEOS WITH SIMULATION
        # self.joint_positions_p1 = 1*np.array([0.313771815355006, 0.7874442301146876, -0.35008257875556276, -1.605427980371526, 0.39355274195809514, 0.7924178032543101, -1.4842731591265332])
        # self.joint_positions_p2 = 1*np.array([0.5628616530365289, 0.8266034640114479, -0.343280991826965, -1.548290132212003, 0.36961525944794194, 0.787609672383395, -1.4840846472048561])
        # self.joint_positions_p3 = 1*np.array([0.9087980537375754, 0.9364973682354464, -0.3175523589760338, -1.3048212555591447, 0.32881175402628704, 0.9519498877320673, -1.483692343658399])
        # self.joint_positions_home = 1*np.array([0.5569787767738359, 0.2807509613788765, -0.2521163338340084, -1.4935594952802898, 0.06308649381713503, 1.3422390616692874, -1.528839315708223])

        # self.joint_positions_home = 1*np.array([0.5569787767738359, 0.2807509613788765, -0.2521163338340084, -1.4935594952802898, 0.06308649381713503, 1.3422390616692874, -1.528839315708223])
        # self.joint_positions_p1 = 1*np.array([0.27099370506783615, 0.7950941203635832, -0.18290599687813244, -1.545624301259042, 0.19252302584488534, 0.8620693993694969, -0.023598616669335572])
        # self.joint_positions_p2 = 1*np.array([0.6446756195683863, 0.9211181322779567, -0.4819512971273034, -1.4284985165235762, 0.5240945070507964, 0.9734429360283877, 0.024398803334734404])
        # self.joint_positions_p3 = 1*np.array([1.0506115772018698, 1.2189853831695028, -1.0281253363138159, -1.1760928235711319, 0.9755587628795142, 1.2917956014430403, 0.11816475720439637])
        # self.joint_positions_handover = 1*np.array([0.5569787767738359, 0.2807509613788765, -0.2521163338340084, -1.4935594952802898, 0.06308649381713503, 0.2022390616692874, -1.528839315708223])

        self.joint_positions_home = 1 * np.array([
            0.20024075874350197, 0.3457841368224398, -0.39772640410707416,
            -1.5429807609967892, 0.1143476257008436, 1.2734894582565162,
            -5.7464311123443805e-05
        ])
        self.joint_positions_p1 = 1 * np.array([
            -0.24520071701864543, 0.8216742323795524, -0.2692404729649888,
            -1.497127918129015, 0.3290061859738872, 0.875714937494543,
            0.027934688991962303
        ])
        self.joint_positions_p2 = 1 * np.array([
            0.005690829067532814, 0.7855678600152904, -0.21038864090325585,
            -1.56908060519925, 0.14593324885458367, 0.8493683380970197,
            -0.2437001721577387
        ])
        self.joint_positions_p3 = 1 * np.array([
            0.15672118448444747, 0.7813835079646257, -0.12592376380188047,
            -1.560340569973171, 0.14926112433896319, 0.8510714761960086,
            -0.025095686034286177
        ])

        self.pub = rospy.Publisher('action_state', String, queue_size=10)
        # self.pub_for_gripper = rospy.Publisher('gripper_command', String, queue_size = 10)
        self.ctrl_freq = 200
        self.q_send = 1 * np.array([
            -0.2561309292711379, 0.48341762688080325, -0.07932016919260708,
            -1.0536090715454243, -0.017271089745345972, 1.5119789493870104,
            -1.5704001283280475
        ])
        self.q_old = 1 * np.array([
            -0.2561309292711379, 0.48341762688080325, -0.07932016919260708,
            -1.0536090715454243, -0.017271089745345972, 1.5119789493870104,
            -1.5704001283280475
        ])
        self.old_q_send = 1 * np.array([
            -0.2561309292711379, 0.48341762688080325, -0.07932016919260708,
            -1.0536090715454243, -0.017271089745345972, 1.5119789493870104,
            -1.5704001283280475
        ])
        self.joint_names = [
            "iiwa_joint_1", "iiwa_joint_2", "iiwa_joint_3", "iiwa_joint_4",
            "iiwa_joint_5", "iiwa_joint_6", "iiwa_joint_7"
        ]
        self.joint_cmd_pub = rospy.Publisher(
            '/iiwa/PositionController/command',
            numpy_msg(Float64MultiArray),
            queue_size=10)
        self.joint_state_sub = rospy.Subscriber('iiwa/joint_states',
                                                numpy_msg(JointState),
                                                self.get_joint_state)
        self.joint_current_states = self.joint_positions_home
        self.position_reached = 0
        self.current_action = "home"
        self.current_target = self.joint_positions_home
        self.gripper_service = rospy.ServiceProxy('gripper_command',
                                                  ControlGripper)

        #PLOTS
        self.commandJoints = [[] for _ in np.arange(7)]
        self.errorJoints = [[] for _ in np.arange(7)]
        self.speedJoints = [[] for _ in np.arange(7)]
        self.endEffxyz = [[] for _ in np.arange(3)]
        self.timeStart = 0
        self.timeVector = []
        self.listenerEndEff = tf.TransformListener()
        self.time = []

        self.subplotID = [331, 332, 333, 334, 335, 336, 337]
        self.titles = [
            'Joint 1', 'Joint 2', 'Joint 3', 'Joint 4', 'Joint 5', 'Joint 6',
            'Joint 7'
        ]
Пример #9
0
#!/usr/bin/env python
# import roslib
# roslib.load_manifest('learning_tf')
import rospy
import math
import tf
import geometry_msgs.msg
import turtlesim.srv

if __name__ == '__main__':
    rospy.init_node('turtle_tf_listener')  #create node

    listener = tf.TransformListener()  #create TransformListener instance

    rospy.wait_for_service('spawn')  #Blocks until service is available
    spawner = rospy.ServiceProxy(
        'spawn', turtlesim.srv.Spawn)  #Create a handle to a ROS service
    #for invoking calls

    spawner(4, 2, 0, 'turtle2')  # create 2th turtle

    #create Publisher
    turtle_vel = rospy.Publisher('turtle2/cmd_vel',
                                 geometry_msgs.msg.Twist,
                                 queue_size=1)

    rate = rospy.Rate(10.0)  #Publish rate =10 Hz

    while not rospy.is_shutdown():
        try:
            #get us the latest available transform from turtle1 to turtle2 frame
Пример #10
0
    def __init__(self):
        # Give the node a name
        rospy.init_node('calibrate_angular', anonymous=False)

        # Set rospy to execute a shutdown function when terminating the script
        rospy.on_shutdown(self.shutdown)

        # How fast will we check the odometry values?
        self.rate = 30
        r = rospy.Rate(self.rate)

        # The test angle is 360 degrees
        self.test_angle = radians(rospy.get_param('~test_angle', 360.0))

        self.speed = rospy.get_param('~speed', 0.7)  # radians per second
        self.tolerance = rospy.get_param(
            'tolerance', radians(5))  # degrees converted to radians
        self.odom_angular_scale_correction = rospy.get_param(
            '~odom_angular_scale_correction', 1.0)
        self.start_test = rospy.get_param('~start_test', True)

        # Publisher to control the robot's speed
        self.cmd_vel = rospy.Publisher('/cmd_vel', Twist)

        # Fire up the dynamic_reconfigure server
        dyn_server = Server(CalibrateAngularConfig,
                            self.dynamic_reconfigure_callback)

        # Connect to the dynamic_reconfigure server
        dyn_client = dynamic_reconfigure.client.Client("calibrate_angular",
                                                       timeout=60)

        # The base frame is base_footprint for the TurtleBot but base_link for Pi Robot
        self.base_frame = rospy.get_param('~base_frame', '/base_link')

        # The odom frame is usually just /odom
        self.odom_frame = rospy.get_param('~odom_frame', '/odom')

        # Initialize the tf listener
        self.tf_listener = tf.TransformListener()

        # Give tf some time to fill its buffer
        rospy.sleep(2)

        # Make sure we see the odom and base frames
        self.tf_listener.waitForTransform(self.odom_frame, self.base_frame,
                                          rospy.Time(), rospy.Duration(60.0))

        rospy.loginfo("Bring up rqt_reconfigure to control the test.")

        reverse = 1

        while not rospy.is_shutdown():
            # Execute the rotation

            if self.start_test:
                # Get the current rotation angle from tf
                self.odom_angle = self.get_odom_angle()

                last_angle = self.odom_angle
                turn_angle = 0

                # Alternate directions between tests
                reverse = -reverse
                angular_speed = reverse * self.speed

                while abs(turn_angle) < abs(self.test_angle):
                    if rospy.is_shutdown():
                        return
                    move_cmd = Twist()
                    move_cmd.angular.z = angular_speed
                    self.cmd_vel.publish(move_cmd)
                    r.sleep()

                    # Get the current rotation angle from tf
                    self.odom_angle = self.get_odom_angle()

                    # Compute how far we have gone since the last measurement
                    delta_angle = self.odom_angular_scale_correction * normalize_angle(
                        self.odom_angle - last_angle)

                    # Add to our total angle so far
                    turn_angle += delta_angle
                    last_angle = self.odom_angle

                # Stop the robot
                self.cmd_vel.publish(Twist())

                # Update the status flag
                self.start_test = False
                params = {'start_test': False}
                dyn_client.update_configuration(params)

            rospy.sleep(0.5)

        # Stop the robot
        self.cmd_vel.publish(Twist())
    def __init__(self, sim):
        """
        Subscribe to the configuration message
        """
        if (False == sim):
            self.config_updated = False
            rospy.Subscriber("/segway/feedback/configuration", Configuration,
                             self._update_configuration_limits)
            rospy.sleep(1.0)

            if (False == self.config_updated):
                rospy.logerr(
                    "Timed out waiting for RMP feedback topics make sure the driver is running"
                )
                sys.exit(0)
                return
        else:
            self.vel_limit_mps = 0.5
            self.yaw_rate_limit_rps = 0.5
            self.accel_lim = 1.0
            self.yaw_accel_lim = 1.0

        # create an interactive marker server on the topic namespace simple_marker
        self._server = InteractiveMarkerServer("segway_marker_ctrl")
        self.br = tf.TransformBroadcaster()
        self.listener = tf.TransformListener()
        self.last_marker_update = rospy.get_time()
        robot_name = rospy.get_param('~robot_name', "RMP_110")

        if ("RMP_OMNI" == robot_name):
            self.include_y = True
        else:
            self.include_y = False

        self.linear_scale = rospy.get_param('~linear_scale', 1.0)
        self.angular_scale = rospy.get_param('~angular_scale', 2.2)

        self.motion_cmd = Twist()
        self.motion_pub = rospy.Publisher('/segway/int_marker/cmd_vel',
                                          Twist,
                                          queue_size=10)

        # create an interactive marker for our server
        int_marker = InteractiveMarker()
        int_marker.header.frame_id = "base_link"
        int_marker.name = "segway_twist_ctrl"
        int_marker.description = "Segway Control Marker"

        control = InteractiveMarkerControl()
        control.orientation_mode = InteractiveMarkerControl.FIXED
        control.orientation.w = 0.707106781
        control.orientation.x = 0.707106781
        control.orientation.y = 0
        control.orientation.z = 0
        control.name = "move_x"
        control.interaction_mode = InteractiveMarkerControl.MOVE_AXIS
        int_marker.controls.append(control)

        if (True == self.include_y):
            control = InteractiveMarkerControl()
            control.orientation_mode = InteractiveMarkerControl.FIXED
            control.orientation.w = 0.707106781
            control.orientation.x = 0
            control.orientation.y = 0
            control.orientation.z = 0.707106781
            control.name = "move_y"
            control.interaction_mode = InteractiveMarkerControl.MOVE_AXIS
            int_marker.controls.append(control)

        control = InteractiveMarkerControl()
        control.orientation_mode = InteractiveMarkerControl.FIXED
        control.orientation.w = 0.707106781
        control.orientation.x = 0
        control.orientation.y = 0.707106781
        control.orientation.z = 0
        control.name = "rotate_z"
        control.interaction_mode = InteractiveMarkerControl.ROTATE_AXIS
        int_marker.controls.append(control)

        # add the interactive marker to our collection &
        # tell the server to call processFeedback() when feedback arrives for it
        self._server.insert(int_marker, self.processFeedback)

        # 'commit' changes and send to all clients
        self._server.applyChanges()

        SegwayMarkerMenu(self._server, sim)
Пример #12
0
            pub1.publish(cloud)
        else:
            pub2.publish(cloud)
    return


def transform_cloud(cloud_array, transformation_matrix=np.eye(4)):
    '''
    Transforms all points in a cloud given a transformation matrix
    Input: Nx3 array of points in cloud
    Output: Nx3 array of transformed points
    '''
    if cloud_array.shape[0] > cloud_array.shape[1]:
        homogenious_coordinates = np.vstack(
            (cloud_array.T, np.ones(cloud_array.shape[0])))
    else:
        homogenious_coordinates = np.vstack(
            (cloud_array, np.ones(cloud_array.shape[1])))

    transformed_cloud = np.matmul(transformation_matrix,
                                  homogenious_coordinates)
    return transformed_cloud[:3, :].T


if __name__ == '__main__':
    rospy.init_node('cloud_processing_node')
    tf_tree = tf.TransformListener()
    rospy.Subscriber('/sensor/side_camera/depth/color/points', PointCloud2,
                     cloud_sub_callback)
    rospy.spin()
    def __init__(self):
        self._cv_bridge = CvBridge()
        self._laser_projector = LaserProjection()

        # # Camera rectification?? To be improved: read from .yaml file
        # Put here the calibration of the camera
        # self.DIM    = (1920, 1208)
        # self.K      = np.array([[693.506921, 0.000000, 955.729444], [0.000000, 694.129349, 641.732500], [0.0, 0.0, 1.0]])
        # self.D      = np.array([[-0.022636], [ -0.033855], [0.013493], [-0.001831]])
        # self.map1, self.map2    = cv2.fisheye.initUndistortRectifyMap(self.K, self.D, np.eye(3), self.K, self.DIM, cv2.CV_16SC2) # np.eye(3) here is actually the rotation matrix

        #   OR load it from a yaml file
        self.cameraModel = PinholeCameraModel()

        # See https://github.com/ros-perception/camera_info_manager_py/tree/master/tests
        camera_infomanager = CameraInfoManager(
            cname='truefisheye',
            url='package://ros_camera_lidar_calib/cfg/truefisheye800x503.yaml'
        )  # Select the calibration file
        camera_infomanager.loadCameraInfo()
        self.cameraInfo = camera_infomanager.getCameraInfo()
        # Crete a camera from camera info
        self.cameraModel.fromCameraInfo(self.cameraInfo)  # Create camera model
        self.DIM = (self.cameraInfo.width, self.cameraInfo.height)
        # Get rectification maps
        self.map1, self.map2 = cv2.fisheye.initUndistortRectifyMap(
            self.cameraModel.intrinsicMatrix(),
            self.cameraModel.distortionCoeffs(), np.eye(3),
            self.cameraModel.intrinsicMatrix(),
            (self.cameraInfo.width, self.cameraInfo.height),
            cv2.CV_16SC2)  # np.eye(3) here is actually the rotation matrix

        # # Declare subscribers to get the latest data
        cam0_subs_topic = '/gmsl_camera/port_0/cam_0/image_raw'
        cam1_subs_topic = '/gmsl_camera/port_0/cam_1/image_raw'
        cam2_subs_topic = '/gmsl_camera/port_0/cam_2/image_raw'
        #cam3_subs_topic = '/gmsl_camera/port_0/cam_3/image_raw/compressed'
        lidar_subs_topic = '/Sensor/points'

        #self.cam0_img_sub   = rospy.Subscriber( cam0_subs_topic , Image, self.cam0_img_callback, queue_size=1)
        #self.cam1_img_sub   = rospy.Subscriber( cam1_subs_topic , Image, self.cam1_img_callback, queue_size=1)
        self.cam2_img_sub = rospy.Subscriber(cam2_subs_topic,
                                             Image,
                                             self.cam2_img_callback,
                                             queue_size=1)
        #self.cam0_img_sub   = rospy.Subscriber( cam0_subs_topic , CompressedImage, self.cam0_img_compre_callback, queue_size=1)
        #self.cam1_img_sub   = rospy.Subscriber( cam1_subs_topic , CompressedImage, self.cam1_img_compre_callback, queue_size=1)
        #self.cam2_img_sub   = rospy.Subscriber( cam2_subs_topic , CompressedImage, self.cam2_img_compre_callback, queue_size=1)
        #self.cam3_img_sub  = rospy.Subscriber( cam3_subs_topic , CompressedImage, self.cam3_img_compre_callback, queue_size=1)
        self.lidar_sub = rospy.Subscriber(lidar_subs_topic,
                                          PointCloud2,
                                          self.lidar_callback,
                                          queue_size=1)
        # Get the tfs
        self.tf_listener = tf.TransformListener()
        #self.lidar_time = rospy.Subscriber(lidar_subs_topic , PointCloud2, self.readtimestamp)
        #self.img0_time = rospy.Subscriber(cam0_subs_topic , CompressedImage, self.readtimestamp)

        # # Declare the global variables we will use to get the latest info
        self.cam0_image_np = np.empty((self.DIM[1], self.DIM[0], 3))
        self.cam0_undistorted_img = np.empty((self.DIM[1], self.DIM[0], 3))
        self.cam1_image_np = np.empty((self.DIM[1], self.DIM[0], 3))
        self.cam1_undistorted_img = np.empty((self.DIM[1], self.DIM[0], 3))
        self.cam2_image_np = np.empty((self.DIM[1], self.DIM[0], 3))
        self.cam2_undistorted_img = np.empty((self.DIM[1], self.DIM[0], 3))
        self.cam3_image_np = np.empty((self.DIM[1], self.DIM[0], 3))
        self.cam3_undistorted_img = np.empty((self.DIM[1], self.DIM[0], 3))
        self.pcl_cloud = np.empty(
            (500, 4)
        )  # Do not know the width of a normal scan. might be variable too......

        self.now = rospy.Time.now()

        # # Main loop: Data projections and alignment on real time
        self.lidar_angle_range_interest = [
            0, 180
        ]  # From -180 to 180. Front is 0deg. Put the range of angles we may want to get points from. Depending of camera etc
        thread.start_new_thread(self.projection_calibrate())
Пример #14
0
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.waypoints_2d = None
        self.waypoints_tree = None
        self.has_image = None
        self.camera_image = None
        self.lights = []

        rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic
        light in 3D map space and helps you acquire an accurate ground truth
        data source for the traffic light classifier by sending the current
        color state of all traffic lights in the simulator. When testing on
        the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                         self.traffic_cb)
        # TODO CAEd: consider other image formats to feed into classifier
        rospy.Subscriber('/image_color', Image, self.image_cb)

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        self.upcoming_red_light_pub =\
            rospy.Publisher('/traffic_waypoint', Int32, queue_size=1)

        self.bridge = CvBridge()
        # CAEd: set up classifier
        # full_param_name = rospy.search_param('model_location')
        # print("Full_Param Name: %s " % full_param_name)
        if IS_SIM:
            model_location = rospy.get_param(
                "/sim_model_path",
                '../../../models/ssd_sim/frozen_inference_graph.pb')
        else:
            model_location = rospy.get_param(
                "/real_model_path",
                '../../../models/ssd_real/frozen_inference_graph.pb')

        model_filter = rospy.get_param("/model_filter", 10)
        min_score = rospy.get_param("/min_score", 0.5)
        width = rospy.get_param("/width", 800)
        height = rospy.get_param("/height", 600)
        self.light_classifier =\
            TLClassifier(model_location, model_filter,
                         min_score, width, height)

        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        self.loop()
    def __init__(self):
        ### MoveIt!
        moveit_commander.roscpp_initialize(sys.argv)
        br = tf.TransformBroadcaster()

        #rospy.init_node('move_group_planner',
        #                anonymous=True)

        self.robot = moveit_commander.RobotCommander()
        self.scene = moveit_commander.PlanningSceneInterface()

        self.group_left = moveit_commander.MoveGroupCommander("panda_left")
        self.group_right = moveit_commander.MoveGroupCommander("panda_right")
        self.hand_left = moveit_commander.MoveGroupCommander("hand_left")
        self.hand_right = moveit_commander.MoveGroupCommander("hand_right")
        self.display_trajectory_publisher = rospy.Publisher(
            '/move_group/display_planned_path',
            moveit_msgs.msg.DisplayTrajectory,
            queue_size=20)

        # We can get the name of the reference frame for this robot:
        self.planning_frame = self.group_left.get_planning_frame()
        print("============ Reference frame: %s" % self.planning_frame)

        # We can also print the name of the end-effector link for this group:
        self.eef_link = self.group_left.get_end_effector_link()
        print("============ End effector: %s" % self.eef_link)

        # We can get the name of the reference frame for this robot:
        self.planning_frame = self.group_right.get_planning_frame()
        print("============ Reference frame: %s" % self.planning_frame)

        # We can also print the name of the end-effector link for this group:
        self.eef_link = self.group_right.get_end_effector_link()
        print("============ End effector: %s" % self.eef_link)

        # We can get a list of all the groups in the robot:
        self.group_names = self.robot.get_group_names()
        print("============ Robot Groups:", self.robot.get_group_names())

        rospy.sleep(1)
        stefan_dir = "/home/jiyeong/STEFAN/stl/"
        self.stefan = SceneObject()

        self.scene.remove_attached_object(
            self.group_left.get_end_effector_link())
        self.scene.remove_attached_object(
            self.group_right.get_end_effector_link())
        self.scene.remove_world_object()
        for key, value in self.stefan.list.items():
            self.scene.add_mesh(key, value, stefan_dir + key + ".stl")

        print("============ Add Mesh:")
        ### Franka Collision
        self.set_collision_behavior = rospy.ServiceProxy(
            'franka_control/set_force_torque_collision_behavior',
            franka_control.srv.SetForceTorqueCollisionBehavior)
        #self.set_collision_behavior.wait_for_service()

        self.active_controllers = []

        self.listener = tf.TransformListener()
        self.tr = TransformerROS()
    def __init__(self):
        # Initialize the move_group API
        moveit_commander.roscpp_initialize(sys.argv)

        rospy.init_node('moveit_demo')
               
        self.gripper_opened = [rospy.get_param(GRIPPER_PARAM + "/max_opening") ]
        self.gripper_closed = [rospy.get_param(GRIPPER_PARAM + "/min_opening") ]
        self.gripper_neutral = [rospy.get_param(GRIPPER_PARAM + "/neutral") ]
        
        self.gripper_tighten = rospy.get_param(GRIPPER_PARAM + "/tighten") 

        # We need a tf listener to convert poses into arm reference base
        self.tf_listener = tf.TransformListener()
        
        # Use the planning scene object to add or remove objects
        scene = PlanningSceneInterface()

        # Create a scene publisher to push changes to the scene
        self.scene_pub = rospy.Publisher('planning_scene', PlanningScene, queue_size=10)

        # Create a publisher for displaying gripper poses
        self.gripper_pose_pub = rospy.Publisher('target_pose', PoseStamped, queue_size=10)

        # Create a dictionary to hold object colors
        self.colors = dict()

        # Initialize the move group for the right arm
        arm = MoveGroupCommander(GROUP_NAME_ARM)

        # Initialize the move group for the right gripper
        gripper = MoveGroupCommander(GROUP_NAME_GRIPPER)

        # Get the name of the end-effector link
        end_effector_link = arm.get_end_effector_link()

        # Allow some leeway in position (meters) and orientation (radians)
        arm.set_goal_position_tolerance(0.04)
        arm.set_goal_orientation_tolerance(1)

        # Allow replanning to increase the odds of a solution
        arm.allow_replanning(True)

        # Set the right arm reference frame
        arm.set_pose_reference_frame(REFERENCE_FRAME)

        # Allow 5 seconds per planning attempt
        arm.set_planning_time(5)

        # Set a limit on the number of pick attempts before bailing
        max_pick_attempts = 1

        # Set a limit on the number of place attempts
        max_place_attempts = 1
        rospy.loginfo("Scaling for MoveIt timeout=" + str(rospy.get_param('/move_group/trajectory_execution/allowed_execution_duration_scaling')))

        # Give the scene a chance to catch up
        rospy.sleep(2)

        # Give each of the scene objects a unique name
        table_id = 'table'
        box1_id = 'box1'
        box2_id = 'box2'
        target_id = 'target'
        tool_id = 'tool'

        # Remove leftover objects from a previous run
        scene.remove_world_object(table_id)
        scene.remove_world_object(box1_id)
        scene.remove_world_object(box2_id)
        scene.remove_world_object(target_id)
        scene.remove_world_object(tool_id)

        # Remove any attached objects from a previous session
        scene.remove_attached_object(GRIPPER_FRAME, target_id)

        # Give the scene a chance to catch up
        rospy.sleep(1)

        # Start the arm in the "arm_up" pose stored in the SRDF file
        rospy.loginfo("Set Arm: right_up")
        arm.set_named_target('right_up')
        if arm.go() != True:
            rospy.logwarn("  Go failed")
        rospy.sleep(2)

        # Move the gripper to the closed position
        rospy.loginfo("Set Gripper: Close " + str(self.gripper_closed ) )
        gripper.set_joint_value_target(self.gripper_closed)   
        if gripper.go() != True:
            rospy.logwarn("  Go failed")
        rospy.sleep(2)
        
        # Move the gripper to the neutral position
        rospy.loginfo("Set Gripper: Neutral " + str(self.gripper_neutral) )
        gripper.set_joint_value_target(self.gripper_neutral)
        if gripper.go() != True:
            rospy.logwarn("  Go failed")
        rospy.sleep(2)

        # Move the gripper to the open position
        rospy.loginfo("Set Gripper: Open " +  str(self.gripper_opened))
        gripper.set_joint_value_target(self.gripper_opened)
        if gripper.go() != True:
            rospy.logwarn("  Go failed")
        rospy.sleep(2)
            
        # Set the height of the table off the ground
        table_ground = 0.4

        # Set the dimensions of the scene objects [l, w, h]
        table_size = [0.2, 0.7, 0.01]
        box1_size = [0.1, 0.05, 0.05]
        box2_size = [0.05, 0.05, 0.15]

        # Set the target size [l, w, h]
        target_size = [0.07, 0.007, 0.10]

        # Add a table top and two boxes to the scene
        table_pose = PoseStamped()
        table_pose.header.frame_id = REFERENCE_FRAME
        table_pose.pose.position.x = 0.36
        table_pose.pose.position.y = 0.0
        table_pose.pose.position.z = (table_ground + table_size[2] / 2.0)-0.08
        table_pose.pose.orientation.w = 1.0
        scene.add_box(table_id, table_pose, table_size)

        box1_pose = PoseStamped()
        box1_pose.header.frame_id = REFERENCE_FRAME
        box1_pose.pose.position.x = table_pose.pose.position.x - 0.04
        box1_pose.pose.position.y = 0.0
        box1_pose.pose.position.z = (table_ground + table_size[2] + box1_size[2] / 2.0)-0.08
        box1_pose.pose.orientation.w = 1.0
        scene.add_box(box1_id, box1_pose, box1_size)

        box2_pose = PoseStamped()
        box2_pose.header.frame_id = REFERENCE_FRAME
        box2_pose.pose.position.x = table_pose.pose.position.x - 0.06
        box2_pose.pose.position.y = 0.2
        box2_pose.pose.position.z = (table_ground + table_size[2] + box2_size[2] / 2.0)-0.08
        box2_pose.pose.orientation.w = 1.0
        scene.add_box(box2_id, box2_pose, box2_size)

        # Set the target pose in between the boxes and on the table
        target_pose = PoseStamped()
        target_pose.header.frame_id = REFERENCE_FRAME
        target_pose.pose.position.x = table_pose.pose.position.x - 0.03
        target_pose.pose.position.y = 0.1
        target_pose.pose.position.z = (table_ground + table_size[2] + target_size[2] / 2.0)-0.075
        target_pose.pose.orientation.w = 0.0

        # Add the target object to the scene
        scene.add_box(target_id, target_pose, target_size)

        # Make the table red and the boxes orange
        self.setColor(table_id, 0.8, 0, 0, 1.0)
        self.setColor(box1_id, 0.8, 0.4, 0, 1.0)
        self.setColor(box2_id, 0.8, 0.4, 0, 1.0)

        # Make the target yellow
        self.setColor(target_id, 0.9, 0.9, 0, 1.0)

        # Send the colors to the planning scene
        self.sendColors()

        # Set the support surface name to the table object
        arm.set_support_surface_name(table_id)

        # Specify a pose to place the target after being picked up
        place_pose = PoseStamped()
        place_pose.header.frame_id = REFERENCE_FRAME
        place_pose.pose.position.x = table_pose.pose.position.x - 0.15
        place_pose.pose.position.y = -0.23
        place_pose.pose.position.z = table_ground + table_size[2] + target_size[2] / 2.0
        place_pose.pose.orientation.w = 0.0

        # Initialize the grasp pose to the target pose
        grasp_pose = target_pose

        # Shift the grasp pose by half the width of the target to center it
        grasp_pose.pose.position.y -= target_size[1] / 2.0

        # Generate a list of grasps
        grasps = self.make_grasps(grasp_pose, [target_id], [target_size[1] + self.gripper_tighten])

        # Track success/failure and number of attempts for pick operation
        result = MoveItErrorCodes.FAILURE
        n_attempts = 0

        # Repeat until we succeed or run out of attempts
        while result != MoveItErrorCodes.SUCCESS and n_attempts < max_pick_attempts:
            rospy.loginfo("Pick attempt #" + str(n_attempts))
            for grasp in grasps:
                # Publish the grasp poses so they can be viewed in RViz
                self.gripper_pose_pub.publish(grasp.grasp_pose)
                rospy.sleep(0.2)

                result = arm.pick(target_id, grasps)
                if result == MoveItErrorCodes.SUCCESS:
                    break

            n_attempts += 1
            rospy.sleep(0.2)

        # If the pick was successful, attempt the place operation
        if result == MoveItErrorCodes.SUCCESS:
            rospy.loginfo("  Pick: Done!")
            # Generate valid place poses
            places = self.make_places(place_pose)

            success = False
            n_attempts = 0

            # Repeat until we succeed or run out of attempts
            while not success and n_attempts < max_place_attempts:
                rospy.loginfo("Place attempt #" + str(n_attempts))
                for place in places:
                    # Publish the place poses so they can be viewed in RViz
                    self.gripper_pose_pub.publish(place)
                    rospy.sleep(0.2)

                    success = arm.place(target_id, place)
                    if success:
                        break
                
                n_attempts += 1
                rospy.sleep(0.2)

            if not success:
                rospy.logerr("Place operation failed after " + str(n_attempts) + " attempts.")
            else:
                rospy.loginfo("  Place: Done!")
        else:
            rospy.logerr("Pick operation failed after " + str(n_attempts) + " attempts.")

        # Return the arm to the "resting" pose stored in the SRDF file (passing through right_up)
        arm.set_named_target('right_up')
        arm.go()
        
        arm.set_named_target('resting')
        arm.go()

        # Open the gripper to the neutral position
        gripper.set_joint_value_target(self.gripper_neutral)
        gripper.go()

        rospy.sleep(1)

        # Shut down MoveIt cleanly
        moveit_commander.roscpp_shutdown()

        # Exit the script
        moveit_commander.os._exit(0)
Пример #17
0
    def __init__(self):
        rospy.loginfo("Initializing %s" % rospy.get_name())

        ## parameters
        self.map_frame = rospy.get_param("~map_frame", 'odom')
        self.bot_frame = 'base_link'
        self.switch_thred = 1.5  # change to next lane if reach next

        self.pursuit = PurePursuit()
        self.lka = rospy.get_param("~lookahead", 0.5)
        self.pursuit.set_look_ahead_distance(self.lka)

        ## node path
        while not rospy.has_param("/guid_path") and not rospy.is_shutdown():
            rospy.loginfo("Wait for /guid_path")
            rospy.sleep(0.5)
        self.guid_path = rospy.get_param("/guid_path")
        self.tag_ang_init = rospy.get_param("/guid_path_ang_init")
        self.last_node = -1
        self.next_node_id = None
        self.all_anchor_ids = rospy.get_param("/all_anchor_ids")
        self.joy_lock = False

        self.get_goal = True
        self.joint_ang = None

        ## set first tracking lane
        self.pub_robot_speech = rospy.Publisher("speech_case",
                                                String,
                                                queue_size=1)
        self.pub_robot_go = rospy.Publisher("robot_go", Bool, queue_size=1)
        rospy.sleep(1)  # wait for the publisher to be set well
        self.set_lane(True)

        # variable
        self.target_global = None
        self.listener = tf.TransformListener()

        # markers
        self.pub_goal_marker = rospy.Publisher("goal_marker",
                                               Marker,
                                               queue_size=1)

        self.pub_target_marker = rospy.Publisher("target_marker",
                                                 Marker,
                                                 queue_size=1)

        # publisher, subscriber
        self.pub_pid_goal = rospy.Publisher("pid_control/goal",
                                            PoseStamped,
                                            queue_size=1)

        self.req_path_srv = rospy.ServiceProxy("plan_service", GetPlan)

        self.sub_box = rospy.Subscriber("anchor_position",
                                        PoseDirectional,
                                        self.cb_goal,
                                        queue_size=1)

        sub_joy = rospy.Subscriber('joy_teleop/joy',
                                   Joy,
                                   self.cb_joy,
                                   queue_size=1)
        sub_fr = rospy.Subscriber('front_right/ranges',
                                  DeviceRangeArray,
                                  self.cb_range,
                                  queue_size=1)
        sub_joint = rospy.Subscriber('/dynamixel_workbench/joint_states',
                                     JointState,
                                     self.cb_joint,
                                     queue_size=1)

        #Don't update goal too often
        self.last_update_goal = None
        self.goal_update_thred = 0.001
        self.hist_goal = np.array([])

        self.normal = True
        self.get_goal = True
        self.timer = rospy.Timer(rospy.Duration(0.1), self.tracking)

        # print("init done")

        # prevent sudden stop
        self.last_plan = None
        # keep searching until find path or reach search end
        self.search_angle = 10 * math.pi / 180
        self.search_max = 5

        ### stupid range drawing
        # for using tf to draw range
        br1 = tf2_ros.StaticTransformBroadcaster()
        br2 = tf2_ros.StaticTransformBroadcaster()

        # stf.header.frame_id = "uwb_back_left"
        # stf.child_frame_id = "base_link"
        # stf.transform.translation.x = -1*r_tag_points[0, 0]
        # stf.transform.translation.y = -1*r_tag_points[0, 1]
        # br1.sendTransform(stf)

        # stf2 = stf
        # stf2.header.stamp = rospy.Time.now()
        # stf2.header.frame_id = "uwb_front_right"
        # stf2.transform.translation.x = -1*r_tag_points[1, 0]
        # stf2.transform.translation.y = -1*r_tag_points[1, 1]
        # br2.sendTransform(stf2)

        stf = TransformStamped()
        stf.header.stamp = rospy.Time.now()
        stf.transform.rotation.w = 1
        stf.header.frame_id = "base_link"
        stf.child_frame_id = "uwb_back_left"
        stf.transform.translation.x = r_tag_points[0, 0]
        stf.transform.translation.y = r_tag_points[0, 1]

        stf2 = TransformStamped()
        stf2.header.stamp = rospy.Time.now()
        stf2.transform.rotation.w = 1
        stf2.header.frame_id = "base_link"
        stf2.child_frame_id = "uwb_front_right"
        stf2.transform.translation.x = r_tag_points[1, 0]
        stf2.transform.translation.y = r_tag_points[1, 1]
Пример #18
0
    def __init__(self):
        rospy.init_node('tl_detector')

        self.base_waypoints = None
        self.curr_pose = None
        self.camera_image = None
        self.tree = None
        self.lights = []
        self.bridge = CvBridge()
        self.light_classifier = TLClassifier()
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0
        self.datasize = 0
        self.ignore_state = False

        # Subscribe to topic '/current_pose' to get the current position of the car
        rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        
        # Subscribe to topic '/image_color' to get the image from the cars front camera
        rospy.Subscriber('/image_color', Image, self.image_cb)

        # Subscribe to topic '/vehicle/traffic_lights' to get the location and state of the traffic lights
        # IMPORTANT: The state will not be available in real life testing
        rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray, self.traffic_lights_cb)
        
        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        # Fetch info from '/base_waypoints', which provide a complete list of waypoints the car will be following
        wp = rospy.wait_for_message('/base_waypoints', Lane)
        
        # Set the waypoints only once
        if not self.base_waypoints:
            self.base_waypoints = wp.waypoints
            self.datasize = len(self.base_waypoints)
            if PRINT_DEBUG:
                rospy.logwarn('Got the base points for tl_detector.')        
        
        # Get the x/y coordinates of the base_waypoints
        b_xcor = []
        b_ycor = []
        
        for pt in self.base_waypoints:
            b_xcor.append(pt.pose.pose.position.x)
            b_ycor.append(pt.pose.pose.position.y)
        self.tree = KDTree(zip(b_xcor, b_ycor))


        # Create the publisher to write messages to topic '/traffic_waypoint'
        # The index of the waypoint which is closest to the next red traffic light has to be published
        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint', Int32, queue_size=1)

        # Create the publisher to write messages to topic '/traffic_waypoint_state'
        # The state of the closest red traffic light has to be published
        self.upcoming_red_light_state_pub = rospy.Publisher('/traffic_waypoint_state', Int32, queue_size=1)

        # Block until shutdown -> Tasks are handled with callbacks
        rospy.spin()
Пример #19
0
def follower_node():

    rospy.init_node('follower')

    global robotX
    global robotY
    global robotZ

    global motor_command_publisher
    motor_command_publisher = rospy.Publisher('/cmd_vel', Twist, queue_size = 10)

    waypoint_subscriber = rospy.Subscriber("/waypoint_cmd", Transform, waypoint_callback)

    action_subscriber = rospy.Subscriber("/incoming", Transform, avoid_object)

    map_subscriber = rospy.Subscriber("/map", OccupancyGrid, map_callback, queue_size = 1000)

    listener = tf.TransformListener()
    step=1
    size=0.7

    dx=0
    dy=0
    dz=0

    print "Starting, dont forget to enable motors"

    delay = rospy.Rate(50.0);
    while not rospy.is_shutdown():
        motor_command = Twist()
        objectDetected=False
        error = False
        stopped = False
        if control:
            try:
                #grab the latest available transform from the odometry frame (robot's original location - usually the same as the map unless the odometry becomes inaccurate) to the robot's base.
                (translationZ,orientationZ) = listener.lookupTransform("/base_link", "/base_footprint", rospy.Time(0));
            except  (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e:
                print("EXCEPTION:",e)
                #if something goes wrong with this just go to bed for a second or so and wake up hopefully refreshed.
                delay.sleep()
                continue

            try:
                #grab the latest available transform from the odometry frame (robot's original location - usually the same as the map unless the odometry becomes inaccurate) to the robot's base.
                (translation,orientation) = listener.lookupTransform("/world", "/base_footprint", rospy.Time(0));
            except  (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e:
                print("EXCEPTION:",e)
                delay.sleep()
                continue

            #print "\n---------------------\ntranslation: ", translation, "\norientation: ", orientation, "\nwaypoint:\n", waypoint

            robotX=translation[0]
            robotY=translation[1]
            robotZ=translationZ[2]
            print "Robot coordinates: ", robotX, "-", robotY, "-", robotZ

            if slamMap is not None:
                xMax = round((robotX-slamMap.info.origin.position.x+size)/slamMap.info.resolution)
                yMax = round((robotY-slamMap.info.origin.position.y+size)/slamMap.info.resolution)
                xMin = round((robotX-slamMap.info.origin.position.x-size)/slamMap.info.resolution)
                yMin = round((robotY-slamMap.info.origin.position.y-size)/slamMap.info.resolution)
                xRob = round((robotX-slamMap.info.origin.position.x)/slamMap.info.resolution)
                yRob = round((robotY-slamMap.info.origin.position.y)/slamMap.info.resolution)

                print "Map data: ", slamMap.info.width, "-", slamMap.info.height, "-", slamMap.info.resolution, "-", slamMap.info.origin.position.x, "-", slamMap.info.origin.position.y
                print "Checking map: ", xMax, "-", xMin, "-", yMax, "-", yMin, " Robot on map: ", int(xRob), "-", int(yRob)

                #sys.stdout.write('\n')

                for y in range(int(yMin),int(yMax),1):
                    for x in range(int(xMin),int(xMax),1):
                        index = x+y*slamMap.info.width
                        if abs(xRob-x)<1 and abs(yRob-y)<1:
                            if slamMap.data[index] > 90:
                                error = True
                            #sys.stdout.write('R')
                        elif slamMap.data[index] > 90:
                            ## This square is occupied
                            objectDetected = True
                            #sys.stdout.write('X')
                        #elif slamMap.data[index] >= 0:
                            ## This square is unoccupied
                            #sys.stdout.write(" ")
                        #else:
                            #sys.stdout.write('?')
                    #sys.stdout.write('\n')
            else:
                print "Map data not found! Check if slam works"


            if not error and objectDetected:
                print("Object detected, stop!")
                if not stopped and dx!=0 and dy!=0 and dz!=0:
                    motor_command.linear.x = -dx
                    motor_command.linear.y = -dy
                    motor_command.linear.z = -dz

                    #print "command!\n", motor_command.linear.x, "-", motor_command.linear.y, "-", motor_command.linear.z, "-k: ", k

                    motor_command_publisher.publish(motor_command)
                stopped=True
            else:
                if(abs(robotZ)<0.3): #prevent collisions
                     motor_command.linear.z = 0.2
                     motor_command_publisher.publish(motor_command)
                     rospy.sleep(0.2)


                dx=waypoint.translation.x-robotX
                dy=waypoint.translation.y-robotY
                dz=waypoint.translation.z+robotZ#base link has negative z

                #print "moving!\n", dx, "-", dy, "-", dz

                k=1
                if(abs(dx)>step or abs(dy)>step or abs(dz)>step):
                    maxd = max(abs(dx),abs(dy),abs(dz))
                    k = step / maxd
                    dx = dx*k
                    dy = dy*k
                    dz = dz*k

                motor_command.linear.x = dx
                motor_command.linear.y = dy
                motor_command.linear.z = dz

                #print "command!\n", motor_command.linear.x, "-", motor_command.linear.y, "-", motor_command.linear.z, "-k: ", k

                motor_command_publisher.publish(motor_command)
    def __init__(self, sim=False):
        self.scene = PlanningSceneInterface("base_link")
        self.move_group = MoveGroupInterface("upper_body", "base_link")
        self.lmove_group = MoveGroupInterface("left_arm", "base_link")
        self.rmove_group = MoveGroupInterface("right_arm", "base_link")
        self.move_group.setPlannerId("RRTConnectkConfigDefault")
        self.lmove_group.setPlannerId("RRTConnectkConfigDefault")
        self.rmove_group.setPlannerId("RRTConnectkConfigDefault")
        self._upper_body_joints = [
            "right_elbow_joint", "right_shoulder_lift_joint",
            "right_shoulder_pan_joint", "right_wrist_1_joint",
            "right_wrist_2_joint", "right_wrist_3_joint", "left_elbow_joint",
            "left_shoulder_lift_joint", "left_shoulder_pan_joint",
            "left_wrist_1_joint", "left_wrist_2_joint", "left_wrist_3_joint",
            "linear_joint", "pan_joint", "tilt_joint"
        ]
        self._right_arm_joints = [
            "right_elbow_joint", "right_shoulder_lift_joint",
            "right_shoulder_pan_joint", "right_wrist_1_joint",
            "right_wrist_2_joint", "right_wrist_3_joint"
        ]
        self._left_arm_joints = [
            "left_elbow_joint", "left_shoulder_lift_joint",
            "left_shoulder_pan_joint", "left_wrist_1_joint",
            "left_wrist_2_joint", "left_wrist_3_joint"
        ]

        self.pickplace = [None] * 2
        self.pickplace[0] = PickPlaceInterface("left_side",
                                               "left_gripper",
                                               verbose=True)
        self.pickplace[0].planner_id = "RRTConnectkConfigDefault"
        self.pickplace[1] = PickPlaceInterface("right_side",
                                               "right_gripper",
                                               verbose=True)
        self.pickplace[1].planner_id = "RRTConnectkConfigDefault"
        self.pick_result = [None] * 2
        self._last_gripper_picked = 0
        self.place_result = [None] * 2
        self._last_gripper_placed = 0

        self._objs_to_keep = []

        self._listener = tf.TransformListener()

        self._lgripper = GripperActionClient('left')
        self._rgripper = GripperActionClient('right')

        find_topic = "basic_grasping_perception/find_objects"
        rospy.loginfo("Waiting for %s..." % find_topic)
        self.find_client = actionlib.SimpleActionClient(
            find_topic, FindGraspableObjectsAction)
        self.find_client.wait_for_server()

        self.scene.clear()

        # This is a simulation so need to adjust gripper parameters
        if sim:
            self._gripper_closed = 0.96
            self._gripper_open = 0.0
        else:
            self._gripper_closed = 0.0
            self._gripper_open = 0.165
Пример #21
0
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.lights = []

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        self.is_site = self.config['is_site']

        #TODO Remove hack to force site mode or ground_truth for testing
        # self.is_site = True
        self.ground_truth = False

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.bridge = CvBridge()
        self.light_classifier = TLClassifier()
        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        self.waypoints_2d = None
        self.waypoint_tree = None
        self.vgg_model = None
        self.graph = None
        self.sess = None
        self.initialized = False

        if self.is_site:
            # Detector Stuff
            self.model_image_size = None
            model_path = os.path.expanduser('./weights/parking_lot.h5')
            anchors_path = os.path.expanduser('./model_data/lisa_anchors.txt')
            classes_path = os.path.expanduser('./model_data/lisa_classes.txt')

            self.class_names = utils.get_classes(classes_path)

            anchors = utils.get_anchors(anchors_path)
            if SHALLOW_DETECTOR:
                anchors = anchors * 2

            self.yolo_model, _ = create_model(
                anchors,
                self.class_names,
                load_pretrained=True,
                feature_extractor=FEATURE_EXTRACTOR,
                pretrained_path=model_path,
                freeze_body=True)

            # Check if model is fully convolutional, assuming channel last order.
            self.model_image_size = self.yolo_model.layers[0].input_shape[1:3]

            self.sess = K.get_session()

            # Generate output tensor targets for filtered bounding boxes.
            self.yolo_outputs = decode_yolo_output(self.yolo_model.output,
                                                   anchors,
                                                   len(self.class_names))

            self.input_image_shape = K.placeholder(shape=(2, ))
            self.boxes, self.scores, self.classes = yolo_eval(
                self.yolo_outputs,
                self.input_image_shape,
                score_threshold=.6,
                iou_threshold=.6)

            self.graph = tensorflow.get_default_graph()
        else:
            try:
                model_path = os.path.expanduser('./weights/vgg16_1.h5')
                self.vgg_model = load_model(model_path)
                self.graph = tensorflow.get_default_graph()
            except:
                rospy.logerr(
                    "Could not load model. Have you downloaded the vgg16_1.h5 file to the weights folder? You can download it here: https://s3-eu-west-1.amazonaws.com/sdcnddata/vgg16_1.h5"
                )

        self.initialized = True

        rospy.spin()
Пример #22
0
    def __init__(self):
        # 初始化move_group的API
        moveit_commander.roscpp_initialize(sys.argv)

        # 初始化ROS节点
        rospy.init_node('moveit_cartesian_demo', anonymous=True)

        # 初始化需要使用move group控制的机械臂中的arm group
        arm = MoveGroupCommander('arm')
        robot = moveit_commander.RobotCommander()

        # 当运动规划失败后,允许重新规划
        arm.allow_replanning(False)

        # 设置目标位置所使用的参考坐标系
        reference_frame = 'base_link'
        arm.set_pose_reference_frame(reference_frame)

        # 设置位置(单位:米)和姿态(单位:弧度)的允许误差
        arm.set_goal_position_tolerance(0.001)
        arm.set_goal_orientation_tolerance(0.01)

        arm.set_max_velocity_scaling_factor(0.5)
        arm.set_max_acceleration_scaling_factor(0.5)

        arm.set_planning_time(0.08)  # 规划时间限制为2秒
        # arm.set_num_planning_attempts(1) # 规划1次

        # 获取终端link的名称
        eef_link = arm.get_end_effector_link()

        scene = moveit_commander.PlanningSceneInterface()
        scene_pub = rospy.Publisher('planning_scene',
                                    PlanningScene,
                                    queue_size=10)

        print "[INFO] Current pose:\n", arm.get_current_pose().pose

        self.scene = scene
        self.scene_pub = scene_pub
        self.colors = dict()

        self.group = arm
        self.robot = robot
        self.eef_link = eef_link
        self.reference_frame = reference_frame
        self.moveit_commander = moveit_commander

        self.broadcaster = tf.TransformBroadcaster()
        self.listener = tf.TransformListener()

        self.gripper_len = 0.095  # 手爪实际长度0.165m, 虚拟夹爪深度0.075m 0.16-0.065=0.095m
        self.approach_distance = 0.06
        self.back_distance = 0.05

        # sub and pub point cloud
        self.point_cloud = None
        self.update_cloud_flag = False
        rospy.Subscriber('/camera/depth/color/points', PointCloud2,
                         self.callback_pointcloud)
        thread.start_new_thread(self.publish_pointcloud, ())
Пример #23
0
    def __init__(self):
        rospy.init_node('turtlebot3_sandbot',
                        anonymous=False,
                        disable_signals=True)
        rospy.on_shutdown(self.shutdown)
        self.cmd_vel = rospy.Publisher('/cmd_vel', Twist, queue_size=5)
        position = Point()
        move_cmd = Twist()  #make empty twist message
        r = rospy.Rate(10)
        self.tf_listener = tf.TransformListener()
        self.odom_frame = '/odom'
        self.isFirst = True
        print("isFirst initialized")
        self.offset_x = 0
        self.offset_y = 0
        self.offset_rot = 0
        try:
            self.tf_listener.waitForTransform(self.odom_frame,
                                              '/base_footprint', rospy.Time(),
                                              rospy.Duration(1.0))
            self.base_frame = '/base_footprint'
        except (tf.Exception, tf.ConnectivityException, tf.LookupException):
            try:
                self.tf_listener.waitForTransform(self.odom_frame,
                                                  '/base_link', rospy.Time(),
                                                  rospy.Duration(1.0))
                self.base_frame = '/base_link'
            except (tf.Exception, tf.ConnectivityException,
                    tf.LookupException):
                rospy.loginfo(
                    "Cannot find transform between /odom and /base_link or /base_footprint"
                )
                rospy.signal_shutdown("tf Exception")

        (position, rotation) = self.get_odom()
        if self.isFirst:
            self.offset_x = position.x
            self.offset_y = position.y
            self.offset_rot = rotation
            self.isFirst = False
            print("offset initialized")
        (position, rotation) = self.get_odom()
        print("x, y, rotation", position.x, position.y, np.rad2deg(rotation))

        last_rotation = 0
        last_distance = 10
        linear_speed = 1
        angular_speed = 1
        # (goal_x, goal_y, goal_z) = self.getkey()

        # go through path array
        ind = 1
        length = len(arr_path_B)
        distanceIncreasing = False
        init_goal = arr_path_B[0]
        goal_x = arr_path_B[ind][0] - init_goal[0]
        goal_y = arr_path_B[ind][1] - init_goal[1]

        while ind < length:
            # if goal_z > 180 or goal_z < -180:
            #     print("you input worng z range.")
            #     self.shutdown()
            # goal_z = np.deg2rad(goal_z)
            # (position,rotation) = self.get_odom()

            distance = sqrt(
                pow(goal_x - position.x, 2) + pow(goal_y - position.y, 2))

            while distance > 0.1:
                try:
                    (position, rotation) = self.get_odom()
                    distance = sqrt(
                        pow((goal_x - position.x), 2) +
                        pow((goal_y - position.y), 2))
                    path_angle = atan2(goal_y - position.y,
                                       goal_x - position.x)

                    # if last_distance<distance:
                    # ind = ind - increment
                    # distanceIncreasing = True

                    #Normalization of path_angle
                    if path_angle < -pi / 4 or path_angle > pi / 4:
                        if goal_y < 0 and position.y < goal_y:
                            path_angle = -2 * pi + path_angle
                        elif goal_y >= 0 and position.y > goal_y:
                            path_angle = 2 * pi + path_angle

                    #Normalization of rotation
                    if last_rotation > pi - 0.1 and rotation <= 0:
                        rotation = 2 * pi + rotation
                    elif last_rotation < -pi + 0.1 and rotation > 0:
                        rotation = -2 * pi + rotation

                    rot_angle = path_angle - rotation
                    if rot_angle > pi or (rot_angle < 0 and rot_angle > -pi):
                        diff_sign = -1.0
                    else:
                        diff_sign = 1.0

                    diff_magnitude = pi - abs(abs(path_angle - rotation) - pi)
                    diff = diff_sign * diff_magnitude

                    print("CURRENT ", position.x, position.y,
                          np.rad2deg(rotation))
                    print("GOAL", goal_x, goal_y, np.rad2deg(path_angle))
                    print("DISTANCE(m) ", distance)

                    # move_cmd.angular.z = angular_speed * diff
                    move_cmd.angular.z = angular_speed * rot_angle
                    move_cmd.linear.x = min(linear_speed * distance, 0.1)

                    if move_cmd.angular.z > 0:
                        move_cmd.angular.z = min(move_cmd.angular.z, 0.2)
                    else:
                        move_cmd.angular.z = max(move_cmd.angular.z, -0.2)

                    #if heading angle is over limit (while driving)
                    if diff_magnitude > pi / 4.0:
                        move_cmd.linear.x = 0
                        # move_cmd.angular.z = move_cmd.angular.z * (-1.0)
                        # move_cmd.angular.z = move_cmd.angular.z

                    last_rotation = rotation
                    last_distance = distance

                    # if distanceIncreasing == True:
                    #     print("distance is increasing!!!")
                    #     self.cmd_vel.publish(Twist())
                    #     r.sleep()
                    #     continue

                    print("linear.x   angular.z")
                    print(move_cmd.linear.x, move_cmd.angular.z)

                    self.cmd_vel.publish(move_cmd)
                    r.sleep()

                except KeyboardInterrupt:
                    rospy.signal_shutdown("KeboardInterrupt")
                    break

            if rospy.is_shutdown():
                break

            #self.cmd_vel.publish(Twist())

            print("Now at Waypoint No.", ind)

            if ind < length - increment:
                ind = ind + increment
                goal_x = arr_path_B[ind][0] - init_goal[0]
                goal_y = arr_path_B[ind][1] - init_goal[1]
            else:
                #arrived at the final destination
                print("Robot at the final destination")
                rospy.signal_shutdown("Robot Task Done")
                break

            (position, rotation) = self.get_odom()
            path_angle = atan2(goal_y - position.y, goal_x - position.x)
            rot_angle = path_angle - rotation
            while True:
                try:
                    print("rotation", np.rad2deg(rotation))
                    print("path_angle", np.rad2deg(path_angle))

                    move_cmd.linear.x = 0

                    #diff is always positive
                    diff = pi - abs(abs(rot_angle) - pi)
                    print("diff", np.rad2deg(diff))

                    rot_angle = path_angle - rotation
                    if rot_angle > pi or (rot_angle < 0 and rot_angle > -pi):
                        diff_sign = -1.0
                    else:
                        diff_sign = 1.0

                    diff_magnitude = pi - abs(abs(path_angle - rotation) - pi)
                    diff = diff_sign * diff_magnitude

                    if diff_sign < 0:
                        if diff_magnitude < np.deg2rad(20):
                            move_cmd.angular.z = -0.1
                        else:
                            move_cmd.angular.z = -0.2
                    else:
                        if diff_magnitude < np.deg2rad(20):
                            move_cmd.angular.z = 0.1
                        else:
                            move_cmd.angular.z = 0.2

                    (position, rotation) = self.get_odom()
                    path_angle = atan2(goal_y - position.y,
                                       goal_x - position.x)
                    rot_angle = path_angle - rotation

                    # 8 too small
                    if abs(rot_angle) < np.deg2rad(8):
                        r.sleep()
                        break

                    self.cmd_vel.publish(move_cmd)
                    r.sleep()
                except KeyboardInterrupt:
                    rospy.signal_shutdown("KeboardInterrupt")
                    break

            #self.cmd_vel.publish(Twist())

            if rospy.is_shutdown():
                break
                # if goal_z >= 0:
                #     if rotation <= goal_z and rotation >= goal_z - pi:
                #         move_cmd.linear.x = 0.00
                #         move_cmd.angular.z = 0.2
                #     else:
                #         move_cmd.linear.x = 0.00
                #         move_cmd.angular.z = -0.2
                # else:
                #     if rotation <= goal_z + pi and rotation > goal_z:
                #         move_cmd.linear.x = 0.00
                #         move_cmd.angular.z = -0.2
                #     else:
                #         move_cmd.linear.x = 0.00
                #         move_cmd.angular.z = 0.2

        rospy.loginfo("Stopping the robot...")
        self.cmd_vel.publish(Twist())
Пример #24
0
    def __init__(self):
        rospy.init_node('turtlebot_navigator', anonymous=True)
        self.mode = Mode.IDLE

        # WE CHANGED THIS STUFF
        # Time to stop at a stop sign
        self.stop_time = rospy.get_param("~stop_time", 3.)

        # Minimum distance from a stop sign to obey it (originally 0.5)
        self.stop_min_dist = rospy.get_param("~stop_min_dist", 0.7)

        # Time taken to cross an intersection
        self.crossing_time = rospy.get_param("~crossing_time", 3.)

        # Number of stop sign detections in a row
        self.stop_detections = 0

        # Stop sign detector
        rospy.Subscriber('/detector/stop_sign', DetectedObject,
                         self.stop_sign_detected_callback)

        # END OF WE CHANGED THIS STUFF

        # current state
        self.x = 0.0
        self.y = 0.0
        self.theta = 0.0

        # goal state
        self.x_g = None
        self.y_g = None
        self.theta_g = None

        self.th_init = 0.0

        # map parameters
        self.map_width = 0
        self.map_height = 0
        self.map_resolution = 0
        self.map_origin = [0, 0]
        self.map_probs = []
        self.occupancy = None
        self.occupancy_updated = False

        # plan parameters
        self.plan_resolution = 0.1
        self.plan_horizon = 15

        # time when we started following the plan
        self.current_plan_start_time = rospy.get_rostime()
        self.current_plan_duration = 0
        self.plan_start = [0., 0.]

        # Robot limits
        self.v_max = 0.2  # maximum velocity
        self.om_max = 0.4  # maximum angular velocity

        self.v_des = 0.12  # desired cruising velocity
        #self.v_des = 0.2
        self.theta_start_thresh = 0.05  # threshold in theta to start moving forward when path-following
        self.start_pos_thresh = 0.2  # threshold to be far enough into the plan to recompute it

        # threshold at which navigator switches from trajectory to pose control
        self.near_thresh = 0.2

        # goal state threshold
        self.at_thresh = 0.09  #0.05
        self.at_thresh_theta = 0.2  #0.1

        # trajectory smoothing
        self.spline_alpha = 0.005  #0.01   #0.05 # 0.15
        self.traj_dt = 0.1

        # trajectory tracking controller parameters
        self.kpx = 0.5  # 0.5
        self.kpy = 0.5  # 0.5
        self.kdx = 1.5  # 1.5
        self.kdy = 1.5  # 1.5

        # heading controller parameters
        self.kp_th = 2.

        self.traj_controller = TrajectoryTracker(self.kpx, self.kpy, self.kdx,
                                                 self.kdy, self.v_max,
                                                 self.om_max)
        self.pose_controller = PoseController(0., 0., 0., self.v_max,
                                              self.om_max)
        self.heading_controller = HeadingController(self.kp_th, self.om_max)

        self.nav_planned_path_pub = rospy.Publisher('/planned_path',
                                                    Path,
                                                    queue_size=10)
        self.nav_smoothed_path_pub = rospy.Publisher('/cmd_smoothed_path',
                                                     Path,
                                                     queue_size=10)
        self.nav_smoothed_path_rej_pub = rospy.Publisher(
            '/cmd_smoothed_path_rejected', Path, queue_size=10)
        self.nav_vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)

        self.trans_listener = tf.TransformListener()

        self.cfg_srv = Server(NavigatorConfig, self.dyn_cfg_callback)

        rospy.Subscriber('/map', OccupancyGrid, self.map_callback)
        rospy.Subscriber('/map_metadata', MapMetaData, self.map_md_callback)
        rospy.Subscriber('/cmd_nav', Pose2D, self.cmd_nav_callback)

        # ADD CURRENT STATE PUBLISHER (11/15/20 DEM)
        self.current_state_pub = rospy.Publisher('/current_state',
                                                 Pose2D,
                                                 queue_size=10)

        print "finished init"
Пример #25
0
    def __init__(self):
        # Give the node a name
        rospy.init_node('out_and_back', anonymous=False)

        # Set rospy to execute a shutdown function when exiting
        rospy.on_shutdown(self.shutdown)

        # Publisher to control the robot's speed
        #self.cmd_vel = rospy.Publisher('/cmd_vel', Twist, queue_size=5)
        self.cmd_vel = rospy.Publisher('/mobile_base/commands/velocity',
                                       Twist,
                                       queue_size=5)

        # How fast will we update the robot's movement?
        rate = 20

        # Set the equivalent ROS rate variable
        r = rospy.Rate(rate)

        # Set the forward linear speed to 0.15 meters per second
        linear_speed = 0.15

        # Set the travel distance in meters
        goal_distance = 1.0

        # Set the rotation speed in radians per second
        angular_speed = 0.5

        # Set the angular tolerance in degrees converted to radians
        angular_tolerance = radians(1.0)

        # Set the rotation angle to Pi radians (180 degrees)
        goal_angle = pi

        # Initialize the tf listener
        self.tf_listener = tf.TransformListener()

        # Give tf some time to fill its buffer
        rospy.sleep(2)

        # Set the odom frame
        self.odom_frame = '/odom'

        # Find out if the robot uses /base_link or /base_footprint
        try:
            self.tf_listener.waitForTransform(self.odom_frame,
                                              '/base_footprint', rospy.Time(),
                                              rospy.Duration(1.0))
            self.base_frame = '/base_footprint'
        except (tf.Exception, tf.ConnectivityException, tf.LookupException):
            try:
                self.tf_listener.waitForTransform(self.odom_frame,
                                                  '/base_link', rospy.Time(),
                                                  rospy.Duration(1.0))
                self.base_frame = '/base_link'
            except (tf.Exception, tf.ConnectivityException,
                    tf.LookupException):
                rospy.loginfo(
                    "Cannot find transform between /odom and /base_link or /base_footprint"
                )
                rospy.signal_shutdown("tf Exception")

        # Initialize the position variable as a Point type
        position = Point()

        # Loop once for each leg of the trip
        for i in range(2):
            # Initialize the movement command
            move_cmd = Twist()

            # Set the movement command to forward motion
            move_cmd.linear.x = linear_speed

            # Get the starting position values
            (position, rotation) = self.get_odom()

            x_start = position.x
            y_start = position.y

            # Keep track of the distance traveled
            distance = 0

            # Enter the loop to move along a side
            while distance < goal_distance and not rospy.is_shutdown():
                # Publish the Twist message and sleep 1 cycle
                self.cmd_vel.publish(move_cmd)

                r.sleep()

                # Get the current position
                (position, rotation) = self.get_odom()

                # Compute the Euclidean distance from the start
                distance = sqrt(
                    pow((position.x - x_start), 2) +
                    pow((position.y - y_start), 2))

            # Stop the robot before the rotation
            move_cmd = Twist()
            self.cmd_vel.publish(move_cmd)
            rospy.sleep(1)

            # Set the movement command to a rotation
            move_cmd.angular.z = angular_speed

            # Track the last angle measured
            last_angle = rotation

            # Track how far we have turned
            turn_angle = 0

            while abs(turn_angle + angular_tolerance) < abs(
                    goal_angle) and not rospy.is_shutdown():
                # Publish the Twist message and sleep 1 cycle
                self.cmd_vel.publish(move_cmd)
                r.sleep()

                # Get the current rotation
                (position, rotation) = self.get_odom()

                # Compute the amount of rotation since the last loop
                delta_angle = normalize_angle(rotation - last_angle)

                # Add to the running total
                turn_angle += delta_angle
                last_angle = rotation

            # Stop the robot before the next leg
            move_cmd = Twist()
            self.cmd_vel.publish(move_cmd)
            rospy.sleep(1)

        # Stop the robot for good
        self.cmd_vel.publish(Twist())
def main(argv):
    # prepare the proxy, listener
    global listener
    global vizpub
    rospy.init_node('contour_follow', anonymous=True)
    listener = tf.TransformListener()
    vizpub = rospy.Publisher("visualization_marker", Marker, queue_size=10)
    br = TransformBroadcaster()

    setSpeed(tcp=100, ori=30)
    setZone(0)
    # set the parameters
    limit = 10000  # number of data points to be collected
    ori = [0, 0.7071, 0.7071, 0]
    threshold = 0.3  # the threshold force for contact, need to be tuned
    z = 0.218  # the height above the table probe1: 0.29 probe2: 0.218
    probe_radis = 0.004745  # probe1: 0.00626/2 probe2: 0.004745
    step_size = 0.0002
    obj_des_wrt_vicon = [
        0, 0, -(9.40 / 2 / 1000 + 14.15 / 2 / 1000), 0, 0, 0, 1
    ]

    # visualize the block
    vizBlock(obj_des_wrt_vicon)
    rospy.sleep(0.1)
    vizBlock(obj_des_wrt_vicon)
    rospy.sleep(0.1)
    vizBlock(obj_des_wrt_vicon)
    rospy.sleep(0.1)
    vizBlock(obj_des_wrt_vicon)
    rospy.sleep(0.1)
    vizBlock(obj_des_wrt_vicon)
    rospy.sleep(0.1)
    vizBlock(obj_des_wrt_vicon)
    rospy.sleep(0.1)

    # 0. move to startPos
    start_pos = [0.3, 0.06, z + 0.05]
    setCart(start_pos, ori)

    start_pos = [0.3, 0.06, z]
    setCart(start_pos, ori)
    curr_pos = start_pos
    # 0.1 zero the ft reading
    rospy.sleep(1)
    setZero()
    rospy.sleep(3)

    # 1. move in -y direction till contact -> normal
    setSpeed(tcp=30, ori=30)
    direc = np.array([0, -step_size, 0])
    normal = [0, 0, 0]
    while True:
        curr_pos = np.array(curr_pos) + direc
        setCart(curr_pos, ori)
        # let the ft reads the force in static, not while pushing
        rospy.sleep(0.1)
        ft = ftmsg2list(
            ROS_Wait_For_Msg('/netft_data',
                             geometry_msgs.msg.WrenchStamped).getmsg())
        print '[ft explore]', ft
        # get box pose from vicon
        (box_pos, box_quat) = lookupTransform('base_link',
                                              'vicon/SteelBlock/SteelBlock',
                                              listener)
        # correct box_pose
        box_pose_des_global = mat2poselist(
            np.dot(poselist2mat(list(box_pos) + list(box_quat)),
                   poselist2mat(obj_des_wrt_vicon)))
        print 'box_pose', box_pose_des_global

        # If in contact, break
        if norm(ft[0:2]) > threshold:
            # transform ft data to global frame
            ft_global = transformFt2Global(ft)
            ft_global[2] = 0  # we don't want noise from z
            normal = ft_global[0:3] / norm(ft_global)
            break
    #pause()

    # 2. use the normal to move along the block
    setSpeed(tcp=20, ori=30)
    index = 0
    contact_pts = []
    contact_nms = []
    all_contact = []
    while True:
        # 2.1 move
        direc = np.dot(tfm.euler_matrix(0, 0, 2), normal.tolist() + [1])[0:3]
        curr_pos = np.array(curr_pos) + direc * step_size
        setCart(curr_pos, ori)

        # let the ft reads the force in static, not while pushing
        rospy.sleep(0.1)
        ft = getAveragedFT()
        print index  #, '[ft explore]', ft
        # get box pose from vicon
        (box_pos, box_quat) = lookupTransform('base_link',
                                              'vicon/SteelBlock/SteelBlock',
                                              listener)
        # correct box_pose
        box_pose_des_global = mat2poselist(
            np.dot(poselist2mat(list(box_pos) + list(box_quat)),
                   poselist2mat(obj_des_wrt_vicon)))
        #box_pose_des_global = list(box_pos) + list(box_quat)
        #print 'box_pose', box_pose_des_global

        vizBlock(obj_des_wrt_vicon)
        br.sendTransform(box_pose_des_global[0:3], box_pose_des_global[3:7],
                         rospy.Time.now(), "SteelBlockDesired", "map")
        #print 'box_pos', box_pos, 'box_quat', box_quat

        if norm(ft[0:2]) > threshold:
            # transform ft data to global frame
            ft_global = transformFt2Global(ft)
            ft_global[2] = 0  # we don't want noise from z
            normal = ft_global[0:3] / norm(ft_global)
            contact_nms.append(normal.tolist())
            contact_pt = curr_pos - normal * probe_radis
            contact_pts.append(contact_pt.tolist())
            contact_pt[2] = 0.01
            vizPoint(contact_pt.tolist())
            vizArrow(contact_pt.tolist(), (contact_pt + normal * 0.1).tolist())
            # caution: matlab uses the other quaternion order: w x y z. Also the normal is in toward the object.
            all_contact.append(contact_pt.tolist()[0:2] + [0] +
                               (-normal).tolist()[0:2] + [0] +
                               box_pose_des_global[0:3] +
                               box_pose_des_global[6:7] +
                               box_pose_des_global[3:6] + curr_pos.tolist())
            index += 1

        if len(contact_pts) > limit:
            break

        if len(
                contact_pts
        ) % 500 == 0:  # zero the ft offset, move away from block, zero it, then come back
            move_away_size = 0.01
            overshoot_size = 0  #0.0005
            setSpeed(tcp=5, ori=30)
            setCart(curr_pos + normal * move_away_size, ori)
            rospy.sleep(1)
            print 'bad ft:', getAveragedFT()
            setZero()
            rospy.sleep(3)
            setCart(curr_pos - normal * overshoot_size, ori)
            setSpeed(tcp=20, ori=30)

    #all_contact(1:2,:);  % x,y of contact position
    #all_contact(4:5,:);  % x,y contact normal
    #all_contact(7:9,:);  % box x,y
    #all_contact(10:13,:);  % box quaternion
    #all_contact(14:16,:);  % pusher position

    # save contact_nm and contact_pt as json file
    with open(os.environ['PNPUSHDATA_BASE'] + '/all_contact_real.json',
              'w') as outfile:
        json.dump({
            'contact_pts': contact_pts,
            'contact_nms': contact_nms
        }, outfile)

    # save all_contact as mat file
    sio.savemat(os.environ['PNPUSHDATA_BASE'] + '/all_contact_real.mat',
                {'all_contact': all_contact})

    setSpeed(tcp=100, ori=30)
    # 3. move back to startPos
    start_pos = [0.3, 0.06, z + 0.05]
    setCart(start_pos, ori)
Пример #27
0
    def __init__(self):

        # 设置rospy在终止节点时执行的关闭函数
        rospy.on_shutdown(self.shutdown)

        # 获取参数
        self.goal_distance = rospy.get_param("~goal_distance", 1.0)  # meters
        self.goal_angle = radians(rospy.get_param(
            "~goal_angle", 90))  # degrees converted to radians
        self.linear_speed = rospy.get_param("~linear_speed",
                                            0.2)  # meters per second
        self.angular_speed = rospy.get_param("~angular_speed",
                                             0.7)  # radians per second
        self.angular_tolerance = radians(
            rospy.get_param("~angular_tolerance", 2))  # degrees to radians
        self.velocity_topic = rospy.get_param(
            '~vel_topic', '/mobile_base/mobile_base_controller/cmd_vel')
        self.rate_pub = rospy.get_param('~velocity_pub_rate', 20)
        self.rate = rospy.Rate(self.rate_pub)

        # 初始化发布速度的topic
        self.cmd_vel = rospy.Publisher(self.velocity_topic,
                                       Twist,
                                       queue_size=1)

        self.base_frame = rospy.get_param('~base_frame',
                                          '/base_link')  #set the base_frame
        self.odom_frame = rospy.get_param('~odom_frame',
                                          '/odom')  #set the odom_frame
        # 初始化tf
        self.tf_listener = tf.TransformListener()

        rospy.sleep(2)

        try:
            self.tf_listener.waitForTransform(self.base_frame, self.odom_frame,
                                              rospy.Time(0),
                                              rospy.Duration(5.0))
        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            rospy.logerr(
                'tf catch exception when robot waiting for transform......')
            exit(-1)

        # 循环四次画出正方形
        for i in range(4):

            self.Linear_motion()

            # 再进行旋转之前时机器人停止
            move_cmd = Twist()
            self.cmd_vel.publish(move_cmd)
            rospy.sleep(1.0)

            self.Rotational_motion()

            move_cmd = Twist()
            self.cmd_vel.publish(move_cmd)
            rospy.sleep(1.0)

        #结束时使机器人停止
        self.cmd_vel.publish(Twist())
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.lights = []
        self.waypoints_2d = None
        self.closest_waypoint_id = 0
        self.last_car_position = 0
        self.last_light_pos_wp = []
        self.waypoints_tree = None

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        sub6 = rospy.Subscriber('/image_color', Image, self.image_cb)

        config_string = rospy.get_param("/traffic_light_config")
        self.config = yaml.load(config_string)

        self.environment_param = rospy.get_param("/traffic_light_environment")
        self.model_path = rospy.get_param("/traffic_light_model_name")
        self.detection_threshhold = rospy.get_param(
            "/traffic_light_detection_threshhold")
        self.max_light_distance = rospy.get_param(
            "/traffic_light_max_distance")
        self.yellow_light_max_braking_distance = rospy.get_param(
            "/yellow_max_braking_distance")
        self.yellow_light_min_braking_distance = rospy.get_param(
            "/yellow_min_braking_distance")

        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)

        self.brake_on_yellow = False

        self.bridge = CvBridge()

        self.listener = tf.TransformListener()

        self.state = TrafficLight.UNKNOWN

        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        self.TrafficLightState = rospy.Publisher('/traffic_light_state',
                                                 Int32,
                                                 queue_size=1)

        self.light_classifier = TLClassifier(self.environment_param,
                                             self.model_path,
                                             self.detection_threshhold)

        rospy.spin()
Пример #29
0
def main():
    global settings
    settings = termios.tcgetattr(sys.stdin)

    rospy.init_node('controller')
    controller = Controller()
    PID_controller = Pid_Controller()
    cfg = Config()
    udp = sub_udp()
    states = States()

    pose_dict = mt.get_csv()
    cx = pose_dict['x']
    cy = pose_dict['y']
    target_course = TargetCourse(cx, cy)

    udp.GearNo = -9
    udp.VC_SwitchOn = 1
    controller.loop = 0
    v_dt = 0
    distance_old = 0
    count = 0

    y_line_state = 9

    tar_vel_ = 0
    Lf_k = 1.6
    listener = tf.TransformListener()

    # start -----------------------------------------------
    while 1:
        # cal dt
        if controller.car_lasted_time == 0:
            controller.car_lasted_time = controller.car_now_time
        controller.car_dt_time = controller.car_now_time - controller.car_lasted_time
        controller.car_lasted_time = controller.car_now_time
        key = getKey()

        try:
            (trans, rot) = listener.lookupTransform('localization',
                                                    'Wheel_Rear',
                                                    rospy.Time(0))
            print(trans)
            print(rot)

        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            continue

        try:
            # use localization
            state = State(x=controller.car_local_point_x,
                          y=controller.car_local_point_y,
                          yaw=controller.car_local_point_theta,
                          v=controller.cur_vel)

            last_index = len(cx) - 1
            target_ind, Ld, now_ind, test_mode, test_ind_1, test_ind_2 = target_course.search_target_index(
                state, Lf_k)

            cur_vel_ = controller.cur_vel
            cur_dt = controller.car_dt_time

        except (AttributeError, IndexError) as set_err:
            print(set_err)
            print("*-*-*-L-O-D-I-N-G-*-*-*")
            continue

        if last_index > target_ind:

            Ks = 0.08

            mov_vel = PID_controller.accel_control(tar_vel_, cur_vel_, cur_dt,
                                                   1, 0, 1)
            udp.Ax = mov_vel

            nx = target_course.cx[now_ind]
            zx = target_course.cx[0]
            ny = target_course.cy[now_ind]
            zy = target_course.cy[0]

            # pure mode
            # delta_gain, tx, ty = pure_pursuit_steer_control(state, target_course, target_ind, Ks)
            delta_gain, tx, ty, alpha, delta_rad, delta, alpha_m, delta_m_rad, delta_m, Ld_m, car_alpha_m, car_pr, car_fr, pu_ind, pu_pind = pure_pursuit_steer_control(
                state, target_course, target_ind, Ks)
            udp.SteeringWheel = delta_gain

            print(
                "------------------------------------------------------------------------------"
            )
            print("yaw : ", (controller.cur_yaw) % (np.pi * 2))
            print("car v : ", state.v)
            print("delta_gain : ", delta_gain)
            print("")

            print("tx - : ", tx, "ty - : ", ty)
            print("Ld _ origin  : ", Ld, "Ld _ cal  : ", Ld_m)
            print("alpha _ origin  : ", alpha, "alpha _ cal  : ", alpha_m)
            print("delta rad _ origin  : ", delta_rad, "delta rad _ cal  : ",
                  delta_m_rad)
            print("delta _ origin  : ", delta, "delta _ cal  : ", delta_m)

            print("yaw _ origin  : ", controller.cur_yaw)
            print("yaw _ 360 ", (controller.cur_yaw) % (np.pi * 2))
            print("yaw _ - tar  : ", car_pr, "yaw _ - cal  : ", car_fr,
                  "yaw - ", car_pr - car_fr)
            print("yaw _ - cal  : ", car_alpha_m)

            tx_tar = target_course.cx[target_ind]
            tx_now = target_course.cx[now_ind]
            ty_tar = target_course.cy[target_ind]
            ty_now = target_course.cy[now_ind]

            tx_tar_1 = target_course.cx[now_ind + 1]
            ty_tar_1 = target_course.cy[now_ind + 1]
            tx_tar_2 = target_course.cx[now_ind + 2]
            ty_tar_2 = target_course.cy[now_ind + 2]
            tx_tar_3 = target_course.cx[now_ind + 3]
            ty_tar_3 = target_course.cy[now_ind + 3]

            print("test mode", test_mode, "\n test ind 1", test_ind_1,
                  "test in 2", test_ind_2)

            print(" tx_tar : ", tx_tar, "  ty_tar : ", ty_tar)
            print(" tx_tar_1 : ", tx_tar_1, "  ty_tar_1 : ", ty_tar_1)
            print(" tx_tar_2 : ", tx_tar_2, "  ty_tar_2 : ", ty_tar_2)
            print(" tx_now : ", tx_now, "  ty_now : ", ty_now)
            print(target_ind - now_ind, "", last_index)
            print("")

            print("localization1 x : ", controller.car_local_point_x,
                  "localization1 y : ", controller.car_local_point_y)
            print("localization2 x : ", controller.car_local_2_point_x,
                  "localization2 y : ", controller.car_local_2_point_y)
            print("rear x : ", state.rear_x, "rear y : ", state.rear_y)
            print("front x : ", state.front_x, "front y : ", state.front_y)

            print("lo_1 dx : ", controller.car_local_point_x - tx,
                  "lo_1 dy : ", controller.car_local_point_y - ty)
            print("lo_2 dx : ", controller.car_local_2_point_x - tx,
                  "lo_2 dy : ", controller.car_local_2_point_y - ty)

            dlo_1 = np.hypot(tx - controller.car_local_point_x,
                             ty - controller.car_local_point_y)
            dlo_1_0 = np.hypot(tx_tar - controller.car_local_point_x,
                               ty_tar - controller.car_local_point_y)
            dlo_1_1 = np.hypot(tx_tar_1 - controller.car_local_point_x,
                               ty_tar_1 - controller.car_local_point_y)
            dlo_1_2 = np.hypot(tx_tar_2 - controller.car_local_point_x,
                               ty_tar_2 - controller.car_local_point_y)
            dlo_1_3 = np.hypot(tx_tar_3 - controller.car_local_point_x,
                               ty_tar_3 - controller.car_local_point_y)
            dlo_1_now = np.hypot(tx_now - controller.car_local_point_x,
                                 ty_now - controller.car_local_point_y)

            dlo_2 = np.hypot(tx - controller.car_local_2_point_x,
                             ty - controller.car_local_2_point_y)
            dlo_2_0 = np.hypot(tx_tar - controller.car_local_2_point_x,
                               ty_tar - controller.car_local_2_point_y)
            dlo_2_1 = np.hypot(tx_tar_1 - controller.car_local_2_point_x,
                               ty_tar_1 - controller.car_local_2_point_y)
            dlo_2_2 = np.hypot(tx_tar_2 - controller.car_local_2_point_x,
                               ty_tar_2 - controller.car_local_2_point_y)
            dlo_2_3 = np.hypot(tx_tar_3 - controller.car_local_2_point_x,
                               ty_tar_3 - controller.car_local_2_point_y)
            dlo_2_now = np.hypot(tx_now - controller.car_local_2_point_x,
                                 ty_now - controller.car_local_2_point_y)

            print("lo_1 d : ", dlo_1_now, dlo_1_0, dlo_1_1, dlo_1_2, dlo_1_3)
            print("lo_2 d : ", dlo_2_now, dlo_2_0, dlo_2_1, dlo_2_2, dlo_2_3)
            print(" target ind : ", target_ind, " | ", last_index)
            print(" now ind : ", now_ind, " | ", last_index)
            print(" pure_ cal ind : ", pu_ind, "pu ind : ", pu_pind)

            print("")
            print("tx      : ", tx, "ty      : ", ty)
            print("nx      : ", nx, "ny      : ", ny)
            print("state x : ", state.x, "state y : ", state.y)
            print("distance target x : ", tx - state.x, "distance target y : ",
                  ty - state.y)
            dtar = np.hypot(state.x - tx, state.y - ty)
            dnow = np.hypot(state.x - nx, state.y - ny)
            print("")
            print("distance target : ", dtar, "distance now : ", dnow)
            print("Ld _ origin  : ", Ld, "Ld _ cal  : ", Ld_m)

            # ----------------------------------------------------------------------------------
            controller.loop += 1

            # draw plot
            v_dt += cur_dt
            states.append(v_dt, state)

            # -------------------------------------------------------------------------------------
            # drawing plot
            if cfg.show_animation:  # pragma: no cover
                plt.cla()
                # for stopping simulation with the esc key.

                plt.gcf().canvas.mpl_connect(
                    'key_release_event',
                    lambda event: [exit(0) if event.key == 'escape' else None])
                plot_arrow(state.x, state.y, state.yaw)
                plt.plot(cx, cy, "-r", label="course")
                plt.plot(states.x, states.y, "-b", label="trajectory")
                plt.plot(cx[target_ind], cy[target_ind], "xg", label="target")
                plt.axis("equal")
                plt.grid(True)
                plt.title("Speed[km/h]:" + str(state.v)[:4])
                plt.pause(0.001)

            if cfg.show_animation_gain:
                plt.cla()
                plt.gcf().canvas.mpl_connect(
                    'key_release_event',
                    lambda event: [exit(0) if event.key == 'escape' else None])
                plt.grid(True)
                plt.plot(states.t, [iv for iv in states.v], "-r")
                plt.legend()
                plt.xlabel("Time[s]")
                plt.ylabel("Speed[km/h]")
                plt.axis("equal")
                plt.title("Speed[km/h]:" + str(state.v)[:4])
                plt.grid(True)
                plt.pause(0.00001)
            #

        else:
            print("lastIndex < target_ind")
            print("*-*-*-*-*-*-END-*-*-*-*-*-*")
        # ----------------------------------------------------

        # set target V
        if key == '0':
            udp.VC_SwitchOn = 0
            print('maneuver')

        if key == '1':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 10

        if key == '2':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 20

        if key == '3':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 30

        if key == '4':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 40

        if key == '5':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 50

        if key == '9':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 1
        if key == '8':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 3
        if key == '7':
            udp.VC_SwitchOn = 1
            udp.GearNo = 1
            tar_vel_ = 5

        if key == 'w':
            udp.VC_SwitchOn = 1
            if udp.Ax >= 0:
                udp.GearNo = 1
            else:
                udp.GearNo = -1
            udp.Ax += 1

        if key == 'a':
            udp.VC_SwitchOn = 1
            udp.SteeringWheel += 0.1
            # mode_pub.publish(udp)
            print('SteeringWheel +')

        if key == 'd':
            udp.VC_SwitchOn = 1
            udp.SteeringWheel -= 0.1
            # mode_pub.publish(udp)
            print('SteeringWheel -')

        if key == 'e':
            udp.VC_SwitchOn = 1
            udp.SteeringWheel = 0.0
            # mode_pub.publish(udp)
            print('SteeringWheel 0')

        if key == 's':
            udp.VC_SwitchOn = 1
            if udp.Ax >= 0:
                udp.GearNo = 1
            else:
                udp.GearNo = -1
            udp.Ax -= 1
            # mode_pub.publish(udp)
            print('Ax -')

        # Brake
        if key == 'c':
            udp.VC_SwitchOn = 1
            udp.GearNo = 0
            udp.Ax = 0
            udp.SteeringWheel = 0
            print('stop - 0.5')

        if key == 'x':
            udp.VC_SwitchOn = 1
            udp.GearNo = -9
            udp.Ax = 0
            udp.SteeringWheel = 0
            print('stop - 1')

        else:
            if (key == '\x03'):
                break

        controller.car_pub.publish(udp)

    # draw plot
    try:
        if controller.show_animation:  # pragma: no cover
            plt.cla()
            plt.plot(cx, cy, ".r", label="course")
            plt.plot(states.x, states.y, "-b", label="trajectory")
            plt.legend()
            plt.xlabel("x[m]")
            plt.ylabel("y[m]")
            plt.axis("equal")
            plt.grid(True)

            plt.subplots(1)
            plt.plot(states.t, [iv for iv in states.v], "-r")
            plt.xlabel("Time[s]")
            plt.ylabel("Speed[km/h]")
            plt.grid(True)
            plt.show()

    except AttributeError:
        print("*-*-*-n-o-d-a-t-a-*-*-*")
Пример #30
0
    def __init__(self):
        rospy.init_node('tl_detector')

        self.pose = None
        self.waypoints = None
        self.camera_image = None
        self.lights = []
        '''
        /vehicle/traffic_lights provides you with the location of the traffic light in 3D map space and
        helps you acquire an accurate ground truth data source for the traffic light
        classifier by sending the current color state of all traffic lights in the
        simulator. When testing on the vehicle, the color state will not be available. You'll need to
        rely on the position of the light and the camera image to predict it.
        '''

        config_string = rospy.get_param("/traffic_light_config")

        # Folder of where images are stored.
        self.dirData = rospy.core.rospkg.environment.get_test_results_dir()
        self.config = yaml.load(config_string)

        self.stop_line_positions = self.config['stop_line_positions']
        self.stop_line_waypoints = []

        self.bridge = CvBridge()
        self.listener = tf.TransformListener()
        self.idx = 0
        self.waypointlist = np.array([])
        self.state = TrafficLight.UNKNOWN
        self.last_state = TrafficLight.UNKNOWN
        self.last_wp = -1
        self.state_count = 0

        self.ego_x = None
        self.ego_y = None
        self.closest_wp_index = None

        self.num_waypoints = 0
        self.light_wp = -1
        self.state_red_count = -1

        self.tree = []
        self.image_count = 0
        self.waypoints_prepared = False
        self.idx_traffic_light_found = False
        self.last_pose = None

        sub1 = rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
        sub2 = rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
        sub3 = rospy.Subscriber('/vehicle/traffic_lights', TrafficLightArray,
                                self.traffic_cb)
        self.upcoming_red_light_pub = rospy.Publisher('/traffic_waypoint',
                                                      Int32,
                                                      queue_size=1)
        self.gpu_ready_pub = rospy.Publisher('/gpu_ready', Int32, queue_size=1)

        # Philippe
        #        sub6 = rospy.Subscriber('/image_color', Image, self.image_callback)
        #        self.light_classifier = TLClassifier(deep_learning=False)
        #        self.gpu_ready_pub.publish(Int32(1))
        #        self.loop()

        # Peter and Mikkel
        sub6 = rospy.Subscriber('/image_color', Image, self.image_callback)
        self.light_classifier = TLClassifier(deep_learning=True)
        self.gpu_ready_pub.publish(Int32(1))
        self.loop2()