コード例 #1
0
def main():
    # Declare the global variable
    global result
    global count
# Set the init
    count = 1
    result = {"detection": []}


    # Using mesage filters to get multiple image
    depth_image_sub = message_filters.Subscriber('/camera/aligned_depth_to_color/image_raw', Image)
    bb_sub = message_filters.Subscriber('/darknet_ros/bounding_boxes', BoundingBoxes)
    target_image = message_filters.Subscriber('/darknet_ros/detection_image', Image)
    joy_sub = message_filters.Subscriber('/joy', Joy)

    # Synchronize the topic and throw into the callback
    ts = message_filters.ApproximateTimeSynchronizer([depth_image_sub, bb_sub, target_image, joy_sub], 1, 100)
    ts.registerCallback(callback)

    rospy.spin()

    # Write out the Json file
    with open("result.json", 'w') as output:
         json.dump(result, output)
コード例 #2
0
    def __init__(self):
        super(ProjectionNode, self).__init__()

        # init the node
        rospy.init_node('projection_node', anonymous=False)

        self._bridge = CvBridge()

        (depth_topic, face_topic, output_topic, f, cx, cy) = \
            self.get_parameters()

        # Subscribe to the face positions
        sub_obj = message_filters.Subscriber(face_topic,\
            DetectionArray)

        sub_depth = message_filters.Subscriber(depth_topic,\
            Image)

        # Advertise the result of Face Depths
        self.pub = rospy.Publisher(output_topic, \
            DetectionArray, queue_size=1)

        # Create the message filter
        ts = message_filters.ApproximateTimeSynchronizer(\
            [sub_obj, sub_depth], \
            2, \
            0.9)

        ts.registerCallback(self.detection_callback)

        self.f = f
        self.cx = cx
        self.cy = cy

        # spin
        rospy.spin()
コード例 #3
0
def listener():

    # In ROS, nodes are uniquely named. If two nodes with the same
    # name are launched, the previous one is kicked off. The
    # anonymous=True flag means that rospy will choose a unique
    # name for our 'listener' node so that multiple listeners can
    # run simultaneously.

    rospy.init_node('listener', anonymous=True)

    symbol = message_filters.Subscriber('/scan', LaserScan)
    odom = message_filters.Subscriber('/odom', Odometry)
    #odom  = message_filters.Subscriber('/hsrb/odom',Odometry)
    #pose  = message_filters.Subscriber('/navigation/localization/amcl_pose',PoseWithCovarianceStamped)#TAKESHI REAL
    #odom= message_filters.Subscriber("/hsrb/wheel_odom",Odometry)

    #ats= message_filters.ApproximateTimeSynchronizer([symbol,odom,twist],queue_size=5,slop=.1,allow_headerless=True)
    ats = message_filters.ApproximateTimeSynchronizer([symbol, odom],
                                                      queue_size=5,
                                                      slop=.1,
                                                      allow_headerless=True)
    ats.registerCallback(callback)
    # spin() simply keeps python from exiting until this node is stopped
    rospy.spin()
コード例 #4
0
    def on_start(self, args, time):
        print("Start estimator for {}".format(self._camera))

        topic = "/{}".format(self._camera)
        self._pubImage = rospy.Publisher(topic + "/marker_image",
                                         ImageMsg,
                                         queue_size=1)
        self._pubMarker = rospy.Publisher(topic + "/marker_pose",
                                          PoseMsg,
                                          queue_size=1)

        sub1 = mf.Subscriber(topic + "/rgb/camera_info",
                             CameraInfoMsg,
                             queue_size=3)
        sub2 = mf.Subscriber(topic + "/rgb/image_raw", ImageMsg, queue_size=3)
        sub3 = mf.Subscriber(topic + "/depth_registered/image_raw",
                             ImageMsg,
                             queue_size=3)
        sub4 = mf.Subscriber(topic + "/depth_registered/points",
                             PointCloud2Msg,
                             queue_size=3)
        self._subCamera = [sub1, sub2, sub3, sub4]
        self._sync = mf.ApproximateTimeSynchronizer(self._subCamera, 3, 0.01)
        self._sync.registerCallback(self.__callback)
コード例 #5
0
	def __init__(self):
		#rospy.loginfo("[%s] Initializing " %(self.node_name))
		self.bridge = CvBridge()
		#msg = rospy.wait_for_message('/X1/rgbd_camera/rgb/camera_info', CameraInfo, timeout=None)
		msg = rospy.wait_for_message('/camera/color/camera_info', CameraInfo, timeout=None)
		self.fx = msg.P[0]
		self.fy = msg.P[5]
		self.cx = msg.P[2]
		self.cy = msg.P[6]

		#-------point cloud without color-------
		#self.depth_sub = rospy.Subscriber("/camera/depth/image_rect_raw", Image, self.img_cb, queue_size = 1, buff_size = 2**24)
		#self.depth_sub = rospy.Subscriber("/X1/rgbd_camera/depth/image_raw", Image, self.img_cb, queue_size = 1, buff_size = 2**24)
		#------------------------------------

		#-------point cloud with color-------
		self.depth_sub = message_filters.Subscriber("/camera/aligned_depth_to_color/image_raw", Image)
		self.image_sub = message_filters.Subscriber("/camera/color/image_raw", Image)
		self.ts = message_filters.ApproximateTimeSynchronizer([self.image_sub, self.depth_sub], 1, 0.3)
		self.ts.registerCallback(self.img_cb)
		#------------------------------------

		self.pc_pub = rospy.Publisher("/pointcloud2_transformed", PointCloud2, queue_size=1)
		self.points = []
コード例 #6
0
    def __init__(self, node):

        self._logger = node.get_logger()
        self._node = node

        self._logger.info("Init ObjectFilter()")
        self._pub_object_topic = self._node.create_publisher(
            Objects, '/object_map/filtered_object')

        sub_detection = message_filters.Subscriber(
            self._node, ObjectsInBoxes,
            '/movidius_ncs_stream/detected_objects')
        sub_tracking = message_filters.Subscriber(
            self._node, TrackedObjects, '/object_analytics/tracking')
        sub_localization = message_filters.Subscriber(
            self._node, ObjectsInBoxes3D, '/object_analytics/localization')

        object_filter = message_filters.ApproximateTimeSynchronizer(
            [sub_detection, sub_tracking, sub_localization],
            10,
            0.01,
            allow_headerless=False)

        object_filter.registerCallback(self._callback)
コード例 #7
0
    def __init__(self):
        rospy.init_node('object_subscriber', anonymous=True)
        self.object_sub1 = message_filters.Subscriber("/object_coord1",
                                                      Float64MultiArray)
        self.object_sub2 = message_filters.Subscriber("/object_coord2",
                                                      Float64MultiArray)

        self.object_true_x = message_filters.Subscriber(
            "/target2/x2_position_controller/command", Float64)
        self.object_true_y = message_filters.Subscriber(
            "/target2/y2_position_controller/command", Float64)
        self.object_true_z = message_filters.Subscriber(
            "/target2/z2_position_controller/command", Float64)

        self.object_x = rospy.Publisher(
            "/target2/x_mea_position_controller/command",
            Float64,
            queue_size=10)
        self.object_y = rospy.Publisher(
            "/target2/y_mea_position_controller/command",
            Float64,
            queue_size=10)
        self.object_z = rospy.Publisher(
            "/target2/z_mea_position_controller/command",
            Float64,
            queue_size=10)

        self.object_ts = message_filters.ApproximateTimeSynchronizer(
            [
                self.object_sub1, self.object_sub2, self.object_true_x,
                self.object_true_y, self.object_true_z
            ],
            10,
            slop=0.1,
            allow_headerless=True)
        self.object_ts.registerCallback(self.object_callback)
コード例 #8
0
    def __init__(self, folder):
        if (folder != None):
            self.folder = folder
        self.bridge = CvBridge()
        self.string_pub_ = rospy.Publisher('sync_msg', String, queue_size=2)
        # command subscriber
        if rospy.has_param('twist_topic'):
            self.twist_topic = rospy.get_param('twist_topic')
            rospy.loginfo('Parameter %s has value %s',
                          rospy.resolve_name('twist_topic'), self.twist_topic)

        self.twist_sub_ = message_filters.Subscriber(self.twist_topic,
                                                     TwistStamped)
        # image subscriber
        if rospy.has_param('/robot/img_topic'):
            self.img_topic = rospy.get_param('/robot/img_topic')
            rospy.loginfo('Parameter %s has value %s',
                          rospy.resolve_name('img_topic'), self.img_topic)

        self.img_sub_ = message_filters.Subscriber(self.img_topic,
                                                   CompressedImage)

        # sync topics
        self.ts = message_filters.ApproximateTimeSynchronizer(
            [self.img_sub_, self.twist_sub_], 5, 0.5)
        print('registering callback...')
        self.ts.registerCallback(self.callback)
        #self.grayscale = rospy.get_param("grayscale")

        print('opening file for targets...')
        # init table for targets and ids
        with open(self.target_file, 'w') as f:
            writer = csv.writer(f)
            writer.writerow(['id', 'linear', 'angular', 'target'])

        print('topics: ', self.img_topic, ' -- ', self.twist_topic)
コード例 #9
0
    rospy.loginfo('Calculated z %f, phi %f and theta %f', u.thrust,
                  u.orientation.x, u.orientation.y)


if __name__ == '__main__':
    rospy.init_node('altitude_pid', anonymous=True)
    rate = rospy.Rate(20)

    ref = rospy.get_param('~alt_ref')
    name = rospy.get_param('~name')

    pub = rospy.Publisher('/mambo_con/' + name + '/zd_input',
                          AttitudeTarget,
                          queue_size=10)
    posesub = message_filters.Subscriber('/vrpn_client_node/' + name + '/pose',
                                         PoseStamped)
    twistsub = message_filters.Subscriber(
        '/vrpn_client_node/' + name + '/twist', TwistStamped)
    # dsub = message_filters.Subscriber('/vrpn_client_node/' + name + '/accel', TwistStamped)

    ats = message_filters.ApproximateTimeSynchronizer([posesub, twistsub],
                                                      queue_size=10,
                                                      slop=0.1)
    ats.registerCallback(mocapcb)
    rospy.spin()

    rospy.loginfo('Tracking altitude: %f' % ref)
    # while not rospy.is_shutdown():
    # 	## Add the computation of u.
    # 	rospy.loginfo('Computation at %s' % rospy.get_time())
    # 	rospy.spin()
コード例 #10
0
    def __init__(self):
        # Subscriber
        # self.pc_sub = rospy.Subscriber("/kitti/oxts/gps/vel",TwistStamped, self.IMUcallback, queue_size=1)
        # self.imu_sub = rospy.Subscriber("/kitti/oxts/gps/vel",TwistStamped, self.IMUcallback, queue_size=1)
        self.imu_sub = message_filters.Subscriber("/kitti/oxts/gps/vel",
                                                  TwistStamped)
        self.det_front_sub = message_filters.Subscriber(
            "/detection/lidar_detector/objects_markers_front", MarkerArray)
        self.det_back_sub = message_filters.Subscriber(
            "/detection/lidar_detector/objects_markers", MarkerArray)

        # Publisher
        self.pub_det_markerarray = rospy.Publisher('pillar_marker_array',
                                                   BoundingBoxArray,
                                                   queue_size=1)

        self.pub_frame_seq = rospy.Publisher('kitti_frame_seq',
                                             OverlayText,
                                             queue_size=1)
        self.pub_boxes = rospy.Publisher('kitti_box_track',
                                         BoundingBoxArray,
                                         queue_size=1)
        self.pub_pictograms = rospy.Publisher('kitti_box_pictogram_track',
                                              PictogramArray,
                                              queue_size=1)
        self.pub_selfvelo_text = rospy.Publisher('kitti_selfvelo_text_track',
                                                 Marker,
                                                 queue_size=1)
        self.pub_selfveloDirection = rospy.Publisher(
            'kitti_selfvelo_direction_track', Marker, queue_size=1)
        self.pub_objs_ori = rospy.Publisher('kitti_objs_ori_track',
                                            MarkerArray,
                                            queue_size=3)
        self.pub_objs_velo = rospy.Publisher('kitti_objs_velo_track',
                                             MarkerArray,
                                             queue_size=1)
        self.pub_path = rospy.Publisher('kitti_objs_path_track',
                                        MarkerArray,
                                        queue_size=1)
        self.pub_warning_lines = rospy.Publisher('kitti_warning_lines_track',
                                                 MarkerArray,
                                                 queue_size=1)
        self.pub_ego_outCircle = rospy.Publisher('kitti_ego_outCircle_track',
                                                 Marker,
                                                 queue_size=1)
        self.pub_ego_innerCircle = rospy.Publisher(
            'kitti_ego_innerCircle_track', Marker, queue_size=1)
        self.pub_ego_car = rospy.Publisher('kitti_ego_car_track',
                                           Marker,
                                           queue_size=1)

        # Publisher & Subscriber Wrapper
        pub_list = [self.det_front_sub, self.det_back_sub, self.imu_sub]
        # pub_list = [self.det_front_sub, self.det_back_sub]
        # pub_list = [self.det_front_sub]

        self.ts = message_filters.ApproximateTimeSynchronizer(
            pub_list, 1, 0.1, allow_headerless=True)
        # self.ts = message_filters.TimeSynchronizer(pub_list, 10)
        # self.ts = message_filters.Cache(self.det_front_sub, 10, allow_headerless=False)
        # self.ts = message_filters.Cache(self.det_back_sub, 10, allow_headerless=False)
        self.ts.registerCallback(self.callback)
        self.tf2 = TransformListener()  # for callback function

        # Multi-Objects tracking instance
        self.mot_tracker = AB3DMOT()
コード例 #11
0
with open(config_file, 'r') as f:
    f.readline()
    config = yaml.load(f)
    lens = config['lens']
    fx = float(config['fx'])
    fy = float(config['fy'])
    cx = float(config['cx'])
    cy = float(config['cy'])
    k1 = float(config['k1'])
    k2 = float(config['k2'])
    p1 = float(config['p1/k3'])
    p2 = float(config['p2/k4'])  
K = np.matrix([[fx, 0.0, cx],
               [0.0, fy, cy],
               [0.0, 0.0, 1.0]])
D = np.array([k1, k2, p1, p2])
print("Camera parameters")
print("Lens = %s" % lens)
print("K =")
print(K)
print("D =")
print(D)

pub = rospy.Publisher("/reprojection", Image, queue_size=1)
scan_sub = message_filters.Subscriber(scan_topic, LaserScan, queue_size=1)
image_sub = message_filters.Subscriber(image_topic, Image, queue_size=1)
ts = message_filters.ApproximateTimeSynchronizer([scan_sub, image_sub], 10, time_diff)
ts.registerCallback(callback)

rospy.spin()
コード例 #12
0
ファイル: rrt.py プロジェクト: championway/gazebo_test
    route.reverse()
    waypoint = WayPoint()
    waypointlist = WayPointList()
    print "------------------------Start--------------------------"
    for i in route:
        print "x = ", i[0], ", y = ", i[1]
        waypoint = WayPoint()
        waypoint.x = i[0]
        waypoint.y = i[1]
        waypointlist.list.append(waypoint)
    print "-----------------------Arrive--------------------------"
    pub_waypoint.publish(waypointlist)
    start = None
    nodes = []


# if python says run, then we should run
if __name__ == '__main__':
    rospy.init_node('rrt', anonymous=False)
    pub_waypoint = rospy.Publisher('/waypoint_list',
                                   WayPointList,
                                   queue_size=1)
    obs_sub = message_filters.Subscriber("/obstacle_list", ObstaclePoseList)
    pos_sub = message_filters.Subscriber("/pose", PoseStamped)

    ats = message_filters.ApproximateTimeSynchronizer([obs_sub, pos_sub],
                                                      queue_size=1,
                                                      slop=1)
    ats.registerCallback(call_back)
    print "f**k"
    rospy.spin()
コード例 #13
0
ファイル: rgb_detection.py プロジェクト: eleboss/robo
        print(im.dtype, im[0,0,0])
        Im_to_ros = Image()
        try:
            Im_to_ros = bridge.cv2_to_imgmsg(im, "bgr8")
        except CvBridgeError as e:
            print (e)
        Im_to_ros.header.stamp = rospy.Time.now()
        Im_to_ros.header.frame_id = 'camera_link'
        pub_dr.publish(Im_to_ros)
        print ('Image detection output saved to {}'.format(out_file_name))


rospy.init_node('rgb_detection')
DetectInit()

rgb_sub = message_filters.Subscriber('camera/color/image_raw', Image)
#rgb_sub = message_filters.Subscriber('camera/infra1/image_rect_raw', Image)
pc_sub =  message_filters.Subscriber('/camera/depth_registered/points', PointCloud2)
#depth_sub = message_filters.Subscriber('camera/depth/image_rect_raw', Image)
pub = rospy.Publisher('rgb_detection/enemy_position', ObjectList, queue_size=1)
pub_dr = rospy.Publisher('rgb_detection/detection_result', Image, queue_size=1)

#TODO 在后面的试验中调调整slop
TsDet = message_filters.ApproximateTimeSynchronizer([rgb_sub,pc_sub],queue_size=5, slop=0.1, allow_headerless=False)
TsDet.registerCallback(TsDet_callback)

rospy.spin()
if mc.DRAW_Video:
    video.release()
cv2.destroyAllWindows()
コード例 #14
0
    def __init__(self,
                 image_namespace,
                 depth_namespace=None,
                 score_thresh=0.5,
                 service_name=default_service_name,
                 vis=False):
        self._service_name = service_name
        stem = image_namespace.split('/')[-1]

        self.namespace = "rasberry_perception/" + stem + "/"

        self.score_thresh = score_thresh

        # Wait for connection to detection service
        self.detector = Client()
        self.vis = vis
        if self.vis:
            # Discard frame if worker busy
            self.publisher_tasks = WorkerTaskQueue(num_workers=1,
                                                   max_size=1,
                                                   discard=True)
            rospy.on_shutdown(self.on_shutdown)

        # Initialise publishers
        self.detections_pub = rospy.Publisher(self.namespace + "detections",
                                              Detections,
                                              queue_size=1)
        self.image_pub = rospy.Publisher(self.namespace + "colour/image_raw",
                                         Image,
                                         queue_size=1)
        self.image_info_pub = rospy.Publisher(self.namespace +
                                              "colour/camera_info",
                                              CameraInfo,
                                              queue_size=1)
        self.detections_vis_pub = rospy.Publisher(self.namespace +
                                                  "colour/vis_raw",
                                                  Image,
                                                  queue_size=1)

        # Initialise subscribers
        subscribers = [
            message_filters.Subscriber(image_namespace + "/image_raw", Image),
            message_filters.Subscriber(image_namespace + "/camera_info",
                                       CameraInfo),
        ]

        self.depth_enabled = depth_namespace is not None
        if self.depth_enabled:
            self.depth_pub = rospy.Publisher(self.namespace +
                                             "depth/image_raw",
                                             Image,
                                             queue_size=1)
            self.depth_info_pub = rospy.Publisher(self.namespace +
                                                  "depth/camera_info",
                                                  CameraInfo,
                                                  queue_size=1)
            self.depth_bbox_detections_pub = rospy.Publisher(self.namespace +
                                                             "bbox_detections",
                                                             PoseArray,
                                                             queue_size=1)
            self.depth_segm_detections_pub = rospy.Publisher(self.namespace +
                                                             "segm_detections",
                                                             PoseArray,
                                                             queue_size=1)

            subscribers.extend([
                message_filters.Subscriber(depth_namespace + "/image_raw",
                                           Image),
                message_filters.Subscriber(depth_namespace + "/camera_info",
                                           CameraInfo),
            ])

        # Start subscription
        sync_queue, sync_thresh = 1, 0.1
        rospy.loginfo(
            "Waiting for topics with time synchroniser (queue {}, {}s tolerance) on '{}'"
            .format(sync_queue, sync_thresh,
                    ', '.join([s.topic for s in subscribers])))
        self.ts = message_filters.ApproximateTimeSynchronizer(
            subscribers, 1, 0.1)
        self.ts.registerCallback(self.run_detector)
コード例 #15
0
def main():
	global _ultrasound_image, _robot_pose, _new_pose_available
	rospy.init_node('z_wire_to_pose', anonymous=True)
	marker_pub = rospy.Publisher('z_wire_pose', Marker, queue_size = 1)
	marker_pub_phantom = rospy.Publisher('z_wire_phantom', Marker, queue_size = 1)
	robo_sub = message_filters.Subscriber('/pose_ee', TransformStamped)
	ultrasound_sub = message_filters.Subscriber('/camera/image_raw', Image)
	ts = message_filters.ApproximateTimeSynchronizer([robo_sub, ultrasound_sub], 10, 1/15)
	ts.registerCallback(_sync_cb)

	last_im_points = np.zeros((9,2))
	# last_im_points = [[521, 261],
	# 				  [722, 261],
	# 				  [797, 262],
	# 				  [523, 366],
	# 				  [603, 365],
	# 				  [797, 363],
	# 				  [592, 471],
	# 				  [727, 468],
	# 				  [794, 469]]
	points_tracked = [False] * 9
	rate = rospy.Rate(30)
	centroids = []


	def on_click(event, x, y, p1, p2):
		n_tracked_points =  np.sum(np.equal(points_tracked, True))
		no_free_centroids = len(centroids) <= n_tracked_points
		all_points_tracked = n_tracked_points == len(points_tracked)
		if event != cv2.EVENT_LBUTTONDBLCLK or no_free_centroids or all_points_tracked:
			return
		print("Clicked %d, %d" % (x,y))
		try:
			idx = points_tracked.index(False)
		except ValueError:
			return
		points_tracked[idx] = True
		last_im_points[idx] = [x,y]

	print("Starting")
	while not rospy.is_shutdown():
		marker_pub_phantom.publish(make_wire_marker(np.eye(4), 10))
		rate.sleep()
		if not _new_pose_available:
			continue

		centroids = find_centroids(_ultrasound_image)

		for pt in centroids:
			cv2.circle(_ultrasound_image, tuple(pt), 3, color=(255,0,0), thickness=1)

		for idx, pt in enumerate(last_im_points):
			if points_tracked[idx]:
				new_pt = find_closest_point(centroids, pt)
				if np.linalg.norm(new_pt - pt) < 10:
					last_im_points[idx] = new_pt
				else:
					points_tracked[idx] = False
		if np.sum(np.equal(points_tracked, True))  == len(points_tracked):
			im_points = centralize_and_scale(np.array(last_im_points), np.array([388,134]))
			coords_3d = img_3d_coords_zFrame(im_points)
			print(last_im_points, im_points, coords_3d)
			if coords_3d is not None:
				pos = find_img_frame_position(im_points, coords_3d)
				rot = find_img_frame_axis(im_points, coords_3d)
				T = np.eye(4)
				T[0:3,0:3] = rot
				T[0:3, 3] = pos
				marker_pub.publish(make_pose_marker(T, 10))
		colors = [np.random.rand(3,)*255 for x in last_im_points]
		for pt, tracked in zip(last_im_points, points_tracked):
			if tracked:
				color = (0,255,0)
			else:
				color = (0,0,255)
			cv2.circle(_ultrasound_image, tuple([int(x) for x in pt]), 5, color=color,thickness=2)

		cv2.imshow('Ultrasound Image',_ultrasound_image)
		cv2.setMouseCallback('Ultrasound Image', on_click)
		k = cv2.waitKey(1) & 0xFF
		if k == 27:
			break

		_new_pose_available = False
コード例 #16
0
    # Finding the grid coordinates corresponding to world coordinate
    for key, value in grid_world_coor.items():
        if value['world_coor'] == bot_goal[:2]:
            grid_goal= key
        if value['world_coor'] == bot_init[:2]:
            grid_init= key

    # Initiating the A* algorithm
    a_star(grid= grid, init= grid_init[::-1], goal= grid_goal[::-1])

    if len(grid_path)>0:
        # Printing the calculated path
        for each in grid_path:
            grid[each[0]][each[1]] = '_'
        print(np.array(grid))

        # Converting grid coordinates into world coordinates
        world_path= [grid_world_coor[(y, x)]['world_coor'] if (grid_world_coor[(y, x)]['world_coor'] != (-1, -4)) else (-0.5,-4) for (x, y) in grid_path[::-1]]

        # Initiating subscribers for topics /base_scan & /odom
        scan_sub= message_filters.Subscriber('/base_scan', LaserScan)
        odom_sub= message_filters.Subscriber('/odom', Odometry)

        # Initiating a synchronizer to get the messages in sync
        sub_sync= message_filters.ApproximateTimeSynchronizer([scan_sub, odom_sub], 10, 0.1, True)
        sub_sync.registerCallback(sync_callback)

        # Runs the session infinitely
        rospy.spin()
    def __init__(self):

        super().__init__("trajectory_visualizer")

        self.fig = plt.figure()

        self.max_vel = 0.0
        self.min_vel = 0.0
        self.min_accel = 0.0
        self.max_accel = 0.0
        self.min_jerk = 0.0
        self.max_jerk = 0.0

        # update flag
        self.update_ex_vel_lim = False
        self.update_lat_acc_fil = False
        self.update_traj_raw = False
        self.update_traj_resample = False
        self.update_traj_final = False
        self.update_lanechange_path = False
        self.update_behavior_path = False
        self.update_traj_ob_avoid = False
        self.update_traj_ob_stop = False

        self.tf_buffer = Buffer(node=self)
        self.tf_listener = TransformListener(self.tf_buffer,
                                             self,
                                             spin_thread=True)

        self.self_pose = Pose()
        self.self_pose_received = False
        self.localization_vx = 0.0
        self.vehicle_vx = 0.0

        self.trajectory_external_velocity_limited = Trajectory()
        self.trajectory_lateral_acc_filtered = Trajectory()
        self.trajectory_raw = Trajectory()
        self.trajectory_time_resampled = Trajectory()
        self.trajectory_final = Trajectory()

        self.lane_change_path = PathWithLaneId()
        self.behavior_path = Path()
        self.obstacle_avoid_traj = Trajectory()
        self.obstacle_stop_traj = Trajectory()

        self.plotted = [False] * 9
        self.sub_localization_twist = self.create_subscription(
            Odometry, "/localization/kinematic_state",
            self.CallbackLocalizationTwist, 1)
        self.sub_vehicle_twist = self.create_subscription(
            VelocityReport, "/vehicle/status/velocity_status",
            self.CallbackVehicleTwist, 1)

        # BUFFER_SIZE = 65536*100
        optimizer_debug = "/planning/scenario_planning/motion_velocity_smoother/debug/"
        self.sub1 = message_filters.Subscriber(
            self, Trajectory,
            optimizer_debug + "trajectory_external_velocity_limited")
        self.sub2 = message_filters.Subscriber(
            self, Trajectory,
            optimizer_debug + "trajectory_lateral_acc_filtered")
        self.sub3 = message_filters.Subscriber(
            self, Trajectory, optimizer_debug + "trajectory_raw")
        self.sub4 = message_filters.Subscriber(
            self, Trajectory, optimizer_debug + "trajectory_time_resampled")
        self.sub5 = message_filters.Subscriber(
            self, Trajectory, "/planning/scenario_planning/trajectory")

        lane_driving = "/planning/scenario_planning/lane_driving"
        self.sub6 = message_filters.Subscriber(
            self, PathWithLaneId,
            lane_driving + "/behavior_planning/path_with_lane_id")
        self.sub7 = message_filters.Subscriber(
            self, Path, lane_driving + "/behavior_planning/path")
        self.sub8 = message_filters.Subscriber(
            self,
            Trajectory,
            lane_driving +
            "/motion_planning/obstacle_avoidance_planner/trajectory",
        )
        self.sub9 = message_filters.Subscriber(
            self, Trajectory, "/planning/scenario_planning/trajectory")

        self.ts1 = message_filters.ApproximateTimeSynchronizer(
            [self.sub1, self.sub2, self.sub3, self.sub4, self.sub5], 30, 0.5)
        self.ts1.registerCallback(self.CallbackMotionVelOptTraj)

        self.ts2 = message_filters.ApproximateTimeSynchronizer(
            [self.sub6, self.sub7, self.sub8, self.sub9], 30, 1, 0)
        self.ts2.registerCallback(self.CallBackLaneDrivingTraj)

        # main process
        if PLOT_TYPE == "VEL_ACC_JERK":
            self.ani = animation.FuncAnimation(self.fig,
                                               self.plotTrajectory,
                                               interval=100,
                                               blit=True)
            self.setPlotTrajectory()
        else:
            self.ani = animation.FuncAnimation(self.fig,
                                               self.plotTrajectoryVelocity,
                                               interval=100,
                                               blit=True)
            self.setPlotTrajectoryVelocity()

        plt.show()

        return
コード例 #18
0
    def calibrate_triple_azure(self, msg):

        self.is_finish = False
        rospy.loginfo("Initial calibration between azures")
        calibrate_1 = rospy.ServiceProxy('/azure1/extrinsic_calibration',
                                         ExtrinsicCalibrate)
        calibrate_2 = rospy.ServiceProxy('/azure2/extrinsic_calibration',
                                         ExtrinsicCalibrate)
        calibrate_3 = rospy.ServiceProxy('/azure3/extrinsic_calibration',
                                         ExtrinsicCalibrate)

        # initial calibration
        all_sucess = False
        while not all_sucess:
            is_sucess_1 = calibrate_1(self.fiducial_ids)
            is_sucess_2 = calibrate_2(self.fiducial_ids)
            is_sucess_3 = calibrate_3(self.fiducial_ids)
            all_sucess = is_sucess_1 or is_sucess_2 or is_sucess_3
        rospy.sleep(1.0)

        # refine the calibration using ICP b/w markers
        rospy.loginfo("ICP b/w markers for calibration refinement ")
        points_sub_1 = message_filters.Subscriber('/azure1/points2',
                                                  PointCloud2,
                                                  buff_size=1280 * 720 * 3)
        points_sub_2 = message_filters.Subscriber('/azure2/points2',
                                                  PointCloud2,
                                                  buff_size=1280 * 720 * 3)
        points_sub_3 = message_filters.Subscriber('/azure3/points2',
                                                  PointCloud2,
                                                  buff_size=1280 * 720 * 3)
        fidvertices_sub_1 = message_filters.Subscriber(
            '/azure1/fiducial_vertices', FiducialArray)
        fidvertices_sub_2 = message_filters.Subscriber(
            '/azure2/fiducial_vertices', FiducialArray)
        fidvertices_sub_3 = message_filters.Subscriber(
            '/azure3/fiducial_vertices', FiducialArray)
        self.icp_subs = [
            points_sub_1, points_sub_2, points_sub_3, fidvertices_sub_1,
            fidvertices_sub_2, fidvertices_sub_3
        ]
        self.ts = message_filters.ApproximateTimeSynchronizer([
            points_sub_1, points_sub_2, points_sub_3, fidvertices_sub_1,
            fidvertices_sub_2, fidvertices_sub_3
        ],
                                                              queue_size=1,
                                                              slop=2)
        self.ts.registerCallback(self.icp_with_markers)

        # wait for ICP results
        while not self.is_finish:
            pass
        rospy.loginfo("Extrinisic calibration finished ")

        if rospy.get_param('~publish_cloud'):
            # merge pointclouds and publish it
            rospy.loginfo("Merging pointclouds according to the ICP results")
            points_sub_1 = message_filters.Subscriber('/azure1/points2',
                                                      PointCloud2,
                                                      buff_size=1280 * 720 * 3)
            points_sub_2 = message_filters.Subscriber('/azure2/points2',
                                                      PointCloud2,
                                                      buff_size=1280 * 720 * 3)
            points_sub_3 = message_filters.Subscriber('/azure3/points2',
                                                      PointCloud2,
                                                      buff_size=1280 * 720 * 3)
            self.merge_subscribers = [points_sub_1, points_sub_2, points_sub_3]
            self.ts = message_filters.ApproximateTimeSynchronizer(
                [points_sub_1, points_sub_2, points_sub_3],
                queue_size=1,
                slop=1)
            self.ts.registerCallback(self.merge_pointcloud)

        return self.is_finish
コード例 #19
0
            a = [1]
            return a

        sum = reduce(lambda x, y: x + y, a)
        for i in np.arange(np.shape(a)[0]):
            a[i] = a[i] / sum
        return a

    def normalize2(self, a):
        if min(a) < 0:
            for i in np.arange(len(a)):
                a[i] = (a[i] - min(a)) / (max(a) - min(a))
        return a


if __name__ == "__main__":
    MulTraj_ge = MulTraj_Gene()
    rospy.loginfo("The class has been initialized!")
    ts = message_filters.ApproximateTimeSynchronizer(
        [MulTraj_ge.info_sub, MulTraj_ge.odom_sub], 10,
        0.1)  #,allow_headerless=True)
    rospy.loginfo("Approximate Time!")
    ts.registerCallback(MulTraj_ge.info_odom_callback)
    rospy.loginfo("Callback function!")
    #rospy.logerr(MulTraj_ge.pose.header.frame_id)
    #MulTraj_ge.listener.waitForTransform("map","thermal_camera",rospy.Time(),rospy.Duration(2.0))
    # while not rospy.is_shutdown():
    # while not rospy.is_shutdown():

    rospy.spin()
コード例 #20
0
def image_callback(img, pose):
    #rospy.loginfo(img.header)
    rospy.loginfo(pose.pose.position)
    try:
        cv_image = bridge.imgmsg_to_cv2(img, "passthrough")
    except CvBridgeError, e:
        rospy.logerr("CvBridge Error: {0}".format(e))
    #Pose_estimate(cv_image)
    RGB_img = cv2.cvtColor(cv_image, cv2.COLOR_BGR2RGB)
    Pose_estimate(RGB_img)
    #cv2.waitKey(1)


#def pose_callback(pose):
#	rospy.loginfo(pose.pose.position.x)
#sub_image=rospy.Subscriber("/downward_cam/camera/image",Image,image_callback)
#sub_pose=rospy.Subscriber("/ground_truth_to_tf/pose",PoseStamped,pose_callback)

#createWindow()

sub_image = message_filters.Subscriber("/downward_cam/camera/image", Image)
sub_pose = message_filters.Subscriber("/ground_truth_to_tf/pose", PoseStamped)
sync = message_filters.ApproximateTimeSynchronizer([sub_image, sub_pose], 10,
                                                   3)
sync.registerCallback(image_callback)

while not rospy.is_shutdown():
    rospy.init_node('info')
    rospy.spin()
コード例 #21
0
ファイル: giadog_gym.py プロジェクト: eduardo98m/open-blacky
    def __init__(self, sim: Optional[simulation] = None):
        self.observation_space = spaces.Dict({
            # Non-priviliged Space
            'target_dir':
            spaces.Box(low=-np.inf * np.ones((2, )),
                       high=np.inf * np.ones((2, )),
                       dtype=np.float32),
            'turn_dir':
            spaces.Discrete(3),
            'gravity_vector':
            spaces.Box(low=-np.inf * np.ones((3, )),
                       high=np.inf * np.ones((3, )),
                       dtype=np.float32),
            'angular_vel':
            spaces.Box(low=-np.inf * np.ones((3, )),
                       high=np.inf * np.ones((3, )),
                       dtype=np.float32),
            'linear_vel':
            spaces.Box(low=-np.inf * np.ones((3, )),
                       high=np.inf * np.ones((3, )),
                       dtype=np.float32),
            'joint_angles':
            spaces.Box(low=np.zeros((12, )),
                       high=2 * np.pi * np.ones((12, )),
                       dtype=np.float32),
            'joint_vels':
            spaces.Box(low=-np.inf * np.ones((12, )),
                       high=np.inf * np.ones((12, )),
                       dtype=np.float32),
            'ftg_phases':
            spaces.Box(low=-np.ones((8, )),
                       high=np.ones((8, )),
                       dtype=np.float32),
            'ftg_freqs':
            spaces.Box(low=-np.inf * np.ones((4, )),
                       high=np.inf * np.ones((4, )),
                       dtype=np.float32),
            'base_freq':
            spaces.Box(low=-np.inf, high=np.inf, shape=(1, ), dtype=np.float),
            'joint_err_hist':
            spaces.Box(low=np.zeros((self.JOINT_ERR_HISTORY_LEN, 12)),
                       high=2 * np.pi * np.ones(
                           (self.JOINT_ERR_HISTORY_LEN, 12)),
                       dtype=np.float),
            'joint_vel_hist':
            spaces.Box(low=-np.inf * np.ones((self.JOINT_VEL_HISTORY_LEN, 12)),
                       high=np.inf * np.ones((self.JOINT_VEL_HISTORY_LEN, 12)),
                       dtype=np.float),
            'foot_target_hist':
            spaces.Box(low=-np.inf * np.ones((self.FOOT_HISTORY_LEN, 4, 3)),
                       high=np.inf * np.ones((self.FOOT_HISTORY_LEN, 4, 3)),
                       dtype=np.float),
            'toes_contact':
            spaces.MultiBinary(4),
            'thighs_contact':
            spaces.MultiBinary(4),
            'shanks_contact':
            spaces.MultiBinary(4),

            # Priviliged Space
            'normal_foot':
            spaces.Box(low=-np.inf * np.ones((4, 3)),
                       high=np.inf * np.ones((4, 3)),
                       dtype=np.float32),
            'height_scan':
            spaces.Box(low=-np.inf * np.ones((4, 9)),
                       high=np.inf * np.ones((4, 9)),
                       dtype=np.float32),
            'foot_forces':
            spaces.Box(low=-np.inf * np.ones((4, )),
                       high=np.inf * np.ones((4, )),
                       dtype=np.float32),
            'foot_friction':
            spaces.Box(low=-np.inf * np.ones((4, )),
                       high=np.inf * np.ones((4, )),
                       dtype=np.float32),
            'foot_friction':
            spaces.Box(low=-np.inf * np.ones((3, )),
                       high=np.inf * np.ones((3, )),
                       dtype=np.float32)
        })
        self.action_space = spaces.Box(low=-float('inf') * np.ones((16, )),
                                       high=float('inf') * np.ones((16, )),
                                       dtype=np.float32)

        self.H = np.zeros(
            (HISTORY_LEN, controller_neural_network.NORMAL_DATA_SHAPE))
        self.foot_target_hist = np.zeros((self.FOOT_HISTORY_LEN, 4, 3))
        self.joint_vel_hist = np.zeros((self.JOINT_VEL_HISTORY_LEN, 12))
        self.joint_err_hist = np.zeros((self.JOINT_ERR_HISTORY_LEN, 12))
        self.joint_velocities = np.zeros((12, ))
        self.transf_matrix = np.zeros((4, 4, 4))
        self.external_force = np.zeros((3, ))
        self.ftg_phases = np.zeros((8, ))
        self.ftg_freqs = np.zeros((4, ))
        self.base_freq = BASE_FREQ
        self.gravity_vector = GRAVITY_VECTOR
        self.target_dir = np.zeros((2, ))
        self.timestep = 0
        self.E_v = []

        self.sim = sim
        if self.sim == None:
            # ROS publisher node that update the spot mini joints
            self.reset_pub = rospy.Publisher('reset_simulation',
                                             text,
                                             queue_size=QUEUE_SIZE)
            # ROS publisher node that update the spot mini joints
            self.joints_pub = rospy.Publisher('spot_joints',
                                              joint_angles,
                                              queue_size=QUEUE_SIZE)
            normal_data_sub = message_filters.Subscriber(
                'normal_data', normal_data)
            priviliged_data_sub = message_filters.Subscriber(
                'priviliged_data', priviliged_data)
            timestep_sub = message_filters.Subscriber('timestep', timestep)
            ts = message_filters.ApproximateTimeSynchronizer(
                [timestep_sub, normal_data_sub, priviliged_data_sub],
                queue_size=QUEUE_SIZE,
                slop=0.1,
                allow_headerless=True)
            ts.registerCallback(self.__update_obs_ros)
        else:
            self.count = 0
            self.begin_time = time()
            self.__update_obs_sim()
コード例 #22
0
import os
import sys

rospy.init_node('sync_odom_with_clock')

if len(sys.argv) < 3:
    print("usage: sync_odom_with_clock.py filename_out topic")
    exit(1)
filename_out = os.path.join(sys.path[0], sys.argv[1])
topic = sys.argv[2]
rospy.loginfo("filename_out  : %s", filename_out)
rospy.loginfo("topic : %s", topic)

outbag = rosbag.Bag(filename_out, 'w')

def callback(clk_msg, odom_msg):
    odom_msg.header.stamp = clk_msg.header.stamp
    outbag.write(topic, odom_msg, clk_msg.header.stamp)
    
queue_size = 1000 # because of 200 Hz Vicon
slop = 1.
msg_clock_sub = message_filters.Subscriber("/stamp", Odometry, queue_size = 1000)

msg_odom_sub = message_filters.Subscriber(topic, Odometry, queue_size = 1000)
ts = message_filters.ApproximateTimeSynchronizer([msg_clock_sub, msg_odom_sub], queue_size, slop)
ts.registerCallback(callback)

rospy.loginfo("Start spining")
rospy.spin()
outbag.close()
exit(0)
コード例 #23
0
    def __init__(self):
        self._cv_bridge = CvBridge()
        self._captured_img_l = None
        self._captured_img_r = None
        self._captured_img_width = 1
        self._captured_img_height = 1
        self._left_camera_image_topic = "/camera_l/image_rect_color"
        self._right_camera_image_topic = "/camera_r/image_rect_color"
        sub_img_l = message_filters.Subscriber(self._left_camera_image_topic,
                                               Image)
        sub_img_r = message_filters.Subscriber(self._right_camera_image_topic,
                                               Image)
        self.mf = message_filters.ApproximateTimeSynchronizer(
            [sub_img_l, sub_img_r], 100, 10.0)
        self.mf.registerCallback(self._img_callback)
        self._pub_depth_img = rospy.Publisher("/depth/image_rect",
                                              Image,
                                              queue_size=1)
        self._is_debug = rospy.get_param("~debug")
        self.img_scale = 0.5

        rospy.loginfo("waiting for left camera image")
        rospy.wait_for_message(self._left_camera_image_topic, Image)
        rospy.loginfo("waiting for right camera image")
        rospy.wait_for_message(self._right_camera_image_topic, Image)
        rospy.loginfo("camera image topic received")

        rospy.loginfo("loading camera parameter")
        camera_param = np.load('{}/config/camera_param_fisheye.npz'.format(
            rospkg.RosPack().get_path('jnmouse_ros_examples')))
        mtx_l, dist_l, mtx_r, dist_r, R, T = [
            camera_param[i]
            for i in ["mtx_l", "dist_l", "mtx_r", "dist_r", "R", "T"]
        ]

        newmtx_r = cv2.fisheye.estimateNewCameraMatrixForUndistortRectify(
            mtx_r,
            dist_r, (self._captured_img_width, self._captured_img_height),
            None,
            balance=1.0)
        newmtx_l = cv2.fisheye.estimateNewCameraMatrixForUndistortRectify(
            mtx_l,
            dist_l, (self._captured_img_width, self._captured_img_height),
            None,
            balance=1.0)
        Rp_l, Rp_r, Pp_l, Pp_r, self.Q = cv2.fisheye.stereoRectify(
            newmtx_l, dist_l, newmtx_r, dist_r,
            (self._captured_img_width, self._captured_img_height), R, T, 0)

        min_disp = 16
        window_size = 9
        self.stereo = cv2.StereoSGBM_create(
            minDisparity=min_disp,
            numDisparities=16 * 3,
            blockSize=window_size,
            P1=8 * 1 * window_size**2,
            P2=32 * 1 * window_size**2,
            disp12MaxDiff=1,
            uniquenessRatio=5,
            speckleWindowSize=50,
            speckleRange=2,
            mode=cv2.STEREO_SGBM_MODE_SGBM_3WAY)
コード例 #24
0
    def __init__(self,
                 left_image_topic,
                 right_image_topic,
                 callback=None,
                 slop=None,
                 encoding="bgr8",
                 queue_size=10):
        '''
        Contruct a StereoImageSubscriber

        @param left_image_topic: ROS topic to subscribe for the left camera
                                 ex: /camera/front/left/image_rect_color
        @param right_image_topic: ROS topic to subscribe to for the right camera
                                 ex: /camera/front/right/image_rect_color
        @param callback: Function with signature foo(left_img, right_img) to call when a synchronized pair is ready.
               If left as None, the latest synced images are stored as self.last_image_left and self.last_image_right
        @param slop: Maximum time in seconds between left and right images to be considered synced.
               If left as None, will only consider synced if left and right images have exact same header time.
        @param encoding: String to pass to CvBridge to encode ROS image message to numpy array
        @param queue_size: Integer, the number of images to store in a buffer for each camera to find synced images
        '''
        if callback is None:  # Set default callback to just set image_left and image_right

            def callback(image_left, image_right):
                setattr(self, 'last_image_left', image_left)
                setattr(self, 'last_image_right', image_right)

        self.bridge = CvBridge()
        self.encoding = encoding
        self.callback = callback
        self.camera_info_left = None
        self.camera_info_right = None
        self.last_image_left = None
        self.last_image_time_left = None
        self.last_image_right = None
        self.last_image_time_right = None

        # Subscribe to image and camera info topics for both cameras
        root_topic_left, image_subtopic_left = path.split(left_image_topic)
        self._info_sub_left = rospy.Subscriber(
            root_topic_left + '/camera_info',
            CameraInfo,
            lambda info: setattr(self, 'camera_info_left', info),
            queue_size=queue_size)
        image_sub_left = message_filters.Subscriber(left_image_topic, Image)
        root_topic_right, image_subtopic_right = path.split(right_image_topic)
        self._info_sub_right = rospy.Subscriber(
            root_topic_right + '/camera_info',
            CameraInfo,
            lambda info: setattr(self, 'camera_info_right', info),
            queue_size=queue_size)
        image_sub_right = message_filters.Subscriber(right_image_topic, Image)

        # Use message_filters library to set up synchronized subscriber to both image topics
        if slop is None:
            self._image_sub = message_filters.TimeSynchronizer(
                [image_sub_left, image_sub_right], queue_size)
        else:
            self._image_sub = message_filters.ApproximateTimeSynchronizer(
                [image_sub_left, image_sub_right], queue_size, slop)
        self._image_sub.registerCallback(self._image_callback)
コード例 #25
0
        "Median Y Error:",
        np.median(print_array[:, 1]),
        "Median Theta Error:",
        np.median(print_array[:, 2]),
    )
    true_array.append([true_pose.pose.position.x, true_pose.pose.position.y])
    pf_array.append([pf_pose.pose.position.x, pf_pose.pose.position.y])

    # Plot trajectories
    if plot and time.time() - start_time > 40 and not plotted:
        plt.xlabel("x")
        plt.ylabel("y")
        plt.plot(np.array(true_array)[:, 0], np.array(true_array)[:, 1], c="r")
        plt.plot(np.array(pf_array)[:, 0], np.array(pf_array)[:, 1], c="g")
        plt.show()
        plotted = True


if __name__ == "__main__":
    rospy.init_node("testpf", anonymous=True)
    gt_topic = str(rospy.get_param("~gt_topic", "/pf/ta/viz/inferred_pose"))
    plot = bool(rospy.get_param("~plot", False))

    # Time Synchronizer
    pose_sub = message_filters.Subscriber("/pf/viz/inferred_pose", PoseStamped)
    pf_sub = message_filters.Subscriber(gt_topic, PoseStamped)

    ts = message_filters.ApproximateTimeSynchronizer([pose_sub, pf_sub], 10, 0.1)
    ts.registerCallback(callback)
    rospy.spin()
コード例 #26
0
    print ("threshould larm set {}".format(threshould_l))
    print ("threshould rarm set {}".format(threshould_r))
    print ("threshould larm cover set {}".format(threshould_cover_l))
    print ("threshould rarm cover set {}".format(threshould_cover_r))    
    
if __name__ == '__main__':
    rospy.init_node('count_points')

    pub_num_l = rospy.Publisher('/larm/num', Int32, queue_size=1)
    pub_num_r = rospy.Publisher('/rarm/num', Int32, queue_size=1)    
    # pub_drop_flag_l = rospy.Publisher('/drop_flag_larm', Bool, queue_size=1)
    # pub_drop_flag_r = rospy.Publisher('/drop_flag_rarm', Bool,  queue_size=1)
    pub_drop_flag = rospy.Publisher('/drop_flag_list', Int32MultiArray, queue_size=1)
    sub1 = message_filters.Subscriber("/in_hand_point/attention_clipper_larm/output/point_indices",PointIndices)
    sub2 = message_filters.Subscriber("/in_hand_point/attention_clipper_rarm/output/point_indices",PointIndices)
    sub3 = message_filters.Subscriber("/in_hand_point/attention_clipper_cover_larm/output/point_indices",PointIndices)
    sub4 = message_filters.Subscriber("/in_hand_point/attention_clipper_cover_rarm/output/point_indices",PointIndices)    

    rospy.Subscriber("/robot_to_mask_image_rarm/output",Image,count_rarm)
    # rospy.Subscriber("/mask_image_to_label/output",Image,count_rarm)    
    rospy.Subscriber("/robot_to_mask_image_larm/output",Image,count_larm)    
    fps=0.1
    delay=1 / fps *0.5

    s = rospy.Service('count_points_threshould',CPthre , get_trigger)
    
    mf= message_filters.ApproximateTimeSynchronizer([sub1,sub2,sub3,sub4],2,delay)
    mf.registerCallback(timer_cb)

    rospy.spin()
コード例 #27
0
    #Get output link
    output_link = agent.GetOutputLink()

    #-- ROS SOAR NODE INITIALIZATION-----------------------------------
    print("Initializing ROS SOAR node")
    rospy.init_node("soar_ros_node", anonymous=True)  #Always first

    # Creates a subscriber object for each topic
    # The messages to be synced must have the 'header' field or
    #  use the 'allow_headerless=True' in the TimeSynchronizer() function
    #  if this field is not present
    odom_sub = message_filters.Subscriber('/turtlebot2i/odom', Odometry)
    scan_sub = message_filters.Subscriber('/turtlebot2i/lidar/scan', LaserScan)

    # Make the topics sync through ApproximateTimeSynchronizer with 0.1s of tolerance
    ts = message_filters.ApproximateTimeSynchronizer([odom_sub, scan_sub], 10,
                                                     0.1)

    # Associate the synchronizer with a callback
    ts.registerCallback(topic_callback)

    pub = rospy.Publisher("soar_pub_topic", String, queue_size=10)

    dummy_pub = rospy.Publisher(
        "soar_sub_topic", String,
        queue_size=10)  #used for inputing debug data to soar_sub_topic

    #tasks_list_topic=rospy.Subscriber("soar_tasks_list_topic", String, topic_callback)
    dummy_tasks_list_topic = rospy.Publisher(
        "soar_tasks_list_topic", String,
        queue_size=10)  #used for inputing debug data to soar_tasks_list_topic
コード例 #28
0
                        'phone', 'world')


#################code_update_for Gyro incorporation#########################

if __name__ == "__main__":
    try:
        mag_sub = message_filters.Subscriber("/Magneto_topic_stamped",
                                             SensorMsgStamped)
        accel_sub = message_filters.Subscriber("/Accel_topic_stamped",
                                               SensorMsgStamped)
        #############added__code###########################################################
        gyro_sub = message_filters.Subscriber("/Gyro_topic_stamped",
                                              SensorMsgStamped)
        #############added__code###########################################################
        combined_sub = message_filters.ApproximateTimeSynchronizer(
            [accel_sub, mag_sub], queue_size=5, slop=0.1)
        combined_sub.registerCallback(got_accel_mag)
        rospy.spin()
    except rospy.ROSInterruptException:
        pass

########################
# TO UNDERSTAND FUSION BETWEEN ACCELEROMETER AND MAGNETOMETER
# Author : Kartik Prakash
# Date   : 7/Mar/2020
# STEPS:
# 1. Get the raw accel values and the magnetometer values
# 2. Try combining resultant accel and magnetometer to figure out RPY
#
#
########################
コード例 #29
0
ファイル: synchronizer.py プロジェクト: truder/info
#!/usr/bin/env python

import rospy
import message_filters
from geometry_msgs.msg import PoseStamped
from sensor_msgs.msg import CompressedImage

def callback(msg_in_pose, msg_in_image):
	rospy.loginfo("Synced Measurements:")
	rospy.loginfo(msg_in_pose)
	rospy.loginfo(msg_in_image.format)

if __name__ == '__main__':
	# Initialize
	rospy.init_node('synchronizer_py')

	sub_pose = message_filters.Subscriber('/emulated_uav/pose', PoseStamped, queue_size=10)
	sub_image = message_filters.Subscriber('/emulated_uav/image/compressed', CompressedImage, queue_size=10)
	ts = message_filters.ApproximateTimeSynchronizer([sub_pose, sub_image], 10, 0.1, allow_headerless=True)
	ts.registerCallback(callback)

	# Loop here until quit
	try:
		rospy.loginfo("Started synchronizer node...")
		rospy.spin()
	except rospy.ROSInterruptException:
		# Shutdown
		rospy.loginfo("Shutting down synchronizer!")
		sp.shutdown()
コード例 #30
0
 def reset_time(self, msg):
     print 'reset time'
     self.ts = message_filters.ApproximateTimeSynchronizer(
         self.subscribers, 5, 0.001)
     self.ts.registerCallback(self.callback)