def execute(self,userdata): """ - move head to the right place - get a point cloud returns: success, failure """ global HANDLES HANDLES=[] # Globals.pr2.head.set_pan_tilt(0, HEAD_TILT) Globals.pr2.larm.goto_posture('side') Globals.pr2.rarm.goto_posture('side') Globals.pr2.join_all() if HUMAN_GET_ROPE: xyz,frame = human_get_rope() xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", frame) pose_array = conversions.array_to_pose_array(xyz,"base_footprint") HANDLES.append(Globals.rviz.draw_curve(pose_array,id=3250864,rgba=(0,0,1,1))) xyz = curves.unif_resample(xyz, 100,tol=.01) else: msg = rospy.wait_for_message("/preprocessor/points", sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1,3) xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", msg.header.frame_id) userdata.points = xyz return "success"
def load_cloud_from_sensor(input_topic, frame): import sensor_msgs.msg msg = rospy.wait_for_message(input_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1,3) xyz = ros_utils.transform_points(xyz, Globals.tf_listener, frame, msg.header.frame_id) return xyz
def load_cloud_from_sensor(input_topic, frame): import sensor_msgs.msg msg = rospy.wait_for_message(input_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1, 3) xyz = ros_utils.transform_points(xyz, Globals.tf_listener, frame, msg.header.frame_id) return xyz
def read_from_ros(cloud_topic, frame='base_footprint'): '''format: ros:/my/topic/points(:frame)''' import rospy from brett2 import ros_utils import sensor_msgs import time if rospy.get_name() == '/unnamed': rospy.init_node('cloud_reader', disable_signals=True) msg = rospy.wait_for_message(cloud_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1, 3) tf_listener = ros_utils.get_tf_listener() return ros_utils.transform_points(xyz, tf_listener, frame, msg.header.frame_id)
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('--dataset', default='overhand_knot', help='name of dataset') parser.add_argument('--method', choices=['geodesic_dist', 'shape_context', 'geodesic_dist+shape_context'], default='geodesic_dist', help='matching algorithm') parser.add_argument('--input_mode', choices=['from_dataset', 'kinect', 'file'], default='kinect', help='input cloud acquisition method') parser.add_argument('--cloud_topic', default='/preprocessor/kinect1/points', help='ros topic for kinect input mode') parser.add_argument('--input_file', help='input file for --input_mode=file') parser.add_argument('--show_shape_context', action='store_true') args = parser.parse_args() dataset = recognition.DataSet.LoadFromTaskDemos(args.dataset) # read input to match input_xyz = None if args.input_mode == 'from_dataset': key = select_from_list(sorted(dataset.keys())) input_xyz = np.squeeze(np.asarray(dataset[key]['cloud_xyz'])) elif args.input_mode == 'kinect': import rospy from brett2 import ros_utils import sensor_msgs import time if rospy.get_name() == '/unnamed': rospy.init_node('matcher', disable_signals=True) msg = rospy.wait_for_message(args.cloud_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1, 3) tf_listener = ros_utils.get_tf_listener() time.sleep(1) # so tf works input_xyz = ros_utils.transform_points(xyz, tf_listener, "ground", msg.header.frame_id) elif args.input_mode == 'file': input_xyz = np.load(args.input_file)['cloud_xyz'] else: raise NotImplementedError # create the matcher matcher = None if args.method == 'geodesic_dist': matcher = recognition.GeodesicDistMatcher(dataset) elif args.method == 'shape_context': matcher = recognition.ShapeContextMatcher(dataset) elif args.method == 'geodesic_dist+shape_context': matcher = recognition.CombinedNNMatcher(dataset, [recognition.GeodesicDistMatcher, recognition.ShapeContextMatcher], [1, 0.1]) else: raise NotImplementedError # do the matching best_match_name, best_match_cost = matcher.match(input_xyz) print 'Best match name:', best_match_name, 'with cost:', best_match_cost draw_comparison(left_cloud=input_xyz, right_cloud=dataset[best_match_name], show_shape_context=args.show_shape_context)
def execute(self, userdata): """ - move head to the right place - get a point cloud returns: success, failure """ Globals.handles = [] draw_table() Globals.pr2.rgrip.set_angle(.08) Globals.pr2.lgrip.set_angle(.08) Globals.pr2.join_all() if not args.use_tracking: Globals.pr2.larm.goto_posture('side') Globals.pr2.rarm.goto_posture('side') else: try: increment_pose(Globals.pr2.rarm, [0, 0, .02]) except PR2.IKFail: print "couldn't raise right arm" try: increment_pose(Globals.pr2.larm, [0, 0, .02]) except PR2.IKFail: print "couldn't raise left arm" Globals.pr2.rgrip.set_angle(.08) Globals.pr2.lgrip.set_angle(.08) Globals.pr2.join_all() if args.delay_before_look > 0: rospy.loginfo('sleeping for %f secs before looking', args.delay_before_look) rospy.sleep(args.delay_before_look) if args.test: xyz = np.squeeze( np.asarray(demos[select_from_list(demos.keys())]["cloud_xyz"])) elif args.use_tracking: msg = rospy.wait_for_message("/tracker/object", TrackedObject) xyz = [(pt.x, pt.y, pt.z) for pt in msg.rope.nodes] else: msg = rospy.wait_for_message(args.cloud_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1, 3) xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", msg.header.frame_id) userdata.points = xyz return "success"
def read_from_ros(cloud_topic, frame='ground'): '''format: ros:/my/topic/points(:frame)''' import rospy from brett2 import ros_utils import sensor_msgs import time if rospy.get_name() == '/unnamed': rospy.init_node('cloud_reader', disable_signals=True) msg = rospy.wait_for_message(cloud_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1, 3) tf_listener = ros_utils.get_tf_listener() return ros_utils.transform_points(xyz, tf_listener, frame, msg.header.frame_id)
def create_obstacle_env(env): clone=env.CloneSelf(1) point_cloud = rospy.wait_for_message("/drop/points", sm.PointCloud2) xyz, bgr = ros_utils.pc2xyzrgb(point_cloud) xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", point_cloud.header.frame_id) #xyz = xyz[250:, 250:] clusters = tabletop.segment_tabletop_scene(xyz,'base_footprint') bounds = tabletop.get_table_dimensions(xyz) kinbodies.create_box_from_bounds(clone,bounds,"table") print len(clusters),"clusters" for (i_clu,clu) in enumerate(clusters): x,y,z,r,h = tabletop.get_cylinder(clu) #print kinbodies.create_cylinder(clone, (x,y,z),r,h, name="cylinder%i"%i_clu) return clone
def execute(self,userdata): """ - move head to the right place - get a point cloud returns: success, failure """ Globals.handles = [] draw_table() Globals.pr2.rgrip.set_angle(.08) Globals.pr2.lgrip.set_angle(.08) Globals.pr2.join_all() if not args.use_tracking: Globals.pr2.larm.goto_posture('side') Globals.pr2.rarm.goto_posture('side') else: try: increment_pose(Globals.pr2.rarm, [0,0,.02]) except PR2.IKFail: print "couldn't raise right arm" try: increment_pose(Globals.pr2.larm, [0,0,.02]) except PR2.IKFail: print "couldn't raise left arm" Globals.pr2.rgrip.set_angle(.08) Globals.pr2.lgrip.set_angle(.08) Globals.pr2.join_all() if args.delay_before_look > 0: rospy.loginfo('sleeping for %f secs before looking', args.delay_before_look) rospy.sleep(args.delay_before_look) if args.test: xyz = np.squeeze(np.asarray(demos[select_from_list(demos.keys())]["cloud_xyz"])) elif args.use_tracking: msg = rospy.wait_for_message("/tracker/object", TrackedObject) xyz = [(pt.x, pt.y, pt.z) for pt in msg.rope.nodes] else: msg = rospy.wait_for_message(args.cloud_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1,3) xyz = ros_utils.transform_points(xyz, Globals.pr2.tf_listener, "base_footprint", msg.header.frame_id) ELOG.log('LookAtObject', 'xyz', xyz) userdata.points = xyz return "success"
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('--dataset', default='overhand_knot', help='name of dataset') parser.add_argument('--method', choices=[ 'geodesic_dist', 'shape_context', 'geodesic_dist+shape_context' ], default='geodesic_dist', help='matching algorithm') parser.add_argument('--input_mode', choices=['from_dataset', 'kinect', 'file'], default='kinect', help='input cloud acquisition method') parser.add_argument('--cloud_topic', default='/preprocessor/kinect1/points', help='ros topic for kinect input mode') parser.add_argument('--input_file', help='input file for --input_mode=file') parser.add_argument('--show_shape_context', action='store_true') args = parser.parse_args() dataset = recognition.DataSet.LoadFromTaskDemos(args.dataset) # read input to match input_xyz = None if args.input_mode == 'from_dataset': key = select_from_list(sorted(dataset.keys())) input_xyz = np.squeeze(np.asarray(dataset[key]['cloud_xyz'])) elif args.input_mode == 'kinect': import rospy from brett2 import ros_utils import sensor_msgs import time if rospy.get_name() == '/unnamed': rospy.init_node('matcher', disable_signals=True) msg = rospy.wait_for_message(args.cloud_topic, sensor_msgs.msg.PointCloud2) xyz, rgb = ros_utils.pc2xyzrgb(msg) xyz = xyz.reshape(-1, 3) tf_listener = ros_utils.get_tf_listener() time.sleep(1) # so tf works input_xyz = ros_utils.transform_points(xyz, tf_listener, "ground", msg.header.frame_id) elif args.input_mode == 'file': input_xyz = np.load(args.input_file)['cloud_xyz'] else: raise NotImplementedError # create the matcher matcher = None if args.method == 'geodesic_dist': matcher = recognition.GeodesicDistMatcher(dataset) elif args.method == 'shape_context': matcher = recognition.ShapeContextMatcher(dataset) elif args.method == 'geodesic_dist+shape_context': matcher = recognition.CombinedNNMatcher( dataset, [recognition.GeodesicDistMatcher, recognition.ShapeContextMatcher], [1, 0.1]) else: raise NotImplementedError # do the matching best_match_name, best_match_cost = matcher.match(input_xyz) print 'Best match name:', best_match_name, 'with cost:', best_match_cost draw_comparison(left_cloud=input_xyz, right_cloud=dataset[best_match_name], show_shape_context=args.show_shape_context)
roslib.load_manifest("verb_msgs") from verb_msgs.srv import * from brett2.ros_utils import xyzrgb2pc import geometry_msgs.msg as gm import sensor_msgs.msg as sm import numpy as np if __name__ == "__main__": if rospy.get_name() == "/unnamed": rospy.init_node("test_tabletop", disable_signals=True) pr2 = PR2.create() rviz = ros_utils.RvizWrapper.create() rospy.sleep(1) point_cloud = rospy.wait_for_message("/drop/points", sm.PointCloud2) xyz, bgr = ros_utils.pc2xyzrgb(point_cloud) xyz = ros_utils.transform_points(xyz, pr2.tf_listener, "base_footprint", point_cloud.header.frame_id) clusters = tabletop.segment_tabletop_scene(xyz, "base_footprint", plotting2d=True, plotting3d=True) if False: push_svc = rospy.ServiceProxy("push", Push) req = PushRequest() xyz0 = clusters[np.argmax(map(len, clusters))].reshape(1, -1, 3) rgb0 = np.zeros(xyz0.shape, 'uint8') point_cloud = xyzrgb2pc(xyz0, rgb0, "base_footprint") req.point_cloud = point_cloud push_svc.call(req) if False:
def grip_to_world_transform_tf(point): return ru.transform_points(np.array([point]), ru.get_tf_listener(), gripper_frame_name, "base_footprint")[0]
def to_gripper_frame_tf_listener(pc, gripper_frame_name): return ru.transform_points(pc, ru.get_tf_listener(), gripper_frame_name, "base_footprint")
bag = rosbag.Bag(bagfile) seg_dicts = yamlfile["segments"] listener = mytf.TransformListener() t_start_bag = bag.read_messages().next()[2].to_sec() cloud_times, cloud_xyzs, cloud_bgrs = [],[],[] for (topic, msg, t) in bag.read_messages(): if topic=="/tf": listener.callback(msg) elif hasattr(msg, "row_step"): xyz, bgr = pc2xyzrgb(msg) try: xyz_tf = transform_points(xyz, listener, traj["ref_frame"].value, args.kinect_frame) cloud_xyzs.append(xyz_tf) cloud_bgrs.append(bgr) cloud_times.append(t.to_sec()) except Exception: print "failed to transform points" traceback.print_exc() cloud_times = np.array(cloud_times) cloud_times -= t_start_bag traj_times = np.asarray(traj["times"])-t_start_bag for (i_seg,seg_dict) in enumerate(seg_dicts): i_start, i_stop = np.searchsorted(traj_times, (seg_dict["start"],seg_dict["stop"])) seg_name = "%s.%.2i"%(traj.name,i_seg) if seg_name in h5out: del traj[seg_name] seg = h5out.create_group(seg_name)
roslib.load_manifest("verb_msgs") from verb_msgs.srv import * from brett2.ros_utils import xyzrgb2pc import geometry_msgs.msg as gm import sensor_msgs.msg as sm import numpy as np if __name__ == "__main__": if rospy.get_name() == "/unnamed": rospy.init_node("test_tabletop", disable_signals=True) pr2 = PR2.create() rviz = ros_utils.RvizWrapper.create() rospy.sleep(1) point_cloud = rospy.wait_for_message("/drop/points", sm.PointCloud2) xyz, bgr = ros_utils.pc2xyzrgb(point_cloud) xyz = ros_utils.transform_points(xyz, pr2.tf_listener, "base_footprint", point_cloud.header.frame_id) clusters = tabletop.segment_tabletop_scene(xyz,"base_footprint",plotting2d=True, plotting3d=True) if False: push_svc = rospy.ServiceProxy("push",Push) req = PushRequest() xyz0 = clusters[np.argmax(map(len,clusters))].reshape(1,-1,3) rgb0 = np.zeros(xyz0.shape,'uint8') point_cloud = xyzrgb2pc(xyz0, rgb0, "base_footprint") req.point_cloud = point_cloud push_svc.call(req) if False: pour_srv = rospy.ServiceProxy("pour", Pour) req = PourRequest() sort_inds = np.argsort(map(len,clusters))