def run (self): """ Publishes the transforms stored. """ while True: if self.publish_pc and self.cameras is not None: data=self.cameras.get_RGBD() for i,rgbd in data.items(): if rgbd['depth'] is None or rgbd['rgb'] is None: print 'wut' continue camera_frame = 'camera%d_depth_optical_frame'%(i+1) xyz = clouds.depth_to_xyz(rgbd['depth'], asus_xtion_pro_f) pc = ru.xyzrgb2pc(xyz, rgbd['rgb'], camera_frame) ar_cam = gmt.get_ar_marker_poses(None, None, pc=pc) self.pc_pubs[i].publish(pc) marker_frame = 'ar_marker%d_camera%d' for marker in ar_cam: trans, rot = conversions.hmat_to_trans_rot(ar_cam[marker]) self.tf_broadcaster.sendTransform(trans, rot, rospy.Time.now(), marker_frame%(marker,i+1), camera_frame) if self.ready: for parent, child in self.transforms: trans, rot = self.transforms[parent, child] self.tf_broadcaster.sendTransform(trans, rot, rospy.Time.now(), child, parent) time.sleep(1/self.rate)
def get_ar_marker_poses (rgb, depth, pc=None): """ In order to run this, ar_marker_service needs to be running. """ global getMarkers, req, ar_initialized if not ar_initialized: if rospy.get_name() == '/unnamed': rospy.init_node('ar_tfm') getMarkers = rospy.ServiceProxy("getMarkers", MarkerPositions) req = MarkerPositionsRequest() ar_initialized = True if pc is None: xyz = clouds.depth_to_xyz(depth, asus_xtion_pro_f) pc = ru.xyzrgb2pc(xyz, rgb, '/camera_frame') req.pc = pc marker_tfm = {} res = getMarkers(req) for marker in res.markers.markers: marker_tfm[marker.id] = conversions.pose_to_hmat(marker.pose.pose) return marker_tfm
def visualize_pointcloud (camera_frame="camera_depth_optical_frame", device_id="#1"): """ Visualize point clouds from openni grabber through ROS. """ if rospy.get_name() == '/unnamed': rospy.init_node("visualize_pointcloud") pc_pub = rospy.Publisher("camera_points", PointCloud2) sleeper = rospy.Rate(30) grabber = cpr.CloudGrabber(device_id) grabber.startRGBD() print "Streaming now from frame %s: Pointclouds only."%camera_frame while True: try: r, d = grabber.getRGBD() pc = ru.xyzrgb2pc(clouds.depth_to_xyz(d, asus_xtion_pro_f), r, camera_frame) pc_pub.publish(pc) sleeper.sleep() except KeyboardInterrupt: print "Keyboard interrupt. Exiting." break
def publish_transform_markers(grabber, marker, T, from_frame, to_frame, rate=100): from visualization_msgs.msg import Marker from sensor_msgs.msg import PointCloud2 print colorize("Transform : ", "yellow", True) print T marker_frame = "ar_frame" tf_pub = tf.TransformBroadcaster() pc_pub = rospy.Publisher("camera_points", PointCloud2) trans = T[0:3,3] rot = tf.transformations.quaternion_from_matrix(T) sleeper = rospy.Rate(10) while True: try: r, d = grabber.getRGBD() ar_tfm = get_ar_transform_id (d, r, marker) pc = ru.xyzrgb2pc(clouds.depth_to_xyz(d, asus_xtion_pro_f), r, from_frame) pc_pub.publish(pc) tf_pub.sendTransform(trans, rot, rospy.Time.now(), to_frame, from_frame) try: trans_m, rot_m = conversions.hmat_to_trans_rot(ar_tfm) tf_pub.sendTransform(trans_m, rot_m, rospy.Time.now(), marker_frame, from_frame) except: pass sleeper.sleep() except KeyboardInterrupt: break
def get_pc(self, index=None): if not index: index = self.index rgb = cv2.imread(self.rgbs_fnames[index]) depth = cv2.imread(self.depth_fnames[index], 2) xyz = clouds.depth_to_xyz(depth, asus_xtion_pro_f) pc = ru.xyzrgb2pc(xyz, rgb, frame_id=self.frame_id, use_time_now=False) return pc
def get_ar_transform_id (depth, rgb, idm=None): """ In order to run this, ar_marker_service needs to be running. """ req.pc = ru.xyzrgb2pc(clouds.depth_to_xyz(depth, asus_xtion_pro_f), rgb, '/camera_link') res = getMarkers(req) marker_tfm = {marker.id:conversions.pose_to_hmat(marker.pose.pose) for marker in res.markers.markers} if not idm: return marker_tfm if idm not in marker_tfm: return None return marker_tfm[idm]
def get_tfm_of_camera(ind1,ind2): rgbd1_dir = '/home/sibi/sandbox/human_demos/hd_data/demos/sibi_demo/camera_#1' rgbd2_dir = '/home/sibi/sandbox/human_demos/hd_data/demos/sibi_demo/camera_#2' rgbs1fnames, depths1fnames, stamps1 = eu.get_rgbd_names_times(rgbd1_dir) rgbs2fnames, depths2fnames, stamps2 = eu.get_rgbd_names_times(rgbd2_dir) winname = 'abcd' ar1, ar2 = None, None rgb1 = cv2.imread(rgbs1fnames[ind1]) assert rgb1 is not None depth1 = cv2.imread(depths1fnames[ind1],2) assert depth1 is not None cv2.imshow(winname,) xyz1 = clouds.depth_to_xyz(depth1, asus_xtion_pro_f) pc1 = ru.xyzrgb2pc(xyz1, rgb1, frame_id='', use_time_now=False) ar_tfms1 = get_ar_marker_poses(pc1) if ar_tfms1 and 1 in ar_tfms1: blueprint("Got markers " + str(ar_tfms1.keys()) + " at time %f"%stamps1[ind1]) ar1 = ar_tfms1[1] rgb2 = cv2.imread(rgbs2fnames[ind2]) assert rgb2 is not None depth2 = cv2.imread(depths2fnames[ind2],2) assert depth2 is not None xyz2 = clouds.depth_to_xyz(depth2, asus_xtion_pro_f) pc2 = ru.xyzrgb2pc(xyz2, rgb2, frame_id='', use_time_now=False) ar_tfms2 = get_ar_marker_poses(pc2) if ar_tfms2 and 1 in ar_tfms2: blueprint("Got markers " + str(ar_tfms2.keys()) + " at time %f"%stamps1[ind2]) ar2 = ar_tfms2[1] if ar1 is None or ar2 is None: return None return ar1.dot(nlg.inv(ar2))
def checkerboard_loop (): rospy.init_node("checkerboard_tracker") grabber = cpr.CloudGrabber() grabber.startRGBD() pc_pub = rospy.Publisher("cb_cloud", PointCloud2) pose_pub = rospy.Publisher("cb_pose", PoseStamped) rate = rospy.Rate(10) while True: try: rgb, depth = grabber.getRGBD() xyz = clouds.depth_to_xyz(depth, asus_xtion_pro_f) rtn, corners = get_corners_rgb (rgb) points = get_xyz_from_corners(corners, xyz) print "Shape of points: ", points.shape if points.any(): try: tfm = get_svd_tfm_from_points(points) except: continue print tfm pc = ros_utils.xyzrgb2pc(xyz, rgb, "/map") pose = PoseStamped() pose.pose = conversions.hmat_to_pose(tfm) pc.header.frame_id = pose.header.frame_id = "/map" pose.header.stamp = pose.header.stamp = rospy.Time.now() pose_pub.publish(pose) pc_pub.publish(pc) rate.sleep() except KeyboardInterrupt: break
def visualize_ar (): """ Visualize point clouds from openni grabber and AR marker transforms through ROS. """ global getMarkers if rospy.get_name() == '/unnamed': rospy.init_node("visualize_ar") camera_frame = "camera_depth_optical_frame" tf_pub = tf.TransformBroadcaster() pc_pub = rospy.Publisher("camera_points", PointCloud2) sleeper = rospy.Rate(30) getMarkers = rospy.ServiceProxy("getMarkers", MarkerPositions) grabber= cpr.CloudGrabber() grabber.startRGBD() print "Streaming now." while True: try: r, d = grabber.getRGBD() ar_tfms = get_ar_transform_id (d, r) pc = ru.xyzrgb2pc(clouds.depth_to_xyz(d, asus_xtion_pro_f), r, camera_frame) pc_pub.publish(pc) for i in ar_tfms: try: trans, rot = conversions.hmat_to_trans_rot(ar_tfms[i]) tf_pub.sendTransform(trans, rot, rospy.Time.now(), "ar_marker_%d"%i, camera_frame) except: print "Warning: Problem with AR transform." pass sleeper.sleep() except KeyboardInterrupt: print "Keyboard interrupt. Exiting." break
def get_ar_transform_id_unorganized (xyz, rgb, idm=None): """ In order to run this, ar_marker_service needs to be running. """ print xyz print rgb xyz = xyz.reshape(xyz.shape[0]*xyz.shape[1], xyz.shape[2]) rgb = rgb.reshape(rgb.shape[0]*rgb.shape[1], rgb.shape[2]) print xyz print rgb req.pc = ru.xyzrgb2pc(xyz, rgb, '/camera_link') res = getMarkers(req) marker_tfm = {marker.id:conversions.pose_to_hmat(marker.pose.pose) for marker in res.markers.markers} if not idm: return marker_tfm if idm not in marker_tfm: return None return marker_tfm[idm]
def get_ar_marker_poses (rgb, depth): """ In order to run this, ar_marker_service needs to be running. """ if rospy.get_name() == '/unnamed': rospy.init_node('keypoints') getMarkers = rospy.ServiceProxy("getMarkers", MarkerPositions) #xyz = svi.transform_pointclouds(depth, tfm) xyz = clouds.depth_to_xyz(depth, asus_xtion_pro_f) pc = ru.xyzrgb2pc(xyz, rgb, '/base_footprint') req = MarkerPositionsRequest() req.pc = pc marker_tfm = {} res = getMarkers(req) for marker in res.markers.markers: marker_tfm[marker.id] = conversions.pose_to_hmat(marker.pose.pose) #print "Marker ids found: ", marker_tfm.keys() return marker_tfm
import numpy as np, numpy.linalg as nlg import rospy import cyni import roslib; roslib.load_manifest('ar_track_service') from ar_track_service.srv import * from hd_utils import ros_utils as ru, conversions np.set_printoptions(precision=5, suppress=True) rospy.init_node('test_pcd') xyzrgb1 = cyni.readPCD('/home/sibi/sandbox/pcd_service/clouds/cloud1.pcd') pc1 = ru.xyzrgb2pc(xyzrgb1[:,:,0:3],xyzrgb1[:,:,3:6],'') xyzrgb2 = cyni.readPCD('/home/sibi/sandbox/pcd_service/clouds/cloud2.pcd') pc2 = ru.xyzrgb2pc(xyzrgb2[:,:,0:3],xyzrgb2[:,:,3:6],'') mserv = rospy.ServiceProxy('getMarkers', MarkerPositions) req = MarkerPositionsRequest() req.pc = pc1 res1 = mserv(req) req.pc = pc2 res2 = mserv(req) m1 = res1.markers.markers[1] m2 = res2.markers.markers[1] tfm1 = conversions.pose_to_hmat(m1.pose.pose) tfm2 = conversions.pose_to_hmat(m2.pose.pose) bb = np.array([[4.807, -1.239, -4.11,3.495],
def save_observations_rgbd(demo_type, demo_name, save_file=None): """ Extract data from all sensors and store into demo.data file. """ global setCalib # Temp file to show that data is already being extracted demo_dir = osp.join(demo_files_dir, demo_type, demo_name) with open(osp.join(demo_dir, demo_names.extract_data_temp),'w') as fh: fh.write('Extracting...') calib_file_path = osp.join(demo_dir, demo_names.calib_name) bag_file = osp.join(demo_dir, demo_names.bag_name) with open(osp.join(demo_dir, demo_names.camera_types_name)) as fh: camera_types = yaml.load(fh) with open(osp.join(demo_dir, demo_names.camera_models_name)) as fh: camera_models = yaml.load(fh) num_cameras = len(camera_types) video_dirs = {} for i in range(1, num_cameras + 1): video_dirs[i] = osp.join(demo_dir, demo_names.video_dir%i) c_frames = {} for i in range(1, num_cameras + 1): c_frames[i]= 'camera%i_link'%(i) hydra_frame = 'hydra_base' tfm_c1 = {i:None for i in range (1,num_cameras+1)} tfm_c1[1] = np.eye(4) tfm_c1_h = None with open(calib_file_path,'r') as fh: calib_data = cPickle.load(fh) bag = rosbag.Bag(bag_file) for tfm in calib_data['transforms']: if tfm['parent'] == c_frames[1] or tfm['parent'] == '/' + c_frames[1]: if tfm['child'] == hydra_frame or tfm['child'] == '/' + hydra_frame: tfm_c1_h = nlg.inv(tfm_link_rof).dot(tfm['tfm']) else: for i in range(2, num_cameras+1): if tfm['child'] == c_frames[i] or tfm['child'] == '/' + c_frames[i]: tfm_c1[i] = nlg.inv(tfm_link_rof).dot(tfm['tfm']).dot(tfm_link_rof) if tfm_c1_h is None or not all([tfm_c1[s] != None for s in tfm_c1]): redprint("Calibration does not have required transforms") return if not calib_data.get('grippers'): redprint("Gripper not found.") return grippers = {} data = {} data['T_cam2hbase'] = tfm_c1_h for lr,gdata in calib_data['grippers'].items(): gr = gripper_lite.GripperLite(lr, gdata['ar'], trans_marker_tooltip=gripper_trans_marker_tooltip[lr]) gr.reset_gripper(lr, gdata['tfms'], gdata['ar'], gdata['hydra']) grippers[lr] = gr data[lr] ={'hydra':[], 'pot_angles':[], 'T_tt2hy': gr.get_rel_transform('tool_tip', gr.hydra_marker)} ## place-holder for AR marker transforms: for i in xrange(num_cameras): data[lr]['camera%d'%(i+1)] = [] winname = 'cam_image' cam_counts = [] for i in range(1,num_cameras+1): if verbose: yellowprint('Camera%i'%i) if camera_types[i] == "rgbd": rgb_fnames, depth_fnames, stamps = eu.get_rgbd_names_times(video_dirs[i]) else: rgb_fnames, stamps = eu.get_rgbd_names_times(video_dirs[i], depth = False) cam_count = len(stamps) cam_counts.append(cam_count) if camera_types[i] == "rgbd": for ind in rgb_fnames: rgb = cv2.imread(rgb_fnames[ind]) if displayImages: cv2.imshow(winname, rgb) cv2.waitKey(1) assert rgb is not None depth = cv2.imread(depth_fnames[ind],2) assert depth is not None xyz = clouds.depth_to_xyz(depth, asus_xtion_pro_f) pc = ru.xyzrgb2pc(xyz, rgb, frame_id='', use_time_now=False) ar_tfms = get_ar_marker_poses(pc,track=True) if ar_tfms: if verbose: blueprint("Got markers " + str(ar_tfms.keys()) + " at time %f"%stamps[ind]) for lr,gr in grippers.items(): ar = gr.get_ar_marker() if ar in ar_tfms: tt_tfm = gr.get_tooltip_transform(ar, np.asarray(ar_tfms[ar])) data[lr]['camera%i'%i].append((tfm_c1[i].dot(tt_tfm),stamps[ind])) else: # if setCalib is None: # setCalib = rospy.ServiceProxy("setCalibInfo", SetCalibInfo) # reqCalib.camera_model = camera_models[i] # setCalib(reqCalib) # # if verbose: # yellowprint("Changed camera calibration parameters to model %s"%camera_models[i]) for ind in rgb_fnames: rgb = cv.LoadImage(rgb_fnames[ind]) if displayImages: cv.ShowImage(winname, rgb) cv.WaitKey(1) assert rgb is not None ar_tfms = get_ar_marker_poses(rgb,use_pc_service=False, model = camera_models[i], track=True) if ar_tfms: if verbose: blueprint("Got markers " + str(ar_tfms.keys()) + " at time %f"%stamps[ind]) for lr,gr in grippers.items(): ar = gr.get_ar_marker() if ar in ar_tfms: tt_tfm = gr.get_tooltip_transform(ar, np.asarray(ar_tfms[ar])) data[lr]['camera%i'%i].append((tfm_c1[i].dot(tt_tfm),stamps[ind])) found_hydra_data = False found_pot_data = False # If hydra_only.data file already exists, don't do extra work. hydra_data_file = osp.join(demo_dir, demo_names.hydra_data_name) if osp.isfile(hydra_data_file): yellowprint ("%s already exists for %s. Getting hydra data from this info."%(demo_names.hydra_data_name,demo_name)) with open(hydra_data_file,'r') as fh: hdata = cPickle.load(fh) for lr in hdata: if lr in 'lr': if hdata[lr].get('hydra'): data[lr]['hydra'] = hdata[lr]['hydra'] found_hydra_data = True if hdata[lr].get('pot_angles'): data[lr]['pot_angles'] = hdata[lr]['pot_angles'] found_pot_data = True if not found_hydra_data: if verbose: yellowprint('Hydra') lr_long = {'l':'left','r':'right'} for (_, msg, _) in bag.read_messages(topics=['/tf']): hyd_tfm = {} found = '' for tfm in msg.transforms: if found in ['lr','rl']: break for lr in grippers: if lr in found: continue elif tfm.header.frame_id == '/' + hydra_frame and tfm.child_frame_id == '/hydra_'+lr_long[lr]: t,r = tfm.transform.translation, tfm.transform.rotation trans = (t.x,t.y,t.z) rot = (r.x, r.y, r.z, r.w) hyd_tfm = tfm_c1_h.dot(conversions.trans_rot_to_hmat(trans, rot)) stamp = tfm.header.stamp.to_sec() tt_tfm = grippers[lr].get_tooltip_transform(lr_long[lr], hyd_tfm) data[lr]['hydra'].append((tt_tfm, stamp)) found += lr if found and verbose: blueprint("Got hydra readings %s at time %f"%(found,stamp)) if not found_pot_data: if verbose: yellowprint('Potentiometer readings') for lr in grippers: for (_, msg, ts) in bag.read_messages(topics=['/%s_pot_angle'%lr]): angle = msg.data stamp = ts.to_sec() data[lr]['pot_angles'].append((angle, stamp)) if verbose: blueprint("Got a %s potentiometer angle of %f at time %f"%(lr,angle, stamp)) if verbose: for lr in 'lr': yellowprint("Gripper %s:"%lr) for i in range(num_cameras): yellowprint("Found %i transforms out of %i rgb/rgbd images from camera%i"%(len(data[lr]['camera%i'%(i+1)]), cam_counts[i], i+1)) yellowprint("Found %i transforms from hydra"%len(data[lr]['hydra'])) yellowprint("Found %i potentiometer readings"%len(data[lr]['pot_angles'])) if save_file is None: save_file = demo_names.data_name save_filename = osp.join(demo_dir, save_file) with open(save_filename, 'w') as sfh: cPickle.dump(data, sfh) yellowprint("Saved %s."%demo_names.data_name) os.remove(osp.join(demo_dir, demo_names.extract_data_temp))
def save_init_ar(demo_type, demo_name, ar_marker, save_in_data_dir=True): """ Extracts the initializing ar marker transform and stores it. """ # Temp file to show that data is already being extracted global setCalib demo_dir = osp.join(demo_files_dir, demo_type, demo_name) calib_file_path = osp.join(demo_dir, demo_names.calib_name) with open(osp.join(demo_dir, demo_names.camera_types_name)) as fh: camera_types = yaml.load(fh) with open(osp.join(demo_dir, demo_names.camera_models_name)) as fh: camera_models = yaml.load(fh) cameras = range(1,len(camera_types)+1) c_frames = {} for i in cameras: c_frames[i]= 'camera%i_link'%(i) video_dirs = {} for i in cameras: video_dirs[i] = osp.join(demo_dir, demo_names.video_dir%i) tfm_c1 = {i:None for i in cameras} tfm_c1[1] = np.eye(4) with open(calib_file_path,'r') as fh: calib_data = cPickle.load(fh) for tfm in calib_data['transforms']: if tfm['parent'] == c_frames[1] or tfm['parent'] == '/' + c_frames[1]: for i in cameras: if tfm['child'] == c_frames[i] or tfm['child'] == '/' + c_frames[i]: tfm_c1[i] = nlg.inv(tfm_link_rof).dot(tfm['tfm']).dot(tfm_link_rof) if not all([tfm_c1[s] != None for s in tfm_c1]): redprint("Calibration does not have required transforms") return # We want the ar marker transform from all the relevant cameras data = {'tfms':{i:None for i in cameras}, 'marker':ar_marker} for i in cameras: if verbose: yellowprint('Camera%i'%i) if camera_types[i] == "rgbd": rgb_fnames, depth_fnames, stamps = eu.get_rgbd_names_times(video_dirs[i]) else: rgb_fnames, stamps = eu.get_rgbd_names_times(video_dirs[i], depth = False) all_tfms = [] cam_count = 0 if camera_types[i] == "rgbd": for ind in rgb_fnames: rgb = cv2.imread(rgb_fnames[ind]) if displayImages: cv2.imshow(winname, rgb) cv2.waitKey(1) assert rgb is not None depth = cv2.imread(depth_fnames[ind],2) assert depth is not None xyz = clouds.depth_to_xyz(depth, asus_xtion_pro_f) pc = ru.xyzrgb2pc(xyz, rgb, frame_id='', use_time_now=False) ar_tfms = get_ar_marker_poses(pc,track=True) if ar_tfms and ar_marker in ar_tfms: all_tfms.append(tfm_c1[i].dot(ar_tfms[ar_marker])) cam_count += 1 else: if setCalib is None: setCalib = rospy.ServiceProxy("setCalibInfo", SetCalibInfo) reqCalib.camera_model = camera_models[i] setCalib(reqCalib) if verbose: yellowprint("Changed camera calibration parameters to model %s"%camera_models[i]) for ind in rgb_fnames: rgb = cv.LoadImage(rgb_fnames[ind]) if displayImages: cv.ShowImage(winname, rgb) cv.WaitKey(1) assert rgb is not None ar_tfms = get_ar_marker_poses(rgb,use_pc_service=False,track=True) if ar_tfms and ar_marker in ar_tfms: all_tfms.append(tfm_c1[i].dot(ar_tfms[ar_marker])) cam_count += 1 if verbose: blueprint ("Got %i values for AR Marker %i from camera %i."%(cam_count, ar_marker, i)) if cam_count > 0: data['tfms'][i] = utils.avg_transform(all_tfms) save_filename1 = osp.join(demo_dir, save_file) with open(save_filename1, 'w') as sfh: cPickle.dump(data, sfh) if save_in_data_dir: save_filename2 = osp.join(ar_init_dir, ar_init_demo_name) with open(save_filename2, 'w') as sfh: cPickle.dump(data, sfh) yellowprint("Saved %s."%save_file)