def do_icp(surface_points): #Random point set, scaled up p1 = surface_points n = p1.shape[0] # Transform the points by some random transform tfm = make_transform(trans_SCALE=1) p2 = np.c_[p1,np.ones((n,1))].dot(tfm.T)[:,0:3] #print p1 #print p2 print tfm noise = make_transform(0.01,0.1) req = ICPTransformRequest() req.pc1 = ru.xyz2pc(p1, 'a') req.pc2 = ru.xyz2pc(p2, 'b') req.guess = conversions.hmat_to_pose(tfm.dot(noise)) res = findTransform(req) ntfm = conversions.pose_to_hmat(res.pose) print ntfm print ntfm.dot(np.linalg.inv(tfm)) print np.linalg.norm(ntfm.dot(np.linalg.inv(tfm)) - np.eye(4)) return np.linalg.norm(np.abs(ntfm[0:3,3]-tfm[0:3,3]))
def hmats_to_pose_array(hmats, frame_id): pose_array = gm.PoseArray() for hmat in hmats: pose = conv.hmat_to_pose(hmat) pose_array.poses.append(pose) pose_array.header.frame_id = frame_id pose_array.header.stamp = rospy.Time.now() return pose_array
def checkerboard_loop (): rospy.init_node("checkerboard_tracker") grabber = cpr.CloudGrabber() grabber.startRGBD() pc_pub = rospy.Publisher("cb_cloud", PointCloud2) pose_pub = rospy.Publisher("cb_pose", PoseStamped) rate = rospy.Rate(10) while True: try: rgb, depth = grabber.getRGBD() xyz = clouds.depth_to_xyz(depth, asus_xtion_pro_f) rtn, corners = get_corners_rgb (rgb) points = get_xyz_from_corners(corners, xyz) print "Shape of points: ", points.shape if points.any(): try: tfm = get_svd_tfm_from_points(points) except: continue print tfm pc = ros_utils.xyzrgb2pc(xyz, rgb, "/map") pose = PoseStamped() pose.pose = conversions.hmat_to_pose(tfm) pc.header.frame_id = pose.header.frame_id = "/map" pose.header.stamp = pose.header.stamp = rospy.Time.now() pose_pub.publish(pose) pc_pub.publish(pc) rate.sleep() except KeyboardInterrupt: break
def view_hydra_demo_on_rviz (demo_type, demo_name, freq, speed, prompt=False, verbose=False): """ Uses hydra_only.data for the segment to quickly visualize the demo. @demo_type, @demo_name: demo identification. @freq: basically measure of fine-ness of timesteps. @speed: how fast to replay demo. @prompt: does the user hit enter after each time step? """ demo_dir = osp.join(demo_files_dir, demo_type, demo_name) bag_file = osp.join(demo_dir, demo_names.bag_name) data_file = osp.join(demo_dir, demo_names.hydra_data_name) calib_file = osp.join(demo_dir, demo_names.calib_name) with open(osp.join(demo_dir, demo_names.camera_types_name),'r') as fh: cam_types = yaml.load(fh) if not osp.isfile(data_file): yellowprint("%s does not exist for this demo. Extracting now."%demo_names.hydra_data_name) ed.save_hydra_only(demo_type, demo_name) with open(data_file, 'r') as fh: dat = cp.load(fh) # get grippers used grippers = [key for key in dat.keys() if key in 'lr'] # data rgbd_dirs = {cam:osp.join(demo_dir,demo_names.video_dir%cam) for cam in cam_types if cam_types[cam] == 'rgbd'} cam_frames = {cam:'/camera%i_rgb_optical_frame'%cam for cam in rgbd_dirs} tfm_pubs = {} hydra_dat = {} pot_dat = {} _, hydra_dat['l'], pot_dat['l'] = load_data(data_file, 'l', freq, speed, hydra_only=True) _, hydra_dat['r'], pot_dat['r'] = load_data(data_file, 'r', freq, speed, hydra_only=True) tmin, _, nsteps = relative_time_streams(hydra_dat.values() + pot_dat.values(), freq, speed) if rospy.get_name() == "/unnamed": rospy.init_node("visualize_demo") ## publishers for unfiltered-data: for lr in grippers: tfm_pubs[lr] = rospy.Publisher('/%s_hydra_estimate'%(lr), PoseStamped) ## get the point-cloud stream pc_strms = {cam:streamize_rgbd_pc(rgbd_dirs[cam], cam_frames[cam], freq, tstart=tmin,speed=speed,verbose=verbose) for cam in rgbd_dirs} pc_pubs = {cam:rospy.Publisher('/point_cloud%i'%cam, PointCloud2) for cam in rgbd_dirs} cam_tfms = get_cam_transforms (calib_file, len(cam_types)) for cam in rgbd_dirs: if cam != 1: publish_static_tfm(cam_frames[1], cam_frames[cam], cam_tfms[cam]) sleeper = rospy.Rate(freq) T_far = np.eye(4) T_far[0:3,3] = [10,10,10] handles = [] dat_snext = {lr:{} for lr in grippers} for lr in grippers: dat_snext[lr]['h'] = stream_soft_next(hydra_dat[lr]) dat_snext[lr]['pot'] = stream_soft_next(pot_dat[lr]) prev_ang = {'l': 0, 'r': 0} for i in xrange(nsteps): if prompt: raw_input("Hit enter when ready.") if verbose: print "Time stamp: ", tmin+(0.0+i*speed)/freq ## show the point-cloud: for cam in pc_strms: try: pc = pc_strms[cam].next() if pc is not None: if verbose: print "pc%i ts:"%cam, pc.header.stamp.to_sec() pc.header.stamp = rospy.Time.now() pc_pubs[cam].publish(pc) else: if verbose: print "pc%i ts:"%cam,None except StopIteration: pass ests = {} tfms = [] ang_vals = [] for lr in grippers: ests[lr] = dat_snext[lr]['h']() ang_val = dat_snext[lr]['pot']() if ang_val != None and not np.isnan(ang_val): prev_ang[lr] = ang_val ang_val = ang_val else: ang_val = prev_ang[lr] ang_val *= 2 if ests[lr] is None: tfms.append(T_far) else: tfms.append(ests[lr]) ang_vals.append(rad_angle(ang_val)) handles = draw_trajectory(cam_frames[1], tfms, color=(1,1,0,1), open_fracs=ang_vals) for lr in grippers: if ests[lr] is not None: tfm_pubs[lr].publish(conversions.pose_to_stamped_pose(conversions.hmat_to_pose(ests[lr]), cam_frames[1])) sleeper.sleep() empty_cloud = PointCloud2() for cam in pc_pubs: pc_pubs[cam].publish(empty_cloud)
def view_demo_on_rviz(demo_type, demo_name, freq, speed=1.0, main='h', prompt=False, verbose=False): """ Visualizes recorded demo on rviz (without kalman filter/smoother data). @demo_type, @demo_name: demo identification. @freq: basically measure of fine-ness of timesteps. @speed: how fast to replay demo. @main: which sensor to display the marker for @prompt: does the user hit enter after each time step? """ demo_dir = osp.join(demo_files_dir, demo_type, demo_name) bag_file = osp.join(demo_dir, demo_names.bag_name) data_file = osp.join(demo_dir, demo_names.data_name) calib_file = osp.join(demo_dir, demo_names.calib_name) with open(osp.join(demo_dir, demo_names.camera_types_name),'r') as fh: cam_types = yaml.load(fh) if not osp.isfile(data_file): yellowprint("%s does not exist for this demo. Extracting now."%demo_names.data_name) ed.save_observations_rgbd(demo_type, demo_name) with open(data_file, 'r') as fh: dat = cp.load(fh) # get grippers used grippers = [key for key in dat.keys() if key in 'lr'] # data rgbd_dirs = {cam:osp.join(demo_dir,demo_names.video_dir%cam) for cam in cam_types if cam_types[cam] == 'rgbd'} cam_frames = {cam:'/camera%i_rgb_optical_frame'%cam for cam in rgbd_dirs} tfm_pubs = {} cam_dat = {} hydra_dat = {} pot_dat = {} _, cam_dat['l'], hydra_dat['l'], pot_dat['l'] = load_data(data_file, 'l', freq, speed) _, cam_dat['r'], hydra_dat['r'], pot_dat['r'] = load_data(data_file, 'r', freq, speed) all_cam_strms = [] for lr in 'lr': for cam in cam_dat[lr].keys(): all_cam_strms.append(cam_dat[lr][cam]['stream']) tmin, _, nsteps = relative_time_streams(hydra_dat.values() + pot_dat.values() + all_cam_strms, freq, speed) if rospy.get_name() == "/unnamed": rospy.init_node("visualize_demo") ## publishers for unfiltered-data: for lr in grippers: tfm_pubs[lr] = {} for cam in cam_types: tfm_pubs[lr][cam] = rospy.Publisher('/%s_ar%i_estimate'%(lr,cam), PoseStamped) tfm_pubs[lr]['h'] = rospy.Publisher('/%s_hydra_estimate'%(lr), PoseStamped) ## get the point-cloud stream pc_strms = {cam:streamize_rgbd_pc(rgbd_dirs[cam], cam_frames[cam], freq, tstart=tmin,speed=speed,verbose=verbose) for cam in rgbd_dirs} pc_pubs = {cam:rospy.Publisher('/point_cloud%i'%cam, PointCloud2) for cam in rgbd_dirs} # import IPython # IPython.embed() cam_tfms = get_cam_transforms (calib_file, len(cam_types)) for cam in rgbd_dirs: if cam != 1: publish_static_tfm(cam_frames[1], cam_frames[cam], cam_tfms[cam]) sleeper = rospy.Rate(freq) T_far = np.eye(4) T_far[0:3,3] = [10,10,10] handles = [] prev_ang = {'l': 0, 'r': 0} dat_snext = {lr:{} for lr in grippers} for lr in grippers: dat_snext[lr]['h'] = stream_soft_next(hydra_dat[lr]) dat_snext[lr]['pot'] = stream_soft_next(pot_dat[lr]) for cam in cam_types: dat_snext[lr][cam] = stream_soft_next(cam_dat[lr][cam]['stream']) for i in xrange(nsteps): if prompt: raw_input("Hit enter when ready.") if verbose: print "Time stamp: ", tmin+(0.0+i*speed)/freq ## show the point-cloud: found_pc = False for cam in pc_strms: try: pc = pc_strms[cam].next() if pc is not None: if verbose: print "pc%i ts:"%cam, pc.header.stamp.to_sec() pc.header.stamp = rospy.Time.now() pc_pubs[cam].publish(pc) found_pc = True else: if verbose: print "pc%i ts:"%cam,None except StopIteration: pass next_est = {lr:{} for lr in grippers} tfms = [] ang_vals = [] if main != 'h': main = int(main) for lr in grippers: next_est[lr]['h'] = dat_snext[lr]['h']() for cam in cam_types: next_est[lr][cam] = dat_snext[lr][cam]() ang_val = dat_snext[lr]['pot']() if ang_val != None and not np.isnan(ang_val): prev_ang[lr] = ang_val ang_val = ang_val else: ang_val = prev_ang[lr] ang_val *= 2 tfm = next_est[lr][main] if tfm is None: tfms.append(T_far) else: tfms.append(tfm) ang_vals.append(rad_angle(ang_val)) handles = draw_trajectory(cam_frames[1], tfms, color=(1,1,0,1), open_fracs=ang_vals) for lr in grippers: for m,est in next_est[lr].items(): if est != None: tfm_pubs[lr][m].publish(conversions.pose_to_stamped_pose(conversions.hmat_to_pose(est), cam_frames[1])) else: tfm_pubs[lr][m].publish(conversions.pose_to_stamped_pose(conversions.hmat_to_pose(T_far), cam_frames[1])) sleeper.sleep()