def calibrate_hydras():
    global cameras, tfm_pub

    greenprint('Step 2. Calibrating hydra and kinect.')
    CALIB_HYDRA = \
        raw_input('Enter the gripper you are using for calibration.')
    HYDRA_AR_MARKER = \
        input('Enter the ar_marker you are using for calibration.')
    hydra_calib = HydraCalibrator(cameras, ar_marker = HYDRA_AR_MARKER,
                                  calib_hydra = CALIB_HYDRA,
                                  calib_camera = CALIB_CAMERA)

    done = False
    while not done:
        hydra_calib.calibrate('camera', HYDRA_N_OBS, HYDRA_N_AVG)
        if not hydra_calib.calibrated:
            redprint('Hydra calibration failed.')
            hydra_calib.reset_calibration()
        else:
            tfm_pub.add_transforms(hydra_calib.get_transforms('camera'))
            if yes_or_no('Are you happy with the calibration? Check RVIZ.'):
                done = True
            else:
                yellowprint('Calibrating hydras.')
                hydra_calib.reset_calibration()

    greenprint('Hydra base calibrated.')
Ejemplo n.º 2
0
def delete_demo(demo_type, demo_name):
    
    assert demo_type != '', 'DEMO TYPE needs to be valid.'
    assert demo_name != '', 'DEMO NAME needs to be valid.'
    
    demo_type_dir = osp.join(demo_files_dir, demo_type)
    demo_dir= osp.join(demo_type_dir, demo_name)
    
    if not osp.exists(demo_dir):
        redprint('%s does not exist! Invalid demo.'%demo_dir)
        return
    
    yellowprint('This cannot be undone!')
    if not yes_or_no('Are you still sure you want to delete %s?'%demo_name):
        return
    
    # If it was the last demo recorded, update the latest_demo.txt file
    # Otherwise don't bother
    latest_demo_file = osp.join(demo_type_dir, latest_demo_name)
    with open(latest_demo_file,'r') as fh: demo_num = int(fh.read())
    if demo_name == demo_names.base_name%demo_num:
        with open(latest_demo_file,'w') as fh: fh.write(str(demo_num-1))
    
    # Delete contents of demo_dir
    shutil.rmtree(demo_dir)
    
    # Update master_file
    master_file = osp.join(demo_type_dir, master_name)
    with open(master_file,"r") as fh: master_lines = fh.readlines()
    
    fh = open(master_file,"w")
    for line in master_lines:
      if demo_name not in line: fh.write(line)
    fh.close()
    def calibrate (self, min_obs=5, n_avg=5):
        self.initialize_calibration()

        while True:
            process_observation(n_avg)
            if is_ready(self.masterGraph, min_obs):
                if not yes_or_no("Enough data has been gathered. Would you like to gather more data anyway?"):
                    break

        assert is_ready (self.masterGraph, min_obs)
        self.calibrated = self.finish_calibration()
def visualize_pointcloud():
    """
    Visualize point clouds from cyni data.
    """
    global getMarkers
    if rospy.get_name() == '/unnamed':
        rospy.init_node("visualize_pointcloud")
    
    camera_frame="camera_depth_optical_frame"
    getMarkers = rospy.ServiceProxy("getMarkers", MarkerPositions)
    
    pc_pubs = []
    sleeper = rospy.Rate(30)
    
    streams = []
    for i in xrange(1, NUM_CAMERAS+1):
        cam_streams = get_streams("#%d"%i)
        if cam_streams is not None:
            streams.append(cam_streams)
            cam_streams["depth"].start()
            cam_streams["depth"].setEmitterState(False)
            cam_streams["color"].start()
            pc_pubs.append(rospy.Publisher("camera/depth_registered/points", PointCloud2))
        else: break
    
    indiv_freq = TOGGLE_FREQ/NUM_CAMERAS
    
    print "Streaming now: Pointclouds only."
    try:
        while True:
            print "Publishing data..."
            for (i,stream) in enumerate(streams):
                stream["depth"].setEmitterState(True)
                depth = stream["depth"].readFrame().data
                rgb = cv2.cvtColor(stream["color"].readFrame().data, cv2.COLOR_RGB2BGR)
                
                cv2.imshow("images", rgb)
                cv2.waitKey()
                
                ar_tfms = get_ar_transform_id(depth, rgb)
                print ar_tfms
                               
#                pc = ru.xyzrgb2pc(clouds.depth_to_xyz(depth, asus_xtion_pro_f), rgb, camera_frame)
#                pc_pubs[i].publish(pc)
                time.sleep(1/indiv_freq*0.5)
                stream["depth"].setEmitterState(False)
                time.sleep(1/indiv_freq*0.5)
                
            if yes_or_no("Done?"): break
            #wait for a bit?
    except KeyboardInterrupt:
        print "Keyboard interrupt. Exiting."
    def calibrate (self, min_obs=5, n_avg=5):
        self.initialize_calibration()

        while True:
            worked = self.process_observation(n_avg)
            if not worked:
                yellowprint("Something went wrong. Try again.")
                self.iterations -= 1
            if is_ready(self.masterGraph, min_obs):
                if yes_or_no("Enough data has been gathered. Proceed with transform optimization?"):
                    break

        assert is_ready (self.masterGraph, min_obs)
        self.calibrated = self.finish_calibration()
def calibrate_gripper_lite(lr):
    global cameras, tfm_pub

    greenprint('Step 3.1 Calibrate %s gripper.'%lr)
        
    gr = gripper_lite.GripperLite(lr, marker=gripper_marker_id[lr], 
                                  cameras=cameras,
                                  trans_marker_tooltip=gripper_trans_marker_tooltip[lr])     
        
    tfm_pub.add_gripper(gr)
    tfm_pub.set_publish_grippers(True)

    lr_long = {'l':'left','r':'right'}[lr]

    if yes_or_no(colorize('Do you want to add hydra to %sgripper?'%lr, 'yellow')):
        gr.add_hydra(hydra_marker=lr_long, tfm=None, 
                     ntfm=GRIPPER_LITE_N_OBS, 
                     navg=GRIPPER_LITE_N_AVG)
def calibrate_gripper(lr):
    global cameras, tfm_pub


    if lr == 'l':
        greenprint('Step 3.1 Calibrate l gripper.')
    else:
        greenprint('Step 3.2 Calibrate r gripper')
    
    gripper_calib = GripperCalibrator(cameras, lr)

    # create calib_info based on gripper here

    calib_info = {'master': {'ar_markers': [gripper_marker_id[lr]],
                             'angle_scale': 0,
                             'master_marker': gripper_marker_id[lr]},
                    'l': {'ar_markers': [gripper_finger_markers[lr]['l']],
                          'angle_scale': 1},
                    'r': {'ar_markers': [gripper_finger_markers[lr]['r']],
                          'angle_scale': -1}}

    gripper_calib.update_calib_info(calib_info)

    done = False
    while not done:
        gripper_calib.calibrate(GRIPPER_MIN_OBS, GRIPPER_N_AVG)
        if not gripper_calib.calibrated:
            redprint('Gripper calibration failed.')
            gripper_calib.reset_calibration()

        tfm_pub.add_gripper(gripper_calib.get_gripper())
        tfm_pub.set_publish_grippers(True)

        if yes_or_no('Are you happy with the calibration?'):
            done = True
        else:
            yellowprint('Calibrating ' + lr + ' gripper again.')
            gripper_calib.reset_calibration()
            gripper_calib.update_calib_info(calib_info)
            tfm_pub.set_publish_grippers(False)

    greenprint('Done with ' + lr + ' gripper calibration.')
Ejemplo n.º 8
0
def test_cam_calib ():

    yellowprint("Beginning calibration sequence.")

    NUM_CAMERAS = 2
    cameras = cyni_cameras(NUM_CAMERAS)
    cameras.initialize_cameras()
    
    tfm_pub = transform_publisher(cameras)
    tfm_pub.start()
        
    greenprint("Step 1. Calibrating mutliple cameras.")
    CAM_N_OBS = 10
    CAM_N_AVG = 5
    cam_calib = camera_calibrator(cameras)

    done = False
    while not done:
        cam_calib.calibrate(CAM_N_OBS, CAM_N_AVG, tfm_pub)
        if not cam_calib.calibrated:
            redprint("Camera calibration failed.")
            cam_calib.reset_calibration()
        else:
            tfm_pub.add_transforms(cam_calib.get_transforms())
            if yes_or_no("Are you happy with the calibration? Check RVIZ."):
                done = True
            else:
                yellowprint("Calibrating cameras again.")
                cam_calib.reset_calibration()
    
    tfm_pub.set_publish_pc(True)
    greenprint("Mutliple cameras calibrated.")
    print tfm_pub.publish_pc
    
    import time
    try:
        while True:
            time.sleep(0.1)
    except KeyboardInterrupt:
        pass
def calibrate_cameras():
    global cameras, tfm_pub

    greenprint('Step 1. Calibrating multiple cameras.')
    cam_calib = CameraCalibrator(cameras)

    done = False
    while not done:
        cam_calib.calibrate(n_obs=CAM_N_OBS, n_avg=CAM_N_AVG)
        if cameras.num_cameras == 1:
            break
        if not cam_calib.calibrated:
            redprint('Camera calibration failed.')
            cam_calib.reset_calibration()
        else:
            tfm_pub.add_transforms(cam_calib.get_transforms())
            if yes_or_no('Are you happy with the calibration? Check RVIZ.'
                         ):
                done = True
            else:
                yellowprint('Calibrating cameras again.')
                cam_calib.reset_calibration()

    greenprint('Cameras calibrated.')
def run_calibration_sequence ():
    
    rospy.init_node('calibration')
    
    yellowprint("Beginning calibration sequence.")
    
    tfm_pub = transform_publisher()
    tfm_pub.start()
    
    NUM_CAMERAS = 1
    cameras = cyni_cameras(NUM_CAMERAS)
    cameras.initialize_cameras()
        
    greenprint("Step 1. Calibrating mutliple cameras.")
    CAM_N_OBS = 10
    CAM_N_AVG = 5
    cam_calib = camera_calibrator(cameras)

    done = False
    while not done:
        cam_calib.calibrate(CAM_N_OBS, CAM_N_AVG)
        if not cam_calib.calibrated:
            redprint("Camera calibration failed.")
            cam_calib.reset_calibration()
        else:
            tfm_pub.add_transforms(cam_calib.get_transforms())
            if yes_or_no("Are you happy with the calibration? Check RVIZ."):
                done = True
            else:
                yellowprint("Calibrating cameras again.")
                cam_calib.reset_calibration()

    greenprint("Mutliple cameras calibrated.")

    greenprint("Step 2. Calibrating hydra and kinect.")
    HYDRA_N_OBS = 10
    HYDRA_N_AVG = 5
    HYDRA_AR_MARKER = 0

    hydra_calib = hydra_calibrator(cameras, ar_marker = HYDRA_AR_MARKER)

    done = False
    while not done:
        hydra_calib.calibrate(HYDRA_N_OBS, HYDRA_N_AVG)
        if not hydra_calib.calibrated:
            redprint("Camera calibration failed.")
            hydra_calib.reset_calibration()
        else:
            tfm_pub.add_transforms(hydra_calib.get_transforms())
            if yes_or_no("Are you happy with the calibration? Check RVIZ."):
                done = True
            else:
                yellowprint("Calibrating hydras.")
                hydra_calib.reset_calibration()

    greenprint("Hydra base calibrated.")

    greenprint("Step 3. Calibrating relative transforms of markers on gripper.")

    GRIPPER_MIN_OBS = 5
    GRIPPER_N_AVG = 5

    greenprint("Step 3.1 Calibrate l gripper.")
    l_gripper_calib = gripper_calibrator(cameras)
    
    # create calib_info based on gripper here
    calib_info = {'master':{'ar_markers':[0,1],
                            'hydras':['left'],
                            'angle_scale':0,
                            'master_group':1},
                  'l': {'ar_markers':[2,3,4],
                            'angle_scale':1},
                  'r': {'ar_markers':[5,6,7],
                            'angle_scale':-1}}
    
    l_gripper_calib.update_calib_info(calib_info)
    
    done = False
    while not done:
        l_gripper_calib.calibrate(GRIPPER_MIN_OBS, GRIPPER_N_AVG)
        if not l_gripper_calib.calibrated:
            redprint("Gripper calibration failed.")
            l_gripper_calib.reset_calibration()
        if yes_or_no("Are you happy with the calibration?"):
            done = True
        else:
            yellowprint("Calibrating l gripper again.")
            l_gripper_calib.reset_calibration()
            
    greenprint("Done with l gripper calibration.")

    greenprint("Step 3.2 Calibrate r gripper.")
    r_gripper_calib = gripper_calibrator(cameras)
    
    # create calib_info based on gripper here
    calib_info = {'master':{'ar_markers':[0,1],
                            'hydras':['left'],
                            'angle_scale':0,
                            'master_group':1},
                  'l': {'ar_markers':[2,3,4],
                            'angle_scale':1},
                  'r': {'ar_markers':[5,6,7],
                            'angle_scale':-1}}
    
    r_gripper_calib.update_calib_info(calib_info)
    
    done = False
    while not done:
        r_gripper_calib.calibrate(GRIPPER_MIN_OBS, GRIPPER_N_AVG)
        if not r_gripper_calib.calibrated:
            redprint("Gripper calibration failed.")
            r_gripper_calib.reset_calibration()
        if yes_or_no("Are you happy with the calibration?"):
            done = True
        else:
            yellowprint("Calibrating r gripper again.")
            r_gripper_calib.reset_calibration()

    greenprint("Done with r gripper calibration.")
    
    greenprint("Done with all the calibration.")
    
    while True:
        # stall
        time.sleep(0.1)
Ejemplo n.º 11
0
                yellowprint("Another node seems to be running kf/ks already for %s."%demo["demo_name"]) 
                continue
            # Check if traj already exists
            if not osp.isfile(osp.join(demo_type_dir, demo["demo_name"], demo_names.traj_name)) or args.overwrite:
                yellowprint("Saving %s."%demo["demo_name"])
                filter_traj(demo_dir, tps_model_fname=args.tps_fname, save_tps=args.save_tps, do_smooth=args.do_smooth, plot=args.plot, block=args.block)
            else:
                yellowprint("Trajectory file exists for %s. Not overwriting."%demo["demo_name"])

    else:
        demo_dir = osp.join(demo_type_dir, args.demo_name)
        if osp.exists(demo_dir):
            # Wait until extraction is done for current demo.
            while osp.isfile(osp.join(demo_dir, demo_names.extract_data_temp)):
                time.sleep(1)
            # Check if some other node is running kf/ks currently.
            if not osp.isfile(osp.join(demo_dir, demo_names.run_kalman_temp)):
                # Check if .traj file already exists
                if osp.isfile(osp.join(demo_type_dir, args.demo_name, demo_names.traj_name)):
                    if args.overwrite or yes_or_no('Trajectory file already exists for this demo. Overwrite?'):
                        yellowprint("Saving %s."%args.demo_name)
                        filter_traj(demo_dir, tps_model_fname=args.tps_fname, save_tps=args.save_tps, do_smooth=args.do_smooth, plot=args.plot, block=args.block)
                else:
                    yellowprint("Saving %s."%args.demo_name)
                    filter_traj(demo_dir, tps_model_fname=args.tps_fname, save_tps=args.save_tps, do_smooth=args.do_smooth, plot=args.plot, block=args.block)
            else:
                yellowprint("Another node seems to be running kf/ks already for %s."%args.demo_name)

    if args.plot and args.block == False:
        raw_input()
def record_demo (demo_name, use_voice):
    """
    Record a single demo.
    Returns True/False depending on whether demo needs to be saved.
    
    @demo_dir: directory where demo is recorded.
    @use_voice: bool on whether to use voice for demo or not.
    """
    global cmd_checker, topic_writer, demo_type_dir
    # Start here. Change to voice command.
    sleeper = rospy.Rate(30)
    demo_dir = osp.join(demo_type_dir, demo_name)

    started_video = {cam:False for cam in camera_types}

    print
    yellowprint("Starting bag file recording.")
    bag_file = osp.join(demo_dir,demo_names.bag_name)
    topic_writer.start_saving(bag_file)
    started_bag = True

    for cam in camera_types:
        yellowprint("Calling saveImagecamera%i service."%cam)
        save_image_services[cam](cam_save_requests[cam])
        started_video[cam] = True
    
    # Change to voice command
    time.sleep(1.2)
    subprocess.call("espeak -v en 'Recording.'", stdout=devnull, stderr=devnull, shell=True)
    time_start = time.time()
    if use_voice:
        while True:
            status = cmd_checker.get_latest_msg()
            if  status in ["cancel recording","finish recording","check demo"]:
                break
            sleeper.sleep()
    else:
        raw_input(colorize("Press any key when done.",'y',True))
    
    time_finish = time.time()
    
    for cam in started_video:
        if started_video[cam]:
            save_image_services[cam](cam_stop_request)
            yellowprint("Stopped video%i."%cam)
    if started_bag:
        topic_writer.stop_saving()
        yellowprint("Stopped bag.")
    
    
    if use_voice:
        while status == "check demo":
            subprocess.call("espeak -v en 'Visualizing.'", stdout=devnull, stderr=devnull, shell=True)
            view_hydra_demo_on_rviz(demo_type, demo_name, freq, speed, prompt=False, verbose=False)
            time.sleep(0.5)
            status = cmd_checker.get_latest_msg()
        greenprint("Time taken to record demo: %02f s"%(time_finish-time_start))
        return status == "finish recording"
    elif yes_or_no("Save demo?"):
        greenprint("Time taken to record demo: %02f s"%(time_finish-time_start))
        return True
    else:
        return False
            while osp.isfile(osp.join(demo_dir, demo_names.record_demo_temp)):
                time.sleep(1)
            # Some other node is extracting data currently.
            if osp.isfile(osp.join(demo_dir, demo_names.extract_hydra_data_temp)):
                yellowprint("Another node seems to be extracting hydra data already for %s."%demo["demo_name"]) 
                continue
            # Check if data file already exists
            if not osp.isfile(osp.join(demo_dir, demo_names.hydra_data_name)):
                ed.save_hydra_only(args.demo_type, demo["demo_name"])                
            else:
                yellowprint("Hydra data file exists for %s. Not overwriting."%demo["demo_name"])
            

    else:
        demo_dir = osp.join(demo_type_dir, args.demo_name)
        if osp.exists(demo_dir):
            # Wait until current demo is done recording, if so.
            while osp.isfile(osp.join(demo_dir, demo_names.record_demo_temp)):
                time.sleep(1)
            # Check if some other node is extracting data currently.
            if not osp.isfile(osp.join(demo_dir, demo_names.extract_hydra_data_temp)):
                # Check if data file already exists
                if osp.isfile(osp.join(demo_dir, demo_names.hydra_data_name)):
                    if yes_or_no('Hydra data file already exists for this demo. Overwrite?'):
                        ed.save_hydra_only(args.demo_type, args.demo_name)
                else:
                    ed.save_hydra_only(args.demo_type, args.demo_name)
            else:
                yellowprint("Another node seems to be extracting hydra data already for %s."%args.demo_name)

    print "Done extracting hydra data."
def initialize_ik (calib_file, n_tfm=5, n_avg=30):
    """
    Initializes the file to load from for ik feedback.
    
    First calibrates between PR2 and workspace camera.
    Finally ends up with a calibration from the PR2 base with the hydra_base.
    Can use hydra_info to reach tool tip from movement data. 
    """
    
    if rospy.get_name() == '/unnamed':
        rospy.init_node('ik_feedback_init')
    
    cameras = RosCameras()
    tfm_pub = CalibratedTransformPublisher(cameras)

    yellowprint("Start up the overhead camera as camera1 and PR2's camera as camera2.")
    raw_input(colorize("Hit enter when the PR2 is ready at the workstation.",'yellow',True))

    c1_frame = 'camera1_link'
    h_frame = 'hydra_base'

    # Find tfm from bf to camera1_link    
    greenprint("Calibrating cameras.")
    cam_calib = CameraCalibrator(cameras)
    done = False
    while not done:
        cam_calib.calibrate(n_obs=n_tfm, n_avg=n_avg)
        if not cam_calib.calibrated:
            redprint("Camera calibration failed.")
            cam_calib.reset_calibration()
        else:
            # Add camera to camera tfm and camera to PR2 tfm
            tfm_cam = cam_calib.get_transforms()[0]
            tfm_pr2 = {'parent':'base_footprint', 'child':c1_frame}
            tfm_bf_c2l = get_transform('base_footprint','camera_link')
            tfm_pr2['tfm'] = tfm_bf_c2l.dot(np.linalg.inv(tfm_cam['tfm']))

#             tfm_pub.add_transforms(cam_calib.get_transforms())
            tfm_pub.add_transforms([tfm_cam, tfm_pr2])
            print tfm_pub.transforms
            
            if yes_or_no("Are you happy with the calibration? Check RVIZ."):
                done = True
                tfm_pub.save_calibration(cam_pr2_calib_name)
                greenprint("Cameras calibrated.")
            else:
                yellowprint("Calibrating cameras again.")
                cam_calib.reset_calibration()        

    tfm_c1_h = None
    # Now, find transform between base_footprint and hydra base
    with open(calib_file,'r') as fh: calib_data = cPickle.load(fh)
    for tfm in calib_data['transforms']:
        if tfm['parent'] == c1_frame or tfm['parent'] == '/' + c1_frame:
            if tfm['child'] == h_frame or tfm['child'] == '/' + h_frame:
                tfm_c1_h = tfm['tfm']

    if tfm_c1_h is None:
        redprint("Hydra calib info not found in %s."%calib_file)
    
    grippers = calib_data['grippers']
    assert 'l' in grippers.keys() and 'r' in grippers.keys(), 'Calibration does not have both grippers.'
    
    gprs = {}
    for lr,gdata in grippers.items():
        gr = gripper_lite.GripperLite(lr, gdata['ar'], trans_marker_tooltip=gripper_trans_marker_tooltip[lr])
        gr.reset_gripper(lr, gdata['tfms'], gdata['ar'], gdata['hydra'])
        gprs[lr] = gr
    
    ik_file_data = {}
    ik_file_data['pr2'] = tfm_pr2['tfm'].dot(tfm_c1_h)
    lr_long = {'l':'left','r':'right'}
    for lr in gprs:
        ik_file_data[lr_long[lr]] = gprs[lr].get_rel_transform(lr_long[lr], 'tool_tip')    

    
    yellowprint("Saving IK feedback data in file...")
    ik_file = osp.join(feedback_dir, ik_feedback_name)
    with open(ik_file,'w') as fh: cPickle.dump(ik_file_data, fh)
    yellowprint("Saved!")
    if args.demo_name != '':
        if args.use_traj:
            view_tracking_on_rviz(demo_type=args.demo_type, demo_name=args.demo_name, tps_model_fname=args.tps_fname,
                                  freq=args.freq, speed=args.speed, 
                                  use_smoother=args.use_smoother, prompt=args.prompt, verbose=args.verbose)
        else:
            if args.hydra_only:
                view_hydra_demo_on_rviz(demo_type=args.demo_type, demo_name=args.demo_name, 
                                        freq=args.freq, speed=args.speed, prompt=args.prompt, verbose=args.verbose)
            else:
                view_demo_on_rviz(demo_type=args.demo_type, demo_name=args.demo_name, 
                                  freq=args.freq, speed=args.speed, 
                                  main=args.main, prompt=args.prompt, verbose=args.verbose)
    
        if args.prompt_delete and yes_or_no('Do you want to delete %s?'%args.demo_name):
            delete_demo(args.demo_type, args.demo_name)
    else:
        first = args.first
        last = args.last
        
        demo_type_dir = osp.join(demo_files_dir, args.demo_type)
        
        if args.last == -1:
            latest_demo_file = osp.join(demo_type_dir, latest_demo_name)
            if osp.isfile(latest_demo_file):
                with open(latest_demo_file,'r') as fh:
                    last = int(fh.read()) + 1
            else:
                redprint("No demos!")
    demos[-1]["description"] = "final frame"
    demos[-1]["name"] = "done"
            
    print "writing to %s"%ann_file
    with open(ann_file, "w") as fh:
        yaml.dump(demos, fh)

if __name__=='__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument("--demo_type",help="Type of demonstration")
    parser.add_argument("--demo_name",help="Name of demo", default='', type=str)
    args = parser.parse_args()
    
    if args.demo_name != '':
        if osp.isfile(osp.join(demo_type_dir, demo["demo_name"], demo_names.ann_name)):
            if yes_or_no('Annotation file already exists for this demo. Overwrite?'):
                generate_annotation(args.demo_type, args.demo_name)
        else:
            generate_annotation(args.demo_type, args.demo_name)
    else: 
        # if args.demo_name == '', run generate annotation for all the demos in the directory
        demo_type_dir = osp.join(demo_files_dir, args.demo_type)
        demo_master_file = osp.join(demo_type_dir, master_name)
        
        with open(demo_master_file, 'r') as fh:
            demos_info = yaml.load(fh)

        for demo in demos_info["demos"]:
            if not osp.isfile(osp.join(demo_type_dir, demo["demo_name"], demo_names.ann_name)):
                generate_annotation(args.demo_type, demo["demo_name"])
            else:
Ejemplo n.º 17
0
def calibrate_cameras ():
    global tfm_pub, cameras, tf_listener
    
    tfm_base_kinect = get_robot_kinect_transform()
    if yes_or_no('Calibrate again?'):
        greenprint("Step 1. Calibrating multiple cameras.")
        cam_calib = CameraCalibrator(cameras)
        done = False
        while not done:
            cam_calib.calibrate(CAM_N_OBS, CAM_N_AVG)
            if cameras.num_cameras == 1:
                break
            if not cam_calib.calibrated:
                redprint("Camera calibration failed.")
                cam_calib.reset_calibration()
            else:
                tfm_reference_camera = cam_calib.get_transforms()[0]
                tfm_reference_camera['child'] = 'camera_link'
    
                tfm_base_reference = {}
                tfm_base_reference['child'] = transform['parent']
                tfm_base_reference['parent'] = 'base_footprint'
                tfm_base_reference['tfm'] = nlg.inv(tfm_reference_camera['tfm'].dot(nlg.inv(tfm_base_kinect)))
                tfm_pub.add_transforms([tfm_base_reference])
                if yes_or_no("Are you happy with the calibration? Check RVIZ."):
                    done = True
                else:
                    yellowprint("Calibrating cameras again.")
                    cam_calib.reset_calibration()
    
        greenprint("Cameras calibrated.")
    else:
        tfm_pub.load_calibration('cam_pr2')

    
    if yes_or_no('Get PR2 Gripper?'):
        # Finding transform between PR2's gripper and the tool_tip
        marker_id = 1
        camera_id = 1
        n_avg = 100
        tfms_camera_marker = []
        for i in xrange(n_avg):
            tfms_camera_marker.append(cameras.get_ar_markers([marker_id], camera=camera_id)[marker_id])
            time.sleep(0.03)
        tfm_camera_marker = utils.avg_transform(tfms_camera_marker)
    
        calib_file = 'calib_cam_hydra_gripper1'
        file_name = osp.join(calib_files_dir, calib_file)
        with open(file_name, 'r') as fh: calib_data = cPickle.load(fh)
    
        lr, graph = calib_data['grippers'].items()[0]
        gr = gripper.Gripper(lr, graph)
        assert 'tool_tip' in gr.mmarkers
        gr.tt_calculated = True
        
        tfm_marker_tooltip = gr.get_rel_transform(marker_id, 'tool_tip', 0)
    
        i = 10
        tfm_base_gripper = None
        while i > 0:
            try:
                now = rospy.Time.now()
                tf_listener.waitForTransform('base_footprint', 'l_gripper_tool_frame', now, rospy.Duration(5.0))
                (trans, rot) = tf_listener.lookupTransform('base_footprint', 'l_gripper_tool_frame', rospy.Time(0))
                tfm_base_gripper = conversions.trans_rot_to_hmat(trans, rot)
                break
            except (tf.Exception, tf.LookupException, tf.ConnectivityException):
                yellowprint("Failed attempt.")            
                i -= 1
                pass
        if tfm_base_gripper is not None:
            tfm_gripper_tooltip = {'parent':'l_gripper_tool_frame',
                                   'child':'pr2_lgripper_tooltip',
                                   'tfm':nlg.inv(tfm_base_gripper).dot(tfm_base_kinect).dot(tfm_link_rof).dot(tfm_camera_marker).dot(tfm_marker_tooltip)
                                   }
            tfm_pub.add_transforms([tfm_gripper_tooltip])
            greenprint("Gripper to marker found.")
        else:
            redprint("Gripper to marker not found.")
from hd_utils.colorize import yellowprint
from hd_utils.yes_or_no import yes_or_no

from hd_extract import extract_data as ed


if __name__ == "__main__":
    
    parser = argparse.ArgumentParser()
    parser.add_argument("--demo_type", help="type of demonstration", action='store', type=str)
    parser.add_argument("--demo_name", help="name of demonstration", action='store', type=str)
    parser.add_argument("--ar_marker", help="number of ar marker to find", action='store', type=int)
    args = parser.parse_args()


    demo_type_dir = osp.join(demo_files_dir, args.demo_type)
    demo_master_file = osp.join(demo_type_dir, master_name)
    demo_dir = osp.join(demo_type_dir, args.demo_name)
    
    with open(demo_master_file, 'r') as fh:
        demos_info = yaml.load(fh)
            
    if args.demo_name in (demo["demo_name"] for demo in demos_info["demos"]):
        # Check if data file already exists
        if osp.isfile(osp.join(demo_dir, demo_names.init_ar_marker_name)):
            if yes_or_no('Init ar file already exists for this demo. Overwrite?'):
                ed.save_init_ar(args.demo_type, args.demo_name, args.ar_marker)
        else:
            ed.save_init_ar(args.demo_type, args.demo_name, args.ar_marker)

    print "Done extracting init ar marker."
def create_graph_from_observations(parent_frame, calib_info, min_obs=5, n_avg=5, freq=None):
    """
    Runs a loop till graph has enough data and user is happy with data.
    Or run with frequency specified until enough data is gathered.
    
    @parent_frame -- frame to get observations in.
    @num_markers -- total_number of markers.
    @calib_info -- dict with information on what groups and markers to search for.
                   also gives information on which is the master group and how the angle affects group.
    @min_obs -- minimum number of observations for transform required after finding it once.
    @n_avg -- number of times to average transform per observation.
    """
    global tf_listener
    if rospy.get_name() == "/unnamed":
        rospy.init_node("gripper_marker_calibration")
    tf_listener = tf.TransformListener()
    tf_listener.clear()

    masterGraph = nx.DiGraph()
    ar_markers = []
    hydras = []
    ps_markers = []

    # Setup the groups
    for group in calib_info:
        masterGraph.add_node(group)
        masterGraph.node[group]["graph"] = nx.Graph()
        masterGraph.node[group]["angle_scale"] = calib_info[group]["angle_scale"]

        if calib_info[group].get("ar_markers") is None:
            calib_info[group]["ar_markers"] = []
        if calib_info[group].get("hydras") is None:
            calib_info[group]["hydras"] = []
        if calib_info[group].get("ps_markers") is None:
            calib_info[group]["ps_markers"] = []

        if calib_info[group].get("master_group") is not None:
            masterGraph.node[group]["master"] = 1
            masterGraph.node[group]["angle_scale"] = 0

        masterGraph.node[group]["markers"] = (
            calib_info[group]["ar_markers"] + calib_info[group]["hydras"] + calib_info[group]["ps_markers"]
        )
        masterGraph.node[group]["calib_info"] = calib_info[group]
        ar_markers.extend(calib_info[group]["ar_markers"])
        hydras.extend(calib_info[group]["hydras"])
        ps_markers.extend(calib_info[group]["ps_markers"])

    if freq is not None:
        wait_time = 5
        print "Waiting for %f seconds before collecting data." % wait_time
        time.sleep(wait_time)

    sleeper = rospy.Rate(30)
    count = 0
    while True:
        if freq is None:
            raw_input(colorize("Iteration %d: Press return when ready to capture transforms." % count, "red", True))
        else:
            print colorize("Iteration %d" % count, "red", True)

        avg_tfms = {}
        for j in xrange(n_avg):
            print colorize("\tGetting averaging transform : %d of %d ..." % (j, n_avg - 1), "blue", True)

            tfms = {}
            tfms.update(get_ar_transforms(ar_markers, parent_frame))
            tfms.update(get_hydra_transforms(hydras, parent_frame))
            tfms.update(get_phasespace_transforms(ps_markers, parent_frame))

            pot_angle = get_potentiometer_angle()

            for marker in tfms:
                if marker not in avg_tfms:
                    avg_tfms[marker] = []
                avg_tfms[marker].append(tfms[marker])

            sleeper.sleep()

        for marker in avg_tfms:
            avg_tfms[marker] = utils.avg_transform(avg_tfms[marker])

        update_groups_from_observations(masterGraph, avg_tfms, pot_angle)

        count += 1
        if is_ready(masterGraph, min_obs):
            if freq:
                break
            elif not yes_or_no("Enough data has been gathered. Would you like to gather more data anyway?"):
                break

    print "Finished gathering data in %d iterations." % count
    print "Calibrating for optimal transforms between markers..."
    G_opt = compute_relative_transforms(masterGraph, min_obs=min_obs)
    print "Finished calibrating."
    return G_opt
    hitch_xyz = None
    hitch_pos = None
    if args.has_hitch:
        hitch_normal = clouds.clouds_plane(object_xyz)
        hitch_xyz, hitch_pos = hitch_proc_func(rgb_imgs[0], depth_imgs[0], np.eye(4), hitch_normal)
        hitch_xyz = clouds.downsample(hitch_xyz, .01)
        xyz = np.r_[object_xyz, hitch_xyz]
    else:
        xyz = object_xyz
    
    mlab.figure(0)
    mlab.clf()
    mlab.points3d(xyz[:,0], xyz[:,1], xyz[:,2], color=(1,0,0), scale_factor=.005)
    
    if yes_or_no("Do you want to add this original demo?"):
        perturb_name = str(n_perturb_existed)
        perturb_group = demo_group.create_group(perturb_name)
        perturb_group['cloud_xyz'] = xyz

        if args.has_hitch:
            perturb_group['hitch_pos'] = hitch_pos

        print "add perturb demo %d"%(n_perturb_existed)
        n_perturb_existed += 1
    else:
        pass

    
    if yes_or_no("Do you want to skip perturbing this demo?"):
        print "skip perturbing demo"
Ejemplo n.º 21
0
def cluster_demos (demofile, costs, n_clusters, visualize=False, save_images=False):
    seg_num = 0
    keys = {}
    for demo_name in demofile:
        if demo_name != "ar_demo":
            for seg_name in demofile[demo_name]:
                if seg_name != 'done':
                    keys[seg_num] = (str(demo_name), str(seg_name))
                    seg_num += 1
    
    print "Generating sim matrix."
    sm = generate_sim_matrix(costs, keys)
    
    print "Getting the cluster rankings"
    cdata = cluster_and_rank_demos(sm, n_clusters)
    
    if visualize:
        names = {i:[] for i in xrange(args.num_clusters)}
        images = {i:[] for i in xrange(args.num_clusters)}
        
        for i in cdata:
            names[i] = [get_name(keys[j]) for j in cdata[i]]
            images[i] = [np.asarray(demofile[keys[j][0]][keys[j][1]]["rgb"]) for j in cdata[i]]

        i = 0
        inc = True
        print "Press q to exit, left/right arrow keys to navigate"
        while True:
            if len(images[i]) == 0:
                if i == n_clusters-1: inc = False
                elif i == 0: inc = True
                if inc: i = min(i+1,n_clusters-1)
                else: i = max(i-1,0)                
                continue

            print "Label %i"%(i+1)
            print names[i]
            import math
            ncols = 7
            nrows = int(math.ceil(1.0*len(images[i])/ncols))
            row = cpu.tile_images(images[i], nrows, ncols)
            cv2.imshow("clustering result", row)
            kb = cv2.waitKey()
            if kb == 1113939 or kb == 65363:
                i = min(i+1,args.num_clusters-1)
                inc = True
            elif kb == 1113937 or kb == 65361:
                i = max(i-1,0)
                inc = False
            elif kb == 1048689 or kb == 113:
                break
        
    if save_images and yes_or_no("Do you want to save images?"):
        cluster_img_dir = '/home/sibi/cluster_image_dir'
        if not osp.exists(cluster_img_dir):
            os.mkdir(cluster_img_dir)
            
        idx = 1
        for i in range(n_clusters):
            if len(images[i]) == 0:                
                continue

            print "Label %i"%(i+1)
            print names[i]
            ncols = 7
            nrows = int(math.ceil(1.0*len(images[i])/ncols))
            row = cpu.tile_images(images[i], nrows, ncols)
            cv2.imwrite(osp.join(cluster_img_dir,'image%02i'%idx), row)
            idx += 1

    
    return cdata, keys
def main(demo_type, n_base, n_perts, load_sm = False):
	demofile = h5py.File(osp.join(demo_files_dir, demo_type, demo_type+'.h5'), 'r')
	if load_sm:
		sm_file = osp.join(hd_data_dir, similarity_costs_dir, matrix_file%demo_type)
		with open(sm_file, 'r') as f: sm = pickle.load(f)
	else:
		sm = extract_init(demo_type)

	seg_num = 0
	keys = {}
	for demo_name in demofile:
		if demo_name != "ar_demo":
			for seg_name in demofile[demo_name]:
				if seg_name == 'seg00':
					keys[seg_num] = (demo_name, seg_name)
					#print demo_name, seg_name
					seg_num += 1
	
	n_clusters = n_base - len(BASIC_DEMOS)
	labels = spectral_clustering(sm, n_clusters = n_clusters, eigen_solver='arpack',assign_labels='discretize')
	names = {i:[] for i in xrange(n_clusters)}
	images = {i:[] for i in xrange(n_clusters)}
	demos = {i:[] for i in xrange(n_clusters)}
	
	for i in xrange(len(labels)):
		label = labels[i]
		names[label].append(keys[i])
		images[label].append(np.asarray(demofile[keys[i][0]][keys[i][1]]["rgb"]))
		demos[label].append(i)

	rows = []
	i = 0
	inc = True
	
	print "Press q to exit, left/right arrow keys to navigate"
	while True:
		if len(images[i]) == 0:
			if i == n_clusters-1: inc = False
			elif i == 0: inc = True
				
			if inc: i = min(i+1,n_clusters-1)
			else: i = max(i-1,0)
			
			continue

		print "Label %i"%(i+1)
		print names[i]
		import math
		ncols = 7
		nrows = int(math.ceil(1.0*len(images[i])/ncols))

		row = cpu.tile_images(images[i], nrows, ncols)
		rows.append(np.asarray(row))
		cv2.imshow("clustering result", row)
		kb = cv2.waitKey()
		if kb == 1113939 or kb == 65363:
			i = min(i+1,n_clusters-1)
			inc = True
		elif kb == 1113937 or kb == 65361:
			i = max(i-1,0)
			inc = False
		elif kb == 1048689 or kb == 113:
			break
	
	bests = find_best(demos, sm)
	for i in BASIC_DEMOS:
		if i in bests:
			bests.remove(i)
	# add basic demos
	bests = BASIC_DEMOS+bests
	print bests

	best_xyz = []
	best_images = {i:None for i in bests}
	for best in bests:
		xyz = np.asarray(demofile[keys[best][0]][keys[best][1]]["cloud_xyz"])
		best_xyz.append(xyz)
		best_images[best] = np.asarray(demofile[keys[best][0]][keys[best][1]]["rgb"])
		

	print"Found %i clouds."%len(bests)
	
	print "These are the demos being saved."

	ncols = 10
	nrows = int(math.ceil(1.0*len(bests)/ncols))
	row = cpu.tile_images(best_images.values(), nrows, ncols)
	cv2.imshow("best", row)
	kb = cv2.waitKey()
	
	if not yes_or_no("Do these look fine to you?"):
		return
	
	remaining = n_base-len(bests)
	print "remaining:", remaining
	while remaining > 0:
		fig = pylab.figure()
		xyz = best_xyz[remaining-1]
		while True:
			fig.clf()
			perturbed_xyz = sample_random_rope(xyz, True)
			ax = fig.gca(projection='3d')
			ax.set_autoscale_on(False)
			ax.plot(perturbed_xyz[:,0], perturbed_xyz[:,1], perturbed_xyz[:,2], 'o')   
			fig.show()
			cv2.imshow("pert", best_images[bests[remaining-1]])
			kb = cv2.waitKey()
			if yes_or_no("Does this pert. look fine to you?"):
				best_xyz.append(perturbed_xyz)
				remaining -= 1
				break
		
	
	if n_perts != 0:		
		fig = pylab.figure()
		fig2 = pylab.figure()
		for i in xrange(len(best_xyz)):
			xyz = best_xyz[i]
			n_p = n_perts
			while n_p > 0:
				perturbed_xyz = sample_random_rope(xyz, True)
				ax = fig.gca(projection='3d')
				ax.set_autoscale_on(False)
				ax.plot(perturbed_xyz[:,0], perturbed_xyz[:,1], perturbed_xyz[:,2], 'o')   
				fig.show()
				if i < len(bests):
					cv2.imshow("pert", best_images[bests[i]])
					kb = cv2.waitKey()
				else:
					ax = fig2.gca(projection='3d')
					ax.set_autoscale_on(False)
					ax.plot(xyz[:,0], xyz[:,1], xyz[:,2], 'o')   
					fig2.show()
				if not yes_or_no("Does this pert. look fine to you?"):
					best_xyz.append(perturbed_xyz)
					n_p -= 1
			
	pickle.dump(best_xyz, open(osp.join(demo_files_dir, perturbation_file), 'wa'))

	return