コード例 #1
0
ファイル: handeyecalibration.py プロジェクト: rcory/spartan
    def saveCalibrationData(self, filename=None):
        if filename is None:
            filename = os.path.join(spartanUtils.getSpartanSourceDir(),
                                    'sandbox',
                                    'hand_eye_calibration_robot_data.yaml')

        spartanUtils.saveToYaml(self.calibrationData, filename)
コード例 #2
0
    def downsample_by_pose_difference_threshold(images_dir_full_path, threshold):
        pose_yaml = os.path.join(images_dir_full_path, "pose_data.yaml")
        pose_dict = spartanUtils.getDictFromYamlFilename(pose_yaml)

        images_dir_temp_path = os.path.join(os.path.dirname(images_dir_full_path), 'images_temp')
        if not os.path.isdir(images_dir_temp_path):
            os.makedirs(images_dir_temp_path)
        
        previous_pose = FusionServer.get_numpy_position_from_pose(pose_dict[0])

        print "Using downsampling by pose difference threshold... "
        print "Previously: ", len(pose_dict), " images"

        num_kept_images    = 0
        num_deleted_images = 0

        for i in range(0,len(pose_dict)):
            single_frame_data = pose_dict[i]
            this_pose = FusionServer.get_numpy_position_from_pose(pose_dict[i])

            if i == 0:
                keep_image = True
                num_kept_images += 1
            elif np.linalg.norm(previous_pose - this_pose) > threshold:
                previous_pose = this_pose
                num_kept_images += 1
            else:
                # delete pose from forward kinematics
                del pose_dict[i]
                continue

            # if we have gotten here, then move the images over to the new directory

            rgb_filename = os.path.join(images_dir_full_path, single_frame_data['rgb_image_filename'])
            rgb_filename_temp = os.path.join(images_dir_temp_path, single_frame_data['rgb_image_filename'])

            shutil.move(rgb_filename, rgb_filename_temp)

            depth_filename = os.path.join(images_dir_full_path, single_frame_data['depth_image_filename'])
            depth_filename_temp = os.path.join(images_dir_temp_path, single_frame_data['depth_image_filename'])
            shutil.move(depth_filename, depth_filename_temp)
                # # delete pose from posegraph
                # del posegraph_list[i-num_deleted_images]
                # num_deleted_images += 1

        
        # write downsamples pose_data.yaml (forward kinematics)
        spartanUtils.saveToYaml(pose_dict, os.path.join(images_dir_temp_path,'pose_data.yaml'))

        # remove old images
        shutil.move(os.path.join(images_dir_full_path, 'camera_info.yaml'), os.path.join(images_dir_temp_path, 'camera_info.yaml'))
        shutil.rmtree(images_dir_full_path)

        print "renaming %s to %s " %(images_dir_temp_path, images_dir_full_path)

        # rename temp images to images
        os.rename(images_dir_temp_path, images_dir_full_path)

        print "After: ", num_kept_images, " images"
コード例 #3
0
def extract_intrinsics(path_to_tar_file, path_to_camera_info_yaml_file,
                       camera_name):
    cmd = 'mkdir -p /tmp/calibrationdata'
    os.system(cmd)
    cmd = 'tar xvf ' + path_to_tar_file + ' -C ' + '/tmp/calibrationdata' + ' > /dev/null 2>&1'
    os.system(cmd)

    camera_info_dict = spartanUtils.getDictFromYamlFilename(
        '/tmp/calibrationdata/ost.yaml')

    print camera_info_dict

    camera_info_dict['camera_name'] = camera_name

    spartanUtils.saveToYaml(camera_info_dict, path_to_camera_info_yaml_file)
コード例 #4
0
def extract_and_fuse_single_scene(
        log_full_path,
        downsample=True,
        linear_distance_threshold=LINEAR_DISTANCE_THRESHOLD,
        angle_distance_threshold=ANGLE_DISTANCE_THRESHOLD):
    """
    Extracts all the images from the rosbag.
    :param log_full_path:
    :param downsample: whether or not to downsample
    :param linear_distance_threshold: threshold used in downsampling
    :param angle_distance_threshold: threshold used in downsampling
    :return:
    """
    global fs
    if fs is None:
        fs = FusionServer()

    print "extracting and fusing scene:", log_full_path
    log = os.path.split(log_full_path)[-1]

    mem()

    processed_dir = os.path.join(log_full_path, 'processed')
    images_dir = os.path.join(processed_dir, 'images')
    raw_dir = os.path.join(log_full_path, 'raw')
    raw_close_up_dir = os.path.join(log_full_path, 'raw_close_up')
    bag_filepath = os.path.join(log_full_path, 'raw', 'fusion_' + log + '.bag')
    bag_close_up_filepath = os.path.join(raw_close_up_dir,
                                         'fusion_' + log + '.bag')
    pose_data_filename = os.path.join(images_dir, 'pose_data.yaml')
    metadata_filename = os.path.join(images_dir, 'metadata.yaml')

    if not already_extracted_rosbag(log_full_path):
        print "extracting", bag_filepath

        # run this memory intensive step in a process, to allow
        # it to release memory back to the operating system
        # when finished
        proc = mp.Process(target=extract_data_from_rosbag,
                          args=(bag_filepath, images_dir))
        proc.start()
        proc.join()

        print "finished extracting", bag_filepath
        # counter_new_extracted += 1
    else:
        print "already extracted", bag_filepath

    # we need to free some memory
    gc.collect()
    mem()

    if not already_ran_tsdf_fusion(log_full_path):
        print "preparing for tsdf_fusion for", log_full_path
        tsdf_fusion.format_data_for_tsdf(images_dir)
        gc.collect()

        print "running tsdf fusion"
        fusion_params = FUSION_CONFIG[FUSION_LOCATION]
        tsdf_fusion.run_tsdf_fusion_cuda(
            images_dir,
            bbox_min=fusion_params['bbox_min'],
            bbox_max=fusion_params['bbox_max'],
            voxel_size=fusion_params['voxel_size'])
        gc.collect()

        print "converting tsdf to ply"
        tsdf_bin_filename = os.path.join(processed_dir, 'tsdf.bin')
        tsdf_mesh_filename = os.path.join(processed_dir, 'fusion_mesh.ply')
        tsdf_fusion.convert_tsdf_to_ply(tsdf_bin_filename, tsdf_mesh_filename)
        # counter_new_fused += 1
    else:
        print "already ran tsdf_fusion for", log_full_path

    if not already_downsampled(log_full_path) and downsample:
        print "downsampling image folder"

        # fs = FusionServer()
        fs.downsample_by_pose_difference_threshold(images_dir,
                                                   linear_distance_threshold,
                                                   angle_distance_threshold)
        # counter_new_downsampled += 1

        # create metadata.yaml file if it doesn't yet exist
        if not os.path.exists(metadata_filename):
            pose_data = spartanUtils.getDictFromYamlFilename(
                pose_data_filename)
            original_image_indices = pose_data.keys()

            metadata = dict()
            metadata['original_image_indices'] = original_image_indices
            metadata['close_up_image_indices'] = []
            spartanUtils.saveToYaml(metadata, metadata_filename)

    else:
        print "already downsampled for", log_full_path

    # extract the up close up log if it exists
    if os.path.exists(
            bag_close_up_filepath
    ) and not already_extracted_close_up_log(metadata_filename):
        print "\n\n-----------extracting close up images----------\n\n"
        print "extracting", bag_close_up_filepath
        close_up_temp_dir = os.path.join(processed_dir, 'images_close_up')

        # run this memory intensive step in a process, to allow
        # it to release memory back to the operating system
        # when finished
        proc = mp.Process(target=extract_data_from_rosbag,
                          args=(bag_close_up_filepath, close_up_temp_dir))
        proc.start()
        proc.join()

        print "downsampling image folder %s" % (close_up_temp_dir)
        if downsample:
            fs.downsample_by_pose_difference_threshold(
                close_up_temp_dir, linear_distance_threshold,
                angle_distance_threshold)

        pose_data_close_up_filename = os.path.join(close_up_temp_dir,
                                                   'pose_data.yaml')
        pose_data_close_up = spartanUtils.getDictFromYamlFilename(
            pose_data_close_up_filename)

        pose_data = spartanUtils.getDictFromYamlFilename(pose_data_filename)
        original_image_indices = pose_data.keys()
        largest_original_image_idx = max(original_image_indices)

        close_up_image_indices = []

        print "largest_original_image_idx", largest_original_image_idx

        for img_idx, data in pose_data_close_up.iteritems():
            # just to give some buffer and avoid edge cases in some checks
            img_idx_new = img_idx + largest_original_image_idx + 4
            rgb_filename = "%06i_%s.png" % (img_idx_new, "rgb")
            depth_filename = "%06i_%s.png" % (img_idx_new, "depth")

            rgb_filename_prev = data['rgb_image_filename']
            depth_filename_prev = data['depth_image_filename']

            data['rgb_image_filename'] = rgb_filename
            data['depth_image_filename'] = depth_filename

            pose_data[img_idx_new] = data
            close_up_image_indices.append(img_idx_new)

            # move images around
            shutil.move(os.path.join(close_up_temp_dir, rgb_filename_prev),
                        os.path.join(images_dir, rgb_filename))

            shutil.move(os.path.join(close_up_temp_dir, depth_filename_prev),
                        os.path.join(images_dir, depth_filename))

        shutil.rmtree(close_up_temp_dir)

        original_image_indices.sort()
        close_up_image_indices.sort()
        spartanUtils.saveToYaml(pose_data, pose_data_filename)

        metadata = dict()
        metadata['normal_image_indices'] = original_image_indices
        metadata['close_up_image_indices'] = close_up_image_indices
        spartanUtils.saveToYaml(metadata, metadata_filename)

    else:
        if os.path.exists(bag_close_up_filepath):
            print "already extracted close up log, skipping"
        else:
            print "raw_close_up doesn't exist, skipping"
コード例 #5
0
ファイル: handeyecalibration.py プロジェクト: rcory/spartan
def processCalibrationData(posegraph_file, robot_data_filename,
                           save_data_filename):
    cameraPoses = CameraPoses(posegraph_file)
    calibrationData = spartanUtils.getDictFromYamlFilename(robot_data_filename)
    calibrationData = addCameraPosesToDataFile(calibrationData, cameraPoses)
    spartanUtils.saveToYaml(calibrationData, save_data_filename)
コード例 #6
0
ファイル: handeyecalibration.py プロジェクト: rcory/spartan
    def runROSCalibration(self, headerData):

        headerData['target'][
            'location_estimate_in_robot_base_frame'] = self.calibrationPosesConfig[
                'target_location']
        calibrationRunData = dict()
        calibrationRunData['header'] = headerData

        #setup our passive subscribers to the IR or RGB data
        topicName = None
        self.passiveSubscriber = None
        if self.captureRGB:
            topicName = self.config['rgb_raw_topic']

        if self.captureIR:
            topicName = self.config['ir_raw_topic']

        if topicName:
            self.passiveSubscriber = spartanROSUtils.SimpleSubscriber(
                topicName, sensor_msgs.msg.Image)
            self.passiveSubscriber.start()

        unique_name = time.strftime(
            "%Y%m%d-%H%M%S") + "_" + self.calibrationType
        self.calibrationFolderName = os.path.join(
            spartanUtils.getSpartanSourceDir(), 'calibration_data',
            unique_name)
        os.system("mkdir -p " + self.calibrationFolderName)
        os.chdir(self.calibrationFolderName)

        poseDict = self.computeCalibrationPoses()
        self.drawResult(poseDict)
        rospy.loginfo("finished making calibration poses")
        rospy.loginfo("starting calibration run")

        calibrationData = []

        # topic = self.config['rgb_raw_topic']
        # msgType = sensor_msgs.msg.Image
        # self.subscribers = dict()
        # self.subscribers[topic] = spartanROSUtils.SimpleSubscriber(topic, msgType)
        # self.subscribers[topic].start()

        for pose in poseDict['feasiblePoses']:
            # move robot to that joint position
            rospy.loginfo("\n moving to pose")
            self.robotService.moveToJointPosition(pose['joint_angles'])

            rospy.loginfo("capturing images and robot data")
            data = self.captureCurrentRobotAndImageData(
                captureRGB=self.captureRGB, captureIR=self.captureIR)
            calibrationData.append(data)

        rospy.loginfo("finished calibration routine, saving data to file")

        calibrationRunData['data_list'] = calibrationData

        spartanUtils.saveToYaml(
            calibrationRunData,
            os.path.join(self.calibrationFolderName, 'robot_data.yaml'))

        if self.passiveSubscriber:
            self.passiveSubscriber.stop()

        return calibrationRunData
コード例 #7
0
data_in = json.load(open(stored_poses_json_source))
#pprint(data_in)

stored_poses_yaml_destination = os.path.join(
    spartanUtils.getSpartanSourceDir(), 'src', 'catkin_projects',
    'station_config', 'RLG_iiwa_1', 'stored_poses.yaml')
data_out = dict()

joint_names = [
    'iiwa_joint_1', 'iiwa_joint_2', 'iiwa_joint_3', 'iiwa_joint_4',
    'iiwa_joint_5', 'iiwa_joint_6', 'iiwa_joint_7'
]
data_out['header'] = dict()
data_out['header']['robot'] = 'kuka_iiwa'
data_out['header']['num_joints'] = 7
data_out['header']['joint_names'] = joint_names

for group in data_in:
    data_out[str(group)] = dict()
    print "Group is", group
    for pose in data_in[group]:
        pose_name = pose['name']
        print "pose_name is", pose_name
        data_out[str(group)][str(pose_name)] = []
        for k in joint_names:
            joint_angle = pose['joints'][k]
            data_out[group][str(pose_name)].append(joint_angle)

print stored_poses_yaml_destination
spartanUtils.saveToYaml(data_out, stored_poses_yaml_destination)
コード例 #8
0
    def downsample_by_pose_difference_threshold(images_dir_full_path,
                                                linear_distance_threshold,
                                                rotation_angle_threshold):
        """
        Downsamples poses and keeps only those that are sufficiently apart
        :param images_dir_full_path:
        :type images_dir_full_path:
        :param linear_distance_threshold: threshold on the translation, in meters
        :type linear_distance_threshold:
        :param rotation_angle_threshold: threshold on the angle between the rotations, in degrees
        :type rotation_angle_threshold:
        :return:
        :rtype:
        """
        pose_yaml = os.path.join(images_dir_full_path, "pose_data.yaml")
        pose_dict = spartanUtils.getDictFromYamlFilename(pose_yaml)

        images_dir_temp_path = os.path.join(
            os.path.dirname(images_dir_full_path), 'images_temp')
        if not os.path.isdir(images_dir_temp_path):
            os.makedirs(images_dir_temp_path)

        previous_pose_pos = FusionServer.get_numpy_position_from_pose(
            pose_dict[0])
        previous_pose_quat = FusionServer.get_quaternion_from_pose(
            pose_dict[0])

        print "Using downsampling by pose difference threshold... "
        print "Previously: ", len(pose_dict), " images"

        num_kept_images = 0
        num_deleted_images = 0

        for i in range(0, len(pose_dict)):
            single_frame_data = pose_dict[i]
            this_pose_pos = FusionServer.get_numpy_position_from_pose(
                pose_dict[i])
            this_pose_quat = FusionServer.get_quaternion_from_pose(
                pose_dict[i])

            linear_distance = np.linalg.norm(this_pose_pos - previous_pose_pos)

            rotation_distance = spartanUtils.compute_angle_between_quaternions(
                this_pose_quat, previous_pose_quat)

            if i == 0:
                keep_image = True
                num_kept_images += 1
            elif (linear_distance > linear_distance_threshold) or (
                    np.rad2deg(rotation_distance) > rotation_angle_threshold):
                previous_pose_pos = this_pose_pos
                previous_pose_quat = this_pose_quat
                num_kept_images += 1
            else:
                # delete pose from forward kinematics
                del pose_dict[i]
                continue

            # if we have gotten here, then move the images over to the new directory

            rgb_filename = os.path.join(
                images_dir_full_path, single_frame_data['rgb_image_filename'])
            rgb_filename_temp = os.path.join(
                images_dir_temp_path, single_frame_data['rgb_image_filename'])

            shutil.move(rgb_filename, rgb_filename_temp)

            depth_filename = os.path.join(
                images_dir_full_path,
                single_frame_data['depth_image_filename'])
            depth_filename_temp = os.path.join(
                images_dir_temp_path,
                single_frame_data['depth_image_filename'])
            shutil.move(depth_filename, depth_filename_temp)
            # # delete pose from posegraph
            # del posegraph_list[i-num_deleted_images]
            # num_deleted_images += 1

        # write downsamples pose_data.yaml (forward kinematics)
        spartanUtils.saveToYaml(
            pose_dict, os.path.join(images_dir_temp_path, 'pose_data.yaml'))

        # remove old images
        shutil.move(os.path.join(images_dir_full_path, 'camera_info.yaml'),
                    os.path.join(images_dir_temp_path, 'camera_info.yaml'))
        shutil.rmtree(images_dir_full_path)

        print "renaming %s to %s " % (images_dir_temp_path,
                                      images_dir_full_path)

        # rename temp images to images
        os.rename(images_dir_temp_path, images_dir_full_path)

        print "After: ", num_kept_images, " images"
コード例 #9
0
    def process_ros_bag(self, bag, output_dir, rgb_only=False):

        image_topics = []
        for key, topic in self.topics_dict.iteritems():
            image_topics.append(topic)

        # load all the images
        rgb_data = dict()
        rgb_data['msgs'] = []
        rgb_data['cv_img'] = []
        rgb_data['timestamps'] = []

        depth_data = dict()
        depth_data['msgs'] = []
        depth_data['cv_img'] = []
        depth_data['timestamps'] = []
        depth_data['camera_to_world'] = []

        print "image_topics: ", image_topics

        # extract TF information
        tf_t = rosUtils.setup_tf_transformer_from_ros_bag(bag,
                                                          cache_time_secs=3600)

        log_rate = 100

        counter = 0
        for topic, msg, t in bag.read_messages(topics=image_topics):
            counter += 1

            # skip the first 30 images due to transform errors . . .
            # if counter < 30:
            #     continue

            if counter % log_rate == 0:
                print "processing image message %d" % (counter)

            data = None
            if "rgb" in topic:
                cv_img = self.cv_bridge.imgmsg_to_cv2(
                    msg, desired_encoding=self.rgb_encoding)
                data = rgb_data
            elif "depth" in topic:
                cv_img = rosUtils.depth_image_to_cv2_uint16(
                    msg, bridge=self.cv_bridge)
                data = depth_data

                try:
                    # rot ix (x,y,z,w)
                    (trans,
                     rot) = tf_t.lookupTransform(self.world_frame,
                                                 self.camera_frame,
                                                 msg.header.stamp)
                except:
                    print "wasn't able to get transform for image message %d, skipping" % (
                        counter)
                    continue

                depth_data['camera_to_world'].append((trans, rot))

            # save the relevant data
            data['msgs'].append(msg)
            data['cv_img'].append(cv_img)
            data['timestamps'].append(msg.header.stamp.to_nsec())

        print "Extracted %d rgb images" % (len(rgb_data['msgs']))
        print "Extracted %d depth images" % (len(depth_data['msgs']))

        rgb_data['timestamps'] = np.array(rgb_data['timestamps'])

        # synchronize the images
        synchronized_rgb_imgs = []
        for idx, stamp in enumerate(depth_data['timestamps']):
            rgb_idx = ImageCapture.lookup_synchronized_image(
                stamp, rgb_data['timestamps'])
            synchronized_rgb_imgs.append(rgb_data['cv_img'][rgb_idx])
            if idx % log_rate == 0:
                print "depth image %d matched to rgb image %d" % (idx, rgb_idx)

        # save to a file
        if not os.path.isdir(output_dir):
            os.makedirs(output_dir)

        pose_data = dict()

        for idx, depth_img in enumerate(depth_data['cv_img']):
            rgb_img = synchronized_rgb_imgs[idx]

            rgb_filename = "%06i_%s.png" % (idx, "rgb")
            rgb_filename_full = os.path.join(output_dir, rgb_filename)

            depth_filename = "%06i_%s.png" % (idx, "depth")
            depth_filename_full = os.path.join(output_dir, depth_filename)

            if idx % log_rate == 0:
                print "writing image %d to file %s" % (idx, rgb_filename)

            cv2.imwrite(rgb_filename_full, rgb_img)
            if not rgb_only:
                cv2.imwrite(depth_filename_full, depth_img)

            pose_data[idx] = dict()
            d = pose_data[idx]
            trans, rot = depth_data['camera_to_world'][idx]
            quat_wxyz = [rot[3], rot[0], rot[1], rot[2]]
            transform_dict = spartanUtils.dictFromPosQuat(trans, quat_wxyz)
            d['camera_to_world'] = transform_dict
            d['timestamp'] = depth_data['timestamps'][idx]
            d['rgb_image_filename'] = rgb_filename
            d['depth_image_filename'] = depth_filename

        spartanUtils.saveToYaml(pose_data,
                                os.path.join(output_dir, 'pose_data.yaml'))

        # extract the camera info msg

        camera_info_msg = None
        for topic, msg, t in bag.read_messages(topics=self.camera_info_topic):
            camera_info_msg = msg
            break

        camera_info_dict = rosUtils.camera_info_dict_from_camera_info_msg(
            camera_info_msg)

        # NOTE: currently the batch_extract_and_fuse_all_scenes.py
        # script checks for the existence of this file (camera_info.yaml)
        # to determine if the extraction process was completed.

        spartanUtils.saveToYaml(camera_info_dict,
                                os.path.join(output_dir, 'camera_info.yaml'))
コード例 #10
0
def do_main():
    rospy.init_node('simple_teleop', anonymous=True)

    # setup listener for tf2s (used for ee and controller poses)
    tfBuffer = tf2_ros.Buffer()
    listener = tf2_ros.TransformListener(tfBuffer)
    robotSubscriber = JointStateSubscriber("/joint_states")
    
    # wait until robot state found
    robotSubscriber = ros_utils.JointStateSubscriber("/joint_states")
    print("Waiting for full kuka state...")
    while len(robotSubscriber.joint_positions.keys()) < 3:
        rospy.sleep(0.1)
    print("got full state")

    # init gripper
    handDriver = SchunkDriver()
    
    # init mouse manager
    mouse_manager = TeleopMouseManager()
    roll_goal = 0.0
    yaw_goal = 0.0
    pitch_goal = 0.0

    # Start by moving to an above-table pregrasp pose that we know
    # EE control will work well from (i.e. far from singularity)

    stored_poses_dict = spartan_utils.getDictFromYamlFilename("../station_config/RLG_iiwa_1/stored_poses.yaml")
    above_table_pre_grasp = stored_poses_dict["Grasping"]["above_table_pre_grasp"]
    
    robotService = ros_utils.RobotService.makeKukaRobotService()
    success = robotService.moveToJointPosition(above_table_pre_grasp, timeout=5)
    print("Moved to position")

    gripper_goal_pos = 0.0
    handDriver.sendGripperCommand(gripper_goal_pos, speed=0.1, timeout=0.01)
    print("sent close goal to gripper")
    time.sleep(2)
    gripper_goal_pos = 0.1
    handDriver.sendGripperCommand(gripper_goal_pos, speed=0.1, timeout=0.01)
    print("sent open goal to gripper")

    frame_name = "iiwa_link_ee" # end effector frame name

    for i in range(10):
        if i == 9:
            print "Couldn't find robot pose"
            sys.exit(0)
        try:
            ee_pose_above_table = ros_utils.poseFromROSTransformMsg(
                tfBuffer.lookup_transform("base", frame_name, rospy.Time()).transform)
            ee_tf_above_table = tf_matrix_from_pose(ee_pose_above_table)
            break
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
            print("Troubling looking up robot pose...")
            time.sleep(0.1)

    # Then kick off task space streaming
    sp = rospy.ServiceProxy('plan_runner/init_task_space_streaming',
        robot_msgs.srv.StartStreamingPlan)
    init = robot_msgs.srv.StartStreamingPlanRequest()
    init.force_guard.append(make_force_guard_msg(20.))
    res = sp(init)
    
    print("Started task space streaming")
    pub = rospy.Publisher('plan_runner/task_space_streaming_setpoint',
        robot_msgs.msg.CartesianGoalPoint, queue_size=1);


    def cleanup():
        rospy.wait_for_service("plan_runner/stop_plan")
        sp = rospy.ServiceProxy('plan_runner/stop_plan',
            std_srvs.srv.Trigger)
        init = std_srvs.srv.TriggerRequest()
        print sp(init)
        print("Done cleaning up and stopping streaming plan")

    rospy.on_shutdown(cleanup)
    br = tf.TransformBroadcaster()
    rate = rospy.Rate(100) # max rate at which control should happen


    ee_tf_last_commanded = np.zeros((4,4))
    def get_initial_pose():
        while not rospy.is_shutdown():
            # get current tf from ros world to ee
            try:
                ee_pose_current = ros_utils.poseFromROSTransformMsg(
                    tfBuffer.lookup_transform("base", frame_name, rospy.Time()).transform)
                ee_tf_last_commanded = tf_matrix_from_pose(ee_pose_current)
                return ee_tf_last_commanded
            except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
                print("Troubling looking up robot pose...")
                rate.sleep()
                continue

    print ee_tf_last_commanded
    ee_tf_last_commanded = get_initial_pose()
    print ee_tf_last_commanded

    sys.path.append("../imitation_tools/scripts")
    from capture_imitation_data_client import start_bagging_imitation_data_client, stop_bagging_imitation_data_client
    start_bagging_imitation_data_client()
    
    pose_save_counter = 0
    saved_pose_dict = dict()
    saved_pose_counter = 0

    try:

        # control loop
        while not rospy.is_shutdown():
          
            # # get teleop mouse
            events = mouse_manager.get_events()

            if events["r"]:
                print above_table_pre_grasp
                print "that was above_table_pre_grasp"
                success = robotService.moveToJointPosition(above_table_pre_grasp, timeout=3)
                roll_goal = 0.0
                yaw_goal = 0.0
                pitch_goal = 0.0
                ee_tf_last_commanded = get_initial_pose()
                continue

            pose_save_counter += 1
            if events["o"] and pose_save_counter >= 100: # this make it not happen to much
                joint_name_list = ['iiwa_joint_1', 'iiwa_joint_2', 'iiwa_joint_3', 'iiwa_joint_4', 'iiwa_joint_5', 'iiwa_joint_6', 'iiwa_joint_7']
                joint_position_vector = robotSubscriber.get_position_vector_from_joint_names(joint_name_list)
                print joint_position_vector
                print "joint positions saved"
                new_pose_name = "pose_"+str(saved_pose_counter).zfill(3)
                saved_pose_counter += 1
                saved_pose_dict[new_pose_name] = joint_position_vector.tolist()
                pose_save_counter = 0

            if events["escape"]:
                stop_bagging_imitation_data_client()
                if len(saved_pose_dict) > 0:
                    spartan_utils.saveToYaml(saved_pose_dict, "saved_poses.yaml")
                sys.exit(0)
            
            scale_down = 0.0001
            delta_x = events["delta_x"]*scale_down
            delta_y = events["delta_y"]*-scale_down

            delta_forward = 0.0
            forward_scale = 0.001
            if events["w"]:
                delta_forward -= forward_scale
            if events["s"]:
                delta_forward += forward_scale

            # extract and normalize quat from tf
            if events["rotate_left"]:
                roll_goal += 0.01
            if events["rotate_right"]:
                roll_goal -= 0.01
            roll_goal = np.clip(roll_goal, a_min = -0.9, a_max = 0.9)
            
            if events["side_button_back"]:
                yaw_goal += 0.01
                print("side button back")
            if events["side_button_forward"]:
                yaw_goal -= 0.01
                print("side side_button_forward")
            yaw_goal = np.clip(yaw_goal, a_min = -1.314, a_max = 1.314)

            if events["d"]:
                pitch_goal += 0.01
            if events["a"]:
                pitch_goal -= 0.01
            pitch_goal = np.clip(pitch_goal, a_min = -1.314, a_max = 1.314)


            R = transformations.euler_matrix(pitch_goal, roll_goal, yaw_goal, 'syxz')
            # third is "yaw", when in above table pre-grasp
            # second is "roll", ''
            # first must be "pitch"

            above_table_quat_ee = transformations.quaternion_from_matrix(R.dot(ee_tf_above_table))
            above_table_quat_ee = np.array(above_table_quat_ee) / np.linalg.norm(above_table_quat_ee)

            # calculate controller position delta and add to start position to get target ee position
            target_translation = np.asarray([delta_forward, delta_x, delta_y])
            empty_matrx = np.zeros_like(ee_tf_last_commanded)
            empty_matrx[:3, 3] = target_translation
            ee_tf_last_commanded += empty_matrx
            target_trans_ee = ee_tf_last_commanded[:3, 3]
            

            # publish target pose as cartesian goal point
            new_msg = robot_msgs.msg.CartesianGoalPoint()
            new_msg.xyz_point.header.stamp = rospy.Time.now()
            new_msg.xyz_point.header.frame_id = "world"
            new_msg.xyz_point.point.x = target_trans_ee[0]
            new_msg.xyz_point.point.y = target_trans_ee[1]
            new_msg.xyz_point.point.z = target_trans_ee[2]
            new_msg.xyz_d_point.x = 0.0
            new_msg.xyz_d_point.y = 0.0
            new_msg.xyz_d_point.z = 0.0
            new_msg.quaternion.w = above_table_quat_ee[0]
            new_msg.quaternion.x = above_table_quat_ee[1]
            new_msg.quaternion.y = above_table_quat_ee[2]
            new_msg.quaternion.z = above_table_quat_ee[3]
            new_msg.roll = roll_goal 
            new_msg.pitch = pitch_goal
            new_msg.yaw = yaw_goal
            new_msg.gain = make_cartesian_gains_msg(5., 10.)
            new_msg.ee_frame_id = frame_name
            pub.publish(new_msg)

            #gripper
            if events["mouse_wheel_up"]:
                gripper_goal_pos += 0.006
            if events["mouse_wheel_down"]:
                gripper_goal_pos -= 0.006
            if gripper_goal_pos < 0:
                gripper_goal_pos = 0.0
            if gripper_goal_pos > 0.1:
                gripper_goal_pos = 0.1
            
            handDriver.sendGripperCommand(gripper_goal_pos, speed=0.2, stream=True)

            rate.sleep()



    except Exception as e:
        print "Suffered exception ", e
コード例 #11
0
 def saveData(self):
     filename = os.path.join(self.folderName, 'data.yaml')
     spartanUtils.saveToYaml(self.data, filename)
コード例 #12
0
    def runROSCalibrationFixedMount(self, headerData, topic_name, poses_file):

        headerData['target']['transform_to_hand_frame'] = dict()
        headerData['target']['transform_to_hand_frame']['translation'] = dict()
        headerData['target']['transform_to_hand_frame']['translation'][
            'x'] = 0.0
        headerData['target']['transform_to_hand_frame']['translation'][
            'y'] = 0.0
        headerData['target']['transform_to_hand_frame']['translation'][
            'z'] = 0.0931
        headerData['target']['transform_to_hand_frame']['quaternion'] = dict()
        headerData['target']['transform_to_hand_frame']['quaternion'][
            'x'] = 0.49894425
        headerData['target']['transform_to_hand_frame']['quaternion'][
            'y'] = 0.50080633
        headerData['target']['transform_to_hand_frame']['quaternion'][
            'z'] = 0.51116578
        headerData['target']['transform_to_hand_frame']['quaternion'][
            'w'] = -0.48883249

        self.calibrationData = dict()
        calibrationRunData = self.calibrationData
        calibrationRunData['header'] = headerData

        self.passiveSubscriber = spartanROSUtils.SimpleSubscriber(
            topic_name, sensor_msgs.msg.Image)
        self.passiveSubscriber.start()

        unique_name = time.strftime(
            "%Y%m%d-%H%M%S") + "_" + self.calibrationType
        self.calibrationFolderName = os.path.join(
            spartanUtils.getSpartanSourceDir(), 'calibration_data',
            unique_name)
        os.system("mkdir -p " + self.calibrationFolderName)
        os.chdir(self.calibrationFolderName)

        rospy.loginfo("starting calibration run")

        calibrationData = []
        poses = spartanUtils.getDictFromYamlFilename(poses_file)
        num_poses = len(poses)

        for index, pose_name in enumerate(sorted(poses.keys())):
            # move robot to that joint position
            info_msg = "\n moving to pose " + str(index) + " of " + str(
                num_poses) + " named " + pose_name
            rospy.loginfo(info_msg)
            self.robotService.moveToJointPosition(poses[pose_name])

            rospy.loginfo("capturing images and robot data")
            data = self.captureCurrentRobotAndImageData(
                captureRGB=self.captureRGB,
                captureIR=self.captureIR,
                prefix=str(index))
            calibrationData.append(data)

        rospy.loginfo("finished calibration routine, saving data to file")

        calibrationRunData['data_list'] = calibrationData

        spartanUtils.saveToYaml(
            calibrationRunData,
            os.path.join(self.calibrationFolderName, 'robot_data.yaml'))

        if self.passiveSubscriber:
            self.passiveSubscriber.stop()

        self.moveHome()

        return calibrationRunData
コード例 #13
0
def save_transform_to_file(transform):
    filename = os.path.join(spartan_utils.get_sandbox_dir(), "transform.yaml")
    d = poseFromTransform(transform)
    spartan_utils.saveToYaml(d, filename)
    print("saved transform to %s" % (filename))