Ejemplo n.º 1
0
def main(config_file, task_config_file, output_file, run_name):

    config = utils.config_from_yaml_file(config_file)
    #    # init all the policy
    #    initial_policies = []
    #    for policy_name in config["initial_policies"]:
    #        policy_param = config["initial_policies"][policy_name]
    #
    #        if policy_param['params'] is None:
    #            policy_param['params'] = dict()
    #        # add name to the parameter
    #        policy_param['params']["policy_name"] = policy_name
    #        policy_class, policy_config = utils.import_class_from_config(policy_param)
    #        policy = policy_class(policy_config)
    #        initial_policies.append(policy)
    #    print(f"find {len(initial_policies)} initial policies")

    # init all objects
    objs_config = utils.config_from_yaml_file(task_config_file)

    initial_policies = []
    for policy_name in objs_config["clusters"]:
        policy_param = objs_config["clusters"][policy_name]

        if policy_param['params'] is None:
            policy_param['params'] = dict()
        # add name to the parameter
        policy_param['params']["policy_name"] = policy_name
        policy_class, policy_config = utils.import_class_from_config(
            policy_param)
        policy = policy_class(policy_config)
        initial_policies.append(policy)
    print(f"find {len(initial_policies)} initial policies")

    obj_list = [item for item in objs_config['objs'].items()]
    np.random.shuffle(obj_list)

    objects_to_cluster = []
    for obj_name, obj_config in obj_list:
        obj_config_class = MeshObject.Config().update(obj_config)
        obj = MeshObject(obj_config_class, obj_name)
        objects_to_cluster.append(obj)
    print(f"find {len(objects_to_cluster)} objects to cluster")

    updated_config = PolicyCompressor.Config().update(config)
    compressor = PolicyCompressor(updated_config,
                                  initial_policies=initial_policies,
                                  output_file=output_file,
                                  run_name=run_name)

    nmeshes = len(objects_to_cluster)
    for mesh_id, meshobj in enumerate(objects_to_cluster):
        print(
            f"============ {run_name} processing object {mesh_id}/{nmeshes} ============ "
        )
        compressor.add_object(meshobj)
        if (mesh_id + 1) % 5 == 0:
            compressor.output_xml()
    compressor.output_xml()
    compressor.print_policy_summary()
def main(config_file, task_config_file, output_file, run_name):

    config = utils.config_from_yaml_file(config_file)
    # build detector
    if "use_detector" in config and config["use_detector"]:
        detector_param = config["detector"]

        detector_class, detector_config = utils.import_class_from_config(
            detector_param)
        detector = detector_class(detector_config)
    else:
        detector = None

    # init all the policy
    initial_policies = []
    for policy_name in config["initial_policies"]:
        policy_param = config["initial_policies"][policy_name]

        if policy_param['params'] is None:
            policy_param['params'] = dict()
        # add name to the parameter
        policy_param['params']["policy_name"] = policy_name
        policy_class, policy_config = utils.import_class_from_config(
            policy_param)
        policy = policy_class(policy_config, detector)
        initial_policies.append(policy)
    print(f"find {len(initial_policies)} initial policies")

    # init all objects
    objs_config = utils.config_from_yaml_file(task_config_file)
    objects_to_cluster = []
    for obj_name, obj_config in objs_config['objs'].items():
        obj_config_class = MeshObject.Config().update(obj_config)
        obj = MeshObject(obj_config_class, obj_name)
        objects_to_cluster.append(obj)
    print(f"find {len(objects_to_cluster)} objects to cluster")

    updated_config = PolicyCompressor.Config().update(config)
    compressor = PolicyCompressor(updated_config,
                                  initial_policies=initial_policies,
                                  output_file=output_file,
                                  run_name=run_name)

    nmeshes = len(objects_to_cluster)
    for mesh_id, meshobj in enumerate(objects_to_cluster):
        print(
            f"============ {run_name} processing object {mesh_id}/{nmeshes} ============ "
        )
        compressor.add_object(meshobj)
        if (mesh_id + 1) % 5 == 0:
            compressor.output_xml()
    compressor.output_xml()
    compressor.print_policy_summary()
Ejemplo n.º 3
0
def main(config_file, object_config_file, data_name):
    random.seed(1)

    config = pcp_utils.utils.config_from_yaml_file(config_file)

    objs_config = pcp_utils.utils.config_from_yaml_file(object_config_file)
    objects_to_cluster = []
    for obj_name, obj_config in objs_config['objs'].items():
        updated_obj_config = MeshObject.Config().update(obj_config)
        obj = MeshObject(updated_obj_config, obj_name)
        objects_to_cluster.append(obj)
    print(f"find {len(objects_to_cluster)} objects to cluster")

    data_gen = DataGen(config, data_name=data_name)

    for meshobj in objects_to_cluster:
        print(f'working for {meshobj.name}')
        #meshobj.xml_file
        data_gen.scene_to_npz(meshobj)

    # write the list train_file and test_file
    data_gen.write_train_val_test_files()
Ejemplo n.º 4
0
def main(config_file, object_config_file, data_name, run_name):
    random.seed(1)

    config = pcp_utils.utils.config_from_yaml_file(config_file)

    if "use_detector" in config and config["use_detector"]:
        detector_param = config["detector"]

        detector_class, detector_config = pcp_utils.utils.import_class_from_config(
            detector_param)
        detector = detector_class(detector_config)
    else:
        detector = None

    objs_config = pcp_utils.utils.config_from_yaml_file(object_config_file)
    objects_to_cluster = []
    for obj_name, obj_config in objs_config['objs'].items():
        updated_obj_config = MeshObject.Config().update(obj_config)
        obj = MeshObject(updated_obj_config, obj_name)
        objects_to_cluster.append(obj)
    print(f"find {len(objects_to_cluster)} objects to cluster")

    data_gen = DataGen(config,
                       data_name=data_name,
                       detector=detector,
                       run_name=run_name)

    nmeshes = len(objects_to_cluster)
    for repeat_id in range(config.repeat):
        for mesh_id, meshobj in enumerate(objects_to_cluster):
            print(
                f'working for {meshobj.name}',
                f"============ run {repeat_id} /{config.repeat}, processing object {mesh_id}/{nmeshes} ============ "
            )
            #meshobj.xml_file
            data_gen.scene_to_npz(meshobj, repeat_id)

    # write the list train_file and test_file
    data_gen.write_train_val_test_files()
def main(config_file, task_config_file, output_file):
    config = utils.config_from_yaml_file(config_file)

    # object configurations
    objs_config = utils.config_from_yaml_file(task_config_file)
    objects_to_get_grasps_for = list()
    for obj_name, obj_config in objs_config['objs'].items():
        obj_config_class = MeshObject.Config().update(obj_config)
        obj = MeshObject(obj_config_class, obj_name)
        objects_to_get_grasps_for.append(obj)

    print(f'found {len(objects_to_get_grasps_for)} objects to find grasps for')
    updated_config = Grasp6d_graspnet.Config().update(config)
    grasper = Grasp6d_graspnet(updated_config)

    ##### Prepare the 6dof graspnet network for forward pass ######
    cfg = grasp_estimator.joint_config(
        grasper.vae_checkpoint_folder,
        grasper.evaluator_checkpoint_folder,
    )
    cfg['threshold'] = grasper.grasp_conf_threshold
    cfg['sample_based_improvement'] = 1 - int(
        grasper.gradient_based_refinement)
    cfg['num_refine_steps'] = 10 if grasper.gradient_based_refinement else 20
    estimator = grasp_estimator.GraspEstimator(cfg)
    sess = tf.Session()
    estimator.build_network()
    estimator.load_weights(sess)
    ##### End of 6dof graspnet network preparation ################

    nmeshes = len(objects_to_get_grasps_for)
    for mesh_id, meshobj in enumerate(objects_to_get_grasps_for):
        print(f'---- processing {mesh_id}/{nmeshes} ------')
        obj_pc, pc, pc_colors, env_state = grasper.add_object(meshobj)

        # the object is added, data is collected, processed and returned
        # now sample the grasps, and save them
        latents = estimator.sample_latents()
        generated_grasps, generated_scores, _ = estimator.predict_grasps(
            sess,
            obj_pc,
            latents,
            num_refine_steps=cfg.num_refine_steps,
        )

        print(
            f'------ number of generated grasps are: {len(generated_grasps)} ---------'
        )
        save_file_path = os.path.join(grasper.output_grasps_dir,
                                      f'grasps_{meshobj.name}.npy')
        print(f'---- saving to {save_file_path} -----')

        save_dict = {
            'generated_grasps': generated_grasps,
            'generated_scores': generated_scores,
            'pcd': pc,
            'pcd_color': pc_colors,
            'obj_pcd': obj_pc,
            'env_state': env_state,
        }

        np.save(save_file_path, save_dict)