Ejemplo n.º 1
0
def prepare_config_train(cfg):
    name = uio.new_log_folder(cfg.log.root_path, cfg.log.identifier)
    name += '-{}'.format(time.strftime('%m_%d-%H_%M'))
    if cfg.render.draw_color:
        name += '-color'
    elif cfg.render.draw_depth:
        name += '-depth'
    name += '-{}'.format(cfg.model.type)
    name += '-c{}'.format(cfg.model.cnn_out_channels)
    name += '-{}'.format(cfg.model.fusion_type)
    if cfg.model.fusion_type == 'soft_pool':
        name += '-k{}'.format(cfg.view_pool.kernel)
    name += '-d{}'.format(cfg.model.desc_dim)
    name += '-{}'.format(cfg.train.dataset.name)
    if cfg.render.trainable:
        name += '-tr'
    view_num = cfg.render.view_num
    if cfg.render.augment_rotations:
        view_num *= 4
    elif cfg.render.rotation_num > 0:
        view_num *= cfg.render.rotation_num
    name += '-v{}'.format(view_num)
    if eu.is_not_empty(cfg.train.general.ckpt_path):
        name += '-ft'
    name += '-e{}'.format(cfg.train.solver.epochs)
    name += '-{}'.format(cfg.train.solver.optim)
    cfg.log.root_path = osp.join(cfg.log.root_path, name)
    uio.may_create_folder(cfg.log.root_path)
Ejemplo n.º 2
0
def run_seq(cfg, scene, seq):
    print("    Start {}".format(seq))

    seq_folder = osp.join(cfg.dataset_root, scene, seq)
    color_names = uio.list_files(seq_folder, '*.color.png')
    color_paths = [osp.join(seq_folder, cf) for cf in color_names]
    depth_names = uio.list_files(seq_folder, '*.depth.png')
    depth_paths = [osp.join(seq_folder, df) for df in depth_names]
    # depth_paths = [osp.join(seq_folder, cf[:-10] + '.depth.png') for cf in depth_names]

    # n_frames = len(color_paths)
    n_frames = len(depth_paths)
    n_frags = int(math.ceil(float(n_frames) / cfg.frames_per_frag))

    out_folder = osp.join(cfg.out_root, scene, seq)
    uio.may_create_folder(out_folder)

    intrinsic_path = osp.join(cfg.dataset_root, scene, 'camera-intrinsics.txt')

    if cfg.threads > 1:
        from joblib import Parallel, delayed
        import multiprocessing

        Parallel(n_jobs=cfg.threads)(delayed(process_single_fragment)
                                     (cfg, color_paths, depth_paths, frag_id,
                                      n_frags, intrinsic_path, out_folder)
                                     for frag_id in range(n_frags))

    else:
        for frag_id in range(n_frags):
            process_single_fragment(cfg, color_paths, depth_paths, frag_id,
                                    n_frags, intrinsic_path, out_folder)

    print("    Finished {}".format(seq))
Ejemplo n.º 3
0
def step_eval_geomreg(engine,
                      batch,
                      render_model=None,
                      desc_model=None,
                      device=None,
                      cfg=None):
    cloud = batch['cloud']
    cloud.to(device)
    num_indices = len(cloud.at_centers)

    descs = list()
    with torch.set_grad_enabled(False):
        for i in range(num_indices):
            renderings = render_model(cloud.points, cloud.radii, cloud.colors,
                                      cloud.at_centers[[i], :],
                                      cloud.at_normals[[i], :])
            batch_desc = desc_model(renderings).cpu().numpy()
            assert batch_desc.shape[0] == 1
            descs.append(batch_desc[0, :])
    descs = np.asarray(descs, dtype=np.float32)

    scene = batch['scene']
    seq = batch['seq']
    name = batch['name']

    out_folder = osp.join(cfg.log.root_path, scene, seq)
    uio.may_create_folder(out_folder)

    np.save(osp.join(out_folder, name + '.desc.npy'), descs)
    return out_folder
Ejemplo n.º 4
0
def run(cfg):
    print("Start iterating dataset")

    uio.may_create_folder(cfg.out_root)

    scenes = uio.list_folders(cfg.dataset_root, alphanum_sort=False)
    print("{} scenes".format(len(scenes)))
    for sid, scene in enumerate(scenes):
        run_scene(cfg, sid, scene)

    print("Finished iterating dataset")
Ejemplo n.º 5
0
def run(cfg):
    print("Start making fragments")

    uio.may_create_folder(cfg.out_root)

    scenes = uio.list_folders(cfg.dataset_root, sort=False)
    print("{} scenes".format(len(scenes)))
    for scene in scenes:
        # if not scene.startswith('analysis'):
        #    continue
        run_scene(cfg, scene)

    print("Finished making fragments")
Ejemplo n.º 6
0
def prepare_config_eval(cfg):
    if eu.is_not_empty(cfg.eval.general.ckpt_path):
        exp_name = str(Path(cfg.eval.general.ckpt_path).parent)
    else:
        _, exp_name = uio.last_log_folder(cfg.log.root_path,
                                          cfg.log.identifier)
        ckpt_name = uio.last_checkpoint(osp.join(cfg.log.root_path, exp_name),
                                        eu.PTH_PREFIX)
        cfg.eval.general.ckpt_path = osp.join(cfg.log.root_path, exp_name,
                                              ckpt_name)

    assert eu.is_not_empty(exp_name)
    cfg.log.root_path = osp.join(cfg.log.root_path, exp_name)
    uio.may_create_folder(cfg.log.root_path)
Ejemplo n.º 7
0
def downsample_pcds(in_root, out_root, max_points):
    import open3d as o3d
    o3d.utility.set_verbosity_level(o3d.utility.VerbosityLevel.Error)

    uio.may_create_folder(out_root)

    pcd_names = uio.list_files(in_root, 'cloud_bin_*.ply', alphanum_sort=True)
    pcd_stems = list()
    for pname in pcd_names:
        pstem = pname[:-4]
        pcd_path = osp.join(in_root, pname)
        pose_path = osp.join(in_root, pstem + '.pose.npy')
        pcd = o3d.io.read_point_cloud(pcd_path)
        pose = np.load(pose_path)
        pcd.transform(pose)

        down_pcd = Cloud.downsample_from(pcd, max_points)
        down_pcd.save(osp.join(out_root, pstem + '.npz'))

        pcd_stems.append(pstem)

    return pcd_stems
Ejemplo n.º 8
0
def run_seq(cfg, scene, seq):
    print("    Start {}".format(seq))

    pcd_names = downsample_pcds(osp.join(cfg.dataset_root, scene, seq),
                                osp.join(cfg.temp_root, scene, seq), cfg.max_points)
    n_pcds = len(pcd_names)

    out_folder = osp.join(cfg.out_root, scene, seq)
    if osp.exists(out_folder):
        print('    Skip...')
        return
    uio.may_create_folder(out_folder)

    if cfg.threads > 1:
        from joblib import Parallel, delayed
        import multiprocessing

        Parallel(n_jobs=cfg.threads)(
            delayed(compute_overlap)(cfg, scene, seq, pcd_names, i) for i in range(n_pcds))
    else:
        for i in range(n_pcds):
            compute_overlap(cfg, scene, seq, pcd_names, i)

    print("    Finished {}".format(seq))
Ejemplo n.º 9
0
def run_scene_matching(scene_name,
                       seq_name,
                       desc_type,
                       pcloud_root,
                       desc_root,
                       out_root,
                       inlier_thresh=0.1,
                       n_threads=1):
    out_folder = osp.join(out_root, desc_type)
    uio.may_create_folder(out_folder)

    out_filename = '{}-{}-{:.2f}'.format(scene_name, seq_name, inlier_thresh)
    if Path(osp.join(out_folder, out_filename + '.pkl')).is_file():
        print('[*] {} already exists. Skip computation.'.format(out_filename))
        return osp.join(out_folder, out_filename)

    fragment_names = uio.list_files(osp.join(pcloud_root, scene_name,
                                             seq_name),
                                    '*.ply',
                                    alphanum_sort=True)
    fragment_names = [fn[:-4] for fn in fragment_names]
    n_fragments = len(fragment_names)

    register_results = [
        RegisterResult(
            frag1_name=fragment_names[i],
            frag2_name=fragment_names[j],
            num_inliers=None,
            inlier_ratio=None,
            gt_flag=None,
        ) for i in range(n_fragments) for j in range(i + 1, n_fragments)
    ]
    poses = read_log(osp.join(pcloud_root, scene_name, seq_name, 'gt.log'))

    if n_threads > 1:
        from joblib import Parallel, delayed
        import multiprocessing

        results = Parallel(n_jobs=n_threads)(delayed(register_fragment_pair)(
            scene_name, seq_name, k.frag1_name, k.frag2_name, desc_type, poses,
            pcloud_root, desc_root, inlier_thresh) for k in register_results)
        for k, res in enumerate(results):
            register_results[k].num_inliers = res[0]
            register_results[k].inlier_ratio = res[1]
            register_results[k].gt_flag = res[2]
    else:
        for k in range(len(register_results)):
            num_inliers, inlier_ratio, gt_flag = register_fragment_pair(
                scene_name, seq_name, register_results[k].frag1_name,
                register_results[k].frag2_name, desc_type, poses, pcloud_root,
                desc_root, inlier_thresh)
            register_results[k].num_inliers = num_inliers
            register_results[k].inlier_ratio = inlier_ratio
            register_results[k].gt_flag = gt_flag

    with open(osp.join(out_folder, out_filename + '.pkl'), 'wb') as fh:
        to_save = {
            'register_results': register_results,
            'scene_name': scene_name,
            'seq_name': seq_name,
            'desc_type': desc_type,
            'inlier_thresh': inlier_thresh,
            'n_threads': n_threads,
        }
        pickle.dump(to_save, fh, protocol=pickle.HIGHEST_PROTOCOL)
    with open(osp.join(out_folder, out_filename + '.txt'), 'w') as fh:
        for k in register_results:
            fh.write('{} {} {} {:.8f} {}\n'.format(k.frag1_name, k.frag2_name,
                                                   k.num_inliers,
                                                   k.inlier_ratio, k.gt_flag))

    return osp.join(out_folder, out_filename)