config_folder = argv[0] + '/'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    config_files = ['test.json']
    for config_file in config_files:
        config = read_json(config_folder + config_file)

        height = config['height']
        width = config['width']
        depth = config['depth']
        scale = 1. / max(height, depth, width)

        n_observations = config['n_observations']

        off_directory = common.dirname(config, 'reconstructed_dir')
        txt_directory = common.dirname(config, 'txt_gt_dir')

        import itertools
        off_files = read_ordered_directory(off_directory)
        n_files = len(off_files)
        off_files = list(
            itertools.chain.from_iterable(
                itertools.repeat(x, n_observations) for x in off_files))

        txt_files = []
        for n in range(n_files):
            for k in range(n_observations):
                txt_files.append(txt_directory + '/%d/%d.txt' % (k, n))

        vis_directory = common.dirname(config, 'vis_dir')
    config_files = [
        config_file for config_file in os.listdir(from_config_folder)
    ]
    for config_file in config_files:
        assert os.path.exists(to_config_folder + config_file)

    for config_file in config_files:
        from_config = utils.read_json(from_config_folder + config_file)
        to_config = utils.read_json(to_config_folder + config_file)

        for key in hdf5_file_keys:
            from_file = common.filename(from_config, key)
            to_file = common.filename(to_config, key)

            print('[Data] copying')
            print('  ' + from_file)
            print('  ' + to_file)
            shutil.copy(from_file, to_file)

        for key in dir_keys:
            from_dir = common.dirname(from_config, key)
            to_dir = common.dirname(to_config, key)

            # !
            assert not os.path.exists(to_dir)

            print('[Data] copying')
            print('  ' + from_dir)
            print('  ' + to_dir)
            shutil.copytree(from_dir, to_dir)
def test_dirname_win_path():
    win_dir = 'A:\\b\\c\\d\\'
    result = common.dirname(win_dir)
    assert result == 'A:\\b\\c\\d'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    config_files = ['test.json']
    for config_file in config_files:
        config = read_json(config_folder + config_file)

        height = config['height']
        width = config['width']
        depth = config['depth']
        scale = 1. / max(height, depth, width)

        multiplier = config['multiplier']
        n_observations = config['n_observations']

        txt_directory = common.dirname(config, 'txt_gt_dir')
        n_files = len(read_ordered_directory(txt_directory + '/0/'))

        txt_files = []
        for n in range(n_files):
            for k in range(n_observations):
                txt_files.append(txt_directory + '/%d/%d.txt' % (k, n))

        vis_directory = common.dirname(config, 'vis_dir')
        if not os.path.isdir(vis_directory):
            os.makedirs(vis_directory)

        N = 30
        log('[Data] %d samples' % len(txt_files))
        for i in range(N):
            n = i * (len(txt_files) // N)
def test_dirname_linux_path():
    linux_dir = '/a/b/c/d/'
    result = common.dirname(linux_dir)
    assert result == '/a/b/c/d'
def test_dirname_win_file():
    win_file = 'A:\\b\\c\\d.txt'
    result = common.dirname(win_file)
    assert result == 'A:\\b\\c'
Exemplo n.º 7
0
if __name__ == '__main__':
    if len(sys.argv) < 2:
        print('[Data] Usage python 10_reconstruct.py config_folder')
        exit(1)

    config_folder = sys.argv[1] + '/'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    for config_file in os.listdir(config_folder):
        print('[Data] reading ' + config_folder + config_file)
        config = utils.read_json(config_folder + config_file)

        filled_file = common.filename(config, 'filled_file')
        assert os.path.exists(
            filled_file), 'file %s does not exist' % filled_file

        filled = utils.read_hdf5(filled_file)
        filled = filled.squeeze()

        binvox_directory = common.dirname(config, 'binvox_dir')
        utils.makedir(binvox_directory)

        for n in range(filled.shape[0]):
            model = binvox_rw.Voxels(filled[n] > 0.5, filled[n].shape,
                                     (0, 0, 0), 1)
            binvox_file = binvox_directory + str(n) + '.binvox'
            with open(binvox_file, 'w') as fp:
                model.write(fp)
                print('[Validation] wrote ' + binvox_file)
            print('[Data] read ' + common.filename(config, 'output_file'))

            inputs = utils.read_hdf5(common.filename(config, 'input_file'))
            inputs = inputs.reshape(
                (inputs.shape[0] * n_observations, 1, inputs.shape[2],
                 inputs.shape[3], inputs.shape[4]))
            print('[Data] read ' + common.filename(config, 'input_file'))

            space = utils.read_hdf5(common.filename(config, 'space_file'))
            space = space.reshape(
                (space.shape[0] * n_observations, 1, space.shape[2],
                 space.shape[3], space.shape[4]))
            print('[Data] read ' + common.filename(config, 'space_file'))

            points = []
            point_dir = common.dirname(config, 'txt_gt_dir') + '/'

            for k in range(n_observations):
                k_point_dir = point_dir + '/%d/' % k
                for txt_file in os.listdir(k_point_dir):
                    point_cloud = PointCloud.from_txt(k_point_dir + txt_file)
                    points.append(point_cloud.points.shape[0])

            points = np.array(points)
            occupied = np.squeeze(
                np.sum(np.sum(np.sum(targets, axis=4), axis=3), axis=2))
            observed_points = np.squeeze(
                np.sum(np.sum(np.sum(inputs, axis=4), axis=3), axis=2))
            observed_space = np.squeeze(
                np.sum(np.sum(np.sum(space, axis=4), axis=3), axis=2))
Exemplo n.º 9
0
def test_dirname_win_file():
    win_file = 'A:\\b\\c\\d.txt'
    result = common.dirname(win_file)
    assert result == 'A:\\b\\c'
Exemplo n.º 10
0
def test_dirname_win_path():
    win_dir = 'A:\\b\\c\\d\\'
    result = common.dirname(win_dir)
    assert result == 'A:\\b\\c\\d'
Exemplo n.º 11
0
def test_dirname_linux_path():
    linux_dir = '/a/b/c/d/'
    result = common.dirname(linux_dir)
    assert result == '/a/b/c/d'
Exemplo n.º 12
0
def test_dirname_linux_file():
    linux_file = '/a/b/c/d.txt'
    result = common.dirname(linux_file)
    assert result == '/a/b/c'
Exemplo n.º 13
0
from point_cloud import PointCloud

if __name__ == '__main__':
    if len(sys.argv) < 2:
        print('[Data] Usage python 13_ply_observations.py config_folder')
        exit(1)

    config_folder = sys.argv[1] + '/'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    config_files = [
        config_file for config_file in os.listdir(config_folder)
        if not (config_file.find('prior') > 0)
    ]
    for config_file in config_files:
        print('[Data] reading ' + config_folder + config_file)
        config = utils.read_json(config_folder + config_file)

        for k in range(config['n_observations']):
            txt_directory = common.dirname(config, 'txt_gt_dir') + str(k) + '/'
            assert os.path.exists(txt_directory)

            ply_directory = common.dirname(config, 'ply_gt_dir') + str(k) + '/'
            if not os.path.exists(ply_directory):
                os.makedirs(ply_directory)

            for filename in os.listdir(txt_directory):
                point_cloud = PointCloud.from_txt(txt_directory + filename)
                point_cloud.to_ply(ply_directory + filename[:-4] + '.ply')
                print('[Data] wrote ' + ply_directory + filename[:-4] + '.ply')
Exemplo n.º 14
0
    config_files = ['test.json']
    for config_file in config_files:
        config = read_json(config_folder + config_file)

        height = config['height']
        width = config['width']
        depth = config['depth']
        scale = 1. / max(height, width, depth)

        n_observations = config['n_observations']

        space = read_hdf5(common.filename(config, 'space_file'))
        space = space.reshape((space.shape[0] * n_observations, 1,
                               space.shape[2], space.shape[3], space.shape[4]))

        vis_directory = common.dirname(config, 'vis_dir')
        if not os.path.isdir(vis_directory):
            os.makedirs(vis_directory)

        voxel_size = 0.007
        if height >= 32:
            voxel_size = 0.0055
        if height >= 48:
            voxel_size = 0.004
        log('[Data] voxel size ' + str(voxel_size))

        N = 30
        log('[Data] %d samples' % space.shape[0])
        for i in range(N):
            n = i * (space.shape[0] // N)
def test_dirname_linux_file():
    linux_file = '/a/b/c/d.txt'
    result = common.dirname(linux_file)
    assert result == '/a/b/c'
import mcubes

if __name__ == '__main__':
    if len(sys.argv) < 2:
        print('[Data] Usage python 10_reconstruct.py config_folder')
        exit(1)

    config_folder = sys.argv[1] + '/'
    assert os.path.exists(
        config_folder), 'directory %s does not exist' % config_folder

    for config_file in os.listdir(config_folder):
        print('[Data] reading ' + config_folder + config_file)
        config = utils.read_json(config_folder + config_file)

        sdf_file = common.filename(config, 'sdf_file')
        assert os.path.exists(sdf_file), 'file %s does not exist' % sdf_file

        sdfs = utils.read_hdf5(sdf_file)
        sdfs = sdfs.squeeze()

        reconstructed_directory = common.dirname(config, 'reconstructed_dir')
        utils.makedir(reconstructed_directory)

        for n in range(sdfs.shape[0]):
            vertices, triangles = mcubes.marching_cubes(
                -sdfs[n].transpose(1, 0, 2), 0)

            off_file = reconstructed_directory + '/%d.off' % n
            mcubes.export_off(vertices, triangles, off_file)
            print('[Data] wrote %s' % off_file)
Exemplo n.º 17
0
                perm = np.random.permutation(n_observations)
                perm = perm[:i]
                print('[Data] perm ' + ', '.join(map(str, perm)))
                i_inputs[k] = np.sum(inputs[perm], axis=0)
                i_inputs[k] = np.clip(i_inputs[k], 0, 1)

                i_space[k] = np.sum(space[perm], axis=0)
                i_space[k] = np.clip(i_space[k], 0, 1)

                i_sdf_inputs[k] = np.min(sdf_inputs[perm], axis=0)
                i_tsdf_inputs[k] = np.min(tsdf_inputs[perm], axis=0)
                i_ltsdf_inputs[k] = np.min(ltsdf_inputs[perm], axis=0)

                # Also fuse the actual point clouds!
                txt_directories = utils.read_ordered_directory(
                    common.dirname(config, 'txt_gt_dir'))
                txt_directory = common.dirname(config, 'txt_gt_dir',
                                               i) + '%d/' % k
                utils.makedir(txt_directory)

                for n in range(N):
                    point_cloud = PointCloud()
                    print('[Data] +')
                    for j in range(perm.shape[0]):
                        txt_file = txt_directories[perm[j]] + '/%d.txt' % n
                        print('[Data] | read ' + txt_file)
                        point_cloud_j = PointCloud.from_txt(txt_file)
                        point_cloud.points = np.concatenate(
                            (point_cloud.points, point_cloud_j.points), axis=0)

                    txt_file = txt_directory + '%d.txt' % n