Esempio n. 1
0
def create_archive(cat_desc, shape=(192, 256), n_images=8, example_ids=None):
    import os
    from shapenet.core import cat_desc_to_id
    from shapenet.core import get_example_ids
    from shapenet.core.blender_renderings.config import RenderConfig
    from progress.bar import IncrementalBar
    import zipfile
    cat_id = cat_desc_to_id(cat_desc)
    if example_ids is None or len(example_ids) == 0:
        example_ids = get_example_ids(cat_id)
    config = RenderConfig(shape=shape, n_images=n_images)
    zip_path = config.get_zip_path(cat_id)
    with zipfile.ZipFile(zip_path, mode='a', allowZip64=True) as zf:
        bar = IncrementalBar(max=len(example_ids))
        for example_id in example_ids:
            example_dir = config.get_example_dir(cat_id, example_id)
            if not os.path.isdir(example_dir):
                print('No directory at %s' % example_dir)
            else:
                for fn in os.listdir(example_dir):
                    src = os.path.join(example_dir, fn)
                    dst = os.path.join(cat_id, example_id, fn)
                    zf.write(src, dst)
            bar.next()
        bar.finish()
Esempio n. 2
0
 def create_from(self, src=None, overwrite=False):
     import h5py
     from progress.bar import IncrementalBar
     if src is None:
         src = self.get_default_src()
     with src.get_dataset(id_keys=True) as src:
         example_ids = get_example_ids(self.cat_id)
         n = len(example_ids)
         values = []
         print('Getting pre-padded encodings for %s...' % self.format_key)
         bar = IncrementalBar(max=n)
         encode = self.encoder.to_numpy
         for i in example_ids:
             val = encode(src[i])
             values.append(val)
             bar.next()
         bar.finish()
         m = max(len(v) for v in values)
         print('Saving...')
         path = self.path
         dn = os.path.dirname(path)
         if not os.path.isdir(dn):
             os.makedirs(dn)
         # if overwrite and os.path.isfile(path):
         #     os.remove(path)
         with h5py.File(path, mode='w' if overwrite else 'a') as dst:
             dst_group = dst.require_dataset(GROUP_KEY,
                                             dtype=np.uint8,
                                             shape=(n, m),
                                             compression=self.compression)
             print('Saving data to %s' % self.path)
             for i, val in enumerate(values):
                 dst_group[i, :len(val)] = val
Esempio n. 3
0
def render_cat(
        config, cat_id, overwrite, reverse=False, debug=False,
        example_ids=None, use_fixed_meshes=False, blender_path='blender',
        verbose=False):
    import zipfile
    from progress.bar import IncrementalBar
    call_kwargs = {} if debug else dict(
        stdout=_FNULL, stderr=subprocess.STDOUT)
    if example_ids is None or len(example_ids) == 0:
        example_ids = get_example_ids(cat_id)
    if reverse:
        example_ids = example_ids[-1::-1]
    print('Rendering %d images for cat %s' % (len(example_ids), cat_id))
    bar = IncrementalBar(max=len(example_ids))
    if use_fixed_meshes:
        zip_path = get_fixed_meshes_zip_path(cat_id)
    else:
        zip_path = get_zip_path(cat_id)
    with zipfile.ZipFile(zip_path) as zip_file:
        file_index = get_file_index(zip_file)
        for example_id in example_ids:
            bar.next()
            render_example(
                config, cat_id, example_id, zip_file,
                overwrite, call_kwargs, blender_path=blender_path,
                verbose=verbose, file_index=file_index)
    bar.finish()
Esempio n. 4
0
def main(cat_descs, voxel_dim, overwrite):
    from shapenet.core import cat_desc_to_id, get_cat_ids
    from shapenet.core import get_example_ids
    if len(cat_descs) == 0:
        cat_ids = get_cat_ids()
    else:
        cat_ids = [cat_desc_to_id(cat_desc) for cat_desc in cat_descs]
    for i, cat_id in enumerate(cat_ids):
        print('Processing cat_id %s, %d / %d' % (cat_id, i + 1, len(cat_ids)))
        example_ids = get_example_ids(cat_id)
        convert_multi(cat_id,
                      example_ids,
                      overwrite=overwrite,
                      voxel_dim=voxel_dim)
Esempio n. 5
0
def _map_binvox_dataset(dataset, cat_id, id_keys=True, prefix=''):
    dataset = dataset.map(
        bv.Voxels.from_file,
        inverse_map_fn=lambda vox: _as_readable(vox.save_to_file))
    pl = len(prefix)
    if id_keys:
        dataset = dataset.map_keys(lambda x: '%s%s.binvox' % (prefix, x),
                                   lambda x: x[pl:-7])
    else:
        example_ids = get_example_ids(cat_id)
        indices = {k: i for i, k in enumerate(example_ids)}
        dataset = dataset.map_keys(
            lambda i: '%s%s.binvox' % (prefix, example_ids[i]),
            lambda k: indices[k[pl:-7]])
    return dataset
Esempio n. 6
0
def vis(cat, n_images, view_index=5, example_ids=None):
    import matplotlib.pyplot as plt
    from shapenet.core import cat_desc_to_id, get_example_ids
    from shapenet.core.blender_renderings.config import RenderConfig
    cat_id = cat_desc_to_id(cat)
    config = RenderConfig(n_images=n_images)
    dataset = config.get_dataset(cat_id, view_index)
    if example_ids is not None and len(example_ids) > 0:
        dataset = dataset.subset(example_ids)
    else:
        example_ids = get_example_ids(cat_id)
    with dataset:
        for example_id in example_ids:
            plt.imshow(dataset[example_id])
            plt.title(example_id)
            plt.show()
Esempio n. 7
0
def check_mesh_data(cat_desc):
    from shapenet.core import cat_desc_to_id, get_example_ids
    from shapenet.core.meshes import get_mesh_dataset
    cat_id = cat_desc_to_id(cat_desc)
    example_ids = get_example_ids(cat_id)
    n_absent = 0
    with get_mesh_dataset(cat_id) as ds:
        for example_id in example_ids:
            if example_id not in ds:
                n_absent += 1

    n = len(example_ids)
    if n_absent == 0:
        print('All %d %s meshes present!' % (n, cat_desc))
    else:
        print('%d / %d %s meshes absent' % (n_absent, n, cat_desc))
Esempio n. 8
0
def main(_):
    from shapenet.core.voxels.config import get_config
    from shapenet.core import to_cat_id
    from shapenet.core import get_example_ids
    config = get_config(FLAGS.voxel_dim, alt=FLAGS.alt)
    fill = FLAGS.fill
    if fill is not None:
        config = config.filled(fill)
    if FLAGS.cat is None:
        raise ValueError('Must provide at least one cat to convert.')
    if FLAGS.fill is not None:
        config = config.filled(FLAGS.fill)
    cat_id = to_cat_id(FLAGS.cat)
    example_ids = FLAGS.example_id
    if example_ids is None:
        example_ids = get_example_ids(cat_id)
    config.create_voxel_data(cat_id, example_ids)
Esempio n. 9
0
    def create_voxel_data(self, cat_id, example_ids=None, overwrite=False):
        import shutil
        from progress.bar import IncrementalBar
        from util3d.voxel.convert import obj_to_binvox
        from shapenet.core.path import get_zip_file, get_obj_subpath, \
            get_example_subdir
        from shapenet.core.voxels.path import get_binvox_path
        if example_ids is None:
            from shapenet.core import get_example_ids
            example_ids = get_example_ids(cat_id)
        tmp_dir = '/tmp'

        kwargs = dict(
            voxel_dim=self.voxel_dim,
            exact=self.exact,
            dc=self.dc,
            aw=self.aw)
        voxel_id = self.voxel_id

        print('Creating voxel data.')
        with get_zip_file(cat_id) as zf:
            bar = IncrementalBar(max=len(example_ids))
            for example_id in example_ids:
                bar.next()
                binvox_path = get_binvox_path(voxel_id, cat_id, example_id)
                if os.path.isfile(binvox_path):
                    if overwrite:
                        os.remove(binvox_path)
                    else:
                        continue
                subdir = os.path.dirname(binvox_path)
                if not os.path.isdir(subdir):
                    os.makedirs(subdir)
                subpath = get_obj_subpath(cat_id, example_id)
                zf.extract(subpath, tmp_dir)
                obj_path = os.path.join(tmp_dir, subpath)
                extraction_dir = os.path.join(
                    tmp_dir, get_example_subdir(cat_id, example_id))
                try:
                    obj_to_binvox(obj_path, binvox_path, **kwargs)
                except IOError:
                    print('Error generating %s/%s' % (cat_id, example_id))
                shutil.rmtree(extraction_dir)
            bar.finish()
Esempio n. 10
0
 def create_split(self, cat_id, overwrite=False):
     import random
     from shapenet.core import get_example_ids
     from template_ffd.templates.ids import get_template_ids
     if not overwrite and self.has_split(cat_id):
         return
     template_ids = set(get_template_ids(cat_id))
     example_ids = get_example_ids(cat_id)
     example_ids = [i for i in example_ids if i not in template_ids]
     example_ids.sort()
     random.seed(self._seed)
     random.shuffle(example_ids)
     train_ids, eval_ids = _train_eval_partition(example_ids,
                                                 self._train_prop)
     train_ids.sort()
     eval_ids.sort()
     for mode, ids in (('train', train_ids), ('eval', eval_ids)):
         with open(self.get_txt_path(cat_id, mode), 'w') as fp:
             fp.writelines(('%s\n' % i for i in ids))
Esempio n. 11
0
 def create_split(self, cat_id, overwrite=False):
     import random
     from shapenet.core import get_example_ids
     from template_ffd.templates.ids import get_template_ids
     if not overwrite and self.has_split(cat_id):
         return
     template_ids = set(get_template_ids(cat_id))
     example_ids = get_example_ids(cat_id)
     example_ids = [i for i in example_ids if i not in template_ids]
     example_ids.sort()
     random.seed(self._seed)
     random.shuffle(example_ids)
     train_ids, eval_ids = _train_eval_partition(
         example_ids, self._train_prop)
     train_ids.sort()
     eval_ids.sort()
     for mode, ids in (('train', train_ids), ('eval', eval_ids)):
         with open(self.get_txt_path(cat_id, mode), 'w') as fp:
             fp.writelines(('%s\n' % i for i in ids))
Esempio n. 12
0
def check_archive(cat_desc, voxel_dim, example_ids=None):
    from shapenet.core import cat_desc_to_id, get_example_ids
    from shapenet.core.voxels.config import VoxelConfig
    cat_id = cat_desc_to_id(cat_desc)
    if example_ids is None or len(example_ids) == 0:
        example_ids = get_example_ids(cat_id)
    config = VoxelConfig(voxel_dim=voxel_dim)
    n_absent = 0
    with config.get_zip_file(cat_id) as zf:
        namelist = set(zf.namelist())
    for example_id in example_ids:
        if config.get_binvox_subpath(cat_id, example_id) not in namelist:
            n_absent += 1

    if n_absent == 0:
        print('All %d %s voxels present' % (len(example_ids), cat_desc))
    else:
        print('%d / %d voxel files missing from %s' %
              (n_absent, len(example_ids), cat_desc))
Esempio n. 13
0
 def create_from(self, src=None, overwrite=False):
     import h5py
     from progress.bar import IncrementalBar
     if src is None:
         src = self.get_default_src()
     with src.get_dataset(id_keys=True) as src:
         example_ids = get_example_ids(self.cat_id)
         n = len(example_ids)
         dtype = h5py.special_dtype(vlen=np.dtype(np.uint8))
         with h5py.File(self.path, 'a') as dst:
             dst = dst.require_dataset(GROUP_KEY,
                                       dtype=dtype,
                                       shape=(n, ),
                                       compression=self.compression)
             print('Saving data to %s' % self.path)
             bar = IncrementalBar(max=n)
             encode = self.encoder.to_numpy
             for i, example_id in enumerate(example_ids):
                 if example_id in src and (overwrite or len(dst[i]) == 0):
                     dst[i] = encode(src[example_id])
                 bar.next()
             bar.finish()
Esempio n. 14
0
def check_zip(cat_desc, shape, n_images):
    import zipfile
    from shapenet.core.blender_renderings.config import RenderConfig
    from shapenet.core import cat_desc_to_id, get_example_ids
    cat_id = cat_desc_to_id(cat_desc)

    config = RenderConfig(shape=shape, n_images=n_images)
    rendered_ids = set()
    with zipfile.ZipFile(config.get_zip_path(cat_id)) as zf:
        for name in zf.namelist():
            rendered_ids.add(name.split('/')[1])

    not_rendered_count = 0
    example_ids = get_example_ids(cat_id)
    for example_id in example_ids:
        if example_id not in rendered_ids:
            print(example_id)
            not_rendered_count += 1

    if not_rendered_count > 0:
        print('%d / %d not rendered' % (not_rendered_count, len(example_ids)))
    else:
        print('All %d %ss rendered!' % (len(example_ids), cat_desc))
Esempio n. 15
0
def create_archive(cat_desc, voxel_dim, example_ids=None, overwrite=False):
    import os
    import zipfile
    from shapenet.core.voxels.config import VoxelConfig
    from shapenet.core import get_example_ids, cat_desc_to_id
    cat_id = cat_desc_to_id(cat_desc)

    if example_ids is None or len(example_ids) == 0:
        example_ids = get_example_ids(cat_id)
    config = VoxelConfig(voxel_dim)
    with zipfile.ZipFile(config.get_zip_path(cat_id), 'a') as zf:
        if not overwrite:
            namelist = set(zf.namelist())
        for example_id in example_ids:
            dst = config.get_binvox_subpath(cat_id, example_id)
            if not overwrite and dst in namelist:
                continue
            src = config.get_binvox_path(cat_id, example_id)
            if os.path.isfile(src):
                zf.write(src, dst)
            else:
                print('No file at %s for %s/%s: skipping' %
                      (src, cat_id, example_id))
Esempio n. 16
0
 def create_from(self, src=None, overwrite=False):
     import h5py
     from progress.bar import IncrementalBar
     from util3d.voxel.binvox import rle
     if src is None:
         src = self.get_default_src()
     with src.get_dataset(id_keys=True) as src_ds:
         example_ids = get_example_ids(self.cat_id)
         n = len(example_ids)
         values = []
         print('Getting jagged encodings for %s...' % self.format_key)
         bar = IncrementalBar(max=n)
         encode = self.encoder.to_numpy
         starts = np.empty(shape=(n + 1, ), dtype=np.int32)
         curr = 0
         starts[0] = curr
         for i, example_id in enumerate(example_ids):
             data = src_ds[example_id]
             val = encode(data)
             val = rle.remove_length_padding(val)
             curr += len(val)
             starts[i + 1] = curr
             values.append(val)
             bar.next()
         bar.finish()
         print('Saving...')
         path = self.path
         dn = os.path.dirname(path)
         if not os.path.isdir(dn):
             os.makedirs(dn)
         # if overwrite and os.path.isfile(path):
         #     os.remove(path)
         with h5py.File(path, mode='w' if overwrite else 'a') as dst:
             dst.create_dataset('values',
                                data=np.concatenate(values, axis=0),
                                compression=self.compression)
             dst.create_dataset('starts', data=starts, compression=None)
Esempio n. 17
0
 def has_dataset(self):
     dd = self.data_dir
     return os.path.isdir(dd) and len(os.listdir(dd)) == len(
         get_example_ids(self.cat_id))
Esempio n. 18
0
def _map_indices_to_ids(dataset, cat_id):
    example_ids = get_example_ids(cat_id)
    indices = {k: i for i, k in enumerate(example_ids)}
    dataset = dataset.map_keys(lambda k: indices[k], lambda i: example_ids[i])
    return dataset
Esempio n. 19
0

cat = 'plane'
voxel_dim = 64
ray_shape = (32,)*3
view_index = 0
cat_id = to_cat_id(cat)
config = get_config(voxel_dim, alt=False).filled('orthographic')
voxel_dataset = get_voxel_dataset(
    config, cat_id, id_keys=True, key='rle', compression='lzf')
image_manager = get_base_manager(dim=256)
n_renderings = image_manager.get_render_params()['n_renderings']
f = 32 / 35


example_ids = get_example_ids(cat_id)
with voxel_dataset:
    for example_id in example_ids:
        start = time.time()
        dense_data = voxel_dataset[example_id].dense_data()
        dense_data = dense_data[:, -1::-1]

        key = (cat_id, example_id)
        eyes = image_manager.get_camera_positions(key)
        for vi in range(n_renderings):
            eye = eyes[vi]
            n = np.linalg.norm(eye)
            R, t = get_eye_to_world_transform(eye)
            z_near = n - 0.5
            z_far = z_near + 1
Esempio n. 20
0
    def map_np(self, example_id):
        points = np.array(self._dataset[example_id], dtype=np.float32)
        return sample_points(points, self._n_resamples, axis=0)


def get_sampled_point_cloud_dataset(
        cat_id, example_ids, n_samples, n_resamples):
    manager = SampledPointCloudManager(cat_id, n_samples, n_resamples)
    base = base_dataset(example_ids)
    return base.map(manager.map_tf)


if __name__ == '__main__':
    from mayavi import mlab
    from util3d.mayavi_vis import vis_point_cloud
    from shapenet.core import cat_desc_to_id, get_example_ids
    cat_desc = 'plane'
    n_samples = 16384
    # n_resamples = None
    n_resamples = 1024
    cat_id = cat_desc_to_id(cat_desc)
    example_ids = get_example_ids(cat_id)
    dataset = get_sampled_point_cloud_dataset(
        cat_id, example_ids, n_samples, n_resamples)
    pc = dataset.make_one_shot_iterator().get_next()
    with tf.train.MonitoredSession() as sess:
        while not sess.should_stop():
            cloud = sess.run(pc)
            vis_point_cloud(cloud, color=(0, 0, 1), scale_factor=0.01)
            mlab.show()