def get_data(model_id, example_ids=None): edge_length_threshold = 0.02 builder = get_builder(model_id) cat_id = builder.cat_id with get_ffd_dataset(cat_id, edge_length_threshold=0.02) as ffd_ds: template_ids, bs, ps = zip(*builder.get_ffd_data(ffd_ds)) with get_template_mesh_dataset(cat_id, edge_length_threshold) as mesh_ds: faces = [np.array(mesh_ds[e]['faces']) for e in template_ids] predictions_ds = get_predictions_dataset(model_id) mesh_ds = get_mesh_dataset(cat_id) image_ds = RenderConfig().get_dataset(cat_id, builder.view_index) zipped = Dataset.zip(predictions_ds, mesh_ds, image_ds) with zipped: if example_ids is None: example_ids = list(predictions_ds.keys()) random.shuffle(example_ids) for example_id in example_ids: print(example_id) pred, mesh, image = zipped[example_id] i = np.argmax(pred['probs']) dp = np.array(pred['dp'][i]) b = bs[i] p = ps[i] yield example_id, b, p, dp, faces[i], mesh, image
def get_ds(cat_desc, regime='e'): view_index = 5 edge_length_threshold = 0.02 cat_id = cat_desc_to_id(cat_desc) model_id = '%s_%s' % (regime, cat_desc) image_ds = RenderConfig().get_dataset(cat_id, view_index) cloud_ds = get_cloud_manager( model_id, pre_sampled=True, n_samples=n_samples).get_lazy_dataset() mesh_ds = get_inferred_mesh_dataset( model_id, edge_length_threshold=edge_length_threshold) gt_mesh_ds = get_mesh_dataset(cat_id) voxel_ds = get_voxel_dataset( model_id, edge_length_threshold=edge_length_threshold, filled=False) selected_template_ds = get_selected_template_idx_dataset(model_id) template_meshes = [] with gt_mesh_ds: for template_id in get_template_ids(cat_id): mesh = gt_mesh_ds[template_id] template_meshes.append( {k: np.array(mesh[k]) for k in ('vertices', 'faces')}) template_mesh_ds = selected_template_ds.map(lambda i: template_meshes[i]) return Dataset.zip( image_ds, gt_mesh_ds, cloud_ds, mesh_ds, voxel_ds, template_mesh_ds)
def _main(cat_id, example_id): from dids import Dataset import matplotlib.pyplot as plt from util3d.mayavi_vis import vis_point_cloud from mayavi import mlab colors = ( (1, 0, 0), (0, 1, 0), (0, 0, 1), (1, 1, 1), ) image_ds = SegmentedImageDataset(cat_id) pc_ds = PointCloudDataset(cat_id) s_ds = SegmentationDataset(cat_id) ds = Dataset.zip(image_ds, pc_ds, s_ds) with ds: image, pc, s = ds[example_id] print(np.min(s)) print(np.max(s)) ns = np.max(s) + 1 plt.imshow(image) for i in range(ns - 1): cloud = pc[s == i + 1] color = colors[i % len(colors)] vis_point_cloud(cloud, color=color, scale_factor=0.02) plt.show(block=False) mlab.show()
def _main(): # from path import get_zip_file from dids import Dataset import matplotlib.pyplot as plt cat_id = '02691156' example_id = '1a04e3eab45ca15dd86060f189eb133' ds = Dataset.zip(SegmentedImageDataset(cat_id), SegmentedPointCloudDataset(cat_id)) with ds: image, cloud = ds[example_id] print(cloud.shape) print(cloud.dtype) plt.imshow(image) plt.show()
def vis_clouds(model_id, pre_sampled=True, n_samples=1024, edge_length_threshold=0.1, shuffle=False): import random import numpy as np from mayavi import mlab import matplotlib.pyplot as plt from dids import Dataset from shapenet.core.blender_renderings.config import RenderConfig from shapenet.core.meshes import get_mesh_dataset from util3d.mayavi_vis import vis_point_cloud from util3d.mayavi_vis import vis_mesh from template_ffd.data.ids import get_example_ids from template_ffd.inference.clouds import get_inferred_cloud_dataset from template_ffd.model import get_builder builder = get_builder(model_id) cat_id = builder.cat_id kwargs = dict(model_id=model_id, n_samples=n_samples) if not pre_sampled: kwargs['edge_length_threshold'] = edge_length_threshold cloud_dataset = get_inferred_cloud_dataset(pre_sampled=pre_sampled, **kwargs) image_dataset = RenderConfig().get_dataset(cat_id, builder.view_index) example_ids = get_example_ids(cat_id, 'eval') if shuffle: example_ids = list(example_ids) random.shuffle(example_ids) mesh_dataset = get_mesh_dataset(cat_id) zipped_dataset = Dataset.zip(image_dataset, cloud_dataset, mesh_dataset) # zipped_dataset = Dataset.zip(image_dataset, cloud_dataset) with zipped_dataset: for example_id in example_ids: image, cloud, mesh = zipped_dataset[example_id] # image, cloud = zipped_dataset[example_id] plt.imshow(image) vis_point_cloud(np.array(cloud), color=(0, 1, 0), scale_factor=0.01) v, f = (np.array(mesh[k]) for k in ('vertices', 'faces')) vis_mesh(v, f, color=(0, 0, 1), opacity=0.1, include_wireframe=False) plt.show(block=False) mlab.show() plt.close()
def get_lazy_dataset(self): cat_id = cat_desc_to_id(load_params(self._model_id)['cat_desc']) example_ids = get_example_ids(cat_id, 'eval') inferred_dataset = get_voxel_dataset( self._model_id, self._edge_length_threshold, self._voxel_config, filled=self._filled, example_ids=example_ids) gt_dataset = get_gt_voxel_dataset(cat_id, filled=self._filled) voxel_datasets = Dataset.zip(inferred_dataset, gt_dataset) voxel_datasets = voxel_datasets.subset(example_ids) def map_fn(v): return intersection_over_union(v[0].data, v[1].data) iou_dataset = voxel_datasets.map(map_fn) return iou_dataset
def get_lazy_evaluation_dataset(inf_cloud_ds, cat_id, n_samples, eval_fn): example_ids = get_example_ids(cat_id, 'eval') def sample_fn(cloud): return sample_points(np.array(cloud), n_samples) normalization_ds = get_normalization_params_dataset(cat_id) gt_cloud_ds = get_point_cloud_dataset( cat_id, n_samples, example_ids=example_ids).map(sample_fn) zipped = Dataset.zip(inf_cloud_ds, gt_cloud_ds, normalization_ds) def map_fn(data): inf_cloud, gt_cloud, norm_params = data inf_cloud = normalized(inf_cloud, **norm_params) gt_cloud = normalized(gt_cloud, **norm_params) return eval_fn(inf_cloud, gt_cloud) return zipped.map(map_fn)
def get_lazy_dataset(self): from template_ffd.inference.predictions import \ get_selected_template_idx_dataset builder = get_builder(self._model_id) template_ids = builder.template_ids gt_ds = get_gt_voxel_dataset( builder.cat_id, filled=self._filled, auto_save=True, example_ids=template_ids) gt_ds = gt_ds.map(lambda v: v.data) with gt_ds: template_voxels = tuple(gt_ds[tid] for tid in template_ids) selected_ds = get_selected_template_idx_dataset(self._model_id) selected_ds = selected_ds.map(lambda i: template_voxels[i]) return Dataset.zip(selected_ds, gt_ds).map( lambda v: intersection_over_union(*v))
def vis_clouds( model_id, pre_sampled=True, n_samples=1024, edge_length_threshold=0.1, shuffle=False): import random import numpy as np from mayavi import mlab import matplotlib.pyplot as plt from dids import Dataset from shapenet.core.blender_renderings.config import RenderConfig from shapenet.core.meshes import get_mesh_dataset from util3d.mayavi_vis import vis_point_cloud from util3d.mayavi_vis import vis_mesh from template_ffd.data.ids import get_example_ids from template_ffd.inference.clouds import get_inferred_cloud_dataset from template_ffd.model import get_builder builder = get_builder(model_id) cat_id = builder.cat_id kwargs = dict(model_id=model_id, n_samples=n_samples) if not pre_sampled: kwargs['edge_length_threshold'] = edge_length_threshold cloud_dataset = get_inferred_cloud_dataset( pre_sampled=pre_sampled, **kwargs) image_dataset = RenderConfig().get_dataset(cat_id, builder.view_index) example_ids = get_example_ids(cat_id, 'eval') if shuffle: example_ids = list(example_ids) random.shuffle(example_ids) mesh_dataset = get_mesh_dataset(cat_id) zipped_dataset = Dataset.zip(image_dataset, cloud_dataset, mesh_dataset) # zipped_dataset = Dataset.zip(image_dataset, cloud_dataset) with zipped_dataset: for example_id in example_ids: image, cloud, mesh = zipped_dataset[example_id] # image, cloud = zipped_dataset[example_id] plt.imshow(image) vis_point_cloud( np.array(cloud), color=(0, 1, 0), scale_factor=0.01) v, f = (np.array(mesh[k]) for k in ('vertices', 'faces')) vis_mesh( v, f, color=(0, 0, 1), opacity=0.1, include_wireframe=False) plt.show(block=False) mlab.show() plt.close()
def get_lazy_dataset(self): cat_id = cat_desc_to_id(load_params(self._model_id)['cat_desc']) example_ids = get_example_ids(cat_id, 'eval') inferred_dataset = get_voxel_dataset(self._model_id, self._edge_length_threshold, self._voxel_config, filled=self._filled, example_ids=example_ids) gt_dataset = get_gt_voxel_dataset(cat_id, filled=self._filled) voxel_datasets = Dataset.zip(inferred_dataset, gt_dataset) voxel_datasets = voxel_datasets.subset(example_ids) def map_fn(v): return intersection_over_union(v[0].data, v[1].data) iou_dataset = voxel_datasets.map(map_fn) return iou_dataset
def get_lazy_dataset(self): from template_ffd.inference.predictions import \ get_selected_template_idx_dataset builder = get_builder(self._model_id) template_ids = builder.template_ids gt_ds = get_gt_voxel_dataset(builder.cat_id, filled=self._filled, auto_save=True, example_ids=template_ids) gt_ds = gt_ds.map(lambda v: v.data) with gt_ds: template_voxels = tuple(gt_ds[tid] for tid in template_ids) selected_ds = get_selected_template_idx_dataset(self._model_id) selected_ds = selected_ds.map(lambda i: template_voxels[i]) return Dataset.zip(selected_ds, gt_ds).map(lambda v: intersection_over_union(*v))
def get_lazy_dataset(self): import numpy as np from shapenet.core.meshes import get_mesh_dataset from shapenet.core.annotations.datasets import PointCloudDataset from dids import Dataset vertices_dataset = get_mesh_dataset(self._cat_id).map( lambda mesh: np.array(mesh['vertices'])) points_dataset = PointCloudDataset(self._cat_id) zipped = Dataset.zip(vertices_dataset, points_dataset) def map_fn(inputs): vertices, points = inputs b, p = _calculate_ffd(self._n, vertices, points) return dict(b=b, p=p) with points_dataset: keys = [k for k in get_template_ids(self._cat_id) if k in points_dataset] return zipped.map(map_fn).subset(keys)
def get_lazy_dataset(self): cat_id = cat_desc_to_id(load_params(self._model_id)['cat_desc']) if not isinstance(cat_id, (list, tuple)): cat_id = [cat_id] inferred_dataset = get_voxel_dataset(self._model_id, self._edge_length_threshold, self._voxel_config, filled=self._filled) gt_dataset = get_gt_voxel_dataset(cat_id, filled=self._filled) gt_dataset = gt_dataset.map_keys(lambda key: key[:2]) with inferred_dataset: keys = tuple(inferred_dataset.keys()) voxel_datasets = Dataset.zip(inferred_dataset, gt_dataset) voxel_datasets = voxel_datasets.subset(keys) def map_fn(v): return intersection_over_union(v[0].dense_data(), v[1].dense_data()) iou_dataset = voxel_datasets.map(map_fn) return iou_dataset
shuffle = True k = 3 cat_id = cat_desc_to_id(cat_desc) model_id = '%s_%s' % (regime, cat_desc) builder = get_builder(model_id) image_ds = RenderConfig().get_dataset(cat_id, view_index) gt_mesh_ds = get_mesh_dataset(cat_id) predictions_ds = get_predictions_dataset(model_id) top_k_mesh_fn = builder.get_prediction_to_top_k_mesh_fn( edge_length_threshold, k) all_ds = Dataset.zip(image_ds, gt_mesh_ds, predictions_ds) def vis(): def vis_mesh(mesh, include_wireframe=False, **kwargs): from util3d.mayavi_vis import vis_mesh as vm v, f = (np.array(mesh[k]) for k in ('vertices', 'faces')) vm(v, f, include_wireframe=include_wireframe, **kwargs) example_ids = list(get_example_ids(cat_id, 'eval')) random.shuffle(example_ids) with all_ds: for example_id in example_ids: print(example_id)
shuffle = True k = 3 cat_id = cat_desc_to_id(cat_desc) model_id = '%s_%s' % (regime, cat_desc) builder = get_builder(model_id) image_ds = RenderConfig().get_dataset(cat_id, view_index) gt_mesh_ds = get_mesh_dataset(cat_id) predictions_ds = get_predictions_dataset(model_id) top_k_mesh_fn = builder.get_prediction_to_top_k_mesh_fn( edge_length_threshold, k) all_ds = Dataset.zip(image_ds, gt_mesh_ds, predictions_ds) def vis(): def vis_mesh(mesh, include_wireframe=False, **kwargs): from util3d.mayavi_vis import vis_mesh as vm v, f = (np.array(mesh[k]) for k in ('vertices', 'faces')) vm(v, f, include_wireframe=include_wireframe, **kwargs) example_ids = list(get_example_ids(cat_id, 'eval')) random.shuffle(example_ids) with all_ds: for example_id in example_ids: print(example_id) image, gt_mesh, predictions = all_ds[example_id]