def __getitem__(self, index): if self.explicit_rotation > 1: rotation_space = np.linspace(-np.pi, np.pi, self.explicit_rotation + 1) rotation_angle = rotation_space[index % self.explicit_rotation] index //= self.explicit_rotation else: rotation_angle = None pointcloud, center = self.load_ply(index) if self.PREVOXELIZE_VOXEL_SIZE is not None: inds = ME.SparseVoxelize(pointcloud[:, :3] / self.PREVOXELIZE_VOXEL_SIZE, return_index=True) pointcloud = pointcloud[inds] if self.elastic_distortion: pointcloud = self._augment_elastic_distortion(pointcloud) # import open3d as o3d # from lib.open3d_utils import make_pointcloud # pcd = make_pointcloud(np.floor(pointcloud[:, :3] / self.PREVOXELIZE_VOXEL_SIZE)) # o3d.draw_geometries([pcd]) coords, feats, labels = self.convert_mat2cfl(pointcloud) outs = self.sparse_voxelizer.voxelize( coords, feats, labels, center=center, rotation_angle=rotation_angle, return_transformation=self.return_transformation) if self.return_transformation: coords, feats, labels, transformation = outs transformation = np.expand_dims(transformation, 0) else: coords, feats, labels = outs # map labels not used for evaluation to ignore_label if self.input_transform is not None: coords, feats, labels = self.input_transform(coords, feats, labels) if self.target_transform is not None: coords, feats, labels = self.target_transform( coords, feats, labels) if self.IGNORE_LABELS is not None: labels = np.array([self.label_map[x] for x in labels], dtype=np.int) return_args = [coords, feats, labels] if self.return_transformation: return_args.extend([ pointcloud.astype(np.float32), transformation.astype(np.float32) ]) return tuple(return_args)
def __getitem__(self, index): for seq_idx, numel in enumerate(self.numels): if index >= numel: index -= numel else: break numseq = self.temporal_numseq if self.augment_data and self.config.temporal_rand_numseq: numseq = random.randrange(1, self.temporal_numseq + 1) dilations = [self.temporal_dilation for i in range(numseq - 1)] if self.augment_data and self.config.temporal_rand_dilation: dilations = [ random.randrange(1, self.temporal_dilation + 1) for i in range(numseq - 1) ] files = [ self.data_paths[seq_idx][index + sum(dilations[:i])] for i in range(numseq) ] world_pointclouds = [self.load_world_pointcloud(f) for f in files] ptcs, centers = zip(*world_pointclouds) if self.PREVOXELIZE_VOXEL_SIZE is not None: new_ptcs = [] for ptc in ptcs: inds = ME.SparseVoxelize(ptc[:, :3] / self.PREVOXELIZE_VOXEL_SIZE, return_index=True) new_ptcs.append(ptc[inds]) ptcs = new_ptcs # import ipdb; ipdb.set_trace() # import open3d as o3d # from lib.open3d_utils import make_pointcloud # pcds = [make_pointcloud(np.floor(ptc[:, :3] / self.PREVOXELIZE_VOXEL_SIZE), ptc[:, 3:6] / 256) for ptc in ptcs] # o3d.draw_geometries(pcds) if self.elastic_distortion: ptcs = [self._augment_elastic_distortion(ptc) for ptc in ptcs] # pcds = [make_pointcloud(np.floor(ptc[:, :3] / self.PREVOXELIZE_VOXEL_SIZE), ptc[:, 3:6] / 256) for ptc in ptcs] # o3d.draw_geometries(pcds) ptcs = [self.convert_mat2cfl(ptc) for ptc in ptcs] coords, feats, labels = zip(*ptcs) outs = self.sparse_voxelizer.voxelize_temporal( coords, feats, labels, centers=centers, return_transformation=self.return_transformation) if self.return_transformation: coords_t, feats_t, labels_t, transformation_t = outs else: coords_t, feats_t, labels_t = outs joint_coords = np.vstack([ np.hstack((coords, np.ones((coords.shape[0], 1)) * i)) for i, coords in enumerate(coords_t) ]) joint_feats = np.vstack(feats_t) joint_labels = np.hstack(labels_t) # map labels not used for evaluation to ignore_label if self.input_transform is not None: joint_coords, joint_feats, joint_labels = self.input_transform( joint_coords, joint_feats, joint_labels) if self.target_transform is not None: joint_coords, joint_feats, joint_labels = self.target_transform( joint_coords, joint_feats, joint_labels) if self.IGNORE_LABELS is not None: joint_labels = np.array([self.label_map[x] for x in joint_labels], dtype=np.int) return_args = [joint_coords, joint_feats, joint_labels] if self.return_transformation: pointclouds = np.vstack([ np.hstack( (pointcloud[0][:, :6], np.ones( (pointcloud[0].shape[0], 1)) * i)) for i, pointcloud in enumerate(world_pointclouds) ]) transformations = np.vstack([ np.hstack((transformation, [i])) for i, transformation in enumerate(transformation_t) ]) return_args.extend([ pointclouds.astype(np.float32), transformations.astype(np.float32) ]) return tuple(return_args)
def __getitem__(self, index): if self.explicit_rotation > 1: rotation_space = np.linspace(-np.pi, np.pi, self.explicit_rotation + 1) rotation_angle = rotation_space[index % self.explicit_rotation] index //= self.explicit_rotation else: rotation_angle = None # pointcloud, center = self.load_ply(0) # DEBUG ONLY! pointcloud, center = self.load_ply(index) pointcloud = pointcloud.astype('float32') # when load_whole, data from pth file stays float64 if self.PREVOXELIZE_VOXEL_SIZE is not None: inds = ME.SparseVoxelize(pointcloud[:, :3] / self.PREVOXELIZE_VOXEL_SIZE, return_index=True) pointcloud = pointcloud[inds] if self.elastic_distortion: pointcloud = self._augment_elastic_distortion(pointcloud) # import open3d as o3d # from lib.open3d_utils import make_pointcloud # pcd = make_pointcloud(np.floor(pointcloud[:, :3] / self.PREVOXELIZE_VOXEL_SIZE)) # o3d.draw_geometries([pcd]) coords, feats, labels = self.convert_mat2cfl(pointcloud) # d = {} # d['coords'] = coords # d['feats'] = feats # d['labels'] = labels # torch.save(d,'pc.pth') outs = self.sparse_voxelizer.voxelize( coords, feats, labels, center=center, rotation_angle=rotation_angle, return_transformation=self.return_transformation) if self.return_transformation: coords, feats, labels, unique_map, inverse_map, transformation = outs transformation = np.expand_dims(transformation, 0) else: coords, feats, labels, unique_map, inverse_map = outs if self.config.use_aux: aux = self.aux_data[index] # check if saved is tensor if isinstance(aux, torch.Tensor): aux = aux[unique_map].numpy() else: aux = aux[unique_map] pass aux = aux + 1 # align with preds if self.config.is_export: self.input_transform = None self.target_transform = None # d = {} # d['coords'] = coords # d['feats'] = feats # d['labels'] = labels # d['aux'] = aux # torch.save(d,'voxel.pth') if self.config.use_aux: # cat the aux into labels labels = np.stack([labels, aux], axis=1) # map labels not used for evaluation to ignore_label if self.input_transform is not None: coords, feats, labels = self.input_transform(coords, feats, labels) if self.target_transform is not None: coords, feats, labels = self.target_transform(coords, feats, labels) if self.IGNORE_LABELS is not None: if self.load_whole: labels = labels - 1 # should align all labels to [-1, 20] else: labels = np.array([self.label_map[x] for x in labels], dtype=np.int) # for load-whole, the aux are in [0-19] 20 classes # but labels have [-1, 19] # d = {} # d['coords'] = coords # d['feats'] = feats # d['labels'] = labels[:,0] # d['aux'] = labels[:,1] # torch.save(d,'pre.pth') return_args = [coords, feats, labels, unique_map, inverse_map] if self.return_transformation: return_args.extend([pointcloud.astype(np.float32), transformation.astype(np.float32)]) return tuple(return_args)