def _test_2d(self, matrix, out_file=None, sigma=None, **kwargs): from elf.transformation import transform_subvolume_affine shape = (512, 512) x = np.random.rand(*shape) exp = affine_transform(x, matrix, **kwargs) if out_file is not None: with open_file(out_file) as f: x = f.create_dataset('tmp', data=x, chunks=(64, 64)) f = open_file(out_file, 'r') x = f['tmp'] bbs = [ np.s_[:, :], np.s_[:256, :256], np.s_[37:115, 226:503], np.s_[:200, :], np.s_[:, 10:115] ] for bb in bbs: bb, _ = normalize_index(bb, shape) res = transform_subvolume_affine(x, matrix, bb, sigma=sigma, **kwargs) exp_bb = exp[bb] self.assertEqual(res.shape, exp_bb.shape) if sigma is None: self.assertTrue(np.allclose(res, exp_bb)) else: self.assertTrue(~np.allclose(res, 0)) if out_file is not None: f.close()
def _test_3d(self, matrix, out_file=None, **kwargs): from elf.transformation import transform_subvolume_affine shape = 3 * (64, ) x = np.random.rand(*shape) exp = affine_transform(x, matrix, **kwargs) if out_file is not None: with open_file(out_file) as f: x = f.create_dataset('tmp', data=x, chunks=3 * (16, )) f = open_file(out_file, 'r') x = f['tmp'] bbs = [ np.s_[:, :, :], np.s_[:32, :32, :32], np.s_[1:31, 5:27, 3:13], np.s_[4:19, :, 22:], np.s_[1:29], np.s_[:, 15:27, :], np.s_[:, 1:3, 4:14] ] for bb in bbs: bb, _ = normalize_index(bb, shape) res = transform_subvolume_affine(x, matrix, bb, **kwargs) exp_bb = exp[bb] self.assertEqual(res.shape, exp_bb.shape) self.assertTrue(np.allclose(res, exp_bb)) if out_file is not None: f.close()
def paths_from_ds(self, dataset): if isinstance(dataset, ConcatDataset): datasets = dataset.datasets (n_samples, load_2d_from_3d, rois, raw_paths, raw_key, label_paths, label_key) = self.paths_from_ds(datasets[0]) for ds in datasets[1:]: ns, l2d3d, bb, rp, rk, lp, lk = self.paths_from_ds(ds) assert rk == raw_key assert lk == label_key assert l2d3d == load_2d_from_3d raw_paths.extend(rp) label_paths.extend(lp) rois.append(bb) n_samples += ns elif isinstance(dataset, ImageCollectionDataset): raw_paths, label_paths = dataset.raw_images, dataset.label_images raw_key, label_key = None, None n_samples = len(raw_paths) load_2d_from_3d = False rois = [None] * n_samples elif isinstance(dataset, SegmentationDataset): raw_paths, label_paths = [dataset.raw_path], [dataset.label_path] raw_key, label_key = dataset.raw_key, dataset.label_key shape = open_file(raw_paths[0], 'r')[raw_key].shape roi = getattr(dataset, 'roi', None) if roi is not None: roi = normalize_index(roi, shape) shape = tuple(r.stop - r.start for r in roi) rois = [roi] if self.ndim == len(shape): n_samples = len(raw_paths) load_2d_from_3d = False elif self.ndim == 2 and len(shape) == 3: n_samples = shape[0] load_2d_from_3d = True else: raise RuntimeError else: raise RuntimeError( f"No support for dataset of type {type(dataset)}") return (n_samples, load_2d_from_3d, rois, raw_paths, raw_key, label_paths, label_key)
def test_normalize_index(self): from elf.util import normalize_index, squeeze_singletons shape = (128, ) * 3 x = np.random.rand(*shape) # is something important missing? indices = ( np.s_[10:25, 30:60, 100:103], # full index np.s_[:], # everything np.s_[..., 10:25], # ellipsis np.s_[0, :, 10]) # singletons for index in indices: out1 = x[index] index, to_squeeze = normalize_index(index, shape) out2 = squeeze_singletons(x[index], to_squeeze) self.assertEqual(out1.shape, out2.shape) self.assertTrue(np.allclose(out1, out2))
def __getitem__(self, key): mapped_key, to_squeeze = normalize_index(key, self.shape) mapped_key = tuple( slice(k.start + rs, k.stop + rs) for k, rs in zip(mapped_key, self.roi_start)) return squeeze_singletons(self.source[mapped_key], to_squeeze)