Пример #1
0
 def __init__(self, keys: KeysCollection) -> None:
     """
     Args:
         keys: keys of the corresponding items to be transformed.
             See also: :py:class:`monai.transforms.compose.MapTransform`
     """
     super().__init__(keys)
     self.converter = ToNumpy()
Пример #2
0
 def __init__(self, keys: KeysCollection, allow_missing_keys: bool = False) -> None:
     """
     Args:
         keys: keys of the corresponding items to be transformed.
             See also: :py:class:`monai.transforms.compose.MapTransform`
         allow_missing_keys: don't raise exception if key is missing.
     """
     super().__init__(keys, allow_missing_keys)
     self.converter = ToNumpy()
Пример #3
0
 def inverse(self, data: Mapping[Hashable, Any]) -> Dict[Hashable, Any]:
     d = deepcopy(dict(data))
     for key in self.key_iterator(d):
         # Create inverse transform
         inverse_transform = ToNumpy()
         # Apply inverse
         d[key] = inverse_transform(d[key])
         # Remove the applied transform
         self.pop_transform(d, key)
     return d
Пример #4
0
    def test_dataset(self):
        with tempfile.TemporaryDirectory() as tempdir:
            full_names, ref_data = [], []
            for filename in FILENAMES:
                test_image = np.random.randint(0, 2, size=(4, 4, 4))
                ref_data.append(test_image)
                save_path = os.path.join(tempdir, filename)
                full_names.append(save_path)
                nib.save(nib.Nifti1Image(test_image, np.eye(4)), save_path)

            # default loading no meta
            dataset = ImageDataset(full_names)
            for d, ref in zip(dataset, ref_data):
                np.testing.assert_allclose(d, ref, atol=1e-3)

            # loading no meta, int
            dataset = ImageDataset(full_names, dtype=np.float16)
            for d, _ in zip(dataset, ref_data):
                self.assertEqual(d.dtype, np.float16)

            # loading with meta, no transform
            dataset = ImageDataset(full_names, image_only=False)
            for d_tuple, ref in zip(dataset, ref_data):
                d, meta = d_tuple
                np.testing.assert_allclose(d, ref, atol=1e-3)
                np.testing.assert_allclose(meta["original_affine"], np.eye(4))

            # loading image/label, no meta
            dataset = ImageDataset(full_names, seg_files=full_names, image_only=True)
            for d_tuple, ref in zip(dataset, ref_data):
                img, seg = d_tuple
                np.testing.assert_allclose(img, ref, atol=1e-3)
                np.testing.assert_allclose(seg, ref, atol=1e-3)

            # loading image/label, no meta
            dataset = ImageDataset(full_names, transform=lambda x: x + 1, image_only=True)
            for d, ref in zip(dataset, ref_data):
                np.testing.assert_allclose(d, ref + 1, atol=1e-3)

            # loading image/label, with meta
            dataset = ImageDataset(
                full_names,
                transform=lambda x: x + 1,
                seg_files=full_names,
                seg_transform=lambda x: x + 2,
                image_only=False,
            )
            for d_tuple, ref in zip(dataset, ref_data):
                img, seg, meta, seg_meta = d_tuple
                np.testing.assert_allclose(img, ref + 1, atol=1e-3)
                np.testing.assert_allclose(seg, ref + 2, atol=1e-3)
                np.testing.assert_allclose(meta["original_affine"], np.eye(4), atol=1e-3)
                np.testing.assert_allclose(seg_meta["original_affine"], np.eye(4), atol=1e-3)

            # loading image/label, with meta
            dataset = ImageDataset(
                image_files=full_names,
                seg_files=full_names,
                labels=[1, 2, 3],
                transform=lambda x: x + 1,
                label_transform=Compose(
                    [
                        ToNumpy(),
                        MapLabelValue(orig_labels=[1, 2, 3], target_labels=[30.0, 20.0, 10.0], dtype=np.float32),
                    ]
                ),
                image_only=False,
            )
            for idx, (d_tuple, ref) in enumerate(zip(dataset, ref_data)):
                img, seg, label, meta, seg_meta = d_tuple
                np.testing.assert_allclose(img, ref + 1, atol=1e-3)
                np.testing.assert_allclose(seg, ref, atol=1e-3)
                # test label_transform

                np.testing.assert_allclose((3 - idx) * 10.0, label)
                self.assertTrue(isinstance(label, np.ndarray))
                self.assertEqual(label.dtype, np.float32)
                np.testing.assert_allclose(meta["original_affine"], np.eye(4), atol=1e-3)
                np.testing.assert_allclose(seg_meta["original_affine"], np.eye(4), atol=1e-3)

            # loading image/label, with sync. transform
            dataset = ImageDataset(
                full_names, transform=RandTest(), seg_files=full_names, seg_transform=RandTest(), image_only=False
            )
            for d_tuple, ref in zip(dataset, ref_data):
                img, seg, meta, seg_meta = d_tuple
                np.testing.assert_allclose(img, seg, atol=1e-3)
                self.assertTrue(not np.allclose(img, ref))
                np.testing.assert_allclose(meta["original_affine"], np.eye(4), atol=1e-3)