Exemple #1
0
 def test_pad_kwargs(self):
     for p in TEST_NDARRAYS:
         input_data = p(np.zeros((3, 8, 4)))
         if isinstance(input_data, np.ndarray):
             result = DivisiblePad(k=5, mode="constant", constant_values=((0, 0), (1, 1), (2, 2)))(input_data)
             np.testing.assert_allclose(result[:, :1, :4], np.ones((3, 1, 4)), rtol=1e-7, atol=0)
         else:
             result = DivisiblePad(k=5, mode="constant", value=2)(input_data).cpu()
         torch.testing.assert_allclose(result[:, :, 4:5], np.ones((3, 10, 1)) + 1, rtol=1e-7, atol=0)
Exemple #2
0
    def test_epistemic_scoring(self):
        input_size = (20, 20, 20)
        device = "cuda" if torch.cuda.is_available() else "cpu"
        keys = ["image", "label"]
        num_training_ims = 10
        train_data = self.get_data(num_training_ims, input_size)
        test_data = self.get_data(1, input_size)

        transforms = Compose([
            AddChanneld(keys),
            CropForegroundd(keys, source_key="image"),
            DivisiblePadd(keys, 4),
        ])

        infer_transforms = Compose([
            AddChannel(),
            CropForeground(),
            DivisiblePad(4),
        ])

        train_ds = CacheDataset(train_data, transforms)
        # output might be different size, so pad so that they match
        train_loader = DataLoader(train_ds,
                                  batch_size=2,
                                  collate_fn=pad_list_data_collate)

        model = UNet(3, 1, 1, channels=(6, 6), strides=(2, 2)).to(device)
        loss_function = DiceLoss(sigmoid=True)
        optimizer = torch.optim.Adam(model.parameters(), 1e-3)

        num_epochs = 10
        for _ in trange(num_epochs):
            epoch_loss = 0

            for batch_data in train_loader:
                inputs, labels = batch_data["image"].to(
                    device), batch_data["label"].to(device)
                optimizer.zero_grad()
                outputs = model(inputs)
                loss = loss_function(outputs, labels)
                loss.backward()
                optimizer.step()
                epoch_loss += loss.item()

            epoch_loss /= len(train_loader)

        entropy_score = EpistemicScoring(model=model,
                                         transforms=infer_transforms,
                                         roi_size=[20, 20, 20],
                                         num_samples=10)
        # Call Individual Infer from Epistemic Scoring
        ip_stack = [test_data["image"], test_data["image"], test_data["image"]]
        ip_stack = np.array(ip_stack)
        score_3d = entropy_score.entropy_3d_volume(ip_stack)
        score_3d_sum = np.sum(score_3d)
        # Call Entropy Metric from Epistemic Scoring
        self.assertEqual(score_3d.shape, input_size)
        self.assertIsInstance(score_3d_sum, np.float32)
        self.assertGreater(score_3d_sum, 3.0)
Exemple #3
0
def load_data_and_mask(data, mask_data):
    """
    Load data filename and mask_data (list of file names)
    into a dictionary of {'image': array, "label": list of arrays, "name": str}.
    """
    pad_xform = DivisiblePad(k=32)
    img = np.load(data)  # z y x
    img = pad_xform(img[None])[0]
    item = dict(image=img, label=[])
    for maskfnm in mask_data:
        if maskfnm is None:
            ms = np.zeros(img.shape, np.uint8)
        else:
            ms = np.load(maskfnm).astype(np.uint8)
            assert ms.min() == 0 and ms.max() == 1
        mask = pad_xform(ms[None])[0]
        item["label"].append(mask)
    assert len(item["label"]) == 9
    item["name"] = str(data)
    return item
Exemple #4
0
 def __init__(self,
              keys: KeysCollection,
              k: Union[Sequence[int], int],
              mode: NumpyPadModeSequence = NumpyPadMode.CONSTANT) -> None:
     """
     Args:
         keys: keys of the corresponding items to be transformed.
             See also: :py:class:`monai.transforms.compose.MapTransform`
         k: the target k for each spatial dimension.
             if `k` is negative or 0, the original size is preserved.
             if `k` is an int, the same `k` be applied to all the input spatial dimensions.
         mode: {``"constant"``, ``"edge"``, ``"linear_ramp"``, ``"maximum"``, ``"mean"``,
             ``"median"``, ``"minimum"``, ``"reflect"``, ``"symmetric"``, ``"wrap"``, ``"empty"``}
             One of the listed string values or a user supplied function. Defaults to ``"constant"``.
             See also: https://numpy.org/doc/1.18/reference/generated/numpy.pad.html
             It also can be a sequence of string, each element corresponds to a key in ``keys``.
     See also :py:class:`monai.transforms.SpatialPad`
     """
     super().__init__(keys)
     self.mode = ensure_tuple_rep(mode, len(self.keys))
     self.k = k
     self.padder = DivisiblePad(k=k)
Exemple #5
0
 def test_pad_shape(self, input_param, input_data, expected_val):
     padder = DivisiblePad(**input_param)
     result = padder(input_data)
     self.assertAlmostEqual(result.shape, expected_val.shape)
     result = padder(input_data, mode=input_param["mode"])
     self.assertAlmostEqual(result.shape, expected_val.shape)