Exemplo n.º 1
0
 def __init__(self,
              keys,
              subtrahend=None,
              divisor=None,
              nonzero=False,
              channel_wise=False):
     super().__init__(keys)
     self.normalizer = NormalizeIntensity(subtrahend, divisor, nonzero,
                                          channel_wise)
Exemplo n.º 2
0
 def __init__(
     self,
     keys: KeysCollection,
     subtrahend: Optional[np.ndarray] = None,
     divisor: Optional[np.ndarray] = None,
     nonzero: bool = False,
     channel_wise: bool = False,
 ) -> None:
     super().__init__(keys)
     self.normalizer = NormalizeIntensity(subtrahend, divisor, nonzero, channel_wise)
Exemplo n.º 3
0
 def __init__(
     self,
     keys: KeysCollection,
     subtrahend: Optional[np.ndarray] = None,
     divisor: Optional[np.ndarray] = None,
     nonzero: bool = False,
     channel_wise: bool = False,
     dtype: DtypeLike = np.float32,
     allow_missing_keys: bool = False,
 ) -> None:
     super().__init__(keys, allow_missing_keys)
     self.normalizer = NormalizeIntensity(subtrahend, divisor, nonzero, channel_wise, dtype)
Exemplo n.º 4
0
 def __init__(
     self,
     min_perc: float,
     max_perc: float,
     minmax: bool = False,
 ) -> None:
     super().__init__()
     self.min_perc = min_perc
     self.max_perc = max_perc
     if minmax:
         self.converter = ScaleIntensity(minv=0.0, maxv=1.0)
     else:
         self.converter = NormalizeIntensity()
Exemplo n.º 5
0
    def __call__(
            self, data: Mapping[Hashable,
                                np.ndarray]) -> Dict[Hashable, np.ndarray]:
        d = dict(data)
        self.randomize()
        if self.width_value is None or self.level_value is None:
            raise AssertionError
        if not self._do_transform:
            return d

        lower, upper = _calc_grey_levels(self.width_value, self.level_value)
        normalizer = NormalizeIntensity(subtrahend=lower,
                                        divisor=(upper - lower),
                                        nonzero=self.nonzero,
                                        dtype=self.dtype)

        for key in self.key_iterator(d):
            d[key] = normalizer(d[key])
        return d
Exemplo n.º 6
0
    def __call__(self, data: Mapping[Hashable, NdarrayOrTensor]) -> Dict[Hashable, NdarrayOrTensor]:
        """
        This transform can support to normalize ND spatial (channel-first) data.
        It also supports pseudo ND spatial data (e.g., (C,H,W) is a pseudo-3D
        data point where C is the number of slices)

        Args:
            data: is a dictionary containing (key,value) pairs from
                the loaded dataset

        Returns:
            the new data dictionary
        """
        d = dict(data)

        # prepare the normalizer based on self.ref_key
        if self.default_normalizer.channel_wise:
            # perform channel-wise normalization
            # compute mean of each channel in the input for mean-std normalization
            # subtrahend will have the same shape as image, for example (C,W,D) for a 2D data
            if self.default_normalizer.subtrahend is None:
                subtrahend = np.array(
                    [val.mean() if isinstance(val, ndarray) else val.float().mean().item() for val in d[self.ref_key]]
                )
            # users can define default values instead of mean
            else:
                subtrahend = self.default_normalizer.subtrahend  # type: ignore

            # compute std of each channel in the input for mean-std normalization
            # will have the same shape as subtrahend
            if self.default_normalizer.divisor is None:
                divisor = np.array(
                    [
                        val.std() if isinstance(val, ndarray) else val.float().std(unbiased=False).item()
                        for val in d[self.ref_key]
                    ]
                )
            else:
                # users can define default values instead of std
                divisor = self.default_normalizer.divisor  # type: ignore
        else:
            # perform ordinary normalization (not channel-wise)
            # subtrahend will be a scalar and is the mean of d[self.ref_key], unless user specifies another value
            if self.default_normalizer.subtrahend is None:
                if isinstance(d[self.ref_key], ndarray):
                    subtrahend = d[self.ref_key].mean()  # type: ignore
                else:
                    subtrahend = d[self.ref_key].float().mean().item()  # type: ignore
            # users can define default values instead of mean
            else:
                subtrahend = self.default_normalizer.subtrahend  # type: ignore

            # divisor will be a scalar and is the std of d[self.ref_key], unless user specifies another value
            if self.default_normalizer.divisor is None:
                if isinstance(d[self.ref_key], ndarray):
                    divisor = d[self.ref_key].std()  # type: ignore
                else:
                    divisor = d[self.ref_key].float().std(unbiased=False).item()  # type: ignore
            else:
                # users can define default values instead of std
                divisor = self.default_normalizer.divisor  # type: ignore

        # this creates a new normalizer instance based on self.ref_key
        normalizer = NormalizeIntensity(
            subtrahend,
            divisor,
            self.default_normalizer.nonzero,
            self.default_normalizer.channel_wise,
            self.default_normalizer.dtype,
        )

        # save mean and std
        d["mean"] = subtrahend
        d["std"] = divisor

        # perform normalization
        for key in self.key_iterator(d):
            d[key] = normalizer(d[key])

        return d