Esempio n. 1
0
    def __calc_quant_vals__(self, quant_map: MedicalVolume,
                            map_type: QuantitativeValueType):
        """Helper method to get quantitative values for tissue - implemented per tissue.

        Different tissues should override this as they see fit.

        Args:
            quant_map (MedicalVolume): 3D map of pixel-wise quantitative measures
                (T2, T2*, T1-rho, etc.). Volume should have ``np.nan`` values for
                all pixels unable to be calculated.
            map_type (QuantitativeValueType): Type of quantitative value to analyze.

        Raises:
            TypeError: If `quant_map` is not of type `MedicalVolume` or `map_type` is not of type
                `QuantitativeValueType`.
            ValueError: If no mask is found for tissue.
        """
        if not isinstance(quant_map, MedicalVolume):
            raise TypeError("`Expected type 'MedicalVolume' for `quant_map`")
        if not isinstance(map_type, QuantitativeValueType):
            raise TypeError(
                "`Expected type 'QuantitativeValueType' for `map_type`")

        if self.__mask__ is None:
            raise ValueError("Please initialize mask for {}".format(
                self.FULL_NAME))

        quant_map.reformat(self.__mask__.orientation, inplace=True)
        pass
Esempio n. 2
0
    def set_mask(self, mask: MedicalVolume):
        """Set mask for tissue.

        Args:
            mask (MedicalVolume): Binary mask of segmented tissue.
        """
        assert type(
            mask
        ) is MedicalVolume, "mask for tissue must be of type MedicalVolume"
        mask.reformat(SAGITTAL, inplace=True)
        self.__mask__ = mask
Esempio n. 3
0
def _format_volume_to_header(volume: MedicalVolume) -> MedicalVolume:
    """Reformats the volume according to its header.

    Args:
        volume (MedicalVolume): The volume to reformat.
            Must be 3D and have headers of shape (1, 1, volume.shape[2]).

    Returns:
        MedicalVolume: The reformatted volume.
    """
    headers = volume.headers()
    assert headers.shape == (1, 1, volume.shape[2])

    affine = to_RAS_affine(headers.flatten())
    orientation = stdo.orientation_nib_to_standard(nib.aff2axcodes(affine))

    # Currently do not support mismatch in scanner_origin.
    if tuple(affine[:3, 3]) != volume.scanner_origin:
        raise ValueError(
            "Scanner origin mismatch. "
            "Currently we do not handle mismatch in scanner origin "
            "(i.e. cannot flip across axis)")

    volume = volume.reformat(orientation)
    assert volume.headers().shape == (1, 1, volume.shape[2])
    return volume
Esempio n. 4
0
    def test_reformat_header(self):
        volume = np.random.rand(10, 20, 30, 40)
        headers = ututils.build_dummy_headers(volume.shape[2:])
        mv = MedicalVolume(volume, self._AFFINE, headers=headers)
        new_orientation = tuple(x[::-1] for x in mv.orientation[::-1])

        mv2 = mv.reformat(new_orientation)
        assert mv2._headers.shape == (30, 1, 1, 40)

        mv2 = mv.clone()
        mv2.reformat(new_orientation, inplace=True)
        assert mv2._headers.shape == (30, 1, 1, 40)

        volume = np.random.rand(10, 20, 30, 40)
        headers = ututils.build_dummy_headers((volume.shape[2], 1))
        mv = MedicalVolume(volume, self._AFFINE, headers=headers)
        new_orientation = tuple(x[::-1] for x in mv.orientation[::-1])

        mv2 = mv.reformat(new_orientation)
        assert mv2._headers.shape == (30, 1, 1, 1)
Esempio n. 5
0
    def test_4d(self):
        vol = np.stack([np.ones((10, 20, 30)), 2 * np.ones((10, 20, 30))],
                       axis=-1)
        mv = MedicalVolume(vol, self._AFFINE)
        assert mv.orientation == ("SI", "AP", "LR")
        assert mv.shape == (10, 20, 30, 2)

        assert np.all(mv[..., 0].volume == 1)
        assert np.all(mv[..., 1].volume == 2)

        ornt = ("AP", "IS", "RL")
        mv2 = mv.reformat(ornt)
        assert mv2.orientation == ornt
        assert mv2.shape == (20, 10, 30, 2)

        mv2 = mv.reformat(ornt).reformat(mv.orientation)
        assert mv2.is_identical(mv)

        fp = os.path.join(self._TEMP_PATH, "test_4d.nii.gz")
        mv.save_volume(fp)
        mv2 = NiftiReader().load(fp)
        assert mv2.is_identical(mv)
Esempio n. 6
0
    def check_orientations(self, mv: MedicalVolume, orientations):
        """
        Apply each orientation specified in orientations to the Medical Volume mv
        Assert if mv --> apply orientation --> apply original orientation != mv original
        position coordinates.

        Args:
            mv: a Medical Volume
            orientations: a list or tuple of orientation tuples
        """
        o_base, so_base, ps_base = mv.orientation, mv.scanner_origin, mv.pixel_spacing
        ps_affine = np.array(mv.affine)

        for o in orientations:
            # Reorient to some orientation
            mv.reformat(o, inplace=True)

            # Reorient to original orientation
            mv.reformat(o_base, inplace=True)

            assert mv.orientation == o_base, "Orientation mismatch: Expected %s, got %s" % (
                str(o_base),
                str(mv.orientation),
            )
            assert mv.scanner_origin == so_base, "Scanner Origin mismatch: Expected %s, got %s" % (
                str(so_base),
                str(mv.scanner_origin),
            )
            assert mv.pixel_spacing == ps_base, "Pixel Spacing mismatch: Expected %s, got %s" % (
                str(ps_base),
                str(mv.pixel_spacing),
            )

            assert (mv.affine == ps_affine).all(
            ), "Affine matrix mismatch: Expected\n%s\ngot\n%s" % (
                str(ps_affine),
                str(mv.affine),
            )
Esempio n. 7
0
    def test_reformat(self):
        mv = MedicalVolume(np.random.rand(10, 20, 30), self._AFFINE)
        new_orientation = tuple(x[::-1] for x in mv.orientation[::-1])

        mv2 = mv.reformat(new_orientation)
        assert mv2.orientation == new_orientation
        assert id(mv2) != id(mv)
        assert np.shares_memory(mv2._volume, mv._volume)

        mv2 = mv.reformat(new_orientation, inplace=True)
        assert mv2.orientation == new_orientation
        assert id(mv2) == id(mv)
        assert np.shares_memory(mv2._volume, mv._volume)

        mv2 = mv.reformat(mv.orientation)
        assert id(mv2) != id(mv)
        assert np.shares_memory(mv2._volume, mv._volume)

        mv2 = mv.reformat(mv.orientation, inplace=True)
        assert id(mv2) == id(mv)
        assert np.shares_memory(mv2._volume, mv._volume)

        mv2 = mv.reformat(new_orientation).reformat(mv.orientation)
        assert mv2.is_identical(mv)
Esempio n. 8
0
    def save(
        self,
        volume: MedicalVolume,
        dir_path: str,
        fname_fmt: str = np._NoValue,
        sort_by: Union[str, int, Sequence[Union[str, int]]] = np._NoValue,
    ):
        """Save `medical volume` in dicom format.

        This function assumes headers for the volume (``volume.headers()``) exist
        for one spatial dimension. Headers for non-spatial dimensions are optional, but
        highly recommended. If provided, they will be used to write the volume. If not,
        headers will be appropriately broadcast to these dimensions. Note, this means
        that multiple files will have the same header information and will not be able
        to be loaded automatically.

        Currently header spatial information (orientation, origin, slicing between spaces,
        etc.) is not overwritten nor validated. All data must correspond to the same
        spatial information as specified in the headers to produce valid DICOM files.

        Args:
            volume (MedicalVolume): Volume to save.
            dir_path: Directory path to store dicom files. Dicoms are stored in directories,
                as multiple files are needed to store the volume.
            fname_fmt (str, optional): Formatting string for filenames. Must contain ``%d``,
                which correspopnds to slice number. Defaults to ``self.fname_fmt``.
            sort_by (``str``(s) or ``int``(s), optional): DICOM attribute(s) used
                to define ordering of slices prior to writing. If ``None``, this ordering
                will be defined by the order of blocks in ``volume``. Defaults to
                ``self.sort_by``.

        Raises:
            ValueError: If `im` does not have initialized headers. Or if `im` was flipped across
                any axis. Flipping changes scanner origin, which is currently not handled.
        """
        fname_fmt = fname_fmt if fname_fmt != np._NoValue else self.fname_fmt
        sort_by = sort_by if sort_by != np._NoValue else self.sort_by

        # Get orientation indicated by headers.
        headers = volume.headers()
        if headers is None:
            raise ValueError(
                "MedicalVolume headers must be initialized to save as a dicom")

        sort_by = _wrap_as_tuple(sort_by, default=())

        # Reformat to put headers in last dimensions.
        single_dim = []
        full_dim = []
        for i, dim in enumerate(headers.shape[:3]):
            if dim == 1:
                single_dim.append(i)
            else:
                full_dim.append(i)
        if len(full_dim) > 1:
            raise ValueError(
                f"Only one spatial dimension can have headers. Got {len(full_dim)} - "
                f"headers.shape={headers.shape[:3]}")
        new_orientation = (volume.orientation[x]
                           for x in single_dim + full_dim)

        volume = volume.reformat(new_orientation)
        assert volume.headers().shape[:3] == (1, 1, volume.shape[2])

        # Reformat medical volume to expected orientation specified by dicom headers.
        # NOTE: This is temporary. Future fixes will allow us to modify header
        # data to match affine matrix.
        if len(volume.shape) > 3:
            shape = volume.shape[3:]
            multi_volumes = np.empty(shape, dtype=object)
            for dims in itertools.product(
                    *[list(range(0, x)) for x in multi_volumes.shape]):
                multi_volumes[dims] = _format_volume_to_header(
                    volume[(Ellipsis, ) + dims])
            multi_volumes = multi_volumes.flatten()
            volume_arr = np.concatenate([v.volume for v in multi_volumes],
                                        axis=-1)
            headers = np.concatenate(
                [v.headers(flatten=True) for v in multi_volumes], axis=-1)
        else:
            volume = _format_volume_to_header(volume)
            volume_arr = volume.volume
            headers = volume.headers(flatten=True)

        assert headers.ndim == 1
        assert volume_arr.shape[2] == len(
            headers
        ), "Dimension mismatch - {:d} slices but {:d} headers".format(
            volume_arr.shape[-1], len(headers))

        if sort_by:
            idxs = np.asarray(
                index_natsorted(
                    headers,
                    key=lambda h: tuple(
                        _unpack_dicom_attr(h, k, required=True)
                        for k in sort_by),
                ))
            headers = headers[idxs]
            volume_arr = volume_arr[..., idxs]

        # Check if dir_path exists.
        os.makedirs(dir_path, exist_ok=True)

        num_slices = len(headers)
        if not fname_fmt:
            filename_format = "I%0" + str(max(4, ceil(
                log10(num_slices)))) + "d.dcm"
        else:
            filename_format = fname_fmt

        filepaths = [
            os.path.join(dir_path, filename_format % (s + 1))
            for s in range(num_slices)
        ]
        if self.num_workers:
            slices = [volume_arr[..., s] for s in range(num_slices)]
            if self.verbose:
                process_map(_write_dicom_file, slices, headers, filepaths)
            else:
                with mp.Pool(self.num_workers) as p:
                    out = p.starmap_async(_write_dicom_file,
                                          zip(slices, headers, filepaths))
                    out.wait()
        else:
            for s in tqdm(range(num_slices), disable=not self.verbose):
                _write_dicom_file(volume_arr[..., s], headers[s], filepaths[s])
Esempio n. 9
0
    def to_metrics(
        self,
        mask: MedicalVolume = None,
        labels: Dict[int, str] = None,
        bounds: Tuple[float, float] = None,
        closed: str = "right",
    ) -> pd.DataFrame:
        """Compute scalar metrics for quantitative values.

        Metrics include mean, median, standard deviation, and number of voxels.
        Valid voxels are defined as finite valued voxels within the interval
        `bounds` (if specified).

        Args:
            mask (MedicalVolume, optional): Label mask. Labels should be unsigned ints (uint).
                Metrics will be computed for each non-zero label(s).
                If `labels` specified, metrics computed only for keys in `labels` dictionary.
                If not specified, metrics will be calculated over all valid voxels.
            labels (Dict[int, str], optional): Mapping from label to label name.
                If specified, only labels in this argument will be computed.
            bounds (float or Tuple[float, float], optional): The left/right bounds
                for computing metrics. By default, the bounds are the
                open-interval `(-inf, inf)`.
            closed (str, optional): If `bounds` specified, whether the bounds are closed
                on the left-side, right-side, both or neither.
                One of {'right', 'left', 'both', 'neither'}.

        Returns:
            metrics (pd.DataFrame): Metrics for quantitative value. Columns include:
                * "Region" (str): The label name.
                * "Mean" (float): Average quantitative value in a region.
                * "Median" (float): Median quantitative value in a region.
                * "Std" (float): Standard deviation of quantitative value in a region.
                * "# Voxels" (int): The number of valid voxels in the region.
        """
        volume = self.volumetric_map.volume
        valid_mask = np.isfinite(volume)
        if bounds:
            assert len(bounds) == 2, len(bounds)  # Expected (left,right) bound
            lb, ub = bounds[0], bounds[1]
            assert lb <= ub, f"lower:{lb}, upper: {ub}"  # Expected left bound <= right bound
            assert closed in ("right", "left", "both", "neither"), closed
            lb_mask = volume >= lb if closed in ("left", "both") else volume > lb
            ub_mask = volume <= ub if closed in ("right", "both") else volume < ub
            valid_mask &= lb_mask & ub_mask

        if mask is not None:
            mask = mask.reformat(self.volumetric_map.orientation)
            mask = mask.volume

            if labels is None:
                unique_vals = [x for x in np.unique(mask) if x > 0]
                labels = {int(i): f"label_{int(i)}" for i in unique_vals}
            labels.update({-1: "total"})
        else:
            labels = {-2: "total"}

        metrics = defaultdict(list)
        for label, name in labels.items():
            if label == -2:
                qv_region_vals = volume[valid_mask]  # Entire volume.
            elif label == -1:
                qv_region_vals = volume[np.isin(mask, list(labels.keys())) & valid_mask]  # noqa
            else:
                qv_region_vals = volume[(mask == label) & valid_mask]
            num_voxels = np.prod(qv_region_vals.shape)

            metrics["Region"].append(name)
            metrics["Mean"].append(np.nanmean(qv_region_vals))
            metrics["Std"].append(np.nanstd(qv_region_vals))
            metrics["Median"].append(np.nanmedian(qv_region_vals))
            metrics["# Voxels"].append(num_voxels)

        return pd.DataFrame(metrics)
Esempio n. 10
0
    def fit(self, x=None, y: Sequence[MedicalVolume] = None, mask=None):
        """Perform monoexponential fitting.

        Returns:
            Tuple[MedicalVolume, MedicalVolume]:

                time_constant_volume (MedicalVolume): The per-voxel tc fit.
                rsquared_volume (MedicalVolume): The per-voxel r2 goodness of fit.
        """
        x = self.x if x is None else x
        y = self.y if y is None else y
        mask = self.mask if mask is None else mask

        self._check_y(x, y)
        orientation = y[0].orientation
        y = [sv.reformat(orientation) for sv in y]

        if isinstance(mask, np.ndarray):
            mask = MedicalVolume(mask, affine=y[0].affine)
            if not isinstance(mask, MedicalVolume):
                raise TypeError("`mask` must be a MedicalVolume")
        mask = mask.reformat(orientation) if mask else None

        if self.tc0 == "polyfit":
            polyfitter = PolyFitter(
                1,
                r2_threshold=0,
                num_workers=None,
                nan_to_num=0.0,
                chunksize=self.chunksize,
                verbose=self.verbose,
            )
            vols = [
                sv.astype(np.float32)
                if np.issubdtype(sv.dtype, np.integer) else sv for sv in y
            ]
            vols = [sv + self._eps * (sv == 0) for sv in vols]
            assert all(np.all(v != 0) for v in vols)
            vols = [np.log(v) for v in vols]
            params, _ = polyfitter.fit(x, vols, mask=mask, copy_headers=False)
            p0 = {"a": np.exp(params[..., 1]), "b": params[..., 0]}
            del vols  # begin garbage collection for large arrays sooner
        else:
            p0 = {"a": 1.0, "b": -1 / self.tc0}

        curve_fitter = CurveFitter(
            monoexponential,
            y_bounds=None,
            out_ufuncs=(None, lambda _x: 1 / np.abs(_x)),
            out_bounds=((-np.inf, np.inf), self.bounds),
            r2_threshold=self.r2_threshold,
            num_workers=self.num_workers,
            chunksize=self.chunksize,
            verbose=self.verbose,
            nan_to_num=0.0,
        )
        popt, r_squared = curve_fitter.fit(x, y, mask=mask, p0=p0)
        tc_map = popt[..., 1]

        if self.decimal_precision is not None:
            tc_map = np.around(tc_map, self.decimal_precision)

        return tc_map, r_squared