Beispiel #1
0
    def load_data(self, base_load_dirpath: str):
        """Load data from disk.

        Data will be loaded from the directory '`base_load_dirpath`/`scan.NAME`' (e.g. '`base_load_dirpath'/dess/').

        Override this method to load additional information such as volumes, subvolumes, quantitative maps, etc.
            In override, Call this function (super().save_data(base_load_dirpath)) before adding code to override this
            method.

        Args:
            base_load_dirpath (str): Directory path where all data is stored.

        Raises:
            NotADirectoryError: if base_load_dirpath/scan.NAME/ does not exist.
        """
        load_dirpath = self.__save_dir__(base_load_dirpath, create_dir=False)

        if not os.path.isdir(load_dirpath):
            raise NotADirectoryError("{} does not exist".format(load_dirpath))

        file_path = os.path.join(load_dirpath, "{}.data".format(self.NAME))

        metadata = io_utils.load_pik(file_path)
        for key in metadata.keys():
            if hasattr(self, key):
                self.__setattr__(key, metadata[key])

        try:
            self.__load_dicom__()
        except:
            logging.info("Dicom directory {} not found. Will try to load from {}".format(self.dicom_path, base_load_dirpath))
Beispiel #2
0
    def test_pik(self):
        filepath = os.path.join(IO_UTILS_DATA, "sample.pik")
        datas = {
            "type": np.random.rand(10, 45, 2),
            "type2": np.random.rand(13, 95, 4)
        }

        io_utils.save_pik(filepath, datas)
        datas2 = io_utils.load_pik(filepath)

        for data in datas:
            assert (datas[data] == datas2[data]).all()
Beispiel #3
0
    def test_save_load(self):
        ys, _, _, _ = self._generate_mock_data()
        scan = Cones(ys)

        save_dir = os.path.join(self.data_dirpath, "test-save")
        save_path = scan.save(save_dir,
                              save_custom=True,
                              image_data_format=ImageDataFormat.nifti)
        assert set(os.listdir(save_dir)) == {"volumes", f"{scan.NAME}.data"}

        scan2 = Cones.load(save_dir)
        for v1, v2 in zip(scan.volumes, scan2.volumes):
            assert v1.is_identical(v2)
        assert scan.echo_times == scan2.echo_times

        scan2 = Cones.load(save_path)
        for v1, v2 in zip(scan.volumes, scan2.volumes):
            assert v1.is_identical(v2)
        assert scan.echo_times == scan2.echo_times

        scan2 = Cones.from_dict(io_utils.load_pik(save_path))
        for v1, v2 in zip(scan.volumes, scan2.volumes):
            assert v1.is_identical(v2)
        assert scan.echo_times == scan2.echo_times
Beispiel #4
0
    def load(cls, path_or_data: Union[str, Dict], num_workers: int = 0):
        """Load scan.

        This method overloads the :func:`from_dict` method by supporting loading from a file
        in addition to the data dictionary. If loading and constructing a scan using
        :func:`from_dict` fails, defaults to loading data from original dicoms
        (if ``self._from_file_args`` is initialized).

        Args:
            path_or_data (Union[str, Dict]): Pickle file to load or data dictionary.
            num_workers (int, optional): Number of workers to use for loading.

        Returns:
            ScanSequence: Of type ``cls``.

        Raises:
            ValueError: If ``scan`` cannot be constructed.
        """
        if isinstance(path_or_data, (str, Path, os.PathLike)):
            if os.path.isdir(path_or_data):
                path_or_data = os.path.join(path_or_data, f"{cls.NAME}.data")

            if not os.path.isfile(path_or_data):
                raise FileNotFoundError(f"File {path_or_data} does not exist")
            data = io_utils.load_pik(path_or_data)
        else:
            data = path_or_data

        try:
            scan = cls.from_dict(data)
            return scan
        except Exception:
            warnings.warn(
                f"Failed to load {cls.__name__} from data. Trying to load from dicom file."
            )

        data = cls._convert_attr_name(data)
        data = cls.load_custom_data(data, num_workers=num_workers)

        scan = None
        if "_from_file_args" in data:
            dicom_args = data.pop("_from_file_args")
            assert dicom_args.pop("_type") == "dicom"
            scan = cls.from_dicom(**dicom_args, num_workers=num_workers)
        elif "dicom_path" in data:
            # Backwards compatibility
            dicom_path = data.pop("dicom_path")
            ignore_ext = data.pop("ignore_ext", False)
            group_by = data.pop("split_by", cls.__DEFAULT_SPLIT_BY__)
            scan = cls.from_dicom(dicom_path,
                                  ignore_ext=ignore_ext,
                                  group_by=group_by,
                                  num_workers=num_workers)

        if scan is None:
            raise ValueError(
                f"Data is insufficient to construct {cls.__name__}")

        for k, v in data.items():
            if not hasattr(scan, k):
                warnings.warn(
                    f"{cls.__name__} does not have attribute {k}. Skipping...")
                continue
            scan.__setattr__(k, v)

        return scan