Esempio n. 1
0
    def _isValidFile(cls, file: str) -> bool:
        """
        Checks whether a file is a NII file
        with a valid dump of DICOM header

        Parameters
        ----------
        file: str
            path to file to test

        Returns
        -------
        bool:
            True if file is identified as NIFTI
        """

        if os.path.isfile(file):
            if os.path.basename(file).startswith('.'):
                logger.warning('{}: file {} is hidden'.format(
                    cls.formatIdentity(), file))
                return False
            path, base = os.path.split(file)

            header = os.path.join(
                path, "header_dump_" + tools.change_ext(base, "json"))
            if os.path.isfile(header):
                return True
        return False
Esempio n. 2
0
 def copyRawFile(self, destination: str) -> str:
     if os.path.isfile(os.path.join(destination, self.currentFile(True))):
         logger.warning("{}: File {} exists at destination".format(
             self.recIdentity(), self.currentFile(True)))
     shutil.copy2(self.currentFile(), destination)
     shutil.copy2(tools.change_ext(self.currentFile(), "json"), destination)
     return os.path.join(destination, self.currentFile(True))
Esempio n. 3
0
 def copyRawFile(self, destination: str) -> str:
     if os.path.isfile(os.path.join(destination, self.currentFile(True))):
         logger.warning("{}: File {} exists at destination".format(
             self.recIdentity(), self.currentFile(True)))
     shutil.copy2(self.currentFile(), destination)
     if self._nii_type == "ni1":
         data_file = tools.change_ext(self.currentFile(), "img")
         shutil.copy2(data_file, destination)
     return os.path.join(destination, self.currentFile(True))
Esempio n. 4
0
    def _post_copy_bidsified(self,
                             directory: str,
                             bidsname: str,
                             ext: str) -> None:
        """
        Copies bidsified data files to its destinattion.

        Additionally, if modality is dwi (diffusion MRI),
        look for file of same name and extentions bvec and
        bval, and copies it. Will show a warning if such files
        not found.

        Parameters
        ----------
        directory: str
            destination directory where files should be copies,
            including modality folder. Assured to exists.
        bidsname: str
            bidsified name without extention
        ext: str
            extention of the data file
        """
        bids_base = os.path.join(directory, bidsname)

        if self.Modality() == "dwi":
            bvec = tools.change_ext(self.currentFile(), "bvec")
            if os.path.isfile(bvec):
                shutil.copy2(bvec,
                             bids_base + ".bvec")
            else:
                logger.warning("{} missing bvec file for diffusion recording"
                               .format(self.recIdentity()))
            bval = tools.change_ext(self.currentFile(), "bval")
            if os.path.isfile(bval):
                shutil.copy2(bval,
                             bids_base + ".bval")
            else:
                logger.warning("{} missing bval file for diffusion recording"
                               .format(self.recIdentity()))
Esempio n. 5
0
 def _loadFile(self, path: str) -> None:
     if path != self._FILE_CACHE:
         # The DICM tag may be missing for anonymized DICOM files
         header = tools.change_ext(path, "json")
         try:
             with open(header, "r") as f:
                 dicomdict = json.load(f)
         except json.JSONDecodeError:
             logger.error("{}: corrupted header {}".format(
                 self.formatIdentity(), header))
             raise
         self._FILE_CACHE = path
         self._HEADER_CACHE = dicomdict
         self._header_file = header
Esempio n. 6
0
 def _copy_bidsified(self, directory: str, bidsname: str, ext: str) -> None:
     if self._nii_type == "ni1":
         shutil.copy2(self.currentFile(),
                      os.path.join(directory, bidsname + ext))
         data_file = tools.change_ext(self.currentFile(), "img")
         shutil.copy2(data_file, os.path.join(directory, bidsname + ".img"))
     else:
         out_fname = os.path.join(directory, bidsname + ext)
         if self.zip:
             with open(self.currentFile(), 'rb') as f_in:
                 with gzip.open(out_fname, 'wb') as f_out:
                     shutil.copyfileobj(f_in, f_out)
         else:
             shutil.copy2(self.currentFile(),
                          os.path.join(directory, bidsname + ext))
Esempio n. 7
0
    def _loadFile(self, path: str) -> None:
        if path != self._FILE_CACHE:
            # The DICM tag may be missing for anonymized DICOM files
            path_dir, base = os.path.split(path)
            header = os.path.join(
                path_dir, "header_dump_" + tools.change_ext(base, "json"))
            try:
                with open(header, "r") as f:
                    dicomdict = json.load(f)
                    self._headerData = {
                        "format": dicomdict["format"],
                        "acqDateTime": dicomdict["acqDateTime"],
                        "manufacturer": dicomdict["manufacturer"],
                    }
                    dicomdict["header"]
                    self.custom = dicomdict["custom"]
            except json.JSONDecodeError:
                logger.error("{}: corrupted header {}".format(
                    self.formatIdentity(), header))
                raise
            except KeyError as e:
                logger.error("{}: missing {} key in {}".format(
                    self.formatIdentity(), e, header))
                raise
            self._FILE_CACHE = path
            self._HEADER_CACHE = dicomdict["header"]
            self._header_file = header
            form = dicomdict["format"].split("/")
            if form[0] != self._module:
                logger.error("{}: format is not {}".format(
                    self.recIdentity, self._module))
                raise Exception("Wrong format")
            if form[1] == "DICOM":
                mod = _DICOM
            else:
                logger.error("{}: unknown format {}".format(
                    self.recIdentity, form[1]))
                raise Exception("Wrong format")

            if self.setManufacturer(dicomdict["manufacturer"],
                                    mod.manufacturers):
                self.resetMetaFields()
                self.setupMetaFields(mod.metafields)
                self.testMetaFields()
Esempio n. 8
0
    def __init__(self, table: str,
                 index: str = "",
                 definitionsFile: str = "",
                 duplicatedFile: str = "",
                 checkDefinitions: bool = True):
        """
        Load tsv bids table into dataframe

        Parameters:
        -----------
        table: str
            path to tsv file, if not existing, an empty Dataframe
            will be created using given definitions
        index: str
            name of index column
        definitionsFile: str
            path to json file with column definitions, if not given,
            the default one based on table path is used
        duplicatedFile: str
            name of file containing duplicates, if not given
            default __<name>.tsv is used.
            If such file is found an exception will be raised
        checkDefinitions: bool
            if True raises keyError if definitions mismatch table columns,
            if False adapt table columns to match definitions
        """

        self._path, self._name = os.path.split(table)
        self._def_name = change_ext(self._name, "json")

        if not duplicatedFile:
            self._dupl_name = "__" + self._name
        else:
            self._dupl_name = duplicatedFile

        if os.path.isfile(os.path.join(self._path, self._dupl_name)):
            logger.error("{}: Found unmerged file with duplicated values"
                         .format(self._name))
            raise FileExistsError(self._dupl_name)

        if not definitionsFile:
            definitionsFile = os.path.join(self._path, self._def_name)
            if not os.path.isfile(definitionsFile):
                logger.error("{}: Unable to find definitions file"
                             .format(self._name))
                raise FileNotFoundError(definitionsFile)

        with open(definitionsFile, "r") as f:
            self._definitions = json.load(f)

        # loading table
        self.df = None
        self.index = index
        if os.path.isfile(table):
            self.df = pandas.read_csv(table, sep="\t", header=0,
                                      na_values="n/a")

            if self.index:
                if self.index not in self.df.columns:
                    logger.error("{}: Index column {} not found in table"
                                 .format(self._name, self.index))
                    raise KeyError(self.index)

            # columns in table but not in definitions
            mismatch = [c for c in self.df.columns
                        if c not in self._definitions
                        and c != self.index]
            if mismatch:
                if checkDefinitions:
                    logger.error("{}: Extra columns {} in table"
                                 .format(self._name, mismatch))
                    raise KeyError(mismatch)
                else:
                    self.df.drop(mismatch, axis="columns", inplace=True)

            # columns in definition but not in table
            mismatch = [c for c in self._definitions
                        if c not in self.df.columns]
            if mismatch:
                if checkDefinitions:
                    logger.error("{}: Columns {} not found in table"
                                 .format(self._name, mismatch))
                    raise KeyError(mismatch)
                else:
                    for c in mismatch:
                        self.df[c] = None
        else:
            columns = self._definitions.keys()
            if index and index not in columns:
                columns = [index] + list(columns)
            self.df = pandas.DataFrame(columns=columns)
Esempio n. 9
0
 def __loadJsonDump(file: str) -> dict:
     json_dump = tools.change_ext(file, "json")
     with open(json_dump, "r") as f:
         return json.load(f)["acqpar"][0]