def generate_readme(self): """ It is recommended by BIDS specifications to have a README file at the base of our project, so we create a blank one for further usage. """ bids_dir = get_bids_dir() bids_dir.mkdir(exist_ok=True, parents=True) readme = bids_dir / "README" if not readme.is_file(): readme_template = TEMPLATES_DIR / "README" shutil.copy(str(readme_template), str(readme))
def generate_bidsignore(self): """ Some acquisitions do not conform to BIDS specification (mainly localizers), so we generate a .bidsignore file, pointing to them. References ---------- * `BIDS validator specifications`_ .. _BIDS validator specifications: https://neuroimaging-core-docs.readthedocs.io/en/latest/pages/bids-validator.html """ bids_dir = get_bids_dir() bids_dir.mkdir(exist_ok=True, parents=True) bidsignore = bids_dir / ".bidsignore" with open(bidsignore, "w+") as in_file: in_file.write("**/*ignore-bids*\n")
def set_participant_tsv_and_json(self, scan): """ Generates recommended "participants.tsv" by either copying the template from TEMPLATES_DiR or editing an existing one under {parent} directory. Parameters ---------- parent : Path BIDS-compatible directory, underwhich there are "sub-x" directories subject_dict : dict Subject's parameters dictionary, containing "participant_id", "handedness","age","sex" fields References ---------- * `BIDS complementary files`_ .. _BIDS complementary files: https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html """ bids_dir = get_bids_dir() bids_dir.mkdir(exist_ok=True, parents=True) subject_dict = self.get_subject_data(scan) participants_tsv = bids_dir / self.PARTICIPANTS_FILE_NAME participants_json = participants_tsv.with_suffix(".json") for participants_file in [participants_tsv, participants_json]: if not participants_file.is_file(): participants_template = TEMPLATES_DIR / participants_file.name shutil.copy(str(participants_template), str(participants_file)) participants_df = pd.read_csv(participants_tsv, sep="\t") subject_dict[ "participant_id" ] = f"sub-{subject_dict['participant_id']}" if ( not subject_dict["participant_id"] in participants_df["participant_id"].values ): participants_df = participants_df.append( pd.DataFrame(subject_dict, index=[0]), ignore_index=True ) participants_df.to_csv(participants_tsv, sep="\t", index=False) else: pass
def set_description_json(self): """ Generates required "dataset_description.json" file, as stated by BIDS structure. References ---------- * `BIDS complementary files`_ .. _BIDS complementary files: https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html """ bids_dir = get_bids_dir() bids_dir.mkdir(exist_ok=True, parents=True) description_file = bids_dir / self.DATASET_DESCRIPTION_FILE_NAME if not description_file.is_file(): description_template = TEMPLATES_DIR / description_file.name description_file.parent.mkdir(exist_ok=True) shutil.copy(str(description_template), str(description_file))
def build_bids_path(self, scan): """ Uses parameters extracted by :func:`get_subject_data` to update BIDS- compatible file path derived from *dicom_parser* Returns ------- pathlib.Path Full path to an updated BIDS-compatible file, according to scan's parameters. """ self._logger.debug( f"Building BIDS-compatible path for scan #{scan.id}" ) subject_id = scan.session.subject.id sample_header = scan.dicom.get_sample_header() default_bids_path = sample_header.build_bids_path() if default_bids_path is None: raise ValueError( "BIDS path could not be derived from scan parameters." ) self._logger.debug( f"Default relative path generated by dicom_parser based on DICOM metadata:\n{default_bids_path}" # noqa: E501 ) self._logger.debug("Replacing Patient ID with subject primary key...") default_subject_id = sample_header.get("PatientID") fixed_relative_path = default_bids_path.replace( f"sub-{default_subject_id}", f"sub-{subject_id}" ) self._logger.debug(f"Result: {fixed_relative_path}") bids_path = get_bids_dir() / fixed_relative_path self._logger.debug(f"Checking for an existing file at {bids_path}") path_with_suffix = { bids_path.with_suffix(".nii"), bids_path.with_suffix(".nii.gz"), } try: existing = NIfTI.objects.get(path__in=path_with_suffix) except NIfTI.DoesNotExist: self._logger.debug("No existing NIfTI file found! All done.") return bids_path else: self._logger.debug(f"Existing NIfTI (#{existing.id}) found!") if scan._nifti == existing: self._logger.debug( "Existing NIfTI instance belongs to queried scan!" ) path = Path(existing.path) return path.parent / path.name.split(".")[0] self._logger.debug( f"Checking for an existing run label in scan #{scan.id}." ) try: existing_run_label = re.findall( self.RUN_PATTERN, str(bids_path) )[0] except IndexError: self._logger.debug("No existing run label found.") self._logger.debug(f"Renaming {scan.id} to include run label.") existing_run_label = self.RUN_LABEL_TEMPLATE.format(index=1) name_parts = Path(existing.path).name.split("_") name_parts.insert(-1, existing_run_label) name_with_run = "_".join(name_parts) updated_path = bids_path.parent / name_with_run existing.rename(updated_path) self._logger.debug( f"Scan #{scan.id} successfully moved to {bids_path}" ) new_run_label = self.RUN_LABEL_TEMPLATE.format(index=2) bids_path = ( updated_path.parent / updated_path.name.split(".")[0] ) else: self._logger.debug( f"Existing run label found: {existing_run_label}" ) existing_run_index = int(existing_run_label.split("-")) index = existing_run_index + 1 new_run_label = self.RUN_LABEL_TEMPLATE.format(index=index) self._logger.debug(f"New run label: {new_run_label}") return Path( str(bids_path).replace(existing_run_label, new_run_label) )