예제 #1
0
def main(**args):

    path = "/mnt/DATA_4Tera/Dati_Sherlock/bids/"

    derivatives = os.path.join(path, "derivatives", "afniproc")
    print("mkdir -p {}".format(derivatives))
    os.system("mkdir -p {}".format(derivatives))

    subj_deriv = os.path.join(derivatives, 'sub-{subject}', "ses-{session}")

    layout = BIDSLayout(path)

    subjects = layout.get_subjects()
    sessions = layout.get_sessions()

    for session in sessions:
        for subj in subjects:

            deriv_dir = subj_deriv.format(session=session, subject=subj)
            print("mkdir -p {}".format(deriv_dir))
            os.system("mkdir -p {}".format(deriv_dir))

            # Create anat and func
            anat_dir = os.path.join(subj_deriv,
                                    "{datatype}").format(session=session,
                                                         subject=subj,
                                                         datatype='anat')
            func_dir = os.path.join(subj_deriv,
                                    "{datatype}").format(session=session,
                                                         subject=subj,
                                                         datatype='func')

            print("mkdir -p {}".format(anat_dir))
            os.system("mkdir -p {}".format(anat_dir))

            print("mkdir -p {}".format(func_dir))
            os.system("mkdir -p {}".format(func_dir))

            # Check and convert T1 to send Freesurfer segmentation
            t1 = layout.get(subject=subj, session=session, suffix='T1w')[0]

            entities = t1.get_entities()
            pattern = os.path.join(
                subj_deriv, "{datatype}",
                "sub-{subject}[_ses-{session}][_desc-{desc}]_{suffix}.{extension}"
            )
            entities['desc'] = 'fsprep'
            t1_fs = layout.build_path(entities, pattern, validate=False)

            entities['extension'] = 'txt'
            t1_log = layout.build_path(entities, pattern, validate=False)

            command = "check_dset_for_fs.py -input %s -fix_all -fix_out_prefix %s -fix_out_vox_dim 1 -verb > %s"
            command = command % (t1.path, t1_fs, t1_log)
            print(command)
            os.system(command)

            check_fs(layout, subj, session, subj_deriv)

            ### T1 to MNI space coreg
            entities = t1.get_entities()
            entities['desc'] = 'fsprep'
            entities['space'] = 'MNI152'

            pattern = os.path.join(
                subj_deriv, "{datatype}",
                "sub-{subject}[_ses-{session}][_space-{space}][_desc-{desc}]_{suffix}.{extension}"
            )
            t1_mni = layout.build_path(entities, pattern, validate=False)

            command = "@auto_tlrc -base MNI152_2009_template.nii.gz -pad_base 35 -prefix {prefix} -input {input}"
            command = command.format(prefix=t1_mni, input=t1_fs)
            print(command)
            os.system(command)

            runs = layout.get_runs()
            ordered_bold = []
            for run in runs:
                fname = layout.get(session=session,
                                   subject=subj,
                                   run=run,
                                   suffix='bold')
                if len(fname) != 0:
                    ordered_bold.append(fname[0])

            # Slice time correction - motion correction - align EPI to Anat to MNI
            bold = layout.get(subject=subj,
                              session=session,
                              suffix='bold',
                              extension='nii.gz')

            # 1D File
            slice_timing = np.array(bold[0].get_metadata()['SliceTiming'])
            slice_fname = os.path.join(path, "slice_timing.txt")
            np.savetxt(slice_fname,
                       slice_timing,
                       delimiter=' ',
                       newline=' ',
                       fmt='%.5f')

            epi = layout.get(subject=subj,
                             session=session,
                             suffix='bold',
                             extension='nii.gz',
                             run=1)[0].path
            child_epi = " ".join([b.path for b in ordered_bold])

            command = "align_epi_anat.py -anat {anat} -epi {epi} -child_epi {child_epi}"+\
                      " -epi_base 0 -tshift_opts -tpattern {tpattern} -epi2anat -giant_move"+\
                      " -tlrc_apar {tlrc_apar}"
            command = command.format(anat=t1_fs,
                                     epi=epi,
                                     child_epi=child_epi,
                                     tpattern=slice_fname,
                                     tlrc_apar=t1_mni)
            print(command)
            os.system(command)

            # Create mask
            automask = " ".join(
                [b.filename[:-7] + '_tlrc_al+tlrc.HEAD' for b in bold])
            mean_mask_prefix = 'mean_mni.nii.gz'
            command = '3dTstat -prefix {prefix} {input}'.format(
                prefix=mean_mask_prefix, input=automask)
            print(command)
            os.system(command)

            entities = bold[0].get_entities()
            entities['suffix'] = 'mask'
            entities['space'] = 'MNI152'

            pattern = os.path.join(
                subj_deriv, "{datatype}",
                "sub-{subject}[_ses-{session}][_space-{space}][_desc-{desc}]_{suffix}.{extension}"
            )
            mask_prefix = layout.build_path(entities, pattern, validate=False)
            command = '3dAutomask -prefix {prefix} {input}'.format(
                input=mean_mask_prefix, prefix=mask_prefix)
            print(command)
            os.system(command)

            # Clean files
            removed = " ".join([b.filename[:-7] + '_al+orig.*' for b in bold])
            command = "rm " + removed
            print(command)
            os.system(command)

            command = "rm __tt_*.*"
            print(command)
            os.system(command)

            command = "rm malldump.*"
            print(command)
            os.system(command)

            # Put files in BIDS
            header = [
                'trans_x', 'trans_y', 'trans_z', 'rot_x', 'rot_y', 'rot_z'
            ]
            motion_ordered = list()
            for bold in ordered_bold:

                motion_fname = bold.filename[:-7] + "_vr_motion.1D"
                motion = np.genfromtxt(motion_fname)

                motion_ordered.append(motion_fname)

                entities = bold.get_entities()
                entities['suffix'] = 'motion'
                entities['desc'] = 'volreg'
                entities['extension'] = 'tsv'

                pattern = os.path.join(
                    subj_deriv, "{datatype}",
                    "sub-{subject}_ses-{session}_task-{task}_run-{run:02d}_desc-{desc}_{suffix}.{extension}"
                )
                motion_bids = pattern.format(**entities)

                motion = motion[:, [3, 4, 5, 0, 1, 2]]
                np.savetxt(motion_bids,
                           motion,
                           fmt="%f",
                           delimiter="\t",
                           header="\t".join(header))

                print("rm " + motion_fname)
                os.system("rm " + motion_fname)

                for desc in ['mat', "reg_mat", "tlrc_mat"]:
                    affine_fname = bold.filename[:-7] + "_al_" + desc + ".aff12.1D"
                    entities = bold.get_entities()
                    entities['suffix'] = 'affine'
                    entities['desc'] = desc.replace("_", "")
                    entities['extension'] = 'tsv'

                    pattern = os.path.join(
                        subj_deriv, "{datatype}",
                        "sub-{subject}_ses-{session}_task-{task}_run-{run:02d}_desc-{desc}_{suffix}.{extension}"
                    )
                    affine_bids = pattern.format(**entities)

                    command = "mv {0} {1}".format(affine_fname, affine_bids)
                    print(command)
                    os.system(command)

                afni_bold = bold.filename[:-7] + "_tlrc_al+tlrc"
                entities = bold.get_entities()
                entities['desc'] = "afniproc"
                entities['extension'] = 'nii.gz'
                entities['space'] = 'MNI152'
                pattern = os.path.join(
                    subj_deriv, "{datatype}",
                    "sub-{subject}_ses-{session}_task-{task}_run-{run:02d}_space-{space}_desc-{desc}_{suffix}.{extension}"
                )

                afni_bids = pattern.format(**entities)
                command = "3dcopy {0} {1}".format(afni_bold, afni_bids)
                print(command)
                os.system(command)

                print("rm {}*".format(afni_bold))
                os.system("rm {}*".format(afni_bold))

            # Create confound regressors
            # Motion

            motion_files = list()
            for run in runs:
                f = layout.get(subject=subj,
                               session=session,
                               task=session,
                               run=run,
                               suffix='motion')
                if len(f) != 0:
                    motion_files.append(f[0])

            motion_df = [
                pd.read_csv(m.path, delimiter="\t") for m in motion_files
            ]

            motion_demean = [m - m.mean(0) for m in motion_df]
            motion_demean = pd.concat(motion_demean)

            entities = motion_files[0].get_entities()
            entities['desc'] = 'demean'

            pattern = os.path.join(
                subj_deriv, "{datatype}",
                "sub-{subject}_ses-{session}_desc-{desc}_{suffix}.{extension}")
            demean_fname = pattern.format(**entities)
            motion_demean.to_csv(demean_fname,
                                 header=False,
                                 index=False,
                                 sep="\t")

            motion_deriv = [m.diff() for m in motion_df]
            motion_deriv = [m.fillna(0) for m in motion_deriv]
            motion_deriv = [m - m.mean(0) for m in motion_deriv]
            motion_deriv = pd.concat(motion_deriv)

            entities['desc'] = 'deriv'
            deriv_fname = pattern.format(**entities)
            motion_deriv.to_csv(deriv_fname,
                                header=False,
                                index=False,
                                sep="\t")

            tr_counts = [m.shape[0] for m in motion_df]

            for j, t in enumerate(tr_counts):
                command = "1dBport -nodata {ntr} 1 -band 0.01 999 -invert -nozero > bpass.1D".format(
                    ntr=t)
                print(command)
                os.system(command)

                command = "1d_tool.py -infile bpass.1D -pad_into_many_runs {run:1d} {n_runs}"+\
                          " -set_run_lengths {tr_counts} -write bpass.{run:02d}.1D"
                command = command.format(run=j + 1,
                                         n_runs=str(len(tr_counts)),
                                         tr_counts=" ".join(
                                             [str(t) for t in tr_counts]))
                print(command)
                os.system(command)

            entities['desc'] = 'bpass'
            entities['suffix'] = 'timeseries'
            entities['extension'] = '1D'
            bpass_fname = pattern.format(**entities)

            command = "1dcat bpass.*.1D > {}".format(bpass_fname)
            print(command)
            os.system(command)

            command = "rm bpass*"
            print(command)
            os.system(command)
예제 #2
0
            			acquisition=list(filter(lambda x: "acq-" in x, labels))
            			if acquisition:
            				acquisitionValue=acquisition[0].split('-')[1]
            				entities['acquisition']=acquisitionValue
            			else:
            				entities.pop('acquisition', None)

            			run=list(filter(lambda x: "run-" in x, labels))
            			if run:
            				runValue=run[0].split('-')[1]
            				entities['run']=runValue
            			else:
            				entities.pop('run', None)

            			entities['extension']='nii.gz'
            			outputfile=os.path.join(niftidir, layout.build_path(entities))
            			if os.path.exists(sourcefile):
            				logtext (LOGFILE, "copying %s to %s" %(sourcefile, outputfile))
            				subprocess.check_output(['cp',sourcefile,outputfile])
            			else:
            				logtext (LOGFILE, "ERROR: %s cannot be found. Check bidsaction file logic." % sourcefile)


            			entities['extension']='json'
            			outputjson=os.path.join(niftidir, layout.build_path(entities))
            			if os.path.exists(sourcejson):
            				logtext (LOGFILE, "copying %s to %s" %(sourcejson, outputjson))
            				subprocess.check_output(['cp',sourcejson, outputjson])
            			else:
            				logtext (LOGFILE, "ERROR: %s cannot be found. Check bidsaction file logic." % sourcejson)
예제 #3
0
def main(**args):

    outfiles = ['fitts', 'errts', 'stats', 'betas']

    path = args['path']
    pipeline = args['pipeline']

    command = '3dDeconvolve -input {files} -jobs {n_jobs} -polort {polort} -float {confounds} {events_string} '+ \
              ' -mask {mask} -allzero_OK -fout -tout -x1D {design_matrix_txt} -xjpeg {design_matrix_jpg} -xsave '+\
              '-fitts {fitts} -errts {errts} -bucket {stats} -cbucket {betas} -rout -gltsym "SYM: RESP+L -RESP+R" -glt_label 1 RespLvsRespR'

    extra_event = process_extraevent_arg(args['extra_event'])

    derivatives = os.path.join(path, "derivatives", pipeline)
    print("mkdir -p {}".format(derivatives))
    os.system("mkdir -p {}".format(derivatives))

    derivatives_pattern = os.path.join(derivatives, 'sub-{subject}',
                                       "ses-{session}")
    pattern = os.path.join(
        derivatives_pattern, "{datatype}",
        "sub-{subject}[_ses-{session}][_space-{space}][_desc-{desc}]_{suffix}.{extension}"
    )

    layout = BIDSLayout(path, derivatives=True)

    subjects = layout.get_subjects()
    subjects.remove('lormat')
    sessions = layout.get_sessions()

    # TODO: Check if there are sessions
    for session in sessions:
        for subj in subjects:

            deriv_dir = derivatives_pattern.format(session=session,
                                                   subject=subj)
            print("mkdir -p {}".format(deriv_dir))
            os.system("mkdir -p {}".format(deriv_dir))

            # Create func
            func_dir = os.path.join(derivatives_pattern,
                                    "{datatype}").format(session=session,
                                                         subject=subj,
                                                         datatype='func')

            print("mkdir -p {}".format(func_dir))
            os.system("mkdir -p {}".format(func_dir))

            # Main command
            files = layout.get(subject=subj,
                               session=session,
                               task=session,
                               desc='afniproc',
                               extension='nii.gz')
            entities = files[0].get_entities()
            files = " ".join(f.path for f in files)

            args['files'] = files

            confounds = ''
            for desc in ['bpass', 'demean']:
                ort_files = layout.get(subject=subj,
                                       session=session,
                                       desc=desc)
                confounds += '-ortvec {} {} '.format(ort_files[0].path, desc)

            args['confounds'] = confounds

            # Stimuli
            stims = bids2afni_events(subj,
                                     session,
                                     layout,
                                     pattern,
                                     extra_event=extra_event)

            write_afni(stims)
            args['events_string'] = stims_times(stims)

            # Mask
            mask = layout.get(subject=subj,
                              session=session,
                              suffix='mask',
                              extension='nii.gz')
            args['mask'] = mask[0].path

            # Buckets
            for desc in outfiles:
                entities['desc'] = pipeline
                entities['suffix'] = desc
                args[desc] = layout.build_path(entities,
                                               pattern,
                                               validate=False)

            for extension in ['jpg', 'txt']:
                entities['suffix'] = 'dmatrix'
                entities['extension'] = extension
                args['design_matrix_' + extension] = layout.build_path(
                    entities, pattern, validate=False)

            print(command.format(**args))
            os.system(command.format(**args))
예제 #4
0
파일: convert.py 프로젝트: j1c/hcp2bids
def convert(input_path, output_path, include_ses=False):
    in_path = Path(input_path)
    if not in_path.is_dir():
        msg = f"{input_path} is not a valid directory."
        raise ValueError(msg)

    # Make output path
    out_path = Path(output_path)
    if not out_path.is_dir():
        out_path.mkdir(parents=True)

    # Generate dataset_description.json
    data = dict(Name="hcp", BIDSVersion="1.4.0", DatasetType="raw")
    with open(out_path / "dataset_description.json", "w") as f:
        json.dump(data, f)

    layout = BIDSLayout(out_path.absolute())

    # Iterate through each subject folder
    subject_folders = [x for x in in_path.iterdir() if x.is_dir()]
    for subject_folder in subject_folders:
        if not (subject_folder / "unprocessed/3T/").is_dir():
            continue

        # 6 digit sub id in str form
        subject = subject_folder.name

        modality_folders = [
            x for x in (subject_folder / "unprocessed/3T/").iterdir()
            if x.is_dir()
        ]
        for modality_folder in modality_folders:
            modality = modality_folder.name

            # Make bids output folders
            _mkdir(layout, subject, modality, include_ses)

            if modality == "T1w_MPR1":
                entities = dict(
                    subject=subject,
                    datatype=modality_dict[modality],
                    extension=".nii.gz",
                    suffix="T1w",
                )
                if include_ses:
                    entities["session"] = "1"
                new_fname = layout.build_path(entities, pattern)

                # Rename old files
                old_fname = list(modality_folder.iterdir())[0]
                old_fname.rename(new_fname)

            elif modality == "Diffusion":
                for fname in modality_folder.iterdir():
                    splits = fname.name.split(".")
                    extension = "." + splits[-1]  # Get extension
                    if extension == ".gz":
                        extension = ".nii.gz"
                    splits = splits[0].split("_")
                    direction = splits[-1]  # Direction. RL or LR
                    run = run_dict[splits[-2]]  # Run number

                    entities = dict(
                        subject=subject,
                        datatype=modality_dict[modality],
                        direction=direction,
                        run=run,
                        extension=extension,
                        suffix="dwi",
                    )
                    if include_ses:
                        entities["session"] = "1"

                    new_fname = layout.build_path(entities, pattern)
                    Path(fname).rename(new_fname)

                    # Make json sidecar
                    if extension == ".nii.gz":
                        entities["extension"] = ".json"

                        if direction == "LR":
                            phase = "i-"
                        elif direction == "RL":
                            phase = "i"

                        # TotalReadoutTime = EffectiveEchoSpacing * (EPI factor - 1) (which is 144)
                        sidecar = dict(
                            EffectiveEchoSpacing=0.00078,
                            TotalReadoutTime=0.11154,
                            PhaseEncodingDirection=phase,
                        )
                        with open(layout.build_path(entities, pattern),
                                  "w") as f:
                            json.dump(sidecar, f)

            # Remove all folders
            modality_folder.rmdir()

        for folder in list(subject_folder.rglob("*"))[::-1]:
            folder.rmdir()
        subject_folder.rmdir()

    if not input_path == output_path:
        in_path.rmdir()
예제 #5
0
                        if acquisition:
                            acquisitionValue = acquisition[0].split('-')[1]
                            entities['acquisition'] = acquisitionValue
                        else:
                            entities.pop('acquisition', None)

                        run = list(filter(lambda x: "run-" in x, labels))
                        if run:
                            runValue = run[0].split('-')[1]
                            entities['run'] = runValue
                        else:
                            entities.pop('run', None)

                        entities['extension'] = 'nii.gz'
                        outputfile = os.path.join(niftidir,
                                                  layout.build_path(entities))
                        if os.path.exists(sourcefile):
                            logtext(
                                LOGFILE,
                                "copying %s to %s" % (sourcefile, outputfile))
                            subprocess.check_output(
                                ['cp', sourcefile, outputfile])
                        else:
                            logtext(
                                LOGFILE,
                                "ERROR: %s cannot be found. Check bidsaction file logic."
                                % sourcefile)

                        entities['extension'] = 'json'
                        outputjson = os.path.join(niftidir,
                                                  layout.build_path(entities))
예제 #6
0
파일: dm_deface.py 프로젝트: jerdra/datman
def main():
    parser = ArgumentParser(
        description="Defaces anatomical data in a BIDS dataset",
        formatter_class=ArgumentDefaultsHelpFormatter,
    )

    g_required = parser.add_mutually_exclusive_group(required=True)
    g_required.add_argument("--study",
                            action="store",
                            help="Nickname of the study to process")
    g_required.add_argument(
        "--bids-dir",
        action="store",
        metavar="DIR",
        type=lambda x: _is_dir(x, parser),
        help="The root directory of the BIDS dataset to process",
    )

    g_bids = parser.add_argument_group("Options for filtering BIDS queries")
    g_bids.add_argument(
        "-s",
        "--suffix-id",
        action="store",
        nargs="+",
        default=["T1w"],
        help="Select a specific BIDS suffix to be processed",
    )
    g_bids.add_argument(
        "--skip-bids-validation",
        action="store_true",
        default=False,
        help="Assume the input dataset is BIDS compatible and skip validation",
    )
    g_bids.add_argument(
        "--bids-database-dir",
        action="store",
        type=lambda x: _is_dir(x, parser),
        help="Path to a PyBIDS database directory for faster indexing",
    )

    g_perfm = parser.add_argument_group("Options for logging and debugging")
    g_perfm.add_argument("--quiet",
                         action="store_true",
                         default=False,
                         help="Minimal logging")
    g_perfm.add_argument("--verbose",
                         action="store_true",
                         default=False,
                         help="Maximal logging")
    g_perfm.add_argument("--dry-run",
                         action="store_true",
                         default=False,
                         help="Do nothing")

    args = parser.parse_args()

    if args.verbose:
        logger.setLevel(logging.INFO)
    if args.quiet:
        logger.setLevel(logging.ERROR)

    if args.study:
        config = datman.config.config(study=args.study)
        bids_dir = config.get_path("bids")
    else:
        bids_dir = args.bids_dir

    layout = BIDSLayout(
        bids_dir,
        validate=args.skip_bids_validation,
        database_path=args.bids_database_dir,
    )

    anat_list = layout.get(suffix=args.suffix_id,
                           extension=[".nii", ".nii.gz"])
    keys_to_extract = [
        "subject",
        "session",
        "acquisition",
        "ceagent",
        "reconstruction",
        "run",
        "suffix",
    ]

    for anat in anat_list:

        entities = {
            key: anat.entities.get(key, None)
            for key in keys_to_extract
        }
        if (entities["acquisition"] is not None
                and "defaced" in entities["acquisition"]):
            continue
        if entities["acquisition"] is not None:
            entities["acquisition"] = entities["acquisition"] + "defaced"
        else:
            entities["acquisition"] = "defaced"

        output_file = Path(bids_dir, layout.build_path(entities))

        if not output_file.exists():
            if args.dry_run:
                logger.info(
                    f"DRYRUN would have executed defacing on <{anat.path}> "
                    f"and output to <{output_file}>")
                continue

            try:
                deface_image(infile=anat.path, outfile=str(output_file))
            except Exception as e:
                logger.error(f"Defacing failed to run on <{anat.path}> for "
                             f"reason {e}")
                return

            anat_metadata = anat.get_metadata()
            anat_metadata["DefaceSoftware"] = "pydeface"
            with open(str(output_file).replace(".nii.gz", ".json"), "w+") as f:
                json.dump(anat_metadata, f, indent=4)