Esempio n. 1
0
def make_w_coreg_7T_3T(ttype=''):
    n_in = Node(IdentityInterface(fields=[
        'T1w_7T',
        'T1w_3T',
    ]), name='input')

    n_out = Node(IdentityInterface(fields=[
        'mat_t1w3t_to_t1w7t',
        'mat_t1w7t_to_t1w3t',
    ]),
                 name='output')

    n_7T = Node(Reorient2Std(), '7T')
    n_3T = Node(Reorient2Std(), '3T')

    n_coarse = Node(FLIRT(), 'coarse')
    n_coarse.inputs.no_search = True
    n_coarse.inputs.schedule = environ[
        'FSLDIR'] + '/etc/flirtsch/sch3Dtrans_3dof'
    n_coarse.inputs.dof = 6

    n_fine = Node(FLIRT(), 'fine')
    n_fine.inputs.no_search = True
    # n_fine.inputs.cost = 'normcorr'
    n_fine.inputs.dof = 7

    n_3t_7t = Node(ConvertXFM(), name='t1w3t_to_t1w7t')
    n_3t_7t.inputs.concat_xfm = True
    n_3t_7t.inputs.out_file = 'mat_t1w3t_to_t1w7t.mat'

    n_7t_3t = Node(ConvertXFM(), name='t1w7t_to_t1w3t')
    n_7t_3t.inputs.invert_xfm = True
    n_7t_3t.inputs.out_file = 'mat_t1w7t_to_t1w3t.mat'

    w = Workflow('coreg_3T_7T' + ttype)
    w.connect(n_in, 'T1w_7T', n_7T, 'in_file')
    w.connect(n_in, 'T1w_3T', n_3T, 'in_file')
    w.connect(n_7T, 'out_file', n_coarse, 'reference')
    w.connect(n_3T, 'out_file', n_coarse, 'in_file')
    w.connect(n_7T, 'out_file', n_fine, 'reference')
    w.connect(n_coarse, 'out_file', n_fine, 'in_file')
    w.connect(n_coarse, 'out_matrix_file', n_3t_7t, 'in_file')
    w.connect(n_fine, 'out_matrix_file', n_3t_7t, 'in_file2')
    w.connect(n_3t_7t, 'out_file', n_7t_3t, 'in_file')
    w.connect(n_3t_7t, 'out_file', n_out, 'mat_t1w3t_to_t1w7t')
    w.connect(n_7t_3t, 'out_file', n_out, 'mat_t1w7t_to_t1w3t')

    return w
Esempio n. 2
0
def chopper(in_file):
    import subprocess
    from nipype.utils.filemanip import split_filename
    import os

    """cmd1 = ["robustfov", "-i", in_file]
    proc = subprocess.Popen(cmd1,stdout = subprocess.PIPE)
    res = proc.communicate()
    print(res)
    newfov = res[0].decode("utf-8").split("\n")[3].split(" ")[:-1]
    print(newfov)
    _,fname,ext = split_filename(in_file)
    out_file = os.path.abspath(fname+"_chop"+ext)
    cmd2 = ["fslroi", in_file, out_file] + newfov
    print(cmd2)
    proc2 = subprocess.Popen(cmd2)
    proc2.wait()

    """

    from nipype.interfaces.fsl import RobustFOV, Reorient2Std
    rfov = RobustFOV()
    rfov.inputs.in_file = in_file
    res = rfov.run()

    reo = Reorient2Std()
    reo.inputs.in_file = res.outputs.out_roi
    res = reo.run()
    out_file = res.outputs.out_file

    return out_file
Esempio n. 3
0
def convert_modality(old_subject_id,
                     old_ses_id,
                     output_dir,
                     info_out_dir,
                     bids_name,
                     bids_modality,
                     search_str,
                     bvecs_from_scanner_file=None,
                     public_sub_id=None,
                     public_output=True,
                     reorient2std=True,
                     task=None,
                     direction=None,
                     acq=None,
                     only_use_last=False,
                     deface=False,
                     physio=False,
                     add_info={},
                     dry_run=False,
                     post_glob_filter=None):
    """
    runs conversion for one subject and one modality
    public_output: if True: strips all info about original subject_id, file, date
    """
    if (public_output and bids_modality == "anat" and not deface):
        raise Exception(
            "Public output requested, but anatomical images not defaced. exit. %s %s %s"
            % (old_subject_id, old_ses_id, bids_name))

    new_ses_id = get_clean_ses_id(old_ses_id)
    bids_ses = "ses-" + new_ses_id
    if public_sub_id:
        bids_sub = "sub-" + public_sub_id
    else:
        bids_sub = "sub-" + get_clean_subject_id(old_subject_id)

    if isinstance(search_str, str):
        search_str = [search_str]

    par_file_list = []
    for s_str in search_str:
        par_file_list += sorted(glob("*" + s_str + "*.par"))

    # remove double entries
    par_file_list = list(set(par_file_list))

    if post_glob_filter:
        par_file_list = list(filter(post_glob_filter, par_file_list))

    physio_in_file_list = []

    mapping = []
    if par_file_list:
        sub_output_dir = os.path.join(output_dir, bids_sub)
        nii_output_dir = os.path.join(sub_output_dir, bids_ses, bids_modality)

        if not os.path.exists(nii_output_dir):
            os.makedirs(nii_output_dir)

        if only_use_last:
            par_file_list = par_file_list[-1:]

        # sort files by acquision number
        par_acq_nr = np.array([
            get_par_info(par_file, "acquisition_nr")["acquisition_nr"]
            for par_file in par_file_list
        ])
        sort_index = np.argsort(par_acq_nr)

        for run_id, par_file in enumerate(
                np.array(par_file_list)[sort_index].tolist(), 1):
            # put together bids file name
            # bids run
            bids_run = "run-" + str(run_id)
            out_components = [bids_sub, bids_ses]

            # bids acq
            if acq:
                out_components += ["acq-%s" % acq]

            # bids task
            if task:
                out_components += ["task-%s" % task]

            # bids acq. direction
            if direction:
                out_components += ["dir-%s" % direction]

            out_components += [bids_run, bids_name]
            out_filename = "_".join(out_components)
            out_filename_wo_name = "_".join(out_components[:-1])
            nii_file = os.path.join(nii_output_dir, out_filename + ".nii.gz")
            if not dry_run:
                assert not os.path.exists(
                    nii_file), "file exists. STOP. %s" % nii_file

                bids_file, converter_results, mapping_ = run_dcm2niix(
                    bids_name, bids_modality, bvecs_from_scanner_file,
                    info_out_dir, nii_file, nii_output_dir, out_filename,
                    par_file, task)
                mapping.append(mapping_)

                if reorient2std:
                    reorient = Reorient2Std()
                    reorient.inputs.in_file = converter_results.outputs.converted_files
                    reorient.inputs.out_file = converter_results.outputs.converted_files
                    reorient_results = reorient.run()

                if deface:
                    deface_data(nii_file, nii_output_dir, out_filename)
                add_info_to_json(bids_file, {"Defaced": deface})

                add_info_to_json(bids_file, add_info)

                # finally as a sanity check, check that converted nii exists
                assert os.path.exists(nii_file), "Something went wrong" \
                                                 "converted file does not exist. STOP. %s" % nii_file
            physio_in_file_list = []
            if physio:  # convert physiological data
                physio_search_str_list = [
                    ".".join(par_file.split(".")[:-1]) + "_*phys*.log",
                    "SCANPHYSLOG_" + ".".join(par_file.split(".")[:-1]) +
                    ".log"
                ]
                physio_in_file_list = []
                for physio_search_str in physio_search_str_list:
                    physio_in_file_list += glob(physio_search_str)
                assert len(
                    physio_in_file_list
                ) < 2, "more than 1  phyio file found for %s" % physio_search_str

                if physio_in_file_list and not dry_run:
                    physio_out_file_base = os.path.join(
                        nii_output_dir, out_filename_wo_name + "_physio")
                    meta_data, physio_data = parse_physio(
                        physio_in_file_list[0])
                    save_physio(physio_out_file_base, meta_data, physio_data)

    return par_file_list, physio_in_file_list, mapping
        par_list = glob(os.path.join(raw_dir, sub_id, "*t1w*.par"))

        if not par_list:
            raise Exception("No t1w images for {}".format(sub_id))
        for par in par_list:
            out_filename = os.path.basename(par).split(".par")[0]
            full_out_path = os.path.join(subject_out_dir,
                                         out_filename + ".nii.gz")
            print(full_out_path)
            if not os.path.isfile(full_out_path):
                converter = Dcm2niix()
                converter.inputs.source_names = [par]
                converter.inputs.bids_format = True
                converter.inputs.compress = 'i'
                converter.inputs.has_private = True
                converter.inputs.output_dir = subject_out_dir
                converter.inputs.out_filename = out_filename

                print("XXXXXXX running dcm2niix command")
                print(converter.cmdline)

                converter_results = converter.run()

                reorient = Reorient2Std()
                reorient.inputs.in_file = converter_results.outputs.converted_files
                reorient.inputs.out_file = converter_results.outputs.converted_files
                reorient_results = reorient.run()

    print("\n\n\n\nDONE.\nConverted %d subjects." % len(sub_id_list))
    print(sub_id_list)
def create_workflow(subject_id, outdir, file_url):
    """Create a workflow for a single participant"""

    sink_directory = os.path.join(outdir, subject_id)

    wf = Workflow(name=subject_id)

    getter = Node(Function(input_names=['url'],
                           output_names=['localfile'],
                           function=download_file),
                  name="download_url")
    getter.inputs.url = file_url

    orienter = Node(Reorient2Std(), name='reorient_brain')
    wf.connect(getter, 'localfile', orienter, 'in_file')

    better = Node(BET(), name='extract_brain')
    wf.connect(orienter, 'out_file', better, 'in_file')

    faster = Node(FAST(), name='segment_brain')
    wf.connect(better, 'out_file', faster, 'in_files')

    firster = Node(FIRST(), name='parcellate_brain')
    structures = [
        'L_Hipp', 'R_Hipp', 'L_Accu', 'R_Accu', 'L_Amyg', 'R_Amyg', 'L_Caud',
        'R_Caud', 'L_Pall', 'R_Pall', 'L_Puta', 'R_Puta', 'L_Thal', 'R_Thal'
    ]
    firster.inputs.list_of_specific_structures = structures
    wf.connect(orienter, 'out_file', firster, 'in_file')

    fslstatser = MapNode(ImageStats(),
                         iterfield=['op_string'],
                         name="compute_segment_stats")
    fslstatser.inputs.op_string = [
        '-l {thr1} -u {thr2} -v'.format(thr1=val + 0.5, thr2=val + 1.5)
        for val in range(3)
    ]
    wf.connect(faster, 'partial_volume_map', fslstatser, 'in_file')

    jsonfiler = Node(Function(
        input_names=['stats', 'seg_file', 'structure_map', 'struct_file'],
        output_names=['out_file'],
        function=toJSON),
                     name='save_json')
    structure_map = [('Background', 0), ('Left-Thalamus-Proper', 10),
                     ('Left-Caudate', 11), ('Left-Putamen', 12),
                     ('Left-Pallidum', 13), ('Left-Hippocampus', 17),
                     ('Left-Amygdala', 18), ('Left-Accumbens-area', 26),
                     ('Right-Thalamus-Proper', 49), ('Right-Caudate', 50),
                     ('Right-Putamen', 51), ('Right-Pallidum', 52),
                     ('Right-Hippocampus', 53), ('Right-Amygdala', 54),
                     ('Right-Accumbens-area', 58)]
    jsonfiler.inputs.structure_map = structure_map
    wf.connect(fslstatser, 'out_stat', jsonfiler, 'stats')
    wf.connect(firster, 'segmentation_file', jsonfiler, 'seg_file')

    sinker = Node(DataSink(), name='store_results')
    sinker.inputs.base_directory = sink_directory
    wf.connect(better, 'out_file', sinker, 'brain')
    wf.connect(faster, 'bias_field', sinker, 'segs.@bias_field')
    wf.connect(faster, 'partial_volume_files', sinker, 'segs.@partial_files')
    wf.connect(faster, 'partial_volume_map', sinker, 'segs.@partial_map')
    wf.connect(faster, 'probability_maps', sinker, 'segs.@prob_maps')
    wf.connect(faster, 'restored_image', sinker, 'segs.@restored')
    wf.connect(faster, 'tissue_class_files', sinker, 'segs.@tissue_files')
    wf.connect(faster, 'tissue_class_map', sinker, 'segs.@tissue_map')
    wf.connect(firster, 'bvars', sinker, 'parcels.@bvars')
    wf.connect(firster, 'original_segmentations', sinker, 'parcels.@origsegs')
    wf.connect(firster, 'segmentation_file', sinker, 'parcels.@segfile')
    wf.connect(firster, 'vtk_surfaces', sinker, 'parcels.@vtk')
    wf.connect(jsonfiler, 'out_file', sinker, '@stats')

    return wf
Esempio n. 6
0
    def _fnirt_to_tmpl_pipeline(self, **name_maps):
        """
        Registers a MR scan to a refernce MR scan using FSL's nonlinear FNIRT
        command

        Parameters
        ----------
        template : Which template to use, can be one of 'mni_nl6'
        """
        pipeline = self.new_pipeline(
            name='mag_coreg_to_tmpl',
            name_maps=name_maps,
            desc=("Nonlinearly registers a MR scan to a standard space,"
                  "e.g. MNI-space"),
            citations=[fsl_cite])

        # Basic reorientation to standard MNI space
        reorient = pipeline.add(
            'reorient',
            Reorient2Std(
                output_type='NIFTI_GZ'),
            inputs={
                'in_file': ('mag_preproc', nifti_gz_format)},
            requirements=[fsl_req.v('5.0.8')])

        reorient_mask = pipeline.add(
            'reorient_mask',
            Reorient2Std(
                output_type='NIFTI_GZ'),
            inputs={
                'in_file': ('brain_mask', nifti_gz_format)},
            requirements=[fsl_req.v('5.0.8')])

        reorient_brain = pipeline.add(
            'reorient_brain',
            Reorient2Std(
                output_type='NIFTI_GZ'),
            inputs={
                'in_file': ('brain', nifti_gz_format)},
            requirements=[fsl_req.v('5.0.8')])

        # Affine transformation to MNI space
        flirt = pipeline.add(
            'flirt',
            interface=FLIRT(
                dof=12,
                output_type='NIFTI_GZ'),
            inputs={
                'reference': ('template_brain', nifti_gz_format),
                'in_file': (reorient_brain, 'out_file')},
            requirements=[fsl_req.v('5.0.8')],
            wall_time=5)

        # Apply mask if corresponding subsampling scheme is 1
        # (i.e. 1-to-1 resolution) otherwise don't.
        apply_mask = [int(s == 1)
                      for s in self.parameter('fnirt_subsampling')]
        # Nonlinear transformation to MNI space
        pipeline.add(
            'fnirt',
            interface=FNIRT(
                output_type='NIFTI_GZ',
                intensity_mapping_model=(
                    self.parameter('fnirt_intensity_model')
                    if self.parameter('fnirt_intensity_model') is not None else
                    'none'),
                subsampling_scheme=self.parameter('fnirt_subsampling'),
                fieldcoeff_file=True,
                in_fwhm=[8, 6, 5, 4, 3, 2],  # [8, 6, 5, 4.5, 3, 2] This threw an error because of float value @IgnorePep8,
                ref_fwhm=[8, 6, 5, 4, 2, 0],
                regularization_lambda=[300, 150, 100, 50, 40, 30],
                apply_intensity_mapping=[1, 1, 1, 1, 1, 0],
                max_nonlin_iter=[5, 5, 5, 5, 5, 10],
                apply_inmask=apply_mask,
                apply_refmask=apply_mask),
            inputs={
                'ref_file': ('template', nifti_gz_format),
                'refmask': ('template_mask', nifti_gz_format),
                'in_file': (reorient, 'out_file'),
                'inmask_file': (reorient_mask, 'out_file'),
                'affine_file': (flirt, 'out_matrix_file')},
            outputs={
                'mag_coreg_to_tmpl': ('warped_file', nifti_gz_format),
                'coreg_to_tmpl_fsl_coeff': ('fieldcoeff_file',
                                             nifti_gz_format)},
            requirements=[fsl_req.v('5.0.8')],
            wall_time=60)
        # Set registration parameters
        # TODO: Need to work out which parameters to use
        return pipeline
Esempio n. 7
0
    DWIBiasCorrect,
    DWIDenoise,
    Generate5tt,
    MRDeGibbs,
    ResponseSD,
)

#: A dictionary that should be imported in the project's settings and included
#: within the *ANALYSIS_INTERFACES* setting.
interfaces = {
    "apply_topup": {ApplyTOPUP().version: ApplyTOPUP},
    "binary_maths": {BinaryMaths().version: BinaryMaths},
    "BET": {BET().version: BET},
    "CAT12 Segmentation": {"12.7": Cat12Segmentation},
    "fslmerge": {Merge().version: Merge},
    "fslreorient2std": {Reorient2Std().version: Reorient2Std},
    "fslroi": {ExtractROI().version: ExtractROI},
    "FAST": {FastWrapper.version: FastWrapper},
    "FLIRT": {FLIRT().version: FLIRT},
    "FNIRT": {FNIRT().version: FNIRT},
    "FSL Anatomical Processing Script": {FslAnat.__version__: FslAnat},
    "mean_image": {MeanImage().version: MeanImage},
    "robustfov": {RobustFOV().version: RobustFOV},
    "ReconAll": {ReconAll().version: ReconAll},
    "SUSAN": {SUSAN().version: SUSAN},
    "topup": {TopupWrapper.version: TopupWrapper},
    "eddy": {Eddy().version: Eddy},
    "denoise": {DWIDenoise().version: DWIDenoise},
    "degibbs": {MRDeGibbs().version: MRDeGibbs},
    "bias_correct": {DWIBiasCorrect().version: DWIBiasCorrect},
    "dwifslpreproc": {DwiFslPreproc.__version__: DwiFslPreproc},
Esempio n. 8
0
def convert_modality(old_subject_id, old_ses_id, output_dir, bids_name, bids_modality,
                     search_str, bvecs_from_scanner_file=None, public_sub_id=None, public_output=True,
                     reorient2std=True, task=None, direction=None, acq=None,
                     only_use_last=False, deface=False, physio=False, add_info={}):
    """
    runs conversion for one subject and one modality
    public_output: if True: strips all info about original subject_id, file, date
    """
    if (public_output and bids_modality == "anat" and not deface):
        raise Exception("Public output requested, but anatomical images not defaced. exit. %s %s %s" % (
            old_subject_id, old_ses_id, bids_name))

    new_ses_id = get_clean_ses_id(old_ses_id)
    bids_ses = "ses-" + new_ses_id
    if public_sub_id:
        bids_sub = "sub-" + public_sub_id
    else:
        bids_sub = "sub-" + get_clean_subject_id(old_subject_id)
    mapping_file = os.path.join(output_dir, bids_sub, "par2nii_mapping.txt")

    par_file_list = glob("*" + search_str + "*.par")
    if par_file_list:
        sub_output_dir = os.path.join(output_dir, bids_sub)
        nii_output_dir = os.path.join(sub_output_dir, bids_ses, bids_modality)

        if not os.path.exists(nii_output_dir):
            os.makedirs(nii_output_dir)

        if only_use_last:
            par_file_list = par_file_list[-1:]

        for run_id, par_file in enumerate(par_file_list, 1):
            # put together bids file name
            # bids run
            bids_run = "run-" + str(run_id)
            out_components = [bids_sub, bids_ses]

            # bids acq
            if acq:
                out_components += ["acq-%s" % acq]

            # bids task
            if task:
                out_components += ["task-%s" % task]

            # bids acq. direction
            if direction:
                out_components += ["dir-%s" % direction]

            out_components += [bids_run, bids_name]
            out_filename = "_".join(out_components)
            nii_file = os.path.join(nii_output_dir, out_filename + ".nii.gz")
            assert not os.path.exists(nii_file), "file exists. STOP. %s" % nii_file

            bids_file, converter_results = run_dcm2niix(bids_name, bids_modality, bvecs_from_scanner_file,
                                                        mapping_file, nii_file, nii_output_dir, out_filename, par_file,
                                                        public_output, task)

            if reorient2std:
                reorient = Reorient2Std()
                reorient.inputs.in_file = converter_results.outputs.converted_files
                reorient.inputs.out_file = converter_results.outputs.converted_files
                reorient_results = reorient.run()

            if deface:
                deface_data(nii_file, nii_output_dir, out_filename)
            add_info_to_json(bids_file, {"Defaced": deface})

            add_info_to_json(bids_file, add_info)

            update_sub_scans_file(output_dir, bids_sub, bids_ses, bids_modality, out_filename, par_file, public_output)

            # finally as a sanity check, check that converted nii exists
            assert os.path.exists(nii_file), "Something went wrong" \
                                             "converted file does not exist. STOP. %s" % nii_file

            if physio:  # convert physiological data
                physio_search_str = ".".join(par_file.split(".")[:-1]) + "_physio.log"
                physio_in_file_list = glob(physio_search_str)
                assert len(physio_in_file_list) < 2, "more than 1  phyio file found for %s" % physio_search_str

                if physio_in_file_list:
                    physio_out_file_base = os.path.join(nii_output_dir, out_filename + "_physio")
                    meta_data, physio_data = parse_physio(physio_in_file_list[0])
                    save_physio(physio_out_file_base, meta_data, physio_data)
     "versions": [{
         "title": MeanImage().version or "1.0",
         "description":
         f"Default MeanImage version for nipype {_NIPYPE_VERSION}.",  # noqa: E501
         "input": MEAN_IMAGE_INPUT_SPECIFICATION,
         "output": MEAN_IMAGE_OUTPUT_SPECIFICATION,
         "nested_results_attribute": "outputs.get_traitsfree",
     }],
 },
 {
     "title":
     "fslreorient2std",
     "description":
     "This is a simple and safe tool designed to reorient an image to match the orientation of the standard template images (MNI152) so that they appear 'the same way around' in FSLView. It requires that the image labels are correct in FSLView before this is run. It is also not a registration tool, so it will not align the image to standard space, it will only apply 90, 180 or 270 degree rotations about the different axes as necessary to get the labels in the same position as the standard template.",  # noqa: E501
     "versions": [{
         "title": Reorient2Std().version or "1.0",
         "description":
         f"Default fslorient2std version for nipype {_NIPYPE_VERSION}.",  # noqa: E501
         "input": REORIENT2STD_INPUT_SPECIFICATION,
         "output": REORIENT2STD_OUTPUT_SPECIFICATION,
         "nested_results_attribute": "outputs.get_traitsfree",
     }],
 },
 {
     "title":
     "robustfov",
     "description":
     "Automatically crops an image removing lower head and neck.",  # noqa: E501
     "versions": [{
         "title": RobustFOV().version or "1.0",
         "description":
Esempio n. 10
0
def create_tlc_workflow(config, t1_file, freesurf_parc, flair_lesion):
    """
    Inputs::
        config: Dictionary with PBR configuration options. See config.py
        t1_file: full path of t1 image
        freesurf_parc: full path of aparc+aseg.mgz from freesurfer
        flair_lesion: editted binary lesion mask based on FLAIR image (can also be labeled)
    Outputs::
        nipype.pipeline.engine.Workflow object
    """

    import nipype.interfaces.ants as ants
    from nipype.pipeline.engine import Node, Workflow, MapNode
    from nipype.interfaces.io import DataSink, DataGrabber
    from nipype.interfaces.utility import IdentityInterface, Function
    import nipype.interfaces.fsl as fsl
    from nipype.utils.filemanip import load_json
    import os
    import numpy as np
    from nipype.interfaces.freesurfer import Binarize, MRIConvert
    from nipype.interfaces.slicer.filtering import n4itkbiasfieldcorrection as n4
    from nipype.interfaces.fsl import Reorient2Std
    from nipype.interfaces.freesurfer import SegStats


    mse = get_mseid(t1_file)
    msid = get_msid(t1_file)
    working_dir = "tlc_{0}_{1}".format(msid, mse)

    register = Workflow(name=working_dir)
    register.base_dir = config["working_directory"]

    inputnode = Node(IdentityInterface(fields=["t1_image", "parc", "flair_lesion", "mse"]),
                     name="inputspec")
    inputnode.inputs.t1_image = t1_file
    inputnode.inputs.parc = freesurf_parc
    inputnode.inputs.flair_lesion = flair_lesion
    inputnode.inputs.mse = mse

    bin_math = Node(fsl.BinaryMaths(), name="Convert_to_binary")
    bin_math.inputs.operand_value = 1
    bin_math.inputs.operation = 'min'
    register.connect(inputnode, "flair_lesion", bin_math, "in_file")

    binvol1 = Node(Binarize(), name="binary_ventricle")
    binvol1.inputs.match = [4, 5, 11, 14, 15, 24, 43, 44, 50, 72, 213, 31, 63]
    #binvol1.inputs.match = [4, 5, 14, 15, 24, 43, 44, 72, 213]
    # every parcellation corresponds to ventricle CSF
    #binvol1.inputs.mask_thresh = 0.5
    binvol1.inputs.binary_file = os.path.join(config["working_directory"],
                                              working_dir, "binary_ventricle", "binarize_ventricle.nii.gz")
    register.connect(inputnode, "parc", binvol1, "in_file")

    binvol2 = Node(Binarize(), name="binary_gray_matter")
    binvol2.inputs.match = [3, 8, 42, 47, 169, 220, 702,
                            1878, 1915, 1979, 1993, 2000, 2001, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
                            2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024, 2025, 2026,
                            2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035,
                            772, 833, 835, 896, 925, 936, 1001, 1002, 1003, 1005, 1006, 1007, 1008, 1009, 1010, 1011,
                            1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026,
                            1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035]
    binvol2.inputs.binary_file = os.path.join(config["working_directory"], working_dir,
                                              "binary_gray_matter", "binarize_cortex.nii.gz")
    #binvol2.inputs.mask_thresh = 0.5
    register.connect(inputnode, "parc", binvol2, "in_file")

    bias_corr = Node(n4.N4ITKBiasFieldCorrection(), name="BiasFieldCorrection")
    bias_corr.inputs.outputimage = os.path.join(config["working_directory"], working_dir,
                                                "BiasFieldCorrection", "bias_corrected.nii.gz")
    register.connect(inputnode, "t1_image", bias_corr, "inputimage")

    reo1 = Node(Reorient2Std(), name="reorient1")
    reo2 = Node(Reorient2Std(), name="reorient2")
    register.connect(binvol1, "binary_file", reo1, "in_file")
    register.connect(binvol2, "binary_file", reo2, "in_file")

    mri_convert1 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert1")
    mri_convert2 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert2")
    mri_convert1.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert1')
    register.connect(bias_corr, "outputimage", mri_convert1, "t1_image")
    register.connect(reo1, "out_file", mri_convert1, "reorient_mask")
    mri_convert2.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert2')
    register.connect(bias_corr, "outputimage", mri_convert2, "t1_image")
    register.connect(reo2, "out_file", mri_convert2, "reorient_mask")

    binvol3 = Node(Binarize(), name="binary_white_matter")
    binvol3.inputs.match = [2, 7, 16, 28, 41, 46, 60, 77, 78, 79, 251, 252, 253, 254, 255]
    #binvol3.inputs.match = [2, 7, 41, 46, 77, 78, 79]
    #binvol3.inputs.mask_thresh = 0.5
    binvol3.inputs.binary_file = os.path.join(config["working_directory"], working_dir,
                                              "binary_white_matter", "binarize_white_matter.nii.gz")
    register.connect(inputnode, "parc", binvol3, "in_file")
    reo3 = Node(Reorient2Std(), name="reorient3")
    register.connect(binvol3, "binary_file", reo3, "in_file")

    mri_convert3 = Node(Function(input_names=['t1_image', 'reorient_mask', 'working_dir'],
                                 output_names=['output_file'],
                                 function=mri_convert_like), name="mri_convert3")
    mri_convert3.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'mri_convert3')
    register.connect(reo3, "out_file", mri_convert3, "reorient_mask")
    register.connect(bias_corr, "outputimage", mri_convert3, "t1_image")

    get_new_lesion = Node(Function(input_names=['t1_image', 'ventricle', 'cortex', 'flair_lesion', 'white_matter',
                                                'working_dir'],
                                   output_names=['out_path85', 'out_path90', 'out_path95', 'out_path100', 'out_path_combined'],
                                   function=matrix_operation), name='get_new_lesion')
    get_new_lesion.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'get_new_lesion')
    register.connect(bias_corr, "outputimage", get_new_lesion, "t1_image")
    register.connect(mri_convert1, "output_file", get_new_lesion, "ventricle")
    register.connect(mri_convert2, "output_file", get_new_lesion, "cortex")
    register.connect(bin_math, "out_file", get_new_lesion, "flair_lesion")
    register.connect(mri_convert3, "output_file", get_new_lesion, "white_matter")


    cluster85 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster85")
    register.connect(get_new_lesion, "out_path85", cluster85, "in_file")
    segstats85 = Node(SegStats(), name="segstats85")
    register.connect(cluster85, "index_file", segstats85, "segmentation_file")

    cluster90 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster90")
    register.connect(get_new_lesion, "out_path90", cluster90, "in_file")
    segstats90 = Node(SegStats(), name="segstats90")
    register.connect(cluster90, "index_file", segstats90, "segmentation_file")

    cluster95 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster95")
    register.connect(get_new_lesion, "out_path95", cluster95, "in_file")
    segstats95 = Node(SegStats(), name="segstats95")
    register.connect(cluster95, "index_file", segstats95, "segmentation_file")

    cluster100 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                     name="cluster100")
    register.connect(get_new_lesion, "out_path100", cluster100, "in_file")
    segstats100 = Node(SegStats(), name="segstats100")
    register.connect(cluster100, "index_file", segstats100, "segmentation_file")

    get_new_lesion2 = Node(Function(input_names=['t1_image', 'ventricle', 'cortex', 'flair_lesion', 'white_matter',
                                                'working_dir'],
                                   output_names=['out_path90', 'out_path95', 'out_path100'],
                                   function=matrix_operation2), name='get_new_lesion2')
    get_new_lesion2.inputs.working_dir = os.path.join(config["working_directory"], working_dir, 'get_new_lesion2')
    register.connect(bias_corr, "outputimage", get_new_lesion2, "t1_image")
    register.connect(mri_convert1, "output_file", get_new_lesion2, "ventricle")
    register.connect(mri_convert2, "output_file", get_new_lesion2, "cortex")
    register.connect(bin_math, "out_file", get_new_lesion2, "flair_lesion")
    register.connect(mri_convert3, "output_file", get_new_lesion2, "white_matter")
    cluster_intersection90 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection90")
    register.connect(get_new_lesion2, "out_path90", cluster_intersection90, "in_file")
    segstats_intersection90 = Node(SegStats(), name="segstats_intersection90")
    register.connect(cluster_intersection90, "index_file", segstats_intersection90, "segmentation_file")

    cluster_intersection95 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection95")
    register.connect(get_new_lesion2, "out_path95", cluster_intersection95, "in_file")
    segstats_intersection95 = Node(SegStats(), name="segstats_intersection95")
    register.connect(cluster_intersection95, "index_file", segstats_intersection95, "segmentation_file")

    cluster_intersection100 = Node(fsl.Cluster(threshold=0.0001,
                                 out_index_file = True,
                                 use_mm=True),
                                 name="cluster_intersection100")
    register.connect(get_new_lesion2, "out_path100", cluster_intersection100, "in_file")
    segstats_intersection100 = Node(SegStats(), name="segstats_intersection100")
    register.connect(cluster_intersection100, "index_file", segstats_intersection100, "segmentation_file")

    sinker = Node(DataSink(), name="sinker")
    sinker.inputs.base_directory = os.path.join(config["output_directory"], mse, "tlc")
    sinker.inputs.container = '.'
    sinker.inputs.substitutions = []

    register.connect(get_new_lesion, "out_path85", sinker, "85.@lesion85")
    register.connect(get_new_lesion, "out_path90", sinker, "90.@lesion90")
    register.connect(get_new_lesion, "out_path95", sinker, "95.@lesion95")
    register.connect(get_new_lesion, "out_path100", sinker, "100.@lesion100")
    register.connect(get_new_lesion, "out_path_combined", sinker, "@WhiteMatterCombined")
    register.connect(get_new_lesion2, "out_path90", sinker, "intersection90.@lesion90")
    register.connect(get_new_lesion2, "out_path95", sinker, "intersection95.@lesion95")
    register.connect(get_new_lesion2, "out_path100", sinker, "intersection100.@lesion100")

    register.connect(segstats85, "summary_file", sinker, "85.@summaryfile85")
    register.connect(segstats90, "summary_file", sinker, "90.@summaryfile90")
    register.connect(segstats95, "summary_file", sinker, "95.@summaryfile95")
    register.connect(segstats100, "summary_file", sinker, "100.@summaryfile100")
    register.connect(segstats_intersection90, "summary_file", sinker, "intersection90.@summaryfile90")
    register.connect(segstats_intersection95, "summary_file", sinker, "intersection95.@summaryfile95")
    register.connect(segstats_intersection100, "summary_file", sinker, "intersection100.@summaryfile100")

    register.connect(cluster85, "index_file", sinker, "85.@index_file85")
    register.connect(cluster90, "index_file", sinker, "90.@index_file90")
    register.connect(cluster95, "index_file", sinker, "95.@index_file95")
    register.connect(cluster100, "index_file", sinker, "100.@index_file100")
    register.connect(cluster_intersection90, "index_file", sinker, "intersection90.@index_file90")
    register.connect(cluster_intersection95, "index_file", sinker, "intersection95.@index_file95")
    register.connect(cluster_intersection100, "index_file", sinker, "intersection100.@index_file100")

    return register
Esempio n. 11
0
    def _fsl_fnirt_to_atlas_pipeline(self, **kwargs):
        """
        Registers a MR scan to a refernce MR scan using FSL's nonlinear FNIRT
        command

        Parameters
        ----------
        atlas : Which atlas to use, can be one of 'mni_nl6'
        """
        pipeline = self.create_pipeline(
            name='coregister_to_atlas',
            inputs=[
                DatasetSpec('preproc', nifti_gz_format),
                DatasetSpec('brain_mask', nifti_gz_format),
                DatasetSpec('brain', nifti_gz_format)
            ],
            outputs=[
                DatasetSpec('coreg_to_atlas', nifti_gz_format),
                DatasetSpec('coreg_to_atlas_coeff', nifti_gz_format)
            ],
            desc=("Nonlinearly registers a MR scan to a standard space,"
                  "e.g. MNI-space"),
            version=1,
            citations=[fsl_cite],
            **kwargs)
        # Get the reference atlas from FSL directory
        ref_atlas = get_atlas_path(self.parameter('fnirt_atlas'),
                                   'image',
                                   resolution=self.parameter('resolution'))
        ref_mask = get_atlas_path(self.parameter('fnirt_atlas'),
                                  'mask_dilated',
                                  resolution=self.parameter('resolution'))
        ref_brain = get_atlas_path(self.parameter('fnirt_atlas'),
                                   'brain',
                                   resolution=self.parameter('resolution'))
        # Basic reorientation to standard MNI space
        reorient = pipeline.create_node(Reorient2Std(),
                                        name='reorient',
                                        requirements=[fsl5_req])
        reorient.inputs.output_type = 'NIFTI_GZ'
        reorient_mask = pipeline.create_node(Reorient2Std(),
                                             name='reorient_mask',
                                             requirements=[fsl5_req])
        reorient_mask.inputs.output_type = 'NIFTI_GZ'
        reorient_brain = pipeline.create_node(Reorient2Std(),
                                              name='reorient_brain',
                                              requirements=[fsl5_req])
        reorient_brain.inputs.output_type = 'NIFTI_GZ'
        # Affine transformation to MNI space
        flirt = pipeline.create_node(interface=FLIRT(),
                                     name='flirt',
                                     requirements=[fsl5_req],
                                     wall_time=5)
        flirt.inputs.reference = ref_brain
        flirt.inputs.dof = 12
        flirt.inputs.output_type = 'NIFTI_GZ'
        # Nonlinear transformation to MNI space
        fnirt = pipeline.create_node(interface=FNIRT(),
                                     name='fnirt',
                                     requirements=[fsl5_req],
                                     wall_time=60)
        fnirt.inputs.ref_file = ref_atlas
        fnirt.inputs.refmask_file = ref_mask
        fnirt.inputs.output_type = 'NIFTI_GZ'
        intensity_model = self.parameter('fnirt_intensity_model')
        if intensity_model is None:
            intensity_model = 'none'
        fnirt.inputs.intensity_mapping_model = intensity_model
        fnirt.inputs.subsampling_scheme = self.parameter('fnirt_subsampling')
        fnirt.inputs.fieldcoeff_file = True
        fnirt.inputs.in_fwhm = [8, 6, 5, 4.5, 3, 2]
        fnirt.inputs.ref_fwhm = [8, 6, 5, 4, 2, 0]
        fnirt.inputs.regularization_lambda = [300, 150, 100, 50, 40, 30]
        fnirt.inputs.apply_intensity_mapping = [1, 1, 1, 1, 1, 0]
        fnirt.inputs.max_nonlin_iter = [5, 5, 5, 5, 5, 10]
        # Apply mask if corresponding subsampling scheme is 1
        # (i.e. 1-to-1 resolution) otherwise don't.
        apply_mask = [int(s == 1) for s in self.parameter('fnirt_subsampling')]
        fnirt.inputs.apply_inmask = apply_mask
        fnirt.inputs.apply_refmask = apply_mask
        # Connect nodes
        pipeline.connect(reorient_brain, 'out_file', flirt, 'in_file')
        pipeline.connect(reorient, 'out_file', fnirt, 'in_file')
        pipeline.connect(reorient_mask, 'out_file', fnirt, 'inmask_file')
        pipeline.connect(flirt, 'out_matrix_file', fnirt, 'affine_file')
        # Set registration parameters
        # TOD: Need to work out which parameters to use
        # Connect inputs
        pipeline.connect_input('preproc', reorient, 'in_file')
        pipeline.connect_input('brain_mask', reorient_mask, 'in_file')
        pipeline.connect_input('brain', reorient_brain, 'in_file')
        # Connect outputs
        pipeline.connect_output('coreg_to_atlas', fnirt, 'warped_file')
        pipeline.connect_output('coreg_to_atlas_coeff', fnirt,
                                'fieldcoeff_file')
        return pipeline
Esempio n. 12
0
        d = results.outputs.get_traitsfree()
        for i, pv_file in enumerate(d["partial_volume_files"]):
            d[f"partial_volume_{i}"] = pv_file
        del d["partial_volume_files"]
        return d


interfaces = {
    "BET": {
        BET().version: BET
    },
    "CAT12 Segmentation": {
        "12.6": Cat12Segmentation
    },
    "fslreorient2std": {
        Reorient2Std().version: Reorient2Std
    },
    "FAST": {
        FAST().version: FastWrapper
    },
    "FLIRT": {
        FLIRT().version: FLIRT
    },
    "FNIRT": {
        FNIRT().version: FNIRT
    },
    "FSL Anatomical Processing Script": {
        FslAnat.__version__: FslAnat
    },
    "SUSAN": {
        SUSAN().version: SUSAN