コード例 #1
0
ファイル: cortex_thickness.py プロジェクト: haanme/FinnBrain
def freesurfer_create_subject_dir(basedir, T1dir, output_prefix):

    from nipype.interfaces.io import FreeSurferSource
    fs = FreeSurferSource()
    fs.inputs.subjects_dir = experiment_dir
    fs.inputs.subject_id = output_prefix
    res = fs.run()
    dir(res.outputs)
    print res.outputs
コード例 #2
0
def test_eulernumber(tmpdir):
    # grab a surface from fsaverage
    fssrc = FreeSurferSource(subjects_dir=fs.Info.subjectsdir(),
                             subject_id="fsaverage",
                             hemi="lh")
    pial = fssrc.run().outputs.pial
    assert isinstance(pial, str), "Problem when fetching surface file"

    eu = fs.EulerNumber()
    eu.inputs.in_file = pial
    res = eu.run()
    assert res.outputs.defects == 0
    assert res.outputs.euler == 2
コード例 #3
0
def fs_segment(name="segment"):
    from nipype.interfaces.io import FreeSurferSource
    from nipype.interfaces.utility import IdentityInterface
    import nipype.interfaces.freesurfer as fs
    import nipype.pipeline.engine as pe
    import os
    wf = pe.Workflow(name=name)
    inputspec = pe.Node(
        IdentityInterface(fields=['subject_id', 'subjects_dir']),
        name="inputspec")
    fssource = pe.Node(FreeSurferSource(), name="fssource")
    wf.connect(inputspec, "subject_id", fssource, "subject_id")
    wf.connect(inputspec, "subjects_dir", fssource, "subjects_dir")
    bin_wm = pe.Node(fs.Binarize(), name="get_wm")
    bin_wm.inputs.out_type = 'nii.gz'
    bin_wm.inputs.match = [2, 41]
    bin_gm = bin_wm.clone("get_gm")
    bin_gm.inputs.out_type = 'nii.gz'
    bin_gm.inputs.match = [3, 42]
    bin_csf = bin_wm.clone("get_csf")
    bin_csf.inputs.out_type = 'nii.gz'
    bin_csf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63]
    wf.connect(fssource, ("ribbon", pick_file, 'ribbon.mgz'), bin_wm,
               "in_file")
    wf.connect(fssource, ("ribbon", pick_file, 'ribbon.mgz'), bin_gm,
               "in_file")
    wf.connect(fssource, ("aparc_aseg", pick_file, 'aparc+aseg.mgz'), bin_csf,
               "in_file")
    outputspec = pe.Node(IdentityInterface(fields=["gm", "wm", "csf"]),
                         name='outputspec')
    wf.connect(bin_wm, "binary_file", outputspec, "wm")
    wf.connect(bin_gm, "binary_file", outputspec, "gm")
    wf.connect(bin_csf, "binary_file", outputspec, "csf")
    return wf
コード例 #4
0
def test_mrisexpand(tmpdir):
    fssrc = FreeSurferSource(subjects_dir=fs.Info.subjectsdir(),
                             subject_id='fsaverage', hemi='lh')

    fsavginfo = fssrc.run().outputs.get()

    # dt=60 to ensure very short runtime
    expand_if = fs.MRIsExpand(in_file=fsavginfo['smoothwm'],
                              out_name='expandtmp',
                              distance=1,
                              dt=60)

    expand_nd = pe.Node(
        fs.MRIsExpand(in_file=fsavginfo['smoothwm'],
                      out_name='expandtmp',
                      distance=1,
                      dt=60),
        name='expand_node')

    # Interfaces should have same command line at instantiation
    orig_cmdline = 'mris_expand -T 60 {} 1 expandtmp'.format(fsavginfo['smoothwm'])
    assert expand_if.cmdline == orig_cmdline
    assert expand_nd.interface.cmdline == orig_cmdline

    # Run both interfaces
    if_res = expand_if.run()
    nd_res = expand_nd.run()

    # Commandlines differ
    node_cmdline = 'mris_expand -T 60 -pial {cwd}/lh.pial {cwd}/lh.smoothwm ' \
        '1 expandtmp'.format(cwd=nd_res.runtime.cwd)
    assert if_res.runtime.cmdline == orig_cmdline
    assert nd_res.runtime.cmdline == node_cmdline

    # Check output
    if_out_file = if_res.outputs.get()['out_file']
    nd_out_file = nd_res.outputs.get()['out_file']
    # Same filename
    assert op.basename(if_out_file) == op.basename(nd_out_file)
    # Interface places output in source directory
    assert op.dirname(if_out_file) == op.dirname(fsavginfo['smoothwm'])
    # Node places output in working directory
    assert op.dirname(nd_out_file) == nd_res.runtime.cwd

    # Remove test surface
    os.unlink(if_out_file)
コード例 #5
0
def test_mrisexpand(tmpdir):
    fssrc = FreeSurferSource(subjects_dir=fs.Info.subjectsdir(),
                             subject_id="fsaverage",
                             hemi="lh")

    fsavginfo = fssrc.run().outputs.get()

    # dt=60 to ensure very short runtime
    expand_if = fs.MRIsExpand(in_file=fsavginfo["smoothwm"],
                              out_name="expandtmp",
                              distance=1,
                              dt=60)

    expand_nd = pe.Node(
        fs.MRIsExpand(in_file=fsavginfo["smoothwm"],
                      out_name="expandtmp",
                      distance=1,
                      dt=60),
        name="expand_node",
    )

    # Interfaces should have same command line at instantiation
    orig_cmdline = "mris_expand -T 60 {} 1 expandtmp".format(
        fsavginfo["smoothwm"])
    assert expand_if.cmdline == orig_cmdline
    assert expand_nd.interface.cmdline == orig_cmdline

    # Run Node interface
    nd_res = expand_nd.run()

    # Commandlines differ
    node_cmdline = ("mris_expand -T 60 -pial {cwd}/lh.pial {cwd}/lh.smoothwm "
                    "1 expandtmp".format(cwd=nd_res.runtime.cwd))
    assert nd_res.runtime.cmdline == node_cmdline

    # Check output
    if_out_file = expand_if._list_outputs()["out_file"]
    nd_out_file = nd_res.outputs.get()["out_file"]
    # Same filename
    assert op.basename(if_out_file) == op.basename(nd_out_file)
    # Interface places output in source directory
    assert op.dirname(if_out_file) == op.dirname(fsavginfo["smoothwm"])
    # Node places output in working directory
    assert op.dirname(nd_out_file) == nd_res.runtime.cwd
コード例 #6
0
def test_FreeSurferSource_inputs():
    input_map = dict(hemi=dict(usedefault=True,
    ),
    ignore_exception=dict(nohash=True,
    usedefault=True,
    ),
    subject_id=dict(mandatory=True,
    ),
    subjects_dir=dict(mandatory=True,
    ),
    )
    inputs = FreeSurferSource.input_spec()

    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(inputs.traits()[key], metakey), value
コード例 #7
0
    def create_workflow(self, flow, inputnode, outputnode):
        """Create the stage workflow.

        Parameters
        ----------
        flow : nipype.pipeline.engine.Workflow
            The nipype.pipeline.engine.Workflow instance of the anatomical pipeline

        inputnode : nipype.interfaces.utility.IdentityInterface
            Identity interface describing the inputs of the segmentation stage

        outputnode : nipype.interfaces.utility.IdentityInterface
            Identity interface describing the outputs of the segmentation stage
        """
        if self.config.seg_tool == "Freesurfer":

            def correct_freesurfer_subjectid_path(path):
                if os.path.exists('/output_dir') and '/output_dir' not in path:
                    subject_id = path.split(f"{__freesurfer_directory__}/")[-1]
                    path = os.path.abspath(f'/output_dir/{__freesurfer_directory__}/{subject_id}')
                return path

            def correct_freesurfer_subjects_path(path):
                if os.path.exists('/output_dir') and '/output_dir' not in path:
                    path = os.path.abspath(f'/output_dir/{__freesurfer_directory__}')
                return path

            orig_dir = os.path.join(
                correct_freesurfer_subjectid_path(self.config.freesurfer_subject_id), "mri", "orig"
            )
            print(f'INFO : orig_dir = {orig_dir}')
            # Skip Freesurfer recon-all if 001.mgz exists which typically means it has been already run
            self.config.use_existing_freesurfer_data = True if os.path.exists(orig_dir) else False
            print(f'INFO : orig_dir exists? {self.config.use_existing_freesurfer_data}')

            if self.config.use_existing_freesurfer_data is False:
                # Converting to .mgz format
                fs_mriconvert = pe.Node(
                    interface=fs.MRIConvert(out_type="mgz", out_file="T1.mgz"),
                    name="mgzConvert",
                )

                if self.config.make_isotropic:
                    fs_mriconvert.inputs.vox_size = (
                        self.config.isotropic_vox_size,
                        self.config.isotropic_vox_size,
                        self.config.isotropic_vox_size,
                    )
                    fs_mriconvert.inputs.resample_type = (
                        self.config.isotropic_interpolation
                    )

                rename = pe.Node(Rename001(), name="copyOrig")

                if not os.path.exists(orig_dir):
                    print(f'INFO : Create folder: {orig_dir}')
                    os.makedirs(orig_dir)

                rename.inputs.format_string = os.path.join(orig_dir, "001.mgz")

                if self.config.brain_mask_extraction_tool == "Freesurfer":
                    # ReconAll => named outputnode as we don't want to select a specific output....
                    fs_reconall = pe.Node(
                        interface=fs.ReconAll(
                            flags=f'-no-isrunning -parallel -openmp {self.config.number_of_threads}'
                        ),
                        name="reconall",
                    )
                    fs_reconall.inputs.directive = "all"
                    fs_reconall.inputs.args = self.config.freesurfer_args

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set
                    # in cmp/pipelines/diffusion/diffusion.py
                    fs_reconall.inputs.subjects_dir = (
                        correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir)
                    )
                    # fmt: off
                    flow.connect(
                        [
                            (inputnode, fs_mriconvert, [(("T1", isavailable), "in_file")]),
                            (fs_mriconvert, rename, [("out_file", "in_file")]),
                            (rename, fs_reconall, [(("out_file", extract_reconall_base_dir), "subject_id")]),
                            (fs_reconall, outputnode, [("subjects_dir", "subjects_dir"), ("subject_id", "subject_id")]),
                        ]
                    )
                    # fmt: on
                else:
                    # ReconAll => named outputnode as we don't want to select a specific output....
                    fs_autorecon1 = pe.Node(
                        interface=fs.ReconAll(
                            flags="-no-isrunning -parallel -openmp {}".format(
                                self.config.number_of_threads
                            )
                        ),
                        name="autorecon1",
                    )
                    fs_autorecon1.inputs.directive = "autorecon1"

                    if self.config.brain_mask_extraction_tool == "ANTs":
                        fs_autorecon1.inputs.flags = (
                            "-no-isrunning -noskullstrip -parallel -openmp {}".format(
                                self.config.number_of_threads
                            )
                        )
                    fs_autorecon1.inputs.args = self.config.freesurfer_args

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set
                    # in cmp/pipelines/diffusion/diffusion.py
                    fs_autorecon1.inputs.subjects_dir = (
                        correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir)
                    )
                    # fmt: off
                    flow.connect(
                        [
                            (inputnode, fs_mriconvert, [(("T1", isavailable), "in_file")]),
                            (fs_mriconvert, rename, [("out_file", "in_file")]),
                            (rename, fs_autorecon1, [(("out_file", extract_reconall_base_dir), "subject_id")]),
                        ]
                    )
                    # fmt: on

                    fs_source = pe.Node(interface=FreeSurferSource(), name="fsSource")

                    fs_mriconvert_nu = pe.Node(
                        interface=fs.MRIConvert(out_type="niigz", out_file="nu.nii.gz"),
                        name="niigzConvert",
                    )
                    # fmt: off
                    flow.connect(
                        [
                            (fs_autorecon1, fs_source, [("subjects_dir", "subjects_dir"),
                                                        ("subject_id", "subject_id")]),
                            (fs_source, fs_mriconvert_nu, [("nu", "in_file")]),
                        ]
                    )
                    # fmt: on
                    fs_mriconvert_brainmask = pe.Node(
                        interface=fs.MRIConvert(
                            out_type="mgz", out_file="brainmask.mgz"
                        ),
                        name="fsMriconvertBETbrainmask",
                    )

                    if self.config.brain_mask_extraction_tool == "BET":
                        fsl_bet = pe.Node(
                            interface=fsl.BET(
                                out_file="brain.nii.gz",
                                mask=True,
                                skull=True,
                                robust=True,
                            ),
                            name="fsl_bet",
                        )
                        # fmt: off
                        flow.connect(
                            [
                                (fs_mriconvert_nu, fsl_bet, [("out_file", "in_file")]),
                                (fsl_bet, fs_mriconvert_brainmask, [("out_file", "in_file")]),
                            ]
                        )
                        # fmt: on

                    elif self.config.brain_mask_extraction_tool == "ANTs":
                        ants_bet = pe.Node(
                            interface=ants.BrainExtraction(out_prefix="ants_bet_"),
                            name="antsBET",
                        )
                        ants_bet.inputs.brain_template = self.config.ants_templatefile
                        ants_bet.inputs.brain_probability_mask = (
                            self.config.ants_probmaskfile
                        )
                        ants_bet.inputs.extraction_registration_mask = (
                            self.config.ants_regmaskfile
                        )
                        ants_bet.inputs.num_threads = self.config.number_of_threads
                        # fmt: off
                        flow.connect(
                            [
                                (fs_mriconvert_nu, ants_bet, [("out_file", "anatomical_image")]),
                                (ants_bet, fs_mriconvert_brainmask, [("BrainExtractionBrain", "in_file")]),
                            ]
                        )
                        # fmt: on

                    copy_brainmask_to_fs = pe.Node(
                        interface=copyBrainMaskToFreesurfer(), name="copyBrainmaskTofs"
                    )
                    # fmt: off
                    flow.connect(
                        [
                            (rename, copy_brainmask_to_fs, [(("out_file", extract_reconall_base_dir), "subject_dir")]),
                            (fs_mriconvert_brainmask, copy_brainmask_to_fs, [("out_file", "in_file")]),
                        ]
                    )
                    # fmt: on

                    fs_reconall23 = pe.Node(
                        interface=fs.ReconAll(
                            flags="-no-isrunning -parallel -openmp {}".format(
                                self.config.number_of_threads
                            )
                        ),
                        name="reconall23",
                    )
                    fs_reconall23.inputs.directive = "autorecon2"
                    fs_reconall23.inputs.args = self.config.freesurfer_args
                    fs_reconall23.inputs.flags = "-autorecon3"

                    fs_reconall23.inputs.subjects_dir = (
                        correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir)
                    )
                    # fmt: off
                    flow.connect(
                        [
                            (copy_brainmask_to_fs,fs_reconall23, [(("out_brainmask_file", get_freesurfer_subject_id), "subject_id")]),
                            (fs_reconall23, outputnode, [("subjects_dir", "subjects_dir"),
                                                         ("subject_id", "subject_id")]),
                        ]
                    )
                    # fmt: on

            else:
                outputnode.inputs.subjects_dir = correct_freesurfer_subjects_path(self.config.freesurfer_subjects_dir)
                outputnode.inputs.subject_id = correct_freesurfer_subjectid_path(self.config.freesurfer_subject_id)
                print(f'INFO : Found existing {os.path.join(orig_dir, "001.mgz")} -> Skip Freesurfer recon-all')
                print(f'       - outputnode.inputs.subjects_dir: {outputnode.inputs.subjects_dir}')
                print(f'       - outputnode.inputs.subject_id: {outputnode.inputs.subject_id}')
        elif self.config.seg_tool == "Custom segmentation":
            self.create_workflow_custom(flow, inputnode, outputnode)
コード例 #8
0
def create_workflow(files,
                    subject_id,
                    n_vol=0,
                    despike=True,
                    TR=None,
                    slice_times=None,
                    slice_thickness=None,
                    fieldmap_images=[],
                    norm_threshold=1,
                    num_components=6,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    sink_directory=os.getcwd(),
                    FM_TEdiff=2.46,
                    FM_sigma=2,
                    FM_echo_spacing=.7,
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Skip starting volumes
    remove_vol = MapNode(fsl.ExtractROI(t_min=n_vol, t_size=-1),
                         iterfield=['in_file'],
                         name="remove_volumes")
    remove_vol.inputs.in_file = files

    # Run AFNI's despike. This is always run, however, whether this is fed to
    # realign depends on the input configuration
    despiker = MapNode(afni.Despike(outputtype='NIFTI_GZ'),
                       iterfield=['in_file'],
                       name='despike')
    #despiker.plugin_args = {'qsub_args': '-l nodes=1:ppn='}

    wf.connect(remove_vol, 'roi_file', despiker, 'in_file')

    # Run Nipy joint slice timing and realignment algorithm
    realign = Node(nipy.SpaceTimeRealigner(), name='realign')
    realign.inputs.tr = TR
    realign.inputs.slice_times = slice_times
    realign.inputs.slice_info = 2

    if despike:
        wf.connect(despiker, 'out_file', realign, 'in_file')
    else:
        wf.connect(remove_vol, 'roi_file', realign, 'in_file')

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(realign, 'out_file', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')

    # Coregister the median to the surface
    register = Node(freesurfer.BBRegister(), name='bbregister')
    register.inputs.subject_id = subject_id
    register.inputs.init = 'fsl'
    register.inputs.contrast_type = 't2'
    register.inputs.out_fsl_file = True
    register.inputs.epi_mask = True

    # Compute fieldmaps and unwarp using them
    if fieldmap_images:
        fieldmap = Node(interface=EPIDeWarp(), name='fieldmap_unwarp')
        fieldmap.inputs.tediff = FM_TEdiff
        fieldmap.inputs.esp = FM_echo_spacing
        fieldmap.inputs.sigma = FM_sigma
        fieldmap.inputs.mag_file = fieldmap_images[0]
        fieldmap.inputs.dph_file = fieldmap_images[1]
        wf.connect(calc_median, 'median_file', fieldmap, 'exf_file')

        dewarper = MapNode(interface=fsl.FUGUE(),
                           iterfield=['in_file'],
                           name='dewarper')
        wf.connect(tsnr, 'detrended_file', dewarper, 'in_file')
        wf.connect(fieldmap, 'exf_mask', dewarper, 'mask_file')
        wf.connect(fieldmap, 'vsm_file', dewarper, 'shift_in_file')
        wf.connect(fieldmap, 'exfdw', register, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', register, 'source_file')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(), name='fssource')
    fssource.inputs.subject_id = subject_id
    fssource.inputs.subjects_dir = os.environ['SUBJECTS_DIR']

    # Extract wm+csf, brain masks by eroding freesurfer labels and then
    # transform the masks into the space of the median
    wmcsf = Node(freesurfer.Binarize(), name='wmcsfmask')
    mask = wmcsf.clone('anatmask')
    wmcsftransform = Node(freesurfer.ApplyVolTransform(inverse=True,
                                                       interp='nearest'),
                          name='wmcsftransform')
    wmcsftransform.inputs.subjects_dir = os.environ['SUBJECTS_DIR']
    wmcsf.inputs.wm_ven_csf = True
    wmcsf.inputs.match = [4, 5, 14, 15, 24, 31, 43, 44, 63]
    wmcsf.inputs.binary_file = 'wmcsf.nii.gz'
    wmcsf.inputs.erode = int(np.ceil(slice_thickness))
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), wmcsf, 'in_file')
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', wmcsftransform, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', wmcsftransform, 'source_file')
    wf.connect(register, 'out_reg_file', wmcsftransform, 'reg_file')
    wf.connect(wmcsf, 'binary_file', wmcsftransform, 'target_file')

    mask.inputs.binary_file = 'mask.nii.gz'
    mask.inputs.dilate = int(np.ceil(slice_thickness)) + 1
    mask.inputs.erode = int(np.ceil(slice_thickness))
    mask.inputs.min = 0.5
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), mask, 'in_file')
    masktransform = wmcsftransform.clone("masktransform")
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', masktransform, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', masktransform, 'source_file')
    wf.connect(register, 'out_reg_file', masktransform, 'reg_file')
    wf.connect(mask, 'binary_file', masktransform, 'target_file')

    # Compute Art outliers
    art = Node(interface=ArtifactDetect(use_differences=[True, False],
                                        use_norm=True,
                                        norm_threshold=norm_threshold,
                                        zintensity_threshold=3,
                                        parameter_source='NiPy',
                                        bound_by_brainmask=True,
                                        save_plot=False,
                                        mask_type='file'),
               name="art")
    if fieldmap_images:
        wf.connect(dewarper, 'unwarped_file', art, 'realigned_files')
    else:
        wf.connect(tsnr, 'detrended_file', art, 'realigned_files')
    wf.connect(realign, 'par_file', art, 'realignment_parameters')
    wf.connect(masktransform, 'transformed_file', art, 'mask_file')

    # Compute motion regressors
    motreg = Node(Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors,
        imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'par_file', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers'],
        output_names=['out_files'],
        function=build_filter1,
        imports=imports),
                         name='makemotionbasedfilter')
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    # Filter the motion and art confounds
    filter1 = MapNode(fsl.GLM(out_res_name='timeseries.nii.gz', demean=True),
                      iterfield=['in_file', 'design'],
                      name='filtermotion')
    if fieldmap_images:
        wf.connect(dewarper, 'unwarped_file', filter1, 'in_file')
    else:
        wf.connect(tsnr, 'detrended_file', filter1, 'in_file')
    wf.connect(createfilter1, 'out_files', filter1, 'design')
    wf.connect(masktransform, 'transformed_file', filter1, 'mask')

    # Create a filter to remove noise components based on white matter and CSF
    createfilter2 = MapNode(Function(
        input_names=['realigned_file', 'mask_file', 'num_components'],
        output_names=['out_files'],
        function=extract_noise_components,
        imports=imports),
                            iterfield=['realigned_file'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(masktransform, 'transformed_file', createfilter2, 'mask_file')

    # Filter noise components
    filter2 = MapNode(fsl.GLM(out_res_name='timeseries_cleaned.nii.gz',
                              demean=True),
                      iterfield=['in_file', 'design'],
                      name='filtercompcorr')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(masktransform, 'transformed_file', filter2, 'mask')

    # Smoothing using surface and volume smoothing
    smooth = MapNode(freesurfer.Smooth(), iterfield=['in_file'], name='smooth')
    smooth.inputs.proj_frac_avg = (0.1, 0.9, 0.1)
    if surf_fwhm is None:
        surf_fwhm = 5 * slice_thickness
    smooth.inputs.surface_fwhm = surf_fwhm
    if vol_fwhm is None:
        vol_fwhm = 2 * slice_thickness
    smooth.inputs.vol_fwhm = vol_fwhm
    wf.connect(filter2, 'out_res', smooth, 'in_file')
    wf.connect(register, 'out_reg_file', smooth, 'reg_file')

    # Bandpass filter the data
    bandpass = MapNode(fsl.TemporalFilter(),
                       iterfield=['in_file'],
                       name='bandpassfilter')
    if highpass_freq < 0:
        bandpass.inputs.highpass_sigma = -1
    else:
        bandpass.inputs.highpass_sigma = 1. / (2 * TR * highpass_freq)
    if lowpass_freq < 0:
        bandpass.inputs.lowpass_sigma = -1
    else:
        bandpass.inputs.lowpass_sigma = 1. / (2 * TR * lowpass_freq)
    wf.connect(smooth, 'smoothed_file', bandpass, 'in_file')

    # Convert aparc to subject functional space
    aparctransform = wmcsftransform.clone("aparctransform")
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', aparctransform, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', aparctransform, 'source_file')
    wf.connect(register, 'out_reg_file', aparctransform, 'reg_file')
    wf.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparctransform,
               'target_file')

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(freesurfer.SegStats(avgwf_txt_file=True,
                                              default_color_table=True),
                          iterfield=['in_file'],
                          name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + range(10, 14) + [17, 18, 26, 47] +
                                     range(49, 55) + [58] + range(1001, 1036) +
                                     range(2001, 2036))

    wf.connect(aparctransform, 'transformed_file', sampleaparc,
               'segmentation_file')
    wf.connect(bandpass, 'out_file', sampleaparc, 'in_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    #samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = os.environ['SUBJECTS_DIR']

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(bandpass, 'out_file', samplerlh, 'source_file')
    wf.connect(register, 'out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(bandpass, 'out_file', samplerrh, 'source_file')
    wf.connect(register, 'out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Compute registration between the subject's structural and MNI template
    # This is currently set to perform a very quick registration. However, the
    # registration can be made significantly more accurate for cortical
    # structures by increasing the number of iterations
    # All parameters are set using the example from:
    # https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    reg = Node(ants.Registration(), name='antsRegister')
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.1, ),
                                       (0.2, 3.0, 0.0)]
    # reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 +
    #                                    [[100, 50, 30]])
    reg.inputs.number_of_iterations = [[100, 100, 100]] * 3 + [[100, 20, 10]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = False
    reg.inputs.metric = ['Mattes'] * 3 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 3 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 3 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 3 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 3 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 3 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 3 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 4
    reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]] * 2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 4
    reg.inputs.use_histogram_matching = [False] * 3 + [True]
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.fixed_image = \
        os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz')
    reg.inputs.num_threads = 4
    reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'}

    # Convert T1.mgz to nifti for using with ANTS
    convert = Node(freesurfer.MRIConvert(out_type='niigz'), name='convert2nii')
    wf.connect(fssource, 'T1', convert, 'in_file')

    # Mask the T1.mgz file with the brain mask computed earlier
    maskT1 = Node(fsl.BinaryMaths(operation='mul'), name='maskT1')
    wf.connect(mask, 'binary_file', maskT1, 'operand_file')
    wf.connect(convert, 'out_file', maskT1, 'in_file')
    wf.connect(maskT1, 'out_file', reg, 'moving_image')

    # Convert the BBRegister transformation to ANTS ITK format
    convert2itk = MapNode(C3dAffineTool(),
                          iterfield=['transform_file', 'source_file'],
                          name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    wf.connect(register, 'out_fsl_file', convert2itk, 'transform_file')
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', convert2itk, 'source_file')
    else:
        wf.connect(calc_median, 'median_file', convert2itk, 'source_file')
    wf.connect(convert, 'out_file', convert2itk, 'reference_file')

    # Concatenate the affine and ants transforms into a list
    pickfirst = lambda x: x[0]
    merge = MapNode(Merge(2), iterfield=['in2'], name='mergexfm')
    wf.connect(convert2itk, 'itk_transform', merge, 'in2')
    wf.connect(reg, ('composite_transform', pickfirst), merge, 'in1')

    # Apply the combined transform to the time series file
    sample2mni = MapNode(ants.ApplyTransforms(),
                         iterfield=['input_image', 'transforms'],
                         name='sample2mni')
    sample2mni.inputs.input_image_type = 3
    sample2mni.inputs.interpolation = 'BSpline'
    sample2mni.inputs.invert_transform_flags = [False, False]
    sample2mni.inputs.reference_image = \
        os.path.abspath('OASIS-30_Atropos_template_in_MNI152_2mm.nii.gz')
    sample2mni.inputs.terminal_output = 'file'
    wf.connect(bandpass, 'out_file', sample2mni, 'input_image')
    wf.connect(merge, 'out', sample2mni, 'transforms')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(
        input_names=['timeseries_file', 'label_file', 'indices'],
        output_names=['out_file'],
        function=extract_subrois,
        imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + range(10, 14) + [17, 18, 26, 47] +\
                            range(49, 55) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm.nii.gz'))
    wf.connect(sample2mni, 'output_image', ts2txt, 'timeseries_file')

    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = [('_target_subject_', '')]
    datasink.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(despiker, 'out_file', datasink, 'resting.qa.despike')
    wf.connect(realign, 'par_file', datasink, 'resting.qa.motion')
    wf.connect(tsnr, 'tsnr_file', datasink, 'resting.qa.tsnr')
    wf.connect(tsnr, 'mean_file', datasink, 'resting.qa.tsnr.@mean')
    wf.connect(tsnr, 'stddev_file', datasink, 'resting.qa.@tsnr_stddev')
    if fieldmap_images:
        wf.connect(fieldmap, 'exf_mask', datasink, 'resting.reference')
    else:
        wf.connect(calc_median, 'median_file', datasink, 'resting.reference')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(mask, 'binary_file', datasink, 'resting.mask')
    wf.connect(masktransform, 'transformed_file', datasink,
               'resting.mask.@transformed_file')
    wf.connect(register, 'out_reg_file', datasink,
               'resting.registration.bbreg')
    wf.connect(reg, ('composite_transform', pickfirst), datasink,
               'resting.registration.ants')
    wf.connect(register, 'min_cost_file', datasink,
               'resting.qa.bbreg.@mincost')
    wf.connect(smooth, 'smoothed_file', datasink,
               'resting.timeseries.fullpass')
    wf.connect(bandpass, 'out_file', datasink, 'resting.timeseries.bandpassed')
    wf.connect(sample2mni, 'output_image', datasink, 'resting.timeseries.mni')
    wf.connect(createfilter1, 'out_files', datasink,
               'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files', datasink,
               'resting.regress.@compcorr')
    wf.connect(sampleaparc, 'summary_file', datasink,
               'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file', datasink,
               'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file', datasink,
               'resting.parcellations.grayo.@subcortical')
    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = [('_target_subject_', '')]
    datasink2.inputs.regexp_substitutions = (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file', datasink2,
               'resting.parcellations.grayo.@surface')
    return wf
コード例 #9
0
def test_FreeSurferSource_outputs():
    output_map = dict(BA_stats=dict(altkey='BA',
    loc='stats',
    ),
    T1=dict(loc='mri',
    ),
    annot=dict(altkey='*annot',
    loc='label',
    ),
    aparc_a2009s_stats=dict(altkey='aparc.a2009s',
    loc='stats',
    ),
    aparc_aseg=dict(altkey='aparc*aseg',
    loc='mri',
    ),
    aparc_stats=dict(altkey='aparc',
    loc='stats',
    ),
    aseg=dict(loc='mri',
    ),
    aseg_stats=dict(altkey='aseg',
    loc='stats',
    ),
    brain=dict(loc='mri',
    ),
    brainmask=dict(loc='mri',
    ),
    curv=dict(loc='surf',
    ),
    curv_stats=dict(altkey='curv',
    loc='stats',
    ),
    entorhinal_exvivo_stats=dict(altkey='entorhinal_exvivo',
    loc='stats',
    ),
    filled=dict(loc='mri',
    ),
    inflated=dict(loc='surf',
    ),
    label=dict(altkey='*label',
    loc='label',
    ),
    norm=dict(loc='mri',
    ),
    nu=dict(loc='mri',
    ),
    orig=dict(loc='mri',
    ),
    pial=dict(loc='surf',
    ),
    rawavg=dict(loc='mri',
    ),
    ribbon=dict(altkey='*ribbon',
    loc='mri',
    ),
    smoothwm=dict(loc='surf',
    ),
    sphere=dict(loc='surf',
    ),
    sphere_reg=dict(altkey='sphere.reg',
    loc='surf',
    ),
    sulc=dict(loc='surf',
    ),
    thickness=dict(loc='surf',
    ),
    volume=dict(loc='surf',
    ),
    white=dict(loc='surf',
    ),
    wm=dict(loc='mri',
    ),
    wmparc=dict(loc='mri',
    ),
    wmparc_stats=dict(altkey='wmparc',
    loc='stats',
    ),
    )
    outputs = FreeSurferSource.output_spec()

    for key, metadata in output_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(outputs.traits()[key], metakey), value
コード例 #10
0
def test_fs(c, name='test_fs'):
    """Constructs a workflow to test freesurfer.

Inputs
------

inputspec.subject_id : Freesurfer subject id 
inputspec.sd : Freesurfer SUBJECTS_DIR

Outputs
-------

outputspec.outfile : brainmask.mgz

Returns
--------

a nipype workflow
"""
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio
    import nipype.interfaces.freesurfer as fs
    from nipype.interfaces.io import FreeSurferSource

    workflow = pe.Workflow(name=name)

    # Define Nodes
    inputspec = pe.Node(
        interface=util.IdentityInterface(fields=['subject_id', 'sd']),
        name='inputspec')

    inputnode = pe.Node(
        interface=util.IdentityInterface(fields=["subject_id"]),
        name="subject_names")
    inputnode.iterables = ("subject_id", c.subjects)
    workflow.connect(inputnode, "subject_id", inputspec, "subject_id")

    fssource = pe.Node(interface=FreeSurferSource(), name='fssource')

    convert1 = pe.Node(interface=fs.MRIConvert(), name='converter1')

    convert2 = pe.Node(interface=fs.MRIConvert(), name='converter2')

    convert1.inputs.out_type = 'niigz'
    convert1.inputs.in_type = 'mgz'

    convert2.inputs.out_type = 'mgz'
    convert2.inputs.in_type = 'niigz'

    outputspec = pe.Node(interface=util.IdentityInterface(fields=['outfile']),
                         name='outputspec')

    # Connect Nodes
    workflow.connect(inputspec, 'subject_id', fssource, 'subject_id')
    workflow.connect(inputspec, 'sd', fssource, 'subjects_dir')

    workflow.connect(fssource, 'brainmask', convert1, 'in_file')
    workflow.connect(convert1, 'out_file', convert2, 'in_file')
    workflow.connect(convert2, 'out_file', outputspec, 'outfile')

    workflow.base_dir = c.working_dir
    workflow.inputs.inputspec.sd = c.surf_dir
    sinker = pe.Node(nio.DataSink(), name='sinker')
    sinker.inputs.base_directory = c.sink_dir
    workflow.connect(inputnode, "subject_id", sinker, "container")
    workflow.connect(outputspec, 'outfile', sinker, 'test_fs.result')
    workflow.config = {'execution': {'crashdump_dir': c.crash_dir}}

    return workflow
コード例 #11
0
                            number_of_iterations=[[1000, 500, 250, 100],
                                                  [1000, 500, 250, 100],
                                                  [100, 70, 50, 20]],
                            radius_or_number_of_bins=[32, 32, 4],
                            sampling_percentage=[0.25, 0.25, 1],
                            sampling_strategy=['Regular', 'Regular', 'None'],
                            shrink_factors=[[8, 4, 2, 1]] * 3,
                            smoothing_sigmas=[[3, 2, 1, 0]] * 3,
                            transform_parameters=[(0.1, ), (0.1, ),
                                                  (0.1, 3.0, 0.0)],
                            use_histogram_matching=True,
                            write_composite_transform=True),
               name='antsreg')

# FreeSurferSource - Data grabber specific for FreeSurfer data
fssource = Node(FreeSurferSource(subjects_dir=fs_dir),
                run_without_submitting=True,
                name='fssource')

# Convert FreeSurfer's MGZ format into NIfTI format
convert2nii = Node(MRIConvert(out_type='nii'), name='convert2nii')

# Coregister the median to the surface
bbregister = Node(BBRegister(init='fsl', contrast_type='t2',
                             out_fsl_file=True),
                  name='bbregister')

# Convert the BBRegister transformation to ANTS ITK format
convert2itk = Node(C3dAffineTool(fsl2ras=True, itk_transform=True),
                   name='convert2itk')
コード例 #12
0
subj_proc = project_home + '/proc/subject'
group_proc = project_home + '/proc/group'
template_proc = project_home + '/proc/template'
#subject_info = project_home + '/misc/subjects.csv'
#template_sub = ['011-T1']
template_sub = listdir(fs_subjdir)

#set default FreeSurfer subjects dir
FSCommand.set_default_subjects_dir(fs_subjdir)

# In[ ]:

######### File handling #########

#Pass in list to freesurfer source node (subs)
fs_source = Node(FreeSurferSource(subjects_dir=fs_subjdir), name='fs_source')
fs_source.iterables = ('subject_id', template_sub)

#set up datasink
substitutions = [('_subject_id_', '')]
datasink = Node(DataSink(base_directory=template_proc,
                         substitutions=substitutions),
                name='datasink')

# ## Template Creation Workflow
# Below are the cells associated with template creation:
# * Unique functions
#     - make3DTemplate wraps the ANTs antsMultivariateTemplateConstruction2 script
# * Template creation Nodes
# * Template creation workflow steps
#     - Convert template subjects' FreeSurfer T1 images to nifti
コード例 #13
0
def combine_report(c, first_c=foo0, prep_c=foo1, fx_c=None, thr=2.326,csize=30,fx=False):
    from nipype.interfaces import fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    import nipype.interfaces.io as nio

    if not fx:
        workflow = pe.Workflow(name='first_level_report')
        #dataflow = get_data(first_c)
    else:
        workflow = pe.Workflow(name='fixedfx_report')
        #dataflow =  get_fx_data(fx_c)
    
    infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
                         name='subject_names')

    """
    if c.test_mode:
        infosource.iterables = ('subject_id', [c.subjects[0]])
    else:
        infosource.iterables = ('subject_id', c.subjects)
    
    infosource1 = pe.Node(util.IdentityInterface(fields=['fwhm']),
                         name='fwhms')
    infosource1.iterables = ('fwhm', prep_c.fwhm)
    """

    dataflow = c.datagrabber.create_dataflow()

    fssource = pe.Node(interface = FreeSurferSource(),name='fssource')
    
    #workflow.connect(infosource, 'subject_id', dataflow, 'subject_id')
    #workflow.connect(infosource1, 'fwhm', dataflow, 'fwhm')

    infosource = dataflow.get_node("subject_id_iterable")

    workflow.connect(infosource, 'subject_id', fssource, 'subject_id')
    fssource.inputs.subjects_dir = prep_c.surf_dir
    
    imgflow = img_wkflw(thr=thr,csize=csize)
    
    # adding cluster correction before sending to imgflow
    
    smoothest = pe.MapNode(fsl.SmoothEstimate(), name='smooth_estimate', iterfield=['zstat_file'])
    workflow.connect(dataflow,'datagrabber.func', smoothest, 'zstat_file')
    workflow.connect(dataflow,'datagrabber.mask',smoothest, 'mask_file')
    
    cluster = pe.MapNode(fsl.Cluster(), name='cluster', iterfield=['in_file','dlh','volume'])
    workflow.connect(smoothest,'dlh', cluster, 'dlh')
    workflow.connect(smoothest, 'volume', cluster, 'volume')
    cluster.inputs.connectivity = csize
    cluster.inputs.threshold = thr
    cluster.inputs.out_threshold_file = True
    workflow.connect(dataflow,'datagrabber.func',cluster,'in_file')
    
    workflow.connect(cluster, 'threshold_file',imgflow,'inputspec.in_file')
    #workflow.connect(dataflow,'func',imgflow, 'inputspec.in_file')
    workflow.connect(dataflow,'datagrabber.mask',imgflow, 'inputspec.mask_file')
    workflow.connect(dataflow,'datagrabber.reg',imgflow, 'inputspec.reg_file')
    
    workflow.connect(fssource,'brain',imgflow, 'inputspec.anat_file')
    
    workflow.connect(infosource, 'subject_id', imgflow, 'inputspec.subject_id')
    imgflow.inputs.inputspec.fsdir = prep_c.surf_dir
    
    writereport = pe.Node(util.Function( input_names = ["cs",
                                                        "locations",
                                                        "percents",
                                                        "in_files",
                                                        "des_mat_cov",
                                                        "des_mat",
                                                        "subjects",
                                                        "meanval",
                                                        "imagefiles",
                                                        "surface_ims",
                                                        'thr',
                                                        'csize',
                                                        'fwhm',
                                                        'onset_images'],
                                        output_names =["report",
                                                       "elements"],
                                        function = write_report),
                          name = "writereport" )
    
    
    # add plot detrended timeseries with onsets if block
    if c.is_block_design:
        plottseries = tsnr_roi(plot=True, onsets=True)
        plottseries.inputs.inputspec.TR = prep_c.TR
        workflow.connect(dataflow,'datagrabber.reg',plottseries, 'inputspec.reg_file')
        workflow.connect(fssource, ('aparc_aseg',pickfirst), plottseries, 'inputspec.aparc_aseg')
        workflow.connect(infosource, 'subject_id', plottseries, 'inputspec.subject')
        workflow.connect(dataflow, 'datagrabber.detrended', plottseries,'inputspec.tsnr_file')

        subjectinfo = pe.Node(util.Function(input_names=['subject_id'], output_names=['output']), name='subjectinfo')
        subjectinfo.inputs.function_str = first_c.subjectinfo

        workflow.connect(infosource,'subject_id', subjectinfo, 'subject_id')
        workflow.connect(subjectinfo, 'output', plottseries, 'inputspec.onsets')
        plottseries.inputs.inputspec.input_units = first_c.input_units
        workflow.connect(plottseries,'outputspec.out_file',writereport,'onset_images')
    else:
        writereport.inputs.onset_images = None
    
    
    
    #writereport = pe.Node(interface=ReportSink(),name='reportsink')
    #writereport.inputs.base_directory = os.path.join(c.sink_dir,'analyses','func')
    
    workflow.connect(infosource, 'subject_id', writereport, 'subjects')
    #workflow.connect(infosource, 'subject_id', writereport, 'container')
    try:
        infosource1 = dataflow.get_node('fwhm_iterable')
        workflow.connect(infosource1, 'fwhm', writereport, 'fwhm')
    except:
        writereport.inputs.fwhm = prep_c.fwhm[0]

    writereport.inputs.thr = thr
    writereport.inputs.csize = csize
    
    makesurfaceplots = pe.Node(util.Function(input_names = ['con_image',
                                                            'reg_file',
                                                            'subject_id',
                                                            'thr',
                                                            'sd'],
                                              output_names = ['surface_ims',
                                                              'surface_mgzs'],
                                              function = make_surface_plots),
                               name = 'make_surface_plots')
    
    workflow.connect(infosource, 'subject_id', makesurfaceplots, 'subject_id')
    
    makesurfaceplots.inputs.thr = thr
    makesurfaceplots.inputs.sd = prep_c.surf_dir
    
    sinker = pe.Node(nio.DataSink(), name='sinker')
    sinker.inputs.base_directory = os.path.join(c.sink_dir)
    
    workflow.connect(infosource,'subject_id',sinker,'container')
    workflow.connect(dataflow,'datagrabber.func',makesurfaceplots,'con_image')
    workflow.connect(dataflow,'datagrabber.reg',makesurfaceplots,'reg_file')
    
    workflow.connect(dataflow, 'datagrabber.des_mat', writereport, 'des_mat')
    workflow.connect(dataflow, 'datagrabber.des_mat_cov', writereport, 'des_mat_cov')
    workflow.connect(imgflow, 'outputspec.cs', writereport, 'cs')
    workflow.connect(imgflow, 'outputspec.locations', writereport, 'locations')
    workflow.connect(imgflow, 'outputspec.percents', writereport, 'percents')
    workflow.connect(imgflow, 'outputspec.meanval', writereport, 'meanval')
    workflow.connect(imgflow,'outputspec.imagefiles', writereport, 'imagefiles')
    
    workflow.connect(dataflow, 'datagrabber.func', writereport, 'in_files')
    workflow.connect(makesurfaceplots,'surface_ims', writereport, 'surface_ims')
    if not fx:
        workflow.connect(writereport,"report",sinker,"first_level_report")
    else:
        workflow.connect(writereport,"report",sinker,"fixed_fx_report")
    
    
    return workflow
コード例 #14
0
ファイル: resampling.py プロジェクト: butellyn/aslprep
def init_asl_surf_wf(mem_gb,
                     surface_spaces,
                     medial_surface_nan,
                     name='asl_surf_wf'):
    """
    Sample functional images to FreeSurfer surfaces.

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.
    Outputs are in GIFTI format.

    Workflow Graph
        .. workflow::
            :graph2use: colored
            :simple_form: yes

            from aslprep.workflows.asl import init_asl_surf_wf
            wf = init_asl_surf_wf(mem_gb=0.1,
                                   surface_spaces=['fsnative', 'fsaverage5'],
                                   medial_surface_nan=False)

    Parameters
    ----------
    surface_spaces : :obj:`list`
        List of FreeSurfer surface-spaces (either ``fsaverage{3,4,5,6,}`` or ``fsnative``)
        the functional images are to be resampled to.
        For ``fsnative``, images will be resampled to the individual subject's
        native surface.
    medial_surface_nan : :obj:`bool`
        Replace medial wall values with NaNs on functional GIFTI files

    Inputs
    ------
    source_file
        Motion-corrected ASL series in T1 space
    t1w_preproc
        Bias-corrected structural template image
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    Outputs
    -------
    surfaces
        ASL series, resampled to FreeSurfer surfaces

    """
    from nipype.interfaces.io import FreeSurferSource
    from ...niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from ...niworkflows.interfaces.surf import GiftiSetAnatomicalStructure

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The ASL time-series were resampled onto the following surfaces
(FreeSurfer reconstruction nomenclature):
{out_spaces}.
""".format(out_spaces=', '.join(['*%s*' % s for s in surface_spaces]))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 'subject_id', 'subjects_dir', 't1w2fsnative_xfm'
    ]),
                        name='inputnode')
    itersource = pe.Node(niu.IdentityInterface(fields=['target']),
                         name='itersource')
    itersource.iterables = [('target', surface_spaces)]

    get_fsnative = pe.Node(FreeSurferSource(),
                           name='get_fsnative',
                           run_without_submitting=True)

    def select_target(subject_id, space):
        """Get the target subject ID, given a source subject ID and a target space."""
        return subject_id if space == 'fsnative' else space

    targets = pe.Node(niu.Function(function=select_target),
                      name='targets',
                      run_without_submitting=True,
                      mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.Node(niu.Rename(format_string='%(subject)s',
                                    keep_ext=True),
                         name='rename_src',
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    itk2lta = pe.Node(niu.Function(function=_itk2lta),
                      name="itk2lta",
                      run_without_submitting=True)
    sampler = pe.MapNode(fs.SampleToSurface(
        cortex_mask=True,
        interp_method='trilinear',
        out_type='gii',
        override_reg_subj=True,
        sampling_method='average',
        sampling_range=(0, 1, 0.2),
        sampling_units='frac',
    ),
                         iterfield=['hemi'],
                         name='sampler',
                         mem_gb=mem_gb * 3)
    sampler.inputs.hemi = ['lh', 'rh']
    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield=['in_file'],
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    outputnode = pe.JoinNode(
        niu.IdentityInterface(fields=['surfaces', 'target']),
        joinsource='itersource',
        name='outputnode')

    workflow.connect([
        (inputnode, get_fsnative, [('subject_id', 'subject_id'),
                                   ('subjects_dir', 'subjects_dir')]),
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, itk2lta, [('source_file', 'src_file'),
                              ('t1w2fsnative_xfm', 'in_file')]),
        (get_fsnative, itk2lta, [('T1', 'dst_file')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (itersource, targets, [('target', 'space')]),
        (itersource, rename_src, [('target', 'subject')]),
        (itk2lta, sampler, [('out', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
        (itersource, outputnode, [('target', 'target')]),
    ])

    if not medial_surface_nan:
        workflow.connect(sampler, 'out_file', update_metadata, 'in_file')
        return workflow

    from ...niworkflows.interfaces.freesurfer import MedialNaNs
    # Refine if medial vertices should be NaNs
    medial_nans = pe.MapNode(MedialNaNs(),
                             iterfield=['in_file'],
                             name='medial_nans',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
        (sampler, medial_nans, [('out_file', 'in_file')]),
        (medial_nans, update_metadata, [('out_file', 'in_file')]),
    ])
    return workflow
コード例 #15
0
def make_w_freesurfer2func():
    n_in = Node(IdentityInterface(fields=[
        'T1w',
        'mean',
        'subject',  # without sub-
        ]), name='input')

    n_out = Node(IdentityInterface(fields=[
        'brain',
        'func2struct',
        'struct2func',
        'freesurfer2func',
        'func2freesurfer',
        ]), name='output')

    freesurfer = Node(FreeSurferSource(), name='freesurfer')
    freesurfer.inputs.subjects_dir = '/Fridge/R01_BAIR/freesurfer'

    n_fs2s = Node(Tkregister2(), name='freesurfer2struct')
    n_fs2s.inputs.reg_header = True
    n_fs2s.inputs.fsl_out = 'mat_freesurfer2struct.mat'
    n_fs2s.inputs.noedit = True

    n_s2fs = Node(ConvertXFM(), name='struct2freesurfer')
    n_s2fs.inputs.invert_xfm = True
    n_s2fs.inputs.out_file = 'mat_struct2freesurfer.mat'

    n_vol = Node(ApplyVolTransform(), name='vol2vol')
    n_vol.inputs.reg_header = True
    n_vol.inputs.transformed_file = 'brain.nii.gz'

    n_f2s = Node(FLIRT(), name='func2struct')
    n_f2s.inputs.cost = 'corratio'
    n_f2s.inputs.dof = 6
    n_f2s.inputs.no_search = True
    n_f2s.inputs.output_type = 'NIFTI_GZ'
    n_f2s.inputs.out_matrix_file = 'mat_func2struct.mat'

    n_s2f = Node(ConvertXFM(), name='struct2func')
    n_s2f.inputs.invert_xfm = True
    n_s2f.inputs.out_file = 'mat_struct2func.mat'

    n_f2fs = Node(ConvertXFM(), name='func2freesurfer')
    n_f2fs.inputs.concat_xfm = True
    n_f2fs.inputs.out_file = 'mat_func2freesurfer.mat'

    n_fs2f = Node(ConvertXFM(), name='freesurfer2func')
    n_fs2f.inputs.invert_xfm = True
    n_fs2f.inputs.out_file = 'mat_freesurfer2func.mat'

    w = Workflow('coreg_3T_fs')
    w.connect(n_in, 'subject', freesurfer, 'subject_id')
    w.connect(freesurfer, 'orig', n_fs2s, 'moving_image')
    w.connect(freesurfer, 'rawavg', n_fs2s, 'target_image')
    w.connect(freesurfer, 'brain', n_vol, 'source_file')
    w.connect(freesurfer, 'rawavg', n_vol, 'target_file')
    w.connect(n_in, 'mean', n_f2s, 'in_file')
    w.connect(n_vol, 'transformed_file', n_f2s, 'reference')
    w.connect(n_f2s, 'out_matrix_file', n_s2f, 'in_file')
    w.connect(n_fs2s, 'fsl_file', n_s2fs, 'in_file')
    w.connect(n_f2s, 'out_matrix_file', n_f2fs, 'in_file')
    w.connect(n_s2fs, 'out_file', n_f2fs, 'in_file2')
    w.connect(n_f2fs, 'out_file', n_fs2f, 'in_file')
    w.connect(n_f2s, 'out_matrix_file', n_out, 'func2struct')
    w.connect(n_s2f, 'out_file', n_out, 'struct2func')
    w.connect(n_fs2f, 'out_file', n_out, 'freesurfer2func')
    w.connect(n_f2fs, 'out_file', n_out, 'func2freesurfer')
    w.connect(freesurfer, 'brain', n_out, 'brain')

    return w
コード例 #16
0
    def create_workflow(self, flow, inputnode, outputnode):
        if self.config.seg_tool == "Freesurfer":
            if self.config.use_existing_freesurfer_data is False:
                # Converting to .mgz format
                fs_mriconvert = pe.Node(interface=fs.MRIConvert(
                    out_type="mgz", out_file="T1.mgz"),
                                        name="mgzConvert")

                if self.config.make_isotropic:
                    fs_mriconvert.inputs.vox_size = (
                        self.config.isotropic_vox_size,
                        self.config.isotropic_vox_size,
                        self.config.isotropic_vox_size)
                    fs_mriconvert.inputs.resample_type = self.config.isotropic_interpolation

                rename = pe.Node(util.Rename(), name='copyOrig')
                orig_dir = os.path.join(self.config.freesurfer_subject_id,
                                        "mri", "orig")
                if not os.path.exists(orig_dir):
                    os.makedirs(orig_dir)
                    print("INFO : Folder not existing; %s created!" % orig_dir)
                rename.inputs.format_string = os.path.join(orig_dir, "001.mgz")

                if self.config.brain_mask_extraction_tool == "Freesurfer":
                    # ReconAll => named outputnode as we don't want to select a specific output....
                    fs_reconall = pe.Node(interface=fs.ReconAll(
                        flags='-no-isrunning -parallel -openmp {}'.format(
                            self.config.number_of_threads)),
                                          name='reconall')
                    fs_reconall.inputs.directive = 'all'
                    fs_reconall.inputs.args = self.config.freesurfer_args

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
                    fs_reconall.inputs.subjects_dir = self.config.freesurfer_subjects_dir

                    # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields
                    # fs_reconall.inputs.brainstem = self.config.segment_brainstem

                    def isavailable(file):
                        # print "T1 is available"
                        return file

                    flow.connect([
                        (inputnode, fs_mriconvert, [(('T1', isavailable),
                                                     'in_file')]),
                        (fs_mriconvert, rename, [('out_file', 'in_file')]),
                        (rename, fs_reconall,
                         [(("out_file", extract_base_directory), "subject_id")
                          ]),
                        (fs_reconall, outputnode,
                         [('subjects_dir', 'subjects_dir'),
                          ('subject_id', 'subject_id')]),
                    ])
                else:
                    # ReconAll => named outputnode as we don't want to select a specific output....
                    fs_autorecon1 = pe.Node(interface=fs.ReconAll(
                        flags='-no-isrunning -parallel -openmp {}'.format(
                            self.config.number_of_threads)),
                                            name='autorecon1')
                    fs_autorecon1.inputs.directive = 'autorecon1'

                    # if self.config.brain_mask_extraction_tool == "Custom" or self.config.brain_mask_extraction_tool == "ANTs":
                    if self.config.brain_mask_extraction_tool == "ANTs":
                        fs_autorecon1.inputs.flags = '-no-isrunning -noskullstrip -parallel -openmp {}'.format(
                            self.config.number_of_threads)
                    fs_autorecon1.inputs.args = self.config.freesurfer_args

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
                    fs_autorecon1.inputs.subjects_dir = self.config.freesurfer_subjects_dir

                    def isavailable(file):
                        # print "Is available"
                        return file

                    flow.connect([(inputnode, fs_mriconvert, [
                        (('T1', isavailable), 'in_file')
                    ]), (fs_mriconvert, rename, [('out_file', 'in_file')]),
                                  (rename, fs_autorecon1,
                                   [(("out_file", extract_base_directory),
                                     "subject_id")])])

                    fs_source = pe.Node(interface=FreeSurferSource(),
                                        name='fsSource')

                    fs_mriconvert_nu = pe.Node(interface=fs.MRIConvert(
                        out_type="niigz", out_file="nu.nii.gz"),
                                               name='niigzConvert')

                    flow.connect([(fs_autorecon1, fs_source, [
                        ('subjects_dir', 'subjects_dir'),
                        ('subject_id', 'subject_id')
                    ]), (fs_source, fs_mriconvert_nu, [('nu', 'in_file')])])

                    fs_mriconvert_brainmask = pe.Node(
                        interface=fs.MRIConvert(out_type="mgz",
                                                out_file="brainmask.mgz"),
                        name='fsMriconvertBETbrainmask')

                    if self.config.brain_mask_extraction_tool == "BET":
                        fsl_bet = pe.Node(interface=fsl.BET(
                            out_file='brain.nii.gz',
                            mask=True,
                            skull=True,
                            robust=True),
                                          name='fsl_bet')

                        flow.connect([(fs_mriconvert_nu, fsl_bet,
                                       [('out_file', 'in_file')]),
                                      (fsl_bet, fs_mriconvert_brainmask,
                                       [('out_file', 'in_file')])])

                    elif self.config.brain_mask_extraction_tool == "ANTs":
                        # templatefile =
                        #    pkg_resources.resource_filename('cmtklib', os.path.join('data', 'segmentation',
                        #                                    'ants_template_IXI', 'T_template2_BrainCerebellum.nii.gz'))
                        # probmaskfile = pkg_resources.resource_filename('cmtklib',
                        #     os.path.join('data', 'segmentation', 'ants_template_IXI',
                        #     'T_template_BrainCerebellumProbabilityMask.nii.gz'))

                        ants_bet = pe.Node(interface=ants.BrainExtraction(
                            out_prefix='ants_bet_'),
                                           name='antsBET')
                        ants_bet.inputs.brain_template = self.config.ants_templatefile
                        ants_bet.inputs.brain_probability_mask = self.config.ants_probmaskfile
                        ants_bet.inputs.extraction_registration_mask = self.config.ants_regmaskfile
                        ants_bet.inputs.num_threads = self.config.number_of_threads

                        flow.connect([(fs_mriconvert_nu, ants_bet,
                                       [('out_file', 'anatomical_image')]),
                                      (ants_bet, fs_mriconvert_brainmask,
                                       [('BrainExtractionBrain', 'in_file')])])
                    # elif self.config.brain_mask_extraction_tool == "Custom":
                    #     fs_mriconvert_brainmask.inputs.in_file = os.path.abspath(
                    #         self.config.brain_mask_path)

                    # copy_brainmask_to_fs = pe.Node(interface=copyFileToFreesurfer(),name='copy_brainmask_to_fs')
                    # copy_brainmask_to_fs.inputs.out_file =
                    #    os.path.join(self.config.freesurfer_subject_id,"mri","brainmask.mgz")

                    # copy_brainmaskauto_to_fs = pe.Node(interface=copyFileToFreesurfer(),name='copy_brainmaskauto_to_fs')
                    # copy_brainmaskauto_to_fs.inputs.out_file =
                    #    os.path.join(self.config.freesurfer_subject_id,"mri","brainmask.auto.mgz")

                    # flow.connect([
                    #             (fs_mriconvert_brainmask,copy_brainmask_to_fs,[('out_file','in_file')]),
                    #             (fs_mriconvert_brainmask,copy_brainmaskauto_to_fs,[('out_file','in_file')])
                    #             ])

                    copy_brainmask_to_fs = pe.Node(
                        interface=copyBrainMaskToFreesurfer(),
                        name='copyBrainmaskTofs')

                    flow.connect([(rename, copy_brainmask_to_fs, [
                        (("out_file", extract_base_directory), "subject_dir")
                    ]),
                                  (fs_mriconvert_brainmask,
                                   copy_brainmask_to_fs, [('out_file',
                                                           'in_file')])])

                    # flow.connect([
                    #             (fs_source,fs_mriconvert_nu,[('nu','in_file')])
                    #             ])

                    def get_freesurfer_subject_id(file):
                        # print("Extract reconall base dir : %s" % file[:-18])
                        return file[:-18]

                    fs_reconall23 = pe.Node(interface=fs.ReconAll(
                        flags='-no-isrunning -parallel -openmp {}'.format(
                            self.config.number_of_threads)),
                                            name='reconall23')
                    fs_reconall23.inputs.directive = 'autorecon2'
                    fs_reconall23.inputs.args = self.config.freesurfer_args
                    fs_reconall23.inputs.flags = '-autorecon3'

                    # fs_reconall.inputs.subjects_dir and fs_reconall.inputs.subject_id set in cmp/pipelines/diffusion/diffusion.py
                    fs_reconall23.inputs.subjects_dir = self.config.freesurfer_subjects_dir

                    # fs_reconall.inputs.hippocampal_subfields_T1 = self.config.segment_hippocampal_subfields
                    # fs_reconall.inputs.brainstem = self.config.segment_brainstem

                    flow.connect([(copy_brainmask_to_fs, fs_reconall23, [
                        (("out_brainmask_file", get_freesurfer_subject_id),
                         "subject_id")
                    ]),
                                  (fs_reconall23, outputnode,
                                   [('subjects_dir', 'subjects_dir'),
                                    ('subject_id', 'subject_id')])])

            else:
                outputnode.inputs.subjects_dir = self.config.freesurfer_subjects_dir
                outputnode.inputs.subject_id = self.config.freesurfer_subject_id
コード例 #17
0
def mk_w_angio(freesurfer_dir, angiogram, out_dir):

    n_input = Node(IdentityInterface(fields=[
        'fs_dir',
        'fs_subj',
        'angiogram',
        'out_dir',
    ]),
                   name='input')

    n_input.inputs.fs_dir = str(freesurfer_dir.parent)
    n_input.inputs.fs_subj = freesurfer_dir.name
    n_input.inputs.angiogram = str(angiogram)
    n_input.inputs.out_dir = str(out_dir)

    n_coreg = Node(Registration(), name='antsReg')
    n_coreg.inputs.num_threads = 40
    n_coreg.inputs.use_histogram_matching = False
    n_coreg.inputs.dimension = 3
    n_coreg.inputs.winsorize_lower_quantile = 0.001
    n_coreg.inputs.winsorize_upper_quantile = 0.999
    n_coreg.inputs.float = True
    n_coreg.inputs.interpolation = 'Linear'
    n_coreg.inputs.transforms = [
        'Rigid',
    ]
    n_coreg.inputs.transform_parameters = [
        [
            0.1,
        ],
    ]
    n_coreg.inputs.metric = [
        'MI',
    ]
    n_coreg.inputs.metric_weight = [
        1,
    ]
    n_coreg.inputs.radius_or_number_of_bins = [
        32,
    ]
    n_coreg.inputs.sampling_strategy = [
        'Regular',
    ]
    n_coreg.inputs.sampling_percentage = [
        0.5,
    ]
    n_coreg.inputs.sigma_units = [
        'mm',
    ]
    n_coreg.inputs.convergence_threshold = [
        1e-6,
    ]
    n_coreg.inputs.smoothing_sigmas = [
        [1, 0],
    ]
    n_coreg.inputs.shrink_factors = [
        [1, 1],
    ]
    n_coreg.inputs.convergence_window_size = [
        10,
    ]
    n_coreg.inputs.number_of_iterations = [
        [250, 100],
    ]
    n_coreg.inputs.output_warped_image = True
    n_coreg.inputs.output_inverse_warped_image = True
    n_coreg.inputs.output_transform_prefix = 'angio_to_struct'

    n_apply = Node(ApplyTransforms(), name='ants_apply')
    n_apply.inputs.dimension = 3
    n_apply.inputs.interpolation = 'Linear'
    n_apply.inputs.default_value = 0

    n_convert = Node(MRIConvert(), 'convert')
    n_convert.inputs.out_type = 'niigz'

    n_binarize = Node(Threshold(), 'make_mask')
    n_binarize.inputs.thresh = .1
    n_binarize.inputs.args = '-bin'

    n_mask = Node(BinaryMaths(), 'mask')
    n_mask.inputs.operation = 'mul'

    n_veins = Node(Rename(), 'rename_veins')
    n_veins.inputs.format_string = 'angiogram.nii.gz'

    n_sink = Node(DataSink(), 'sink')
    n_sink.inputs.base_directory = '/Fridge/users/giovanni/projects/intraop/loenen/angiogram'
    n_sink.inputs.remove_dest_dir = True

    fs = Node(FreeSurferSource(), 'freesurfer')

    n_split = Node(Split(), 'split_pca')
    n_split.inputs.dimension = 't'

    w = Workflow('tmp_angiogram')
    w.base_dir = str(out_dir)

    w.connect(n_input, 'fs_dir', fs, 'subjects_dir')
    w.connect(n_input, 'fs_subj', fs, 'subject_id')
    w.connect(n_input, 'angiogram', n_split, 'in_file')
    w.connect(n_split, ('out_files', select_file, 0), n_coreg, 'moving_image')
    w.connect(fs, 'T1', n_coreg, 'fixed_image')

    w.connect(n_coreg, 'forward_transforms', n_apply, 'transforms')
    w.connect(n_split, ('out_files', select_file, 1), n_apply, 'input_image')
    w.connect(fs, 'T1', n_apply, 'reference_image')
    w.connect(fs, 'brain', n_convert, 'in_file')
    w.connect(n_convert, 'out_file', n_binarize, 'in_file')
    w.connect(n_apply, 'output_image', n_mask, 'in_file')
    w.connect(n_binarize, 'out_file', n_mask, 'operand_file')
    w.connect(n_mask, 'out_file', n_veins, 'in_file')
    w.connect(n_input, 'out_dir', n_sink, 'base_directory')
    w.connect(n_veins, 'out_file', n_sink, '@angiogram')
    w.connect(n_convert, 'out_file', n_sink, '@brain')

    return w
コード例 #18
0
def create_reconall(config):
    """
    This function...

    :param config:
    :return:
    """
    ar1_wf = create_autorecon1(config)
    ar2_wf, ar2_lh, ar2_rh = create_autorecon2(config)
    ar3_wf = create_autorecon3(config)

    # Connect workflows
    reconall = pe.Workflow(name="recon-all")
    if config["longitudinal"]:
        # grab files from the initial single session run
        grab_inittp_files = pe.Node(
            DataGrabber(),
            name="Grab_Initial_Files",
            infields=["subject_id"],
            outfileds=["inputvols", "iscales", "ltas"],
        )
        grab_inittp_files.inputs.template = "*"
        grab_inittp_files.inputs.base_directory = config["subjects_dir"]
        grab_inittp_files.inputs.field_template = dict(
            inputvols="%s/mri/orig/0*.mgz",
            iscales="%s/mri/orig/0*-iscale.txt",
            ltas="%s/mri/orig/0*.lta",
        )

        grab_inittp_files.inputs.template_args = dict(
            inputvols=[["subject_id"]],
            iscales=[["subject_id"]],
            ltas=[["subject_id"]])

        reconall.connect([(
            grab_inittp_files,
            ar1_wf,
            [
                ("inputvols", "AutoRecon1_Inputs.in_T1s"),
                ("iscales", "AutoRecon1_Inputs.iscales"),
                ("ltas", "AutoRecon1_Inputs.ltas"),
            ],
        )])

        merge_norms = pe.Node(Merge(len(config["timepoints"])),
                              name="Merge_Norms")
        merge_segs = pe.Node(Merge(len(config["timepoints"])),
                             name="Merge_Segmentations")
        merge_segs_noCC = pe.Node(Merge(len(config["timepoints"])),
                                  name="Merge_Segmentations_noCC")
        merge_template_ltas = pe.Node(Merge(len(config["timepoints"])),
                                      name="Merge_Template_ltas")

        for i, tp in enumerate(config["timepoints"]):
            # datasource timepoint files
            tp_data_source = pe.Node(FreeSurferSource(),
                                     name="{0}_DataSource".format(tp))
            tp_data_source.inputs.subject_id = tp
            tp_data_source.inputs.subjects_dir = config["subjects_dir"]

            tp_data_grabber = pe.Node(
                DataGrabber(),
                name="{0}_DataGrabber".format(tp),
                infields=["tp", "long_tempate"],
                outfileds=["subj_to_template_lta", "seg_noCC", "seg_presurf"],
            )
            tp_data_grabber.inputs.template = "*"
            tp_data_grabber.inputs.base_directory = config["subjects_dir"]
            tp_data_grabber.inputs.field_template = dict(
                subj_to_template_lta="%s/mri/transforms/%s_to_%s.lta",
                seg_noCC="%s/mri/aseg.auto_noCCseg.mgz",
                seg_presurf="%s/mri/aseg.presurf.mgz",
            )

            tp_data_grabber.inputs.template_args = dict(
                subj_to_template_lta=[["long_template", "tp",
                                       "long_template"]],
                seg_noCC=[["tp"]],
                seg_presurf=[["tp"]],
            )

            reconall.connect([
                (tp_data_source, merge_norms, [("norm", "in{0}".format(i))]),
                (tp_data_grabber, merge_segs, [("seg_presurf",
                                                "in{0}".format(i))]),
                (
                    tp_data_grabber,
                    merge_segs_noCC,
                    [("seg_noCC", "in{0}".format(i))],
                ),
                (
                    tp_data_grabber,
                    merge_template_ltas,
                    [("subj_to_template_lta", "in{0}".format(i))],
                ),
            ])

            if tp == config["subject_id"]:
                reconall.connect([
                    (tp_data_source, ar2_wf, [("wm",
                                               "AutoRecon2_Inputs.init_wm")]),
                    (
                        tp_data_grabber,
                        ar2_wf,
                        [(
                            "subj_to_template_lta",
                            "AutoRecon2_Inputs.subj_to_template_lta",
                        )],
                    ),
                    (
                        tp_data_grabber,
                        ar2_wf,
                        [(
                            "subj_to_template_lta",
                            "AutoRecon1_Inputs.subj_to_template_lta",
                        )],
                    ),
                ])

        reconall.connect([
            (merge_norms, ar2_wf, [("out", "AutoRecon2_Inputs.alltps_norms")]),
            (merge_segs, ar2_wf, [("out", "AutoRecon2_Inputs.alltps_segs")]),
            (
                merge_template_ltas,
                ar2_wf,
                [("out", "AutoRecon2_Inputs.alltps_to_template_ltas")],
            ),
            (
                merge_segs_noCC,
                ar2_wf,
                [("out", "AutoRecon2_Inputs.alltps_segs_noCC")],
            ),
        ])

        # datasource files from the template run
        ds_template_files = pe.Node(FreeSurferSource(),
                                    name="Datasource_Template_Files")
        ds_template_files.inputs.subject_id = config["subject_id"]
        ds_template_files.inputs.subjects_dir = config["subjects_dir"]

        reconall.connect([
            (
                ds_template_files,
                ar1_wf,
                [("brainmask", "AutoRecon1_Inputs.template_brainmask")],
            ),
            (
                ds_template_files,
                ar2_wf,
                [("aseg", "AutoRecon2_Inputs.template_aseg")],
            ),
        ])

        # grab files from template run
        grab_template_files = pe.Node(
            DataGrabber(),
            name="Grab_Template_Files",
            infields=["subject_id", "long_template"],
            outfields=[
                "template_talairach_xfm",
                "template_talairach_lta",
                "template_talairach_m3z",
                "template_label_intensities",
                "template_lh_white",
                "template_rh_white",
                "template_lh_pial",
                "template_rh_pial",
            ],
        )
        grab_template_files.inputs.template = "*"
        grab_template_files.inputs.base_directory = config["subjects_dir"]
        grab_template_files.inputs.subject_id = config["subject_id"]
        grab_template_files.inputs.long_template = config["long_template"]
        grab_template_files.inputs.field_template = dict(
            template_talairach_xfm="%s/mri/transfroms/talairach.xfm",
            template_talairach_lta="%s/mri/transfroms/talairach.lta",
            template_talairach_m3z="%s/mri/transfroms/talairach.m3z",
            template_label_intensities=
            "%s/mri/aseg.auto_noCCseg.label_intensities.txt",
            template_lh_white="%s/surf/lh.white",
            template_rh_white="%s/surf/rh.white",
            template_lh_pial="%s/surf/lh.pial",
            template_rh_pial="%s/surf/rh.pial",
        )

        grab_template_files.inputs.template_args = dict(
            template_talairach_xfm=[["long_template"]],
            template_talairach_lta=[["long_template"]],
            template_talairach_m3z=[["long_template"]],
            template_lh_white=[["long_template"]],
            template_rh_white=[["long_template"]],
            template_lh_pial=[["long_template"]],
            template_rh_pial=[["long_template"]],
        )
        reconall.connect([
            (
                grab_template_files,
                ar1_wf,
                [(
                    "template_talairach_xfm",
                    "AutoRecon1_Inputs.template_talairach_xfm",
                )],
            ),
            (
                grab_template_files,
                ar2_wf,
                [
                    (
                        "template_talairach_lta",
                        "AutoRecon2_Inputs.template_talairach_lta",
                    ),
                    (
                        "template_talairach_m3z",
                        "AutoRecon2_Inputs.template_talairach_m3z",
                    ),
                    (
                        "template_label_intensities",
                        "AutoRecon2_Inputs.template_label_intensities",
                    ),
                    ("template_lh_white",
                     "AutoRecon2_Inputs.template_lh_white"),
                    ("template_rh_white",
                     "AutoRecon2_Inputs.template_rh_white"),
                    ("template_lh_pial", "AutoRecon2_Inputs.template_lh_pial"),
                    ("template_rh_pial", "AutoRecon2_Inputs.template_rh_pial"),
                ],
            ),
        ])
        # end longitudinal data collection

    # connect autorecon 1 - 3
    reconall.connect([
        (
            ar1_wf,
            ar3_wf,
            [
                ("AutoRecon1_Inputs.subject_id",
                 "AutoRecon3_Inputs.subject_id"),
                (
                    "AutoRecon1_Inputs.subjects_dir",
                    "AutoRecon3_Inputs.subjects_dir",
                ),
                ("Copy_Brainmask.out_file", "AutoRecon3_Inputs.brainmask"),
                ("Copy_Transform.out_file", "AutoRecon3_Inputs.transform"),
                ("Add_Transform_to_Header.out_file",
                 "AutoRecon3_Inputs.orig_mgz"),
                ("Robust_Template.out_file", "AutoRecon3_Inputs.rawavg"),
            ],
        ),
        (
            ar1_wf,
            ar2_wf,
            [
                ("Copy_Brainmask.out_file", "AutoRecon2_Inputs.brainmask"),
                ("Copy_Transform.out_file", "AutoRecon2_Inputs.transform"),
                ("Add_Transform_to_Header.out_file", "AutoRecon2_Inputs.orig"),
                ("AutoRecon1_Inputs.subject_id",
                 "AutoRecon2_Inputs.subject_id"),
                (
                    "AutoRecon1_Inputs.subjects_dir",
                    "AutoRecon2_Inputs.subjects_dir",
                ),
            ],
        ),
        (
            ar2_lh,
            ar3_wf,
            [
                ("inflate2.out_file", "AutoRecon3_Inputs.lh_inflated"),
                ("Smooth2.surface", "AutoRecon3_Inputs.lh_smoothwm"),
                ("Make_Surfaces.out_white", "AutoRecon3_Inputs.lh_white"),
                ("Make_Surfaces.out_cortex",
                 "AutoRecon3_Inputs.lh_cortex_label"),
                ("Make_Surfaces.out_area", "AutoRecon3_Inputs.lh_area"),
                ("Make_Surfaces.out_curv", "AutoRecon3_Inputs.lh_curv"),
                ("inflate2.out_sulc", "AutoRecon3_Inputs.lh_sulc"),
                (
                    "Extract_Main_Component.out_file",
                    "AutoRecon3_Inputs.lh_orig_nofix",
                ),
                ("Remove_Intersection.out_file", "AutoRecon3_Inputs.lh_orig"),
                ("Curvature1.out_mean", "AutoRecon3_Inputs.lh_white_H"),
                ("Curvature1.out_gauss", "AutoRecon3_Inputs.lh_white_K"),
            ],
        ),
        (
            ar2_rh,
            ar3_wf,
            [
                ("inflate2.out_file", "AutoRecon3_Inputs.rh_inflated"),
                ("Smooth2.surface", "AutoRecon3_Inputs.rh_smoothwm"),
                ("Make_Surfaces.out_white", "AutoRecon3_Inputs.rh_white"),
                ("Make_Surfaces.out_cortex",
                 "AutoRecon3_Inputs.rh_cortex_label"),
                ("Make_Surfaces.out_area", "AutoRecon3_Inputs.rh_area"),
                ("Make_Surfaces.out_curv", "AutoRecon3_Inputs.rh_curv"),
                ("inflate2.out_sulc", "AutoRecon3_Inputs.rh_sulc"),
                (
                    "Extract_Main_Component.out_file",
                    "AutoRecon3_Inputs.rh_orig_nofix",
                ),
                ("Remove_Intersection.out_file", "AutoRecon3_Inputs.rh_orig"),
                ("Curvature1.out_mean", "AutoRecon3_Inputs.rh_white_H"),
                ("Curvature1.out_gauss", "AutoRecon3_Inputs.rh_white_K"),
            ],
        ),
        (
            ar2_wf,
            ar3_wf,
            [
                ("Copy_CCSegmentation.out_file",
                 "AutoRecon3_Inputs.aseg_presurf"),
                (
                    "Mask_Brain_Final_Surface.out_file",
                    "AutoRecon3_Inputs.brain_finalsurfs",
                ),
                ("MRI_Pretess.out_file", "AutoRecon3_Inputs.wm"),
                ("Fill.out_file", "AutoRecon3_Inputs.filled"),
                ("CA_Normalize.out_file", "AutoRecon3_Inputs.norm"),
            ],
        ),
    ])

    return reconall
コード例 #19
0
def init_templateflow_wf(
    bids_dir,
    output_dir,
    participant_label,
    mov_template,
    ref_template='MNI152NLin2009cAsym',
    use_float=True,
    omp_nthreads=None,
    mem_gb=3.0,
    modality='T1w',
    normalization_quality='precise',
    name='templateflow_wf',
    fs_subjects_dir=None,
):
    """
    A Nipype workflow to perform image registration between two templates
    *R* and *M*. *R* is the *reference template*, selected by a templateflow
    identifier such as ``MNI152NLin2009cAsym``, and *M* is the *moving
    template* (e.g., ``MNI152Lin``). This workflows maps data defined on
    template-*M* space onto template-*R* space.


    1. Run the subrogate images through ``antsBrainExtraction``.
    2. Recompute :abbr:`INU (intensity non-uniformity)` correction using
        the mask obtained in 1).
    3. Independently, run spatial normalization of every
       :abbr:`INU (intensity non-uniformity)` corrected image
       (supplied via ``in_files``) to both templates.
    4. Calculate an initialization between both templates, using them directly.
    5. Run multi-channel image registration of the images resulting from
        3). Both sets of images (one registered to *R* and another to *M*)
        are then used as reference and moving images in the registration
        framework.

    **Parameters**

    in_files: list of files
        a list of paths pointing to the images that will be used as surrogates
    mov_template: str
        a templateflow identifier for template-*M*
    ref_template: str
        a templateflow identifier for template-*R* (default: ``MNI152NLin2009cAsym``).


    """
    # number of participants
    ninputs = len(participant_label)
    ants_env = {
        'NSLOTS': '%d' % omp_nthreads,
        'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS': '%d' % omp_nthreads,
        'OMP_NUM_THREADS': '%d' % omp_nthreads,
    }

    # Get path to templates
    tpl_ref = str(
        get_template(ref_template, suffix=modality, desc=None, resolution=1))
    tpl_ref_mask = str(
        get_template(ref_template, suffix='mask', desc='brain', resolution=1))
    tpl_mov = str(
        get_template(mov_template, suffix=modality, desc=None, resolution=1))
    tpl_mov_mask = str(
        get_template(mov_template, suffix='mask', desc='brain', resolution=1))

    wf = pe.Workflow(name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['participant_label']),
                        name='inputnode')
    inputnode.iterables = ('participant_label',
                           sorted(list(participant_label)))

    pick_file = pe.Node(niu.Function(function=_bids_pick),
                        name='pick_file',
                        run_without_submitting=True)
    pick_file.inputs.bids_root = bids_dir

    ref_bex = init_brain_extraction_wf(
        in_template=ref_template,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        bids_suffix=modality,
        name='reference_bex',
    )

    mov_bex = init_brain_extraction_wf(
        in_template=mov_template,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        bids_suffix=modality,
        name='moving_bex',
    )

    ref_norm = pe.Node(Registration(from_file=pkgr.resource_filename(
        'niworkflows.data', 't1w-mni_registration_%s_000.json' %
        normalization_quality)),
                       name='ref_norm',
                       n_procs=omp_nthreads)
    ref_norm.inputs.fixed_image = tpl_ref
    ref_norm.inputs.fixed_image_masks = tpl_ref_mask
    ref_norm.inputs.environ = ants_env

    # Register the INU-corrected image to the other template
    mov_norm = pe.Node(Registration(from_file=pkgr.resource_filename(
        'niworkflows.data', 't1w-mni_registration_%s_000.json' %
        normalization_quality)),
                       name='mov_norm',
                       n_procs=omp_nthreads)
    mov_norm.inputs.fixed_image = tpl_mov
    mov_norm.inputs.fixed_image_masks = tpl_mov_mask
    mov_norm.inputs.environ = ants_env

    # Initialize between-templates transform with antsAI
    init_aff = pe.Node(AI(
        metric=('Mattes', 32, 'Regular', 0.2),
        transform=('Affine', 0.1),
        search_factor=(20, 0.12),
        principal_axes=False,
        convergence=(10, 1e-6, 10),
        verbose=True,
        fixed_image=tpl_ref,
        fixed_image_mask=tpl_ref_mask,
        moving_image=tpl_mov,
        moving_image_mask=tpl_mov_mask,
        environ=ants_env,
    ),
                       name='init_aff',
                       n_procs=omp_nthreads)

    ref_buffer = pe.JoinNode(niu.IdentityInterface(fields=['fixed_image']),
                             joinsource='inputnode',
                             joinfield='fixed_image',
                             name='ref_buffer')

    mov_buffer = pe.JoinNode(niu.IdentityInterface(fields=['moving_image']),
                             joinsource='inputnode',
                             joinfield='moving_image',
                             name='mov_buffer')

    flow = pe.Node(
        Registration(from_file=pkgr.resource_filename(
            'niworkflows.data', 't1w-mni_registration_%s_000.json' %
            normalization_quality)),
        name='flow_norm',
        n_procs=omp_nthreads,
    )
    flow.inputs.fixed_image_masks = tpl_ref_mask
    flow.inputs.moving_image_masks = tpl_mov_mask
    flow.inputs.metric = [[v] * ninputs for v in flow.inputs.metric]
    flow.inputs.metric_weight = [[1 / ninputs] * ninputs
                                 for _ in flow.inputs.metric_weight]
    flow.inputs.radius_or_number_of_bins = [
        [v] * ninputs for v in flow.inputs.radius_or_number_of_bins
    ]
    flow.inputs.sampling_percentage = [[v] * ninputs
                                       for v in flow.inputs.sampling_percentage
                                       ]
    flow.inputs.sampling_strategy = [[v] * ninputs
                                     for v in flow.inputs.sampling_strategy]
    flow.inputs.environ = ants_env

    # Datasinking
    ref_norm_ds = pe.Node(DerivativesDataSink(base_directory=str(
        output_dir.parent),
                                              out_path_base=output_dir.name,
                                              space=ref_template,
                                              desc='preproc',
                                              keep_dtype=True),
                          name='ref_norm_ds',
                          run_without_submitting=True)

    mov_norm_ds = pe.Node(DerivativesDataSink(base_directory=str(
        output_dir.parent),
                                              out_path_base=output_dir.name,
                                              space=mov_template,
                                              desc='preproc',
                                              keep_dtype=True),
                          name='mov_norm_ds',
                          run_without_submitting=True)

    xfm_ds = pe.Node(DerivativesDataSink(
        base_directory=str(output_dir.parent),
        out_path_base=output_dir.name,
        allowed_entities=['from', 'mode'],
        mode='image',
        suffix='xfm',
        source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template),
        **{'from': mov_template}),
                     name='xfm_ds',
                     run_without_submitting=True)

    wf.connect([
        (inputnode, pick_file, [('participant_label', 'participant_label')]),
        (pick_file, ref_bex, [('out', 'inputnode.in_files')]),
        (pick_file, mov_bex, [('out', 'inputnode.in_files')]),
        (ref_bex, ref_norm, [('outputnode.bias_corrected', 'moving_image'),
                             ('outputnode.out_mask', 'moving_image_masks'),
                             ('norm.forward_transforms',
                              'initial_moving_transform')]),
        (ref_bex, mov_norm, [('outputnode.bias_corrected', 'moving_image')]),
        (mov_bex, mov_norm, [('outputnode.out_mask', 'moving_image_masks'),
                             ('norm.forward_transforms',
                              'initial_moving_transform')]),
        (init_aff, flow, [('output_transform', 'initial_moving_transform')]),
        (ref_norm, ref_buffer, [('warped_image', 'fixed_image')]),
        (mov_norm, mov_buffer, [('warped_image', 'moving_image')]),
        (ref_buffer, flow, [('fixed_image', 'fixed_image')]),
        (mov_buffer, flow, [('moving_image', 'moving_image')]),
        (pick_file, ref_norm_ds, [('out', 'source_file')]),
        (ref_norm, ref_norm_ds, [('warped_image', 'in_file')]),
        (pick_file, mov_norm_ds, [('out', 'source_file')]),
        (mov_norm, mov_norm_ds, [('warped_image', 'in_file')]),
        (flow, xfm_ds, [('composite_transform', 'in_file')]),
    ])

    if fs_subjects_dir:
        fssource = pe.Node(FreeSurferSource(subjects_dir=str(fs_subjects_dir)),
                           name='fssource',
                           run_without_submitting=True)
        tonative = pe.Node(fs.Label2Vol(subjects_dir=str(fs_subjects_dir)),
                           name='tonative')
        tonii = pe.Node(fs.MRIConvert(out_type='niigz',
                                      resample_type='nearest'),
                        name='tonii')

        ref_aparc = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                            float=True,
                                            reference_image=tpl_ref,
                                            environ=ants_env),
                            name='ref_aparc',
                            mem_gb=1,
                            n_procs=omp_nthreads)

        mov_aparc = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                            float=True,
                                            reference_image=tpl_mov,
                                            environ=ants_env),
                            name='mov_aparc',
                            mem_gb=1,
                            n_procs=omp_nthreads)

        ref_aparc_buffer = pe.JoinNode(niu.IdentityInterface(fields=['aparc']),
                                       joinsource='inputnode',
                                       joinfield='aparc',
                                       name='ref_aparc_buffer')

        ref_join_labels = pe.Node(AntsJointFusion(
            target_image=[tpl_ref],
            out_label_fusion='merged_aparc.nii.gz',
            out_intensity_fusion_name_format='merged_aparc_intensity_%d.nii.gz',
            out_label_post_prob_name_format='merged_aparc_posterior_%d.nii.gz',
            out_atlas_voting_weight_name_format='merged_aparc_weight_%d.nii.gz',
            environ=ants_env,
        ),
                                  name='ref_join_labels',
                                  n_procs=omp_nthreads)

        ref_join_labels_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                                     name='ref_join_labels_ds',
                                     run_without_submitting=True)

        ref_join_probs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='probtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                                    name='ref_join_probs_ds',
                                    run_without_submitting=True)

        # ref_join_voting_ds = pe.Node(
        #     DerivativesDataSink(
        #         base_directory=str(output_dir.parent),
        #         out_path_base=output_dir.name, space=ref_template,
        #         suffix='probtissue', desc='aparcvoting', keep_dtype=False,
        #         source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
        #     name='ref_join_voting_ds', run_without_submitting=True)

        mov_aparc_buffer = pe.JoinNode(niu.IdentityInterface(fields=['aparc']),
                                       joinsource='inputnode',
                                       joinfield='aparc',
                                       name='mov_aparc_buffer')

        mov_join_labels = pe.Node(AntsJointFusion(
            target_image=[tpl_mov],
            out_label_fusion='merged_aparc.nii.gz',
            out_intensity_fusion_name_format='merged_aparc_intensity_%d.nii.gz',
            out_label_post_prob_name_format='merged_aparc_posterior_%d.nii.gz',
            out_atlas_voting_weight_name_format='merged_aparc_weight_%d.nii.gz',
            environ=ants_env,
        ),
                                  name='mov_join_labels',
                                  n_procs=omp_nthreads)

        mov_join_labels_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                                     name='mov_join_labels_ds',
                                     run_without_submitting=True)

        mov_join_probs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='probtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                                    name='mov_join_probs_ds',
                                    run_without_submitting=True)

        ref_aparc_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False),
                               name='ref_aparc_ds',
                               run_without_submitting=True)

        mov_aparc_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False),
                               name='mov_aparc_ds',
                               run_without_submitting=True)
        # Extract surfaces
        cifti_wf = init_gifti_surface_wf(name='cifti_surfaces',
                                         subjects_dir=str(fs_subjects_dir))

        # Move surfaces to template spaces
        gii2csv = pe.MapNode(GiftiToCSV(itk_lps=True),
                             iterfield=['in_file'],
                             name='gii2csv')
        ref_map_surf = pe.MapNode(ApplyTransformsToPoints(dimension=3,
                                                          environ=ants_env),
                                  n_procs=omp_nthreads,
                                  name='ref_map_surf',
                                  iterfield=['input_file'])
        ref_csv2gii = pe.MapNode(CSVToGifti(itk_lps=True),
                                 name='ref_csv2gii',
                                 iterfield=['in_file', 'gii_file'])
        ref_surfs_buffer = pe.JoinNode(
            niu.IdentityInterface(fields=['surfaces']),
            joinsource='inputnode',
            joinfield='surfaces',
            name='ref_surfs_buffer')
        ref_surfs_unzip = pe.Node(UnzipJoinedSurfaces(),
                                  name='ref_surfs_unzip',
                                  run_without_submitting=True)
        ref_ply = pe.MapNode(SurfacesToPointCloud(),
                             name='ref_ply',
                             iterfield=['in_files'])
        ref_recon = pe.MapNode(PoissonRecon(),
                               name='ref_recon',
                               iterfield=['in_file'])
        ref_avggii = pe.MapNode(PLYtoGifti(),
                                name='ref_avggii',
                                iterfield=['in_file', 'surf_key'])
        ref_smooth = pe.MapNode(fs.SmoothTessellation(),
                                name='ref_smooth',
                                iterfield=['in_file'])

        ref_surfs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            keep_dtype=False,
            compress=False),
                               name='ref_surfs_ds',
                               run_without_submitting=True)
        ref_avg_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            keep_dtype=False,
            compress=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                             name='ref_avg_ds',
                             run_without_submitting=True)

        mov_map_surf = pe.MapNode(ApplyTransformsToPoints(dimension=3,
                                                          environ=ants_env),
                                  n_procs=omp_nthreads,
                                  name='mov_map_surf',
                                  iterfield=['input_file'])
        mov_csv2gii = pe.MapNode(CSVToGifti(itk_lps=True),
                                 name='mov_csv2gii',
                                 iterfield=['in_file', 'gii_file'])
        mov_surfs_buffer = pe.JoinNode(
            niu.IdentityInterface(fields=['surfaces']),
            joinsource='inputnode',
            joinfield='surfaces',
            name='mov_surfs_buffer')
        mov_surfs_unzip = pe.Node(UnzipJoinedSurfaces(),
                                  name='mov_surfs_unzip',
                                  run_without_submitting=True)
        mov_ply = pe.MapNode(SurfacesToPointCloud(),
                             name='mov_ply',
                             iterfield=['in_files'])
        mov_recon = pe.MapNode(PoissonRecon(),
                               name='mov_recon',
                               iterfield=['in_file'])
        mov_avggii = pe.MapNode(PLYtoGifti(),
                                name='mov_avggii',
                                iterfield=['in_file', 'surf_key'])
        mov_smooth = pe.MapNode(fs.SmoothTessellation(),
                                name='mov_smooth',
                                iterfield=['in_file'])

        mov_surfs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            keep_dtype=False,
            compress=False),
                               name='mov_surfs_ds',
                               run_without_submitting=True)
        mov_avg_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            keep_dtype=False,
            compress=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                             name='mov_avg_ds',
                             run_without_submitting=True)

        wf.connect([
            (inputnode, fssource, [(('participant_label', _sub_decorate),
                                    'subject_id')]),
            (inputnode, cifti_wf, [(('participant_label', _sub_decorate),
                                    'inputnode.subject_id')]),
            (pick_file, cifti_wf, [('out', 'inputnode.in_t1w')]),
            (pick_file, tonii, [('out', 'reslice_like')]),
            # Select DKT aparc
            (fssource, tonative, [(('aparc_aseg', _last), 'seg_file'),
                                  ('rawavg', 'template_file'),
                                  ('aseg', 'reg_header')]),
            (tonative, tonii, [('vol_label_file', 'in_file')]),
            (tonii, ref_aparc, [('out_file', 'input_image')]),
            (tonii, mov_aparc, [('out_file', 'input_image')]),
            (ref_norm, ref_aparc, [('composite_transform', 'transforms')]),
            (mov_norm, mov_aparc, [('composite_transform', 'transforms')]),
            (ref_buffer, ref_join_labels, [('fixed_image', 'atlas_image')]),
            (ref_aparc, ref_aparc_buffer, [('output_image', 'aparc')]),
            (ref_aparc_buffer, ref_join_labels,
             [('aparc', 'atlas_segmentation_image')]),
            (mov_buffer, mov_join_labels, [('moving_image', 'atlas_image')]),
            (mov_aparc, mov_aparc_buffer, [('output_image', 'aparc')]),
            (mov_aparc_buffer, mov_join_labels,
             [('aparc', 'atlas_segmentation_image')]),
            # Datasinks
            (ref_join_labels, ref_join_labels_ds, [('out_label_fusion',
                                                    'in_file')]),
            (ref_join_labels, ref_join_probs_ds,
             [('out_label_post_prob', 'in_file'),
              (('out_label_post_prob', _get_extra), 'extra_values')]),
            # (ref_join_labels, ref_join_voting_ds, [
            #     ('out_atlas_voting_weight_name_format', 'in_file')]),
            (mov_join_labels, mov_join_labels_ds, [('out_label_fusion',
                                                    'in_file')]),
            (mov_join_labels, mov_join_probs_ds,
             [('out_label_post_prob', 'in_file'),
              (('out_label_post_prob', _get_extra), 'extra_values')]),
            (pick_file, ref_aparc_ds, [('out', 'source_file')]),
            (ref_aparc, ref_aparc_ds, [('output_image', 'in_file')]),
            (pick_file, mov_aparc_ds, [('out', 'source_file')]),
            (mov_aparc, mov_aparc_ds, [('output_image', 'in_file')]),
            # Mapping ref surfaces
            (cifti_wf, gii2csv, [(('outputnode.surf_norm', _discard_inflated),
                                  'in_file')]),
            (gii2csv, ref_map_surf, [('out_file', 'input_file')]),
            (ref_norm, ref_map_surf, [(('inverse_composite_transform',
                                        _ensure_list), 'transforms')]),
            (ref_map_surf, ref_csv2gii, [('output_file', 'in_file')]),
            (cifti_wf, ref_csv2gii, [(('outputnode.surf_norm',
                                       _discard_inflated), 'gii_file')]),
            (pick_file, ref_surfs_ds, [('out', 'source_file')]),
            (ref_csv2gii, ref_surfs_ds, [('out_file', 'in_file'),
                                         (('out_file', _get_surf_extra),
                                          'extra_values')]),
            (ref_csv2gii, ref_surfs_buffer, [('out_file', 'surfaces')]),
            (ref_surfs_buffer, ref_surfs_unzip, [('surfaces', 'in_files')]),
            (ref_surfs_unzip, ref_ply, [('out_files', 'in_files')]),
            (ref_ply, ref_recon, [('out_file', 'in_file')]),
            (ref_recon, ref_avggii, [('out_file', 'in_file')]),
            (ref_surfs_unzip, ref_avggii, [('surf_keys', 'surf_key')]),
            (ref_avggii, ref_smooth, [('out_file', 'in_file')]),
            (ref_smooth, ref_avg_ds, [('surface', 'in_file'),
                                      (('surface', _get_surf_extra),
                                       'extra_values')]),

            # Mapping mov surfaces
            (gii2csv, mov_map_surf, [('out_file', 'input_file')]),
            (mov_norm, mov_map_surf, [(('inverse_composite_transform',
                                        _ensure_list), 'transforms')]),
            (mov_map_surf, mov_csv2gii, [('output_file', 'in_file')]),
            (cifti_wf, mov_csv2gii, [(('outputnode.surf_norm',
                                       _discard_inflated), 'gii_file')]),
            (pick_file, mov_surfs_ds, [('out', 'source_file')]),
            (mov_csv2gii, mov_surfs_ds, [('out_file', 'in_file'),
                                         (('out_file', _get_surf_extra),
                                          'extra_values')]),
            (mov_csv2gii, mov_surfs_buffer, [('out_file', 'surfaces')]),
            (mov_surfs_buffer, mov_surfs_unzip, [('surfaces', 'in_files')]),
            (mov_surfs_unzip, mov_ply, [('out_files', 'in_files')]),
            (mov_ply, mov_recon, [('out_file', 'in_file')]),
            (mov_recon, mov_avggii, [('out_file', 'in_file')]),
            (mov_surfs_unzip, mov_avggii, [('surf_keys', 'surf_key')]),
            (mov_avggii, mov_smooth, [('out_file', 'in_file')]),
            (mov_smooth, mov_avg_ds, [('surface', 'in_file'),
                                      (('surface', _get_surf_extra),
                                       'extra_values')]),
        ])

    return wf
コード例 #20
0
        })
    logb.inputs.inputspec.rawavg = t1_file
    logb.inputs.inputspec.t2_raw = t2_file
    logb.inputs.inputspec.aseg_presurf = os.path.join(subject_directory,
                                                      "FreeSurfer", "mri",
                                                      "aseg.presurf.mgz")
    if not os.path.isfile(logb.inputs.inputspec.aseg_presurf):
        print("could not find aseg")
        import sys
        sys.exit()
    logb.inputs.inputspec.hncma_atlas = hncma_atlas

    datasink = Node(DataSink(), name="DataSink")
    datasink.inputs.base_directory = recon_all.inputs.subjects_dir
    for hemisphere in ("lh", "rh"):
        fssource = Node(FreeSurferSource(), "{0}FSSource".format(hemisphere))
        fssource.inputs.hemi = hemisphere
        wf.connect([(recon_all, fssource, [("subject_id", "subject_id"),
                                           ("subjects_dir", "subjects_dir")]),
                    (fssource, logb,
                     [("white", "inputspec.{0}_white".format(hemisphere))])])

        for matter in ("gm", "wm"):
            wf.connect(
                logb,
                "outputspec.{0}_{1}_surf_file".format(hemisphere,
                                                      matter), datasink,
                "LOGISMOSB.FreeSurfer.@{0}_{1}".format(hemisphere, matter))

    wf.base_dir = base_dir
    wf.config['execution']['job_finished_timeout'] = 120
コード例 #21
0
def create_reconall(config):
    ar1_wf = create_AutoRecon1(config)
    ar2_wf, ar2_lh, ar2_rh = create_AutoRecon2(config)
    ar3_wf = create_AutoRecon3(config)

    # Connect workflows
    reconall = pe.Workflow(name="recon-all")
    if config['longitudinal']:
        # grab files from the initial single session run
        grab_inittp_files = pe.Node(DataGrabber(),
                                    name="Grab_Initial_Files",
                                    infields=['subject_id'],
                                    outfileds=['inputvols', 'iscales', 'ltas'])
        grab_inittp_files.inputs.template = '*'
        grab_inittp_files.inputs.base_directory = config['subjects_dir']
        grab_inittp_files.inputs.field_template = dict(
            inputvols='%s/mri/orig/0*.mgz',
            iscales='%s/mri/orig/0*-iscale.txt',
            ltas='%s/mri/orig/0*.lta')

        grab_inittp_files.inputs.template_args = dict(
            inputvols=[['subject_id']],
            iscales=[['subject_id']],
            ltas=[['subject_id']])

        reconall.connect([(grab_inittp_files, ar1_wf,
                           [('inputvols', 'AutoRecon1_Inputs.in_T1s'),
                            ('iscales', 'AutoRecon1_Inputs.iscales'),
                            ('ltas', 'AutoRecon1_Inputs.ltas')])])

        merge_norms = pe.Node(Merge(len(config['timepoints'])),
                              name="Merge_Norms")
        merge_segs = pe.Node(Merge(len(config['timepoints'])),
                             name="Merge_Segmentations")
        merge_segs_noCC = pe.Node(Merge(len(config['timepoints'])),
                                  name="Merge_Segmentations_noCC")
        merge_template_ltas = pe.Node(Merge(len(config['timepoints'])),
                                      name="Merge_Template_ltas")

        for i, tp in enumerate(config['timepoints']):
            # datasource timepoint files
            tp_data_source = pe.Node(FreeSurferSource(),
                                     name="{0}_DataSource".format(tp))
            tp_data_source.inputs.subject_id = tp
            tp_data_source.inputs.subjects_dir = config['subjects_dir']

            tp_data_grabber = pe.Node(
                DataGrabber(),
                name="{0}_DataGrabber".format(tp),
                infields=['tp', 'long_tempate'],
                outfileds=['subj_to_template_lta', 'seg_noCC', 'seg_presurf'])
            tp_data_grabber.inputs.template = '*'
            tp_data_grabber.inputs.base_directory = config['subjects_dir']
            tp_data_grabber.inputs.field_template = dict(
                subj_to_template_lta='%s/mri/transforms/%s_to_%s.lta',
                seg_noCC='%s/mri/aseg.auto_noCCseg.mgz',
                seg_presurf='%s/mri/aseg.presurf.mgz',
            )

            tp_data_grabber.inputs.template_args = dict(
                subj_to_template_lta=[['long_template', 'tp',
                                       'long_template']],
                seg_noCC=[['tp']],
                seg_presurf=[['tp']])

            reconall.connect([(tp_data_source, merge_norms,
                               [('norm', 'in{0}'.format(i))]),
                              (tp_data_grabber, merge_segs,
                               [('seg_presurf', 'in{0}'.format(i))]),
                              (tp_data_grabber, merge_segs_noCC,
                               [('seg_noCC', 'in{0}'.format(i))]),
                              (tp_data_grabber, merge_template_ltas,
                               [('subj_to_template_lta', 'in{0}'.format(i))])])

            if tp == config['subject_id']:
                reconall.connect([
                    (tp_data_source, ar2_wf, [('wm',
                                               'AutoRecon2_Inputs.init_wm')]),
                    (tp_data_grabber, ar2_wf,
                     [('subj_to_template_lta',
                       'AutoRecon2_Inputs.subj_to_template_lta')]),
                    (tp_data_grabber, ar2_wf,
                     [('subj_to_template_lta',
                       'AutoRecon1_Inputs.subj_to_template_lta')])
                ])

        reconall.connect([
            (merge_norms, ar2_wf, [('out', 'AutoRecon2_Inputs.alltps_norms')]),
            (merge_segs, ar2_wf, [('out', 'AutoRecon2_Inputs.alltps_segs')]),
            (merge_template_ltas, ar2_wf,
             [('out', 'AutoRecon2_Inputs.alltps_to_template_ltas')]),
            (merge_segs_noCC, ar2_wf, [('out',
                                        'AutoRecon2_Inputs.alltps_segs_noCC')])
        ])

        # datasource files from the template run
        ds_template_files = pe.Node(FreeSurferSource(),
                                    name="Datasource_Template_Files")
        ds_template_files.inputs.subject_id = config['subject_id']
        ds_template_files.inputs.subjects_dir = config['subjects_dir']

        reconall.connect([(ds_template_files, ar1_wf, [
            ('brainmask', 'AutoRecon1_Inputs.template_brainmask')
        ]),
                          (ds_template_files, ar2_wf,
                           [('aseg', 'AutoRecon2_Inputs.template_aseg')])])

        # grab files from template run
        grab_template_files = pe.Node(
            DataGrabber(),
            name="Grab_Template_Files",
            infields=['subject_id', 'long_template'],
            outfields=[
                'template_talairach_xfm', 'template_talairach_lta',
                'template_talairach_m3z', 'template_label_intensities',
                'template_lh_white', 'template_rh_white', 'template_lh_pial',
                'template_rh_pial'
            ])
        grab_template_files.inputs.template = '*'
        grab_template_files.inputs.base_directory = config['subjects_dir']
        grab_template_files.inputs.subject_id = config['subject_id']
        grab_template_files.inputs.long_template = config['long_template']
        grab_template_files.inputs.field_template = dict(
            template_talairach_xfm='%s/mri/transfroms/talairach.xfm',
            template_talairach_lta='%s/mri/transfroms/talairach.lta',
            template_talairach_m3z='%s/mri/transfroms/talairach.m3z',
            template_label_intensities=
            '%s/mri/aseg.auto_noCCseg.label_intensities.txt',
            template_lh_white='%s/surf/lh.white',
            template_rh_white='%s/surf/rh.white',
            template_lh_pial='%s/surf/lh.pial',
            template_rh_pial='%s/surf/rh.pial')

        grab_template_files.inputs.template_args = dict(
            template_talairach_xfm=[['long_template']],
            template_talairach_lta=[['long_template']],
            template_talairach_m3z=[['long_template']],
            template_lh_white=[['long_template']],
            template_rh_white=[['long_template']],
            template_lh_pial=[['long_template']],
            template_rh_pial=[['long_template']])
        reconall.connect([
            (grab_template_files, ar1_wf,
             [('template_talairach_xfm',
               'AutoRecon1_Inputs.template_talairach_xfm')]),
            (grab_template_files, ar2_wf, [
                ('template_talairach_lta',
                 'AutoRecon2_Inputs.template_talairach_lta'),
                ('template_talairach_m3z',
                 'AutoRecon2_Inputs.template_talairach_m3z'),
                ('template_label_intensities',
                 'AutoRecon2_Inputs.template_label_intensities'),
                ('template_lh_white', 'AutoRecon2_Inputs.template_lh_white'),
                ('template_rh_white', 'AutoRecon2_Inputs.template_rh_white'),
                ('template_lh_pial', 'AutoRecon2_Inputs.template_lh_pial'),
                ('template_rh_pial', 'AutoRecon2_Inputs.template_rh_pial'),
            ])
        ])
        # end longitudinal data collection

    # connect autorecon 1 - 3
    reconall.connect([
        (ar1_wf, ar3_wf, [
            ('AutoRecon1_Inputs.subject_id', 'AutoRecon3_Inputs.subject_id'),
            ('AutoRecon1_Inputs.subjects_dir',
             'AutoRecon3_Inputs.subjects_dir'),
            ('Copy_Brainmask.out_file', 'AutoRecon3_Inputs.brainmask'),
            ('Copy_Transform.out_file', 'AutoRecon3_Inputs.transform'),
            ('Add_Transform_to_Header.out_file', 'AutoRecon3_Inputs.orig_mgz'),
            ('Robust_Template.out_file', 'AutoRecon3_Inputs.rawavg'),
        ]),
        (ar1_wf, ar2_wf, [
            ('Copy_Brainmask.out_file', 'AutoRecon2_Inputs.brainmask'),
            ('Copy_Transform.out_file', 'AutoRecon2_Inputs.transform'),
            ('Add_Transform_to_Header.out_file', 'AutoRecon2_Inputs.orig'),
            ('AutoRecon1_Inputs.subject_id', 'AutoRecon2_Inputs.subject_id'),
            ('AutoRecon1_Inputs.subjects_dir',
             'AutoRecon2_Inputs.subjects_dir'),
        ]),
        (ar2_lh, ar3_wf, [
            ('inflate2.out_file', 'AutoRecon3_Inputs.lh_inflated'),
            ('Smooth2.surface', 'AutoRecon3_Inputs.lh_smoothwm'),
            ('Make_Surfaces.out_white', 'AutoRecon3_Inputs.lh_white'),
            ('Make_Surfaces.out_cortex', 'AutoRecon3_Inputs.lh_cortex_label'),
            ('Make_Surfaces.out_area', 'AutoRecon3_Inputs.lh_area'),
            ('Make_Surfaces.out_curv', 'AutoRecon3_Inputs.lh_curv'),
            ('inflate2.out_sulc', 'AutoRecon3_Inputs.lh_sulc'),
            ('Extract_Main_Component.out_file',
             'AutoRecon3_Inputs.lh_orig_nofix'),
            ('Remove_Intersection.out_file', 'AutoRecon3_Inputs.lh_orig'),
            ('Curvature1.out_mean', 'AutoRecon3_Inputs.lh_white_H'),
            ('Curvature1.out_gauss', 'AutoRecon3_Inputs.lh_white_K'),
        ]),
        (ar2_rh, ar3_wf, [
            ('inflate2.out_file', 'AutoRecon3_Inputs.rh_inflated'),
            ('Smooth2.surface', 'AutoRecon3_Inputs.rh_smoothwm'),
            ('Make_Surfaces.out_white', 'AutoRecon3_Inputs.rh_white'),
            ('Make_Surfaces.out_cortex', 'AutoRecon3_Inputs.rh_cortex_label'),
            ('Make_Surfaces.out_area', 'AutoRecon3_Inputs.rh_area'),
            ('Make_Surfaces.out_curv', 'AutoRecon3_Inputs.rh_curv'),
            ('inflate2.out_sulc', 'AutoRecon3_Inputs.rh_sulc'),
            ('Extract_Main_Component.out_file',
             'AutoRecon3_Inputs.rh_orig_nofix'),
            ('Remove_Intersection.out_file', 'AutoRecon3_Inputs.rh_orig'),
            ('Curvature1.out_mean', 'AutoRecon3_Inputs.rh_white_H'),
            ('Curvature1.out_gauss', 'AutoRecon3_Inputs.rh_white_K'),
        ]),
        (ar2_wf, ar3_wf, [
            ('Copy_CCSegmentation.out_file', 'AutoRecon3_Inputs.aseg_presurf'),
            ('Mask_Brain_Final_Surface.out_file',
             'AutoRecon3_Inputs.brain_finalsurfs'),
            ('MRI_Pretess.out_file', 'AutoRecon3_Inputs.wm'),
            ('Fill.out_file', 'AutoRecon3_Inputs.filled'),
            ('CA_Normalize.out_file', 'AutoRecon3_Inputs.norm'),
        ]),
    ])

    return reconall
コード例 #22
0
def normalize_workflow(c):
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util
    from nipype.interfaces.io import FreeSurferSource
    import nipype.interfaces.io as nio
    if not c.do_anatomical_only:
        norm = get_full_norm_workflow()
    else:
        norm = get_struct_norm_workflow()

    fssource = pe.Node(interface=FreeSurferSource(), name='fssource')
    fssource.inputs.subjects_dir = c.surf_dir

    infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
                         name='subject_names')
    infosource.iterables = ('subject_id', c.subjects)

    infofwhm = pe.Node(util.IdentityInterface(fields=['fwhm']), name='fwhm')
    infofwhm.iterables = ('fwhm', c.fwhm)

    inputspec = norm.get_node('inputspec')

    norm.connect(infosource, 'subject_id', fssource, 'subject_id')
    norm.connect(fssource, ('aparc_aseg', pickfirst), inputspec,
                 'segmentation')
    norm.connect(fssource, 'orig', inputspec, 'brain')

    if not c.do_anatomical_only:
        datagrab = func_datagrabber(c)
        norm.connect(infosource, 'subject_id', datagrab, 'subject_id')
        norm.connect(infofwhm, 'fwhm', datagrab, 'fwhm')
        norm.connect(datagrab, 'fsl_mat', inputspec, 'out_fsl_file')
        norm.connect(datagrab, 'inputs', inputspec, 'moving_image')
        norm.connect(datagrab, 'meanfunc', inputspec, 'mean_func')

    norm.inputs.inputspec.template_file = c.norm_template

    sinkd = pe.Node(nio.DataSink(), name='sinkd')
    sinkd.inputs.base_directory = os.path.join(c.sink_dir)

    outputspec = norm.get_node('outputspec')
    norm.connect(infosource, 'subject_id', sinkd, 'container')
    norm.connect(outputspec, 'warped_brain', sinkd, 'smri.warped_brain')
    norm.connect(outputspec, 'warp_field', sinkd, 'smri.warped_field')
    norm.connect(outputspec, 'affine_transformation', sinkd,
                 'smri.affine_transformation')
    norm.connect(outputspec, 'inverse_warp', sinkd, 'smri.inverse_warp')
    norm.connect(outputspec, 'unwarped_brain', sinkd, 'smri.unwarped_brain')
    norm.connect(infosource, ('subject_id', getsubstitutions), sinkd,
                 'substitutions')

    if not c.do_anatomical_only:
        norm.connect(outputspec, 'warped_image', sinkd, 'smri.warped_image')

    if c.do_segment:
        seg = warp_segments()
        norm.connect(infosource, 'subject_id', seg, 'inputspec.subject_id')
        seg.inputs.inputspec.subjects_dir = c.surf_dir
        norm.connect(outputspec, 'warp_field', seg, 'inputspec.warp_file')
        norm.connect(outputspec, 'affine_transformation', seg,
                     "inputspec.ants_affine")
        norm.connect(inputspec, 'template_file', seg, "inputspec.warped_brain")
        norm.connect(seg, "outputspec.out_files", sinkd, "smri.segments")

    return norm
コード例 #23
0
def QA_workflow(QAc, c=foo, name='QA'):
    """ Workflow that generates a Quality Assurance Report
    
    Parameters
    ----------
    name : name of workflow
    
    Inputs
    ------
    inputspec.subject_id : Subject id
    inputspec.config_params : configuration parameters to print in PDF (in the form of a 2D List)
    inputspec.in_file : original functional run
    inputspec.art_file : art outlier file
    inputspec.reg_file : bbregister file
    inputspec.tsnr_detrended : detrended image
    inputspec.tsnr : signal-to-noise ratio image
    inputspec.tsnr_mean : mean image
    inputspec.tsnr_stddev : standard deviation image
    inputspec.ADnorm : norm components file from art
    inputspec.TR : repetition time of acquisition
    inputspec.sd : freesurfer subjects directory
    
    """
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as util

    from nipype.interfaces.freesurfer import ApplyVolTransform
    from nipype.interfaces import freesurfer as fs
    from nipype.interfaces.io import FreeSurferSource

    from ...scripts.QA_utils import (plot_ADnorm, tsdiffana, tsnr_roi,
                                     combine_table, reduce_table, art_output,
                                     plot_motion, plot_ribbon, plot_anat,
                                     overlay_new, overlay_dB,
                                     spectrum_ts_table)

    from ......utils.reportsink.io import ReportSink
    # Define Workflow

    workflow = pe.Workflow(name=name)

    inputspec = pe.Node(interface=util.IdentityInterface(fields=[
        'subject_id', 'config_params', 'in_file', 'art_file', 'motion_plots',
        'reg_file', 'tsnr', 'tsnr_detrended', 'tsnr_stddev', 'ADnorm', 'TR',
        'sd'
    ]),
                        name='inputspec')

    infosource = pe.Node(util.IdentityInterface(fields=['subject_id']),
                         name='subject_names')
    if QAc.test_mode:
        infosource.iterables = ('subject_id', [QAc.subjects[0]])
    else:
        infosource.iterables = ('subject_id', QAc.subjects)

    datagrabber = preproc_datagrabber(c)

    datagrabber.inputs.node_type = c.motion_correct_node

    orig_datagrabber = get_dataflow(c)

    workflow.connect(infosource, 'subject_id', datagrabber, 'subject_id')

    workflow.connect(infosource, 'subject_id', orig_datagrabber, 'subject_id')

    workflow.connect(orig_datagrabber, 'func', inputspec, 'in_file')
    workflow.connect(infosource, 'subject_id', inputspec, 'subject_id')

    workflow.connect(datagrabber, ('outlier_files', sort), inputspec,
                     'art_file')
    workflow.connect(datagrabber, ('reg_file', sort), inputspec, 'reg_file')
    workflow.connect(datagrabber, ('tsnr', sort), inputspec, 'tsnr')
    workflow.connect(datagrabber, ('tsnr_stddev', sort), inputspec,
                     'tsnr_stddev')
    workflow.connect(datagrabber, ('tsnr_detrended', sort), inputspec,
                     'tsnr_detrended')
    workflow.connect(datagrabber, ('art_norm', sort), inputspec, 'ADnorm')

    if not c.use_metadata:
        inputspec.inputs.TR = c.TR
    else:
        from .....base import load_json
        metadata_file = os.path.join(c.sink_dir, QAc.subjects[0],
                                     'preproc/metadata/metadata.json')
        meta_tr = load_json(metadata_file)["tr"]
        inputspec.inputs.TR = meta_tr

    inputspec.inputs.sd = c.surf_dir

    # Define Nodes

    plot_m = pe.MapNode(util.Function(input_names=['motion_parameters'],
                                      output_names=['fname_t', 'fname_r'],
                                      function=plot_motion),
                        name="motion_plots",
                        iterfield=['motion_parameters'])

    workflow.connect(datagrabber, ('motion_parameters', sort), plot_m,
                     'motion_parameters')
    #workflow.connect(plot_m, 'fname',inputspec,'motion_plots')

    tsdiff = pe.MapNode(util.Function(input_names=['img'],
                                      output_names=['out_file'],
                                      function=tsdiffana),
                        name='tsdiffana',
                        iterfield=["img"])

    art_info = pe.MapNode(
        util.Function(input_names=['art_file', 'intensity_file', 'stats_file'],
                      output_names=['table', 'out', 'intensity_plot'],
                      function=art_output),
        name='art_output',
        iterfield=["art_file", "intensity_file", "stats_file"])

    fssource = pe.Node(interface=FreeSurferSource(), name='fssource')

    plotribbon = pe.Node(util.Function(input_names=['Brain'],
                                       output_names=['images'],
                                       function=plot_ribbon),
                         name="plot_ribbon")

    workflow.connect(fssource, 'ribbon', plotribbon, 'Brain')

    plotanat = pe.Node(util.Function(input_names=['brain'],
                                     output_names=['images'],
                                     function=plot_anat),
                       name="plot_anat")
    plotmask = plotanat.clone('plot_mask')
    workflow.connect(datagrabber, 'mask', plotmask, 'brain')
    roidevplot = tsnr_roi(plot=False,
                          name='tsnr_stddev_roi',
                          roi=['all'],
                          onsets=False)

    if not c.use_metadata:
        roidevplot.inputs.inputspec.TR = c.TR
    else:
        from .....base import load_json
        metadata_file = os.path.join(c.sink_dir, QAc.subjects[0],
                                     'preproc/metadata/metadata.json')
        meta_tr = load_json(metadata_file)["tr"]
        roidevplot.inputs.inputspec.TR = meta_tr

    roisnrplot = tsnr_roi(plot=False,
                          name='SNR_roi',
                          roi=['all'],
                          onsets=False)

    if not c.use_metadata:
        roisnrplot.inputs.inputspec.TR = c.TR
    else:
        from .....base import load_json
        metadata_file = os.path.join(c.sink_dir, QAc.subjects[0],
                                     'preproc/metadata/metadata.json')
        meta_tr = load_json(metadata_file)["tr"]
        roisnrplot.inputs.inputspec.TR = meta_tr

    workflow.connect(fssource, ('aparc_aseg', pickfirst), roisnrplot,
                     'inputspec.aparc_aseg')
    workflow.connect(fssource, ('aparc_aseg', pickfirst), roidevplot,
                     'inputspec.aparc_aseg')

    workflow.connect(infosource, 'subject_id', roidevplot, 'inputspec.subject')
    workflow.connect(infosource, 'subject_id', roisnrplot, 'inputspec.subject')

    tablecombine = pe.MapNode(util.Function(
        input_names=['roidev', 'roisnr', 'imagetable'],
        output_names=['imagetable'],
        function=combine_table),
                              name='combinetable',
                              iterfield=['roidev', 'roisnr', 'imagetable'])

    tablereduce = pe.MapNode(util.Function(
        input_names=['imagetable', 'custom_LUT_file'],
        output_names=['reduced_imagetable'],
        function=reduce_table),
                             name='reducetable',
                             iterfield=['imagetable'])

    adnormplot = pe.MapNode(util.Function(
        input_names=['ADnorm', 'TR', 'norm_thresh', 'out'],
        output_names=['plot'],
        function=plot_ADnorm),
                            name='ADnormplot',
                            iterfield=['ADnorm', 'out'])
    adnormplot.inputs.norm_thresh = c.norm_thresh
    workflow.connect(art_info, 'out', adnormplot, 'out')

    convert = pe.Node(interface=fs.MRIConvert(), name='converter')

    voltransform = pe.MapNode(interface=ApplyVolTransform(),
                              name='register',
                              iterfield=['source_file'])

    overlaynew = pe.MapNode(util.Function(
        input_names=['stat_image', 'background_image', 'threshold', "dB"],
        output_names=['fnames'],
        function=overlay_dB),
                            name='overlay_new',
                            iterfield=['stat_image'])
    overlaynew.inputs.dB = False
    overlaynew.inputs.threshold = 20

    overlaymask = pe.MapNode(util.Function(
        input_names=['stat_image', 'background_image', 'threshold'],
        output_names=['fnames'],
        function=overlay_new),
                             name='overlay_mask',
                             iterfield=['stat_image'])
    overlaymask.inputs.threshold = 0.5
    workflow.connect(convert, 'out_file', overlaymask, 'background_image')
    overlaymask2 = overlaymask.clone('acompcor_image')
    workflow.connect(convert, 'out_file', overlaymask2, 'background_image')
    workflow.connect(datagrabber, 'tcompcor', overlaymask, 'stat_image')
    workflow.connect(datagrabber, 'acompcor', overlaymask2, 'stat_image')

    workflow.connect(datagrabber, ('mean_image', sort), plotanat, 'brain')

    ts_and_spectra = spectrum_ts_table()

    timeseries_segstats = tsnr_roi(plot=False,
                                   name='timeseries_roi',
                                   roi=['all'],
                                   onsets=False)
    workflow.connect(inputspec, 'tsnr_detrended', timeseries_segstats,
                     'inputspec.tsnr_file')
    workflow.connect(inputspec, 'reg_file', timeseries_segstats,
                     'inputspec.reg_file')
    workflow.connect(infosource, 'subject_id', timeseries_segstats,
                     'inputspec.subject')
    workflow.connect(fssource, ('aparc_aseg', pickfirst), timeseries_segstats,
                     'inputspec.aparc_aseg')

    if not c.use_metadata:
        timeseries_segstats.inputs.inputspec.TR = c.TR
        ts_and_spectra.inputs.inputspec.tr = c.TR
    else:
        from .....base import load_json
        metadata_file = os.path.join(c.sink_dir, QAc.subjects[0],
                                     'preproc/metadata/metadata.json')
        meta_tr = load_json(metadata_file)["tr"]
        timeseries_segstats.inputs.inputspec.TR = meta_tr
        ts_and_spectra.inputs.inputspec.tr = meta_tr

    workflow.connect(timeseries_segstats, 'outputspec.roi_file',
                     ts_and_spectra, 'inputspec.stats_file')

    write_rep = pe.Node(interface=ReportSink(orderfields=[
        'Introduction', 'in_file', 'config_params', 'Art_Detect',
        'Global_Intensity', 'Mean_Functional', 'Ribbon', 'Mask',
        'motion_plot_translations', 'motion_plot_rotations', 'tsdiffana',
        'ADnorm', 'A_CompCor', 'T_CompCor', 'TSNR_Images', 'tsnr_roi_table'
    ]),
                        name='report_sink')
    write_rep.inputs.Introduction = "Quality Assurance Report for fMRI preprocessing."
    write_rep.inputs.base_directory = os.path.join(QAc.sink_dir)
    write_rep.inputs.report_name = "Preprocessing_Report"
    write_rep.inputs.json_sink = QAc.json_sink
    workflow.connect(infosource, 'subject_id', write_rep, 'container')
    workflow.connect(plotanat, 'images', write_rep, "Mean_Functional")
    write_rep.inputs.table_as_para = False
    # Define Inputs

    convert.inputs.out_type = 'niigz'
    convert.inputs.in_type = 'mgz'

    # Define Connections

    workflow.connect(inputspec, 'TR', adnormplot, 'TR')
    workflow.connect(inputspec, 'subject_id', fssource, 'subject_id')
    workflow.connect(inputspec, 'sd', fssource, 'subjects_dir')
    workflow.connect(inputspec, 'in_file', write_rep, 'in_file')
    workflow.connect(datagrabber, 'art_intensity', art_info, 'intensity_file')
    workflow.connect(datagrabber, ('art_stats', sort), art_info, 'stats_file')
    workflow.connect(inputspec, 'art_file', art_info, 'art_file')
    workflow.connect(art_info, ('table', to1table), write_rep, 'Art_Detect')
    workflow.connect(ts_and_spectra, 'outputspec.imagetable', tablecombine,
                     'imagetable')
    workflow.connect(art_info, 'intensity_plot', write_rep, 'Global_Intensity')
    workflow.connect(plot_m, 'fname_t', write_rep, 'motion_plot_translations')
    workflow.connect(plot_m, 'fname_r', write_rep, 'motion_plot_rotations')
    workflow.connect(inputspec, 'in_file', tsdiff, 'img')
    workflow.connect(tsdiff, "out_file", write_rep, "tsdiffana")
    workflow.connect(inputspec, ('config_params', totable), write_rep,
                     'config_params')
    workflow.connect(inputspec, 'reg_file', roidevplot, 'inputspec.reg_file')
    workflow.connect(inputspec, 'tsnr_stddev', roidevplot,
                     'inputspec.tsnr_file')
    workflow.connect(roidevplot, 'outputspec.roi_table', tablecombine,
                     'roidev')
    workflow.connect(inputspec, 'reg_file', roisnrplot, 'inputspec.reg_file')
    workflow.connect(inputspec, 'tsnr', roisnrplot, 'inputspec.tsnr_file')
    workflow.connect(roisnrplot, 'outputspec.roi_table', tablecombine,
                     'roisnr')

    if QAc.use_custom_ROI_list_file:
        workflow.connect(tablecombine, 'imagetable', tablereduce, 'imagetable')
        tablereduce.inputs.custom_LUT_file = QAc.custom_ROI_list_file
        workflow.connect(tablereduce, ('reduced_imagetable', to1table),
                         write_rep, 'tsnr_roi_table')
    else:
        workflow.connect(tablecombine, ('imagetable', to1table), write_rep,
                         'tsnr_roi_table')

    workflow.connect(inputspec, 'ADnorm', adnormplot, 'ADnorm')
    workflow.connect(adnormplot, 'plot', write_rep, 'ADnorm')
    workflow.connect(fssource, 'orig', convert, 'in_file')
    workflow.connect(convert, 'out_file', voltransform, 'target_file')
    workflow.connect(inputspec, 'reg_file', voltransform, 'reg_file')
    workflow.connect(inputspec, 'tsnr', voltransform, 'source_file')
    workflow.connect(plotribbon, 'images', write_rep, 'Ribbon')
    workflow.connect(voltransform, 'transformed_file', overlaynew,
                     'stat_image')
    workflow.connect(convert, 'out_file', overlaynew, 'background_image')

    workflow.connect(overlaynew, 'fnames', write_rep, 'TSNR_Images')
    workflow.connect(overlaymask, 'fnames', write_rep, 'T_CompCor')
    workflow.connect(overlaymask2, 'fnames', write_rep, 'A_CompCor')
    workflow.connect(plotmask, 'images', write_rep, 'Mask')

    workflow.write_graph()
    return workflow
コード例 #24
0
template_proc = project_home + '/proc/template'
#subject_info = project_home + '/misc/subjects.csv' 
#template_sub = ['011-T1']
template_sub=listdir(fs_subjdir)

#set default FreeSurfer subjects dir
FSCommand.set_default_subjects_dir(fs_subjdir)


# In[2]:


######### File handling #########

#Pass in list to freesurfer source node (subs) 
fs_source = MapNode(FreeSurferSource(subjects_dir = fs_subjdir), 
                    name = 'fs_source', iterfield = ['subject_id'])
fs_source.inputs.subject_id = template_sub

#set up datasink
datasink = Node(DataSink(base_directory = template_proc),
                name = 'datasink')


# In[3]:


######### Template creation functions #########
def make3DTemplate(subject_T1s, num_proc, output_prefix):
    from nipype import config, logging
    config.enable_debug_mode()
コード例 #25
0
def create_reg_workflow(name='registration'):
    """Create a FEAT preprocessing workflow together with freesurfer

    Parameters
    ----------

        name : name of workflow (default: 'registration')

    Inputs::

        inputspec.source_files : files (filename or list of filenames to register)
        inputspec.mean_image : reference image to use
        inputspec.anatomical_image : anatomical image to coregister to
        inputspec.target_image : registration target

    Outputs::

        outputspec.func2anat_transform : FLIRT transform
        outputspec.anat2target_transform : FLIRT+FNIRT transform
        outputspec.transformed_files : transformed files in target space
        outputspec.transformed_mean : mean image in target space
    """

    register = Workflow(name=name)

    inputnode = Node(interface=IdentityInterface(fields=[
        'source_files', 'mean_image', 'subject_id', 'subjects_dir',
        'target_image'
    ]),
                     name='inputspec')

    outputnode = Node(interface=IdentityInterface(fields=[
        'func2anat_transform', 'out_reg_file', 'anat2target_transform',
        'transforms', 'transformed_mean', 'segmentation_files', 'anat2target',
        'aparc'
    ]),
                      name='outputspec')

    # Get the subject's freesurfer source directory
    fssource = Node(FreeSurferSource(), name='fssource')
    fssource.run_without_submitting = True
    register.connect(inputnode, 'subject_id', fssource, 'subject_id')
    register.connect(inputnode, 'subjects_dir', fssource, 'subjects_dir')

    convert = Node(freesurfer.MRIConvert(out_type='nii'), name="convert")
    register.connect(fssource, 'T1', convert, 'in_file')

    # Coregister the median to the surface
    bbregister = Node(freesurfer.BBRegister(), name='bbregister')
    bbregister.inputs.init = 'fsl'
    bbregister.inputs.contrast_type = 't2'
    bbregister.inputs.out_fsl_file = True
    bbregister.inputs.epi_mask = True
    register.connect(inputnode, 'subject_id', bbregister, 'subject_id')
    register.connect(inputnode, 'mean_image', bbregister, 'source_file')
    register.connect(inputnode, 'subjects_dir', bbregister, 'subjects_dir')
    """
    Estimate the tissue classes from the anatomical image. But use spm's segment
    as FSL appears to be breaking.
    """

    stripper = Node(fsl.BET(), name='stripper')
    register.connect(convert, 'out_file', stripper, 'in_file')
    fast = Node(fsl.FAST(), name='fast')
    register.connect(stripper, 'out_file', fast, 'in_files')
    """
    Binarize the segmentation
    """

    binarize = MapNode(fsl.ImageMaths(op_string='-nan -thr 0.9 -ero -bin'),
                       iterfield=['in_file'],
                       name='binarize')
    register.connect(fast, 'partial_volume_files', binarize, 'in_file')
    """
    Apply inverse transform to take segmentations to functional space
    """

    applyxfm = MapNode(freesurfer.ApplyVolTransform(inverse=True,
                                                    interp='nearest'),
                       iterfield=['target_file'],
                       name='inverse_transform')
    register.connect(inputnode, 'subjects_dir', applyxfm, 'subjects_dir')
    register.connect(bbregister, 'out_reg_file', applyxfm, 'reg_file')
    register.connect(binarize, 'out_file', applyxfm, 'target_file')
    register.connect(inputnode, 'mean_image', applyxfm, 'source_file')
    """
    Apply inverse transform to aparc file
    """

    aparcxfm = Node(freesurfer.ApplyVolTransform(inverse=True,
                                                 interp='nearest'),
                    name='aparc_inverse_transform')
    register.connect(inputnode, 'subjects_dir', aparcxfm, 'subjects_dir')
    register.connect(bbregister, 'out_reg_file', aparcxfm, 'reg_file')
    register.connect(fssource, ('aparc_aseg', get_aparc_aseg), aparcxfm,
                     'target_file')
    register.connect(inputnode, 'mean_image', aparcxfm, 'source_file')
    """
    Convert the BBRegister transformation to ANTS ITK format
    """

    convert2itk = Node(C3dAffineTool(), name='convert2itk')
    convert2itk.inputs.fsl2ras = True
    convert2itk.inputs.itk_transform = True
    register.connect(bbregister, 'out_fsl_file', convert2itk, 'transform_file')
    register.connect(inputnode, 'mean_image', convert2itk, 'source_file')
    register.connect(stripper, 'out_file', convert2itk, 'reference_file')
    """
    Compute registration between the subject's structural and MNI template
    This is currently set to perform a very quick registration. However, the
    registration can be made significantly more accurate for cortical
    structures by increasing the number of iterations
    All parameters are set using the example from:
    #https://github.com/stnava/ANTs/blob/master/Scripts/newAntsExample.sh
    """

    reg = Node(ants.Registration(), name='antsRegister')
    reg.inputs.output_transform_prefix = "output_"
    reg.inputs.transforms = ['Rigid', 'Affine', 'SyN']
    reg.inputs.transform_parameters = [(0.1, ), (0.1, ), (0.2, 3.0, 0.0)]
    reg.inputs.number_of_iterations = [[10000, 11110, 11110]] * 2 + [[
        100, 30, 20
    ]]
    reg.inputs.dimension = 3
    reg.inputs.write_composite_transform = True
    reg.inputs.collapse_output_transforms = True
    reg.inputs.initial_moving_transform_com = True
    reg.inputs.metric = ['Mattes'] * 2 + [['Mattes', 'CC']]
    reg.inputs.metric_weight = [1] * 2 + [[0.5, 0.5]]
    reg.inputs.radius_or_number_of_bins = [32] * 2 + [[32, 4]]
    reg.inputs.sampling_strategy = ['Regular'] * 2 + [[None, None]]
    reg.inputs.sampling_percentage = [0.3] * 2 + [[None, None]]
    reg.inputs.convergence_threshold = [1.e-8] * 2 + [-0.01]
    reg.inputs.convergence_window_size = [20] * 2 + [5]
    reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]]
    reg.inputs.sigma_units = ['vox'] * 3
    reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]]
    reg.inputs.use_estimate_learning_rate_once = [True] * 3
    reg.inputs.use_histogram_matching = [False] * 2 + [True]
    reg.inputs.winsorize_lower_quantile = 0.005
    reg.inputs.winsorize_upper_quantile = 0.995
    reg.inputs.float = True
    reg.inputs.output_warped_image = 'output_warped_image.nii.gz'
    reg.inputs.num_threads = 4
    reg.plugin_args = {'qsub_args': '-l nodes=1:ppn=4'}
    register.connect(stripper, 'out_file', reg, 'moving_image')
    register.connect(inputnode, 'target_image', reg, 'fixed_image')
    """
    Concatenate the affine and ants transforms into a list
    """

    merge = Node(Merge(2), iterfield=['in2'], name='mergexfm')
    register.connect(convert2itk, 'itk_transform', merge, 'in2')
    register.connect(reg, 'composite_transform', merge, 'in1')
    """
    Transform the mean image. First to anatomical and then to target
    """

    warpmean = Node(ants.ApplyTransforms(), name='warpmean')
    warpmean.inputs.input_image_type = 3
    warpmean.inputs.interpolation = 'Linear'
    warpmean.inputs.invert_transform_flags = [False, False]
    warpmean.inputs.terminal_output = 'file'
    warpmean.inputs.args = '--float'
    warpmean.inputs.num_threads = 4

    register.connect(inputnode, 'target_image', warpmean, 'reference_image')
    register.connect(inputnode, 'mean_image', warpmean, 'input_image')
    register.connect(merge, 'out', warpmean, 'transforms')
    """
    Assign all the output files
    """

    register.connect(reg, 'warped_image', outputnode, 'anat2target')
    register.connect(warpmean, 'output_image', outputnode, 'transformed_mean')
    register.connect(applyxfm, 'transformed_file', outputnode,
                     'segmentation_files')
    register.connect(aparcxfm, 'transformed_file', outputnode, 'aparc')
    register.connect(bbregister, 'out_fsl_file', outputnode,
                     'func2anat_transform')
    register.connect(bbregister, 'out_reg_file', outputnode, 'out_reg_file')
    register.connect(reg, 'composite_transform', outputnode,
                     'anat2target_transform')
    register.connect(merge, 'out', outputnode, 'transforms')

    return register
コード例 #26
0
working_dir = 'workingdir_inverse_transform_ROIs_ALPACA'  # name of norm working directory

##### Create & specify nodes to be used and connected during the normalization pipeline #####

# Concatenate BBRegister's and ANTS' transforms into a list
merge = Node(Merge(2), iterfield=['in2'], name='mergexfm')

# Binarize node - binarizes mask again after transformation
binarize_post2ant = MapNode(Binarize(min=0.1),
                            iterfield=['in_file'],
                            name='binarize_post2ant')

binarize_pt2pp = binarize_post2ant.clone('binarize_pt2pp')

# FreeSurferSource - Data grabber specific for FreeSurfer data
fssource_lh = Node(FreeSurferSource(subjects_dir=fs_dir, hemi='lh'),
                   run_without_submitting=True,
                   name='fssource_lh')

fssource_rh = Node(FreeSurferSource(subjects_dir=fs_dir, hemi='rh'),
                   run_without_submitting=True,
                   name='fssource_rh')

# Transform the volumetric ROIs to the target space
inverse_transform_mni_volume_post2ant = MapNode(
    ApplyTransforms(args='--float',
                    input_image_type=3,
                    interpolation='Linear',
                    invert_transform_flags=[False, False],
                    num_threads=1,
                    terminal_output='file'),