Exemplo n.º 1
0
def nipype_mri_vol2surf(t1path, hemi):
    splitpath = t1path.split(os.sep)
    fsdir = os.path.join(BIDS_DATA_DIR, "derivatives",
                         "freesurfer_{0}".format(splitpath[-3]))
    outdir = os.path.join(BIDS_DATA_DIR, "derivatives",
                          "freesurfer_projection_{0}".format(splitpath[-3]),
                          splitpath[-4])
    funcpath = os.path.join(
        BIDS_DATA_DIR, "derivatives", "spmpreproc_{0}".format(splitpath[-3]),
        splitpath[-4],
        "wrr{0}_{1}_task-localizer_bold.nii.gz".format(splitpath[-4],
                                                       splitpath[-3]))
    assert os.path.isfile(funcpath), funcpath
    basename = os.path.basename(funcpath).replace(".nii.gz", "")
    regfile = os.path.join(outdir, basename + ".reg.dat")
    assert os.path.isfile(regfile), regfile
    #if not os.path.isfile(regfile):
    #    open(regfile, "wt").close()
    vol2surf = freesurfer.SampleToSurface(
        hemi=hemi,
        source_file=funcpath,
        reg_file=regfile,
        sampling_method="average",
        sampling_units="frac",
        sampling_range=(0.2, 0.8, 0.1),
        out_type="gii",
        out_file=os.path.join(outdir, basename + ".{0}.gii".format(hemi)),
        subjects_dir=fsdir)
    print(vol2surf.cmdline)
    if PROCESS:
        vol2surf.run()
Exemplo n.º 2
0
def create_surface_projection_workflow(name="surfproj", exp_info=None):
    """Project the group mask and thresholded zstat file onto the surface."""
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(IdentityInterface(["zstat_file", "mask_file"]), "inputs")

    # Sample the zstat image to the surface
    hemisource = Node(IdentityInterface(["mni_hemi"]), "hemisource")
    hemisource.iterables = ("mni_hemi", ["lh", "rh"])

    zstatproj = Node(
        freesurfer.SampleToSurface(sampling_method=exp_info["sampling_method"],
                                   sampling_range=exp_info["sampling_range"],
                                   sampling_units=exp_info["sampling_units"],
                                   smooth_surf=exp_info["surf_smooth"],
                                   subject_id="fsaverage",
                                   mni152reg=True,
                                   target_subject="fsaverage"), "zstatproj")

    # Sample the mask to the surface
    maskproj = Node(
        freesurfer.SampleToSurface(sampling_range=exp_info["sampling_range"],
                                   sampling_units=exp_info["sampling_units"],
                                   subject_id="fsaverage",
                                   mni152reg=True,
                                   target_subject="fsaverage"), "maskproj")
    if exp_info["sampling_method"] == "point":
        maskproj.inputs.sampling_method = "point"
    else:
        maskproj.inputs.sampling_method = "max"

    outputnode = Node(IdentityInterface(["surf_zstat", "surf_mask"]),
                      "outputs")

    # Define and connect the workflow
    proj = Workflow(name)
    proj.connect([
        (inputnode, zstatproj, [("zstat_file", "source_file")]),
        (inputnode, maskproj, [("mask_file", "source_file")]),
        (hemisource, zstatproj, [("mni_hemi", "hemi")]),
        (hemisource, maskproj, [("mni_hemi", "hemi")]),
        (zstatproj, outputnode, [("out_file", "surf_zstat")]),
        (maskproj, outputnode, [("out_file", "surf_mask")]),
    ])

    return proj
Exemplo n.º 3
0
def main(derivatives, subject, session, workflow_folder, n_procs=8):
    fn = op.join(
        derivatives, 'modelfitting', 'glm7', 'sub-{subject}', 'ses-{session}',
        'func',
        'sub-{subject}_ses-{session}_left_over_right_zmap.nii.gz').format(
            subject=subject, session=session)

    os.environ['SUBJECTS_DIR'] = op.join(derivatives, 'freesurfer')

    wf = pe.Workflow(name='sample_fs_{}_{}'.format(subject, session),
                     base_dir=workflow_folder)

    input_node = pe.Node(niu.IdentityInterface(fields=['source_file']),
                         name='input_node')
    input_node.inputs.source_file = fn

    config_node = pe.Node(
        niu.IdentityInterface(fields=['depth', 'hemisphere']),
        name='config_node')
    config_node.iterables = [('depth', np.arange(1, 7)),
                             ('hemisphere', ['lh', 'rh'])]

    def get_surf_name(depth, n_surfs=8):
        return 'equi{}.pial'.format(str(float(depth) / (n_surfs - 1)))

    sampler = pe.Node(fs.SampleToSurface(subjects_dir=os.path.join(
        derivatives, 'freesurfer'),
                                         override_reg_subj=True,
                                         reg_header=True,
                                         subject_id='sub-{}'.format(subject),
                                         interp_method='trilinear',
                                         projection_stem='',
                                         out_type='gii'),
                      name='sampler')

    wf.connect(input_node, 'source_file', sampler, 'source_file')
    wf.connect(config_node, ('depth', get_surf_name), sampler, 'surface')
    wf.connect(config_node, 'hemisphere', sampler, 'hemi')

    def get_desc(depth, n_surfs=8):
        return 'zmap-depth-{:.03f}'.format(float(depth) / (n_surfs - 1))

    def get_extra_values(hemi):
        return ['hemi-{}'.format(hemi)]

    ds = pe.MapNode(DerivativesDataSink(
        base_directory=derivatives,
        out_path_base='sampled_giis',
    ),
                    iterfield=['in_file', 'source_file'],
                    name='datasink')

    wf.connect(input_node, 'source_file', ds, 'source_file')
    wf.connect(sampler, 'out_file', ds, 'in_file')
    wf.connect(config_node, ('depth', get_desc), ds, 'desc')
    wf.connect(config_node, ('hemisphere', get_extra_values), ds,
               'extra_values')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': n_procs})
Exemplo n.º 4
0
def test_sample2surf():

    s2s = fs.SampleToSurface()
    # Test underlying command
    yield assert_equal, s2s.cmd, 'mri_vol2surf'

    # Test mandatory args exception
    yield assert_raises, ValueError, s2s.run

    # Create testing files
    files, cwd, oldwd = create_files_in_directory()

    # Test input settings
    s2s.inputs.source_file = files[0]
    s2s.inputs.reference_file = files[1]
    s2s.inputs.hemi = "lh"
    s2s.inputs.reg_file = files[2]
    s2s.inputs.sampling_range = .5
    s2s.inputs.sampling_units = "frac"
    s2s.inputs.sampling_method = "point"

    # Test a basic command line
    yield assert_equal, s2s.cmdline, (
        "mri_vol2surf "
        "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" %
        (os.path.join(cwd, "lh.a.mgz"), files[1], files[0]))

    # Test identity
    s2sish = fs.SampleToSurface(source_file=files[1],
                                reference_file=files[0],
                                hemi="rh")
    yield assert_not_equal, s2s, s2sish

    # Test hits file name creation
    s2s.inputs.hits_file = True
    yield assert_equal, s2s._get_outfilename("hits_file"), os.path.join(
        cwd, "lh.a_hits.mgz")

    # Test that a 2-tuple range raises an error
    def set_illegal_range():
        s2s.inputs.sampling_range = (.2, .5)

    yield assert_raises, TraitError, set_illegal_range

    # Clean up our mess
    clean_directory(cwd, oldwd)
Exemplo n.º 5
0
def test_sample2surf(create_files_in_directory_plus_dummy_file):

    s2s = fs.SampleToSurface()
    # Test underlying command
    assert s2s.cmd == 'mri_vol2surf'

    # Test mandatory args exception
    with pytest.raises(ValueError):
        s2s.run()

    # Create testing files
    files, cwd = create_files_in_directory_plus_dummy_file

    # Test input settings
    s2s.inputs.source_file = files[0]
    s2s.inputs.reference_file = files[1]
    s2s.inputs.hemi = "lh"
    s2s.inputs.reg_file = files[2]
    s2s.inputs.sampling_range = .5
    s2s.inputs.sampling_units = "frac"
    s2s.inputs.sampling_method = "point"

    # Test a basic command line
    assert s2s.cmdline == (
        "mri_vol2surf "
        "--hemi lh --o %s --ref %s --reg reg.dat --projfrac 0.500 --mov %s" %
        (os.path.join(cwd, "lh.a.mgz"), files[1], files[0]))

    # Test identity
    s2sish = fs.SampleToSurface(source_file=files[1],
                                reference_file=files[0],
                                hemi="rh")
    assert s2s != s2sish

    # Test hits file name creation
    s2s.inputs.hits_file = True
    assert s2s._get_outfilename("hits_file") == os.path.join(
        cwd, "lh.a_hits.mgz")

    # Test that a 2-tuple range raises an error
    def set_illegal_range():
        s2s.inputs.sampling_range = (.2, .5)

    with pytest.raises(TraitError):
        set_illegal_range()
Exemplo n.º 6
0
def mri_vol2surf(infile, outfile, hemi, interp='trilinear'):
    sampler = fs.SampleToSurface(hemi=hemi)
    sampler.inputs.source_file = infile
    sampler.inputs.mni152reg = True
    sampler.inputs.sampling_method = "max"
    sampler.inputs.sampling_range = (-3, 2, 0.1)
    sampler.inputs.sampling_units = "mm"
    sampler.inputs.interp_method = interp
    sampler.inputs.out_file = outfile
    sampler.inputs.out_type = 'mgz'
    sampler.inputs.reshape = True
    res = sampler.run()
    return res
Exemplo n.º 7
0
def create_surface_registration(wf_name='surface_registration'):
    """
    Workflow to generate surface from anatomical data and register 
    the structural data to FreeSurfer anatomical and assign it 
    to a surface vertex. 
    
    Parameters
    ----------
    wf_name : string
        name of the workflow
    
    Returns 
    -------
    wflow : workflow object
        workflow object
        
    Notes
    -----
    `Source <https://github.com/FCP-INDI/C-PAC/blob/master/CPAC/timeseries/timeseries_analysis.py>`_
        
    Workflow Inputs::
        
        inputspec.rest : string  (nifti file)
            path to input functional data
        inputspec.brain : string (nifti file)
            path to skull stripped anatomical image
        inputspec.recon_subjects : string
            path to subjects directory
        inputspec.subject_id : string
            subject id 
            
    Workflow Outputs::
    
        outputspec.reconall_subjects_dir : string
            freesurfer subjects directory
        outputspec.reconall_subjects_id : string
            subject id for which anatomical data is taken
        outputspec.out_reg_file : string
            path to bbregister output registration file
        outputspec.lh_surface_file : string (mgz file)
            path to left hemisphere cortical surface file 
        outputspec.rh_surface_file : string (mgz file)
            path to right hemisphere cortical surface file 
    
    Order of commands:
     
    - Generate surfaces and rois of structural data from T1 anatomical Image using FreeSurfer's reconall . For details see `ReconAll <https://surfer.nmr.mgh.harvard.edu/fswiki/recon-all>`_::

        recon-all -all -subjid 0010001 -sd working_dir/SurfaceRegistration/anat_reconall
        
    - Register input volume to the FreeSurfer anatomical using FreeSurfer's bbregister. The input is the output of the recon-all command. For details see `BBRegister <http://surfer.nmr.mgh.harvard.edu/fswiki/bbregister>`_::
        
        bbregister --t2 --init -fsl --reg structural_bbreg_me.dat --mov structural.nii --s 0010001
        
    - Assign values from a volume to each surface vertex using FreeSurfer's mri_vol2surf . For details see `mri_vol2surf <http://surfer.nmr.mgh.harvard.edu/fswiki/mri_vol2surf>`_::
       
        For left hemisphere
        mri_vol2surf --mov structural.nii --reg structural_bbreg_me.dat --interp trilin --projfrac 0.5 --hemi lh --o surface_file.nii.gz 

        For right hemisphere        
        mri_vol2surf --mov structural.nii --reg structural_bbreg_me.dat --interp trilin --projfrac 0.5 --hemi rh --o surface_file.nii.gz
    
    
    High Level Workflow Graph:
    
    .. image:: ../images/surface_registration.dot.png
       :width: 1000
    
    
    Detailed Workflow Graph:
    
    .. image:: ../images/surface_registration_detailed.dot.png
       :width: 1000
       
    Example
    -------
    >>> import CPAC.timeseries.timeseries_analysis as t
    >>> wf = t.create_surface_registration()
    >>> wf.inputs.inputspec.rest = '/home/data/sub001/rest.nii.gz'
    >>> wf.inputs.inputspec.brain = '/home/data/sub001/anat.nii.gz'
    >>> wf.inputs.inputspec.recon_subjects = '/home/data/working_dir/SurfaceRegistration/anat_reconall'
    >>> wf.inputs.inputspec.subject_id = 'sub001'
    >>> wf.base_dir = './'
    >>> wf.run() 
        
    """
    wflow = pe.Workflow(name=wf_name)

    inputNode = pe.Node(util.IdentityInterface(fields=['recon_subjects',
                                                       'brain',
                                                       'subject_id',
                                                       'rest']),
                        name='inputspec')

    outputNode = pe.Node(util.IdentityInterface(fields=['reconall_subjects_dir',
                                                         'reconall_subjects_id',
                                                         'out_reg_file',
                                                         'lh_surface_file',
                                                         'rh_surface_file']),
                         name='outputspec')


    reconall = pe.Node(interface=fs.ReconAll(),
                       name="reconall")
    reconall.inputs.directive = 'all'


    wflow.connect(inputNode, 'brain',
                  reconall, 'T1_files')
    wflow.connect(inputNode, 'subject_id',
                  reconall, 'subject_id')
    wflow.connect(inputNode, 'recon_subjects',
                  reconall, 'subjects_dir')

    wflow.connect(reconall, 'subjects_dir',
                  outputNode, 'reconall_subjects_dir')
    wflow.connect(reconall, 'subject_id',
                  outputNode, 'reconall_subjects_id')


    bbregister = pe.Node(interface=fs.BBRegister(init='fsl',
                                                 contrast_type='t2',
                                                 registered_file=True,
                                                 out_fsl_file=True),
                        name='bbregister')

    wflow.connect(inputNode, 'rest',
                  bbregister, 'source_file')
    wflow.connect(reconall, 'subjects_dir',
                  bbregister, 'subjects_dir')
    wflow.connect(reconall, 'subject_id',
                  bbregister, 'subject_id')

    wflow.connect(bbregister, 'out_reg_file',
                  outputNode, 'out_reg_file')


    sample_to_surface_lh = pe.Node(interface=fs.SampleToSurface(hemi="lh"),
                                    name='sample_to_surface_lh')
    sample_to_surface_lh.inputs.no_reshape = True
    sample_to_surface_lh.inputs.interp_method = 'trilinear'
    sample_to_surface_lh.inputs.sampling_method = "point"
    sample_to_surface_lh.inputs.sampling_range = 0.5
    sample_to_surface_lh.inputs.sampling_units = "frac"

    wflow.connect(bbregister, 'out_reg_file',
                  sample_to_surface_lh, 'reg_file')
    wflow.connect(inputNode, 'rest',
                  sample_to_surface_lh, 'source_file')

    wflow.connect(sample_to_surface_lh, 'out_file',
               outputNode, 'lh_surface_file')


    sample_to_surface_rh = pe.Node(interface=fs.SampleToSurface(hemi="rh"),
                                    name='sample_to_surface_rh')
    sample_to_surface_rh.inputs.no_reshape = True
    sample_to_surface_rh.inputs.interp_method = 'trilinear'
    sample_to_surface_rh.inputs.sampling_method = "point"
    sample_to_surface_rh.inputs.sampling_range = 0.5
    sample_to_surface_rh.inputs.sampling_units = "frac"

    wflow.connect(bbregister, 'out_reg_file',
                  sample_to_surface_rh, 'reg_file')
    wflow.connect(inputNode, 'rest',
                  sample_to_surface_rh, 'source_file')

    wflow.connect(sample_to_surface_rh, 'out_file',
               outputNode, 'rh_surface_file')

    return wflow
Exemplo n.º 8
0
def test_neuro(change_dir, plugin):

    # wf = Workflow(name, mem_gb_node=DEFAULT_MEMORY_MIN_GB,
    #               inputs=['source_file', 't1_preproc', 'subject_id',
    #                       'subjects_dir', 't1_2_fsnative_forward_transform',
    #                       'mem_gb', 'output_spaces', 'medial_surface_nan'],
    #               outputs='surfaces')
    #
    # dj: why do I need outputs?

    wf = Workflow(
        name=Name,
        inputs=Inputs,
        workingdir="test_neuro_{}".format(plugin),
        write_state=False,
        wf_output_names=[
            ("sampler", "out_file", "sampler_out"),
            ("targets", "out", "target_out"),
        ],
    )

    # @interface
    # def select_target(subject_id, space):
    #     """ Given a source subject ID and a target space, get the target subject ID """
    #     return subject_id if space == 'fsnative' else space

    # wf.add('targets', select_target(subject_id=wf.inputs.subject_id))
    #   .split('space', space=[space for space in wf.inputs.output_spaces
    #                        if space.startswith('fs')])

    # dj: don't have option in split to connect with wf input

    wf.add(
        runnable=select_target,
        name="targets",
        subject_id="subject_id",
        input_names=["subject_id", "space"],
        output_names=["out"],
        write_state=False,
    ).split_node(
        splitter="space",
        inputs={
            "space": [
                space for space in Inputs["output_spaces"]
                if space.startswith("fs")
            ]
        },
    )

    # wf.add('rename_src', Rename(format_string='%(subject)s',
    #                             keep_ext=True,
    #                             in_file=wf.inputs.source_file))
    #   .split('subject')

    wf.add(
        name="rename_src",
        runnable=Rename(format_string="%(subject)s", keep_ext=True),
        in_file="source_file",
        output_names=["out_file"],
        write_state=False,
    ).split_node(
        "subject",
        inputs={
            "subject": [
                space for space in Inputs["output_spaces"]
                if space.startswith("fs")
            ]
        },
    )  # TODO: now it's only one subject

    # wf.add('resampling_xfm',
    #        fs.utils.LTAConvert(in_lta='identity.nofile',
    #                            out_lta=True,
    #                            source_file=wf.inputs.source_file,
    #                            target_file=wf.inputs.t1_preproc)
    #   .add('set_xfm_source', ConcatenateLTA(out_type='RAS2RAS',
    #                                         in_lta2=wf.inputs.t1_2_fsnative_forward_transform,
    #                                         in_lta1=wf.resampling_xfm.out_lta))

    wf.add(
        name="resampling_xfm",
        runnable=fs.utils.LTAConvert(in_lta="identity.nofile", out_lta=True),
        source_file="source_file",
        target_file="t1_preproc",
        output_names=["out_lta"],
        write_state=False,
    ).add(
        name="set_xfm_source",
        runnable=ConcatenateLTA(out_type="RAS2RAS"),
        in_lta2="t1_2_fsnative_forward_transform",
        in_lta1="resampling_xfm.out_lta",
        output_names=["out_file"],
        write_state=False,
    )

    # wf.add('sampler',
    #        fs.SampleToSurface(sampling_method='average', sampling_range=(0, 1, 0.2),
    #                           sampling_units='frac', interp_method='trilinear',
    #                           cortex_mask=True, override_reg_subj=True,
    #                           out_type='gii',
    #                           subjects_dir=wf.inputs.subjects_dir,
    #                           subject_id=wf.inputs.subject_id,
    #                           reg_file=wf.set_xfm_source.out_file,
    #                           target_subject=wf.targets.out,
    #                           source_file=wf.rename_src.out_file),
    #         mem_gb=mem_gb * 3)
    #        .split([('source_file', 'target_subject'), 'hemi'], hemi=['lh', 'rh'])

    wf.add(
        name="sampler",
        runnable=fs.SampleToSurface(
            sampling_method="average",
            sampling_range=(0, 1, 0.2),
            sampling_units="frac",
            interp_method="trilinear",
            cortex_mask=True,
            override_reg_subj=True,
            out_type="gii",
        ),
        write_state=False,
        subjects_dir="subjects_dir",
        subject_id="subject_id",
        reg_file="set_xfm_source.out_file",
        target_subject="targets.out",
        source_file="rename_src.out_file",
        output_names=["out_file"],
    ).split_node(splitter=[("_targets", "_rename_src"), "hemi"],
                 inputs={"hemi": ["lh", "rh"]})

    # dj: adding combiner to the last node
    wf.combine_node(combiner="hemi")

    sub = Submitter(plugin=plugin, runnable=wf)
    sub.run()
    sub.close()

    assert "target_out" in wf.output.keys()
    assert len(list(wf.output["target_out"].keys())) == 2
    assert "targets.space" in list(wf.output["target_out"].keys())[0]

    assert "sampler_out" in wf.output.keys()
    # length is 2 because of the combiner
    assert len(list(wf.output["sampler_out"].keys())) == 2
    assert "rename_src.subject" in list(wf.output["sampler_out"].keys())[0]
    assert "targets.space" in list(wf.output["sampler_out"].keys())[0]
    # hemi is eliminated from the state inputs after combiner
    assert "sampler.hemi" not in list(wf.output["sampler_out"].keys())[0]
Exemplo n.º 9
0
def init_asl_surf_wf(mem_gb,
                     surface_spaces,
                     medial_surface_nan,
                     name='asl_surf_wf'):
    """
    Sample functional images to FreeSurfer surfaces.

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.
    Outputs are in GIFTI format.

    Workflow Graph
        .. workflow::
            :graph2use: colored
            :simple_form: yes

            from aslprep.workflows.asl import init_asl_surf_wf
            wf = init_asl_surf_wf(mem_gb=0.1,
                                   surface_spaces=['fsnative', 'fsaverage5'],
                                   medial_surface_nan=False)

    Parameters
    ----------
    surface_spaces : :obj:`list`
        List of FreeSurfer surface-spaces (either ``fsaverage{3,4,5,6,}`` or ``fsnative``)
        the functional images are to be resampled to.
        For ``fsnative``, images will be resampled to the individual subject's
        native surface.
    medial_surface_nan : :obj:`bool`
        Replace medial wall values with NaNs on functional GIFTI files

    Inputs
    ------
    source_file
        Motion-corrected ASL series in T1 space
    t1w_preproc
        Bias-corrected structural template image
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    Outputs
    -------
    surfaces
        ASL series, resampled to FreeSurfer surfaces

    """
    from nipype.interfaces.io import FreeSurferSource
    from ...niworkflows.engine.workflows import LiterateWorkflow as Workflow
    from ...niworkflows.interfaces.surf import GiftiSetAnatomicalStructure

    workflow = Workflow(name=name)
    workflow.__desc__ = """\
The ASL time-series were resampled onto the following surfaces
(FreeSurfer reconstruction nomenclature):
{out_spaces}.
""".format(out_spaces=', '.join(['*%s*' % s for s in surface_spaces]))

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 'subject_id', 'subjects_dir', 't1w2fsnative_xfm'
    ]),
                        name='inputnode')
    itersource = pe.Node(niu.IdentityInterface(fields=['target']),
                         name='itersource')
    itersource.iterables = [('target', surface_spaces)]

    get_fsnative = pe.Node(FreeSurferSource(),
                           name='get_fsnative',
                           run_without_submitting=True)

    def select_target(subject_id, space):
        """Get the target subject ID, given a source subject ID and a target space."""
        return subject_id if space == 'fsnative' else space

    targets = pe.Node(niu.Function(function=select_target),
                      name='targets',
                      run_without_submitting=True,
                      mem_gb=DEFAULT_MEMORY_MIN_GB)

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.Node(niu.Rename(format_string='%(subject)s',
                                    keep_ext=True),
                         name='rename_src',
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    itk2lta = pe.Node(niu.Function(function=_itk2lta),
                      name="itk2lta",
                      run_without_submitting=True)
    sampler = pe.MapNode(fs.SampleToSurface(
        cortex_mask=True,
        interp_method='trilinear',
        out_type='gii',
        override_reg_subj=True,
        sampling_method='average',
        sampling_range=(0, 1, 0.2),
        sampling_units='frac',
    ),
                         iterfield=['hemi'],
                         name='sampler',
                         mem_gb=mem_gb * 3)
    sampler.inputs.hemi = ['lh', 'rh']
    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield=['in_file'],
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    outputnode = pe.JoinNode(
        niu.IdentityInterface(fields=['surfaces', 'target']),
        joinsource='itersource',
        name='outputnode')

    workflow.connect([
        (inputnode, get_fsnative, [('subject_id', 'subject_id'),
                                   ('subjects_dir', 'subjects_dir')]),
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, itk2lta, [('source_file', 'src_file'),
                              ('t1w2fsnative_xfm', 'in_file')]),
        (get_fsnative, itk2lta, [('T1', 'dst_file')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (itersource, targets, [('target', 'space')]),
        (itersource, rename_src, [('target', 'subject')]),
        (itk2lta, sampler, [('out', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
        (itersource, outputnode, [('target', 'target')]),
    ])

    if not medial_surface_nan:
        workflow.connect(sampler, 'out_file', update_metadata, 'in_file')
        return workflow

    from ...niworkflows.interfaces.freesurfer import MedialNaNs
    # Refine if medial vertices should be NaNs
    medial_nans = pe.MapNode(MedialNaNs(),
                             iterfield=['in_file'],
                             name='medial_nans',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
        (sampler, medial_nans, [('out_file', 'in_file')]),
        (medial_nans, update_metadata, [('out_file', 'in_file')]),
    ])
    return workflow
Exemplo n.º 10
0
def init_bold_surf_wf(mem_gb,
                      output_spaces,
                      medial_surface_nan,
                      fslr_density=None,
                      name='bold_surf_wf'):
    """
    Sample functional images to FreeSurfer surfaces.

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.
    Outputs are in GIFTI format.

    Workflow Graph
        .. workflow::
            :graph2use: colored
            :simple_form: yes

            from fmriprep.workflows.bold import init_bold_surf_wf
            wf = init_bold_surf_wf(mem_gb=0.1,
                                   output_spaces=['T1w', 'fsnative',
                                                 'template', 'fsaverage5'],
                                   medial_surface_nan=False)

    Parameters
    ----------
    output_spaces : list
        List of output spaces functional images are to be resampled to
        Target spaces beginning with ``fs`` will be selected for resampling,
        such as ``fsaverage`` or related template spaces
        If the list contains ``fsnative``, images will be resampled to the
        individual subject's native surface
        If the list contains ``fsLR``, images will be resampled twice;
        first to ``fsaverage`` and then to ``fsLR``.
    medial_surface_nan : bool
        Replace medial wall values with NaNs on functional GIFTI files
    fslr_density : str, optional
        Density of fsLR surface (32k or 59k)


    Inputs
    ------
    source_file
        Motion-corrected BOLD series in T1 space
    t1w_preproc
        Bias-corrected structural template image
    subjects_dir
        FreeSurfer SUBJECTS_DIR
    subject_id
        FreeSurfer subject ID
    t1w2fsnative_xfm
        LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    Outputs
    -------
    surfaces
        BOLD series, resampled to FreeSurfer surfaces

    """
    # Ensure volumetric spaces do not sneak into this workflow
    spaces = [space for space in output_spaces if space.startswith('fs')]

    workflow = Workflow(name=name)

    if spaces:
        workflow.__desc__ = """\
The BOLD time-series, were resampled to surfaces on the following
spaces: {out_spaces}.
""".format(out_spaces=', '.join(['*%s*' % s for s in spaces]))
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1w_preproc', 'subject_id', 'subjects_dir',
        't1w2fsnative_xfm'
    ]),
                        name='inputnode')

    to_fslr = False
    if 'fsLR' in output_spaces:
        to_fslr = 'fsaverage' in output_spaces and fslr_density
        spaces.pop(spaces.index('fsLR'))

    outputnode = pe.Node(niu.IdentityInterface(fields=['surfaces']),
                         name='outputnode')

    def select_target(subject_id, space):
        """Get the target subject ID, given a source subject ID and a target space."""
        return subject_id if space == 'fsnative' else space

    targets = pe.MapNode(niu.Function(function=select_target),
                         iterfield=['space'],
                         name='targets',
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    targets.inputs.space = spaces

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.MapNode(niu.Rename(format_string='%(subject)s',
                                       keep_ext=True),
                            iterfield='subject',
                            name='rename_src',
                            run_without_submitting=True,
                            mem_gb=DEFAULT_MEMORY_MIN_GB)
    rename_src.inputs.subject = spaces

    resampling_xfm = pe.Node(LTAConvert(in_lta='identity.nofile',
                                        out_lta=True),
                             name='resampling_xfm')
    set_xfm_source = pe.Node(ConcatenateLTA(out_type='RAS2RAS'),
                             name='set_xfm_source')

    sampler = pe.MapNode(fs.SampleToSurface(sampling_method='average',
                                            sampling_range=(0, 1, 0.2),
                                            sampling_units='frac',
                                            interp_method='trilinear',
                                            cortex_mask=True,
                                            override_reg_subj=True,
                                            out_type='gii'),
                         iterfield=['source_file', 'target_subject'],
                         iterables=('hemi', ['lh', 'rh']),
                         name='sampler',
                         mem_gb=mem_gb * 3)

    if to_fslr:
        filter_fsavg = pe.Node(niu.Function(
            function=_select_fsaverage_hemi,
            output_names=['fsaverage_bold', 'hemi']),
                               name='filter_fsavg',
                               mem_gb=DEFAULT_MEMORY_MIN_GB,
                               run_without_submitting=True)

        rename_fslr = pe.Node(niu.Rename(format_string="%(hemi)s.fsLR",
                                         keep_ext=True,
                                         parse_string=r'^(?P<hemi>\w+)'),
                              name='rename_fslr',
                              mem_gb=DEFAULT_MEMORY_MIN_GB,
                              run_without_submitting=True)

        fetch_fslr_tpls = pe.Node(niu.Function(function=_fetch_fslr_templates,
                                               output_names=[
                                                   'fsaverage_sphere',
                                                   'fslr_sphere',
                                                   'fsaverage_midthick',
                                                   'fslr_midthick'
                                               ]),
                                  name='fetch_fslr_tpls',
                                  mem_gb=DEFAULT_MEMORY_MIN_GB,
                                  overwrite=True)
        fetch_fslr_tpls.inputs.den = fslr_density

        resample_fslr = pe.Node(wb.MetricResample(method='ADAP_BARY_AREA',
                                                  area_metrics=True),
                                name='resample_fslr')

        merge_fslr = pe.Node(niu.Merge(2),
                             name='merge_fslr',
                             mem_gb=DEFAULT_MEMORY_MIN_GB,
                             run_without_submitting=True)

        def _basename(in_file):
            import os
            return os.path.basename(in_file)

        workflow.connect([
            (sampler, filter_fsavg, [('out_file', 'in_files')]),
            (filter_fsavg, fetch_fslr_tpls, [('hemi', 'hemi')]),
            (filter_fsavg, rename_fslr, [('fsaverage_bold', 'in_file')]),
            (rename_fslr, resample_fslr, [('out_file', 'in_file')]),
            (rename_fslr, resample_fslr, [(('out_file', _basename), 'out_file')
                                          ]),
            (fetch_fslr_tpls, resample_fslr,
             [('fsaverage_sphere', 'current_sphere'),
              ('fslr_sphere', 'new_sphere'),
              ('fsaverage_midthick', 'current_area'),
              ('fslr_midthick', 'new_area')]),
            (sampler, merge_fslr, [('out_file', 'in1')]),
            (resample_fslr, merge_fslr, [('out_file', 'in2')]),
        ])

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True),
                         name='merger',
                         joinsource='sampler',
                         joinfield=['in1'],
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)

    if medial_surface_nan:
        medial_nans = pe.MapNode(MedialNaNs(),
                                 iterfield=['in_file'],
                                 name='medial_nans',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

        workflow.connect([
            (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
            (medial_nans, merger, [('out_file', 'in1')]),
        ])

    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield='in_file',
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, resampling_xfm, [('source_file', 'source_file'),
                                     ('t1w_preproc', 'target_file')]),
        (inputnode, set_xfm_source, [('t1w2fsnative_xfm', 'in_lta2')]),
        (resampling_xfm, set_xfm_source, [('out_lta', 'in_lta1')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (set_xfm_source, sampler, [('out_file', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (merger, update_metadata, [('out', 'in_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
    ])

    if to_fslr and medial_surface_nan:
        medial_nans.inputs.density = fslr_density
        workflow.connect(merge_fslr, 'out', medial_nans, 'in_file')
    elif to_fslr:
        workflow.connect(merge_fslr, 'out', merger, 'in1')
    elif medial_surface_nan:
        workflow.connect(sampler, 'out_file', medial_nans, 'in_file')
    else:
        workflow.connect(sampler, 'out_file', merger, 'in1')

    return workflow
Exemplo n.º 11
0
def create_surface_projection_workflow(name="surface_projection"):

    # Define the workflow inputs
    inputnode = pe.Node(util.IdentityInterface(
        fields=["subject_id", "timeseries", "tkreg_affine", "smooth_fwhm"]),
                        name="inputs")

    # Set up a hemisphere iterable
    hemisource = pe.Node(util.IdentityInterface(fields=["hemi"]),
                         iterables=("hemi", ["lh", "rh"]),
                         name="hemisource")

    # Project data onto the surface mesh
    surfproject = pe.MapNode(fs.SampleToSurface(sampling_range=(0, 1, .1),
                                                sampling_units="frac",
                                                cortex_mask=True),
                             iterfield=["source_file", "reg_file"],
                             name="surfproject")
    surfproject.inputs.sampling_method = "average"

    # Apply the spherical warp to the data to bring into fsaverage space
    surftransform = pe.MapNode(fs.SurfaceTransform(target_subject="fsaverage",
                                                   reshape=True),
                               iterfield=["source_file"],
                               name="surftransform")

    # Smooth the data along the surface
    smoothnormsurf = pe.MapNode(fs.SurfaceSmooth(subject_id="fsaverage",
                                                 reshape=True),
                                iterfield=["in_file"],
                                name="smoothnormsurf")

    # Convert the fsaverage surface to nifti
    cvtnormsurf = pe.MapNode(fs.MRIConvert(out_type="niigz"),
                             iterfield=["in_file"],
                             name="convertnormsurf")

    # Rename the timeseries
    rename = pe.MapNode(util.Rename(
        format_string="%(hemi)s.timeseries.fsaverage", keep_ext=True),
                        iterfield=["in_file"],
                        name="rename")

    # Define the outputs
    outputnode = pe.Node(util.IdentityInterface(fields=["timeseries"]),
                         name="outputs")

    # Define and connect the workflow
    tosurf = pe.Workflow(name=name)
    tosurf.connect([
        (inputnode, surfproject, [("timeseries", "source_file"),
                                  ("subject_id", "subject_id"),
                                  ("tkreg_affine", "reg_file")]),
        (hemisource, surfproject, [("hemi", "hemi")]),
        (surfproject, surftransform, [("out_file", "source_file")]),
        (inputnode, surftransform, [("subject_id", "source_subject")]),
        (hemisource, surftransform, [("hemi", "hemi")]),
        (surftransform, smoothnormsurf, [("out_file", "in_file")]),
        (hemisource, smoothnormsurf, [("hemi", "hemi")]),
        (inputnode, smoothnormsurf, [("smooth_fwhm", "fwhm")]),
        (smoothnormsurf, cvtnormsurf, [("out_file", "in_file")]),
        (cvtnormsurf, rename, [("out_file", "in_file")]),
        (hemisource, rename, [("hemi", "hemi")]),
        (rename, outputnode, [("out_file", "timeseries")]),
    ])

    return tosurf
Exemplo n.º 12
0
def main(derivatives,
         subject,
         session,
         task,
         acquisition,
         run,
         workflow_folder='/tmp/workflow_folders'):

    os.environ['SUBJECTS_DIR'] = os.path.join(derivatives, 'freesurfer')

    preproc_bold = get_derivative(derivatives,
                                  'spynoza',
                                  'func',
                                  subject=subject,
                                  session=session,
                                  suffix='preproc',
                                  acquisition=acquisition,
                                  run=run,
                                  task=task)

    registration = get_derivative(derivatives,
                                  'manual_registrations',
                                  'func',
                                  subject=subject,
                                  session=session,
                                  description='spynoza2t1w',
                                  suffix='transform',
                                  extension='lta',
                                  check_exists=False)

    wf = pe.Workflow(name='sample_fs', base_dir=workflow_folder)

    inputnode = pe.Node(niu.IdentityInterface(
        fields=['preproc_bold', 'registration'
                'subject']),
                        name='inputnode')
    inputnode.inputs.preproc_bold = preproc_bold
    inputnode.inputs.subject = 'sub-{}'.format(subject)
    inputnode.inputs.registration = registration

    sampler = pe.Node(fs.SampleToSurface(sampling_method='average',
                                         sampling_range=(0, 1, 0.2),
                                         sampling_units='frac',
                                         interp_method='trilinear',
                                         cortex_mask=True,
                                         subjects_dir=os.path.join(
                                             derivatives, 'freesurfer'),
                                         override_reg_subj=True,
                                         out_type='gii'),
                      iterables=('hemi', ['lh', 'rh']),
                      name='sampler')

    wf.connect(inputnode, 'preproc_bold', sampler, 'source_file')
    if registration is not None:
        wf.connect(inputnode, 'registration', sampler, 'reg_file')
    else:
        sampler.inputs.reg_header = True
    wf.connect(inputnode, 'subject', sampler, 'subject_id')

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True),
                         name='merger',
                         joinsource='sampler',
                         joinfield=['in1'])
    wf.connect(sampler, 'out_file', merger, 'in1')

    ds = pe.MapNode(DerivativesDataSink(
        base_directory=derivatives,
        out_path_base='sampled_giis',
    ),
                    iterfield=['in_file', 'suffix'],
                    name='datasink')

    ds.inputs.suffix = ['bold.lh', 'bold.rh']

    wf.connect(merger, 'out', ds, 'in_file')
    wf.connect(inputnode, 'preproc_bold', ds, 'source_file')

    wf.run()
Exemplo n.º 13
0
def get_wf():
    import numpy as np

    wf = pe.Workflow(name="main_workflow")
    wf.base_dir = os.path.join(workingdir, "rs_preprocessing/")
    wf.config['execution']['crashdump_dir'] = wf.base_dir + "crash_files/"

    ##Infosource##
    subject_id_infosource = pe.Node(
        util.IdentityInterface(fields=['subject_id']),
        name="subject_id_infosource")
    subject_id_infosource.iterables = ('subject_id', subjects)

    session_infosource = pe.Node(util.IdentityInterface(fields=['session']),
                                 name="session_infosource")
    session_infosource.iterables = ('session', sessions)

    hemi_infosource = pe.Node(util.IdentityInterface(fields=['hemi']),
                              name="hemi_infosource")
    hemi_infosource.iterables = ('hemi', hemispheres)

    fs_infosource = pe.Node(util.IdentityInterface(fields=['fs']),
                            name="fs_infosource")
    fs_infosource.iterables = ('fs', fsaverage)

    ##Datagrabber##
    datagrabber = pe.Node(nio.DataGrabber(
        infields=['subject_id'],
        outfields=['resting_nifti', 't1_nifti', 'dicoms']),
                          name="datagrabber",
                          overwrite=True)
    datagrabber.inputs.base_directory = '/'
    datagrabber.inputs.template = '*'
    datagrabber.inputs.field_template = rs_preprocessing_dg_template
    datagrabber.inputs.template_args = rs_preprocessing_dg_args
    datagrabber.inputs.sort_filelist = True

    wf.connect(subject_id_infosource, 'subject_id', datagrabber, 'subject_id')
    wf.connect(session_infosource, 'session', datagrabber, 'session')

    ##DcmStack & MetaData##
    dicom_filedir = pe.Node(name='dicom_filedir',
                            interface=util.Function(
                                input_names=['subject_id', 'session'],
                                output_names=['filedir'],
                                function=construct_dicomfiledir))

    stack = pe.Node(dcm.DcmStack(), name='stack')
    stack.inputs.embed_meta = True

    tr_lookup = pe.Node(dcm.LookupMeta(), name='tr_lookup')
    tr_lookup.inputs.meta_keys = {'RepetitionTime': 'TR'}

    wf.connect(subject_id_infosource, 'subject_id', dicom_filedir,
               'subject_id')
    wf.connect(session_infosource, 'session', dicom_filedir, 'session')

    wf.connect(dicom_filedir, 'filedir', stack, 'dicom_files')
    wf.connect(stack, 'out_file', tr_lookup, 'in_file')

    ##Preproc from BIPs##
    preproc = create_rest_prep(name="bips_resting_preproc", fieldmap=False)
    zscore = preproc.get_node('z_score')
    preproc.remove_nodes([zscore])
    mod_realign = preproc.get_node('mod_realign')
    mod_realign.plugin_args = {'submit_specs': 'request_memory=4000\n'}
    #workaround for realignment crashing in multiproc environment
    mod_realign.run_without_submitting = True

    # inputs
    preproc.inputs.inputspec.motion_correct_node = 'nipy'
    ad = preproc.get_node('artifactdetect')
    preproc.disconnect(mod_realign, 'parameter_source', ad, 'parameter_source')
    ad.inputs.parameter_source = 'NiPy'
    preproc.inputs.inputspec.realign_parameters = {
        "loops": [5],
        "speedup": [5]
    }
    preproc.inputs.inputspec.do_whitening = False
    preproc.inputs.inputspec.timepoints_to_remove = 4
    preproc.inputs.inputspec.smooth_type = 'susan'
    preproc.inputs.inputspec.do_despike = False
    preproc.inputs.inputspec.surface_fwhm = 0.0
    preproc.inputs.inputspec.num_noise_components = 6
    preproc.inputs.inputspec.regress_before_PCA = False
    preproc.get_node('fwhm_input').iterables = ('fwhm', [0, 5])
    preproc.get_node('take_mean_art').get_node(
        'strict_artifact_detect').inputs.save_plot = True
    preproc.inputs.inputspec.ad_normthresh = 1
    preproc.inputs.inputspec.ad_zthresh = 3
    preproc.inputs.inputspec.do_slicetime = True
    preproc.inputs.inputspec.compcor_select = [True, True]
    preproc.inputs.inputspec.filter_type = 'fsl'
    preproc.get_node('bandpass_filter').iterables = [('highpass_freq', [0.01]),
                                                     ('lowpass_freq', [0.1])]
    preproc.inputs.inputspec.reg_params = [
        True, True, True, False, True, False
    ]
    preproc.inputs.inputspec.fssubject_dir = freesurferdir

    #preproc.inputs.inputspec.tr = 1400/1000
    #preproc.inputs.inputspec.motion_correct_node = 'afni'
    #preproc.inputs.inputspec.sliceorder = slicetime_file
    #preproc.inputs.inputspec.sliceorder = list(np.linspace(0,1.4,64))

    def convert_units(tr):
        mstr = (tr * .001)
        return tr

    def get_sliceorder(in_file):
        import nipype.interfaces.dcmstack as dcm
        import numpy as np
        nii_wrp = dcm.NiftiWrapper.from_filename(in_file)
        sliceorder = np.argsort(
            np.argsort(
                nii_wrp.meta_ext.get_values('CsaImage.MosaicRefAcqTimes')
                [0])).tolist()
        return sliceorder

    wf.connect(tr_lookup, ("TR", convert_units), preproc, "inputspec.tr")
    wf.connect(stack, ('out_file', get_sliceorder), preproc,
               "inputspec.sliceorder")
    wf.connect(subject_id_infosource, 'subject_id', preproc,
               "inputspec.fssubject_id")
    wf.connect(datagrabber, "resting_nifti", preproc, "inputspec.func")

    ##Sampler##
    sampler = pe.Node(fs.SampleToSurface(), name='sampler')
    sampler.inputs.sampling_method = 'average'
    sampler.inputs.sampling_range = (0.2, 0.8, 0.1)
    sampler.inputs.sampling_units = 'frac'
    sampler.inputs.interp_method = 'nearest'
    sampler.inputs.out_type = 'nii'

    wf.connect(preproc, ('bandpass_filter.out_file', list_to_filename),
               sampler, 'source_file')
    wf.connect(preproc, ('getmask.register.out_reg_file', list_to_filename),
               sampler, 'reg_file')
    wf.connect(hemi_infosource, 'hemi', sampler, 'hemi')

    ##SXFM##
    sxfm = pe.Node(fs.SurfaceTransform(), name='sxfm')
    sxfm.inputs.args = '--cortex --fwhm-src 5 --noreshape'
    sxfm.inputs.target_type = 'nii'

    wf.connect(sampler, 'out_file', sxfm, 'source_file')
    wf.connect(subject_id_infosource, 'subject_id', sxfm, 'source_subject')
    wf.connect(hemi_infosource, 'hemi', sxfm, 'hemi')
    wf.connect(fs_infosource, 'fs', sxfm, 'target_subject')

    ###########

    #report_wf = create_preproc_report_wf(os.path.join(preprocdir, "reports"))
    #report_wf.inputs.inputspec.fssubjects_dir = preproc.inputs.inputspec.fssubject_dir

    def pick_full_brain_ribbon(l):
        import os
        for path in l:
            if os.path.split(path)[1] == "ribbon.mgz":
                return path

    #wf.connect(preproc,"artifactdetect.plot_files", report_wf, "inputspec.art_detect_plot")
    #wf.connect(preproc,"take_mean_art.weighted_mean.mean_image", report_wf, "inputspec.mean_epi")
    #wf.connect(preproc,("getmask.register.out_reg_file", list_to_filename), report_wf, "inputspec.reg_file")
    #wf.connect(preproc,("getmask.fssource.ribbon",pick_full_brain_ribbon), report_wf, "inputspec.ribbon")
    #wf.connect(preproc,("CompCor.tsnr.tsnr_file", list_to_filename), report_wf, "inputspec.tsnr_file")
    #wf.connect(subject_id_infosource, 'subject_id', report_wf, "inputspec.subject_id")


##Datasink##
    ds = pe.Node(nio.DataSink(), name="datasink")
    ds.inputs.base_directory = os.path.join(preprocdir, "aimivolumes")
    wf.connect(preproc, 'bandpass_filter.out_file', ds, "preprocessed_resting")
    wf.connect(preproc, 'getmask.register.out_fsl_file', ds,
               "func2anat_transform")
    wf.connect(sampler, 'out_file', ds, 'sampledtosurf')
    wf.connect(sxfm, 'out_file', ds, 'sxfmout')
    #wf.write_graph()
    return wf
Exemplo n.º 14
0
func_volreg = pe.MapNode(interface=e_afni.Threedvolreg(),
                         name='func_volreg',
                         iterfield=["in_file", "basefile"])
func_volreg.inputs.other = '-Fourier -twopass'
func_volreg.inputs.zpad = '4'
func_volreg.inputs.oned_file = 'rest_mc.1D'
func_volreg.inputs.out_file = 'rest_mc.nii.gz'

func_bbreg = pe.MapNode(interface=fs.BBRegister(init='fsl',
                                                contrast_type='t2',
                                                registered_file=True,
                                                out_fsl_file=True),
                        name='func_bbreg',
                        iterfield=["source_file"])

func_sampler_lh = pe.MapNode(interface=fs.SampleToSurface(hemi="lh"),
                             name='func_sampler_lh',
                             iterfield=["source_file", "reg_file"])
func_sampler_lh.inputs.no_reshape = True
func_sampler_lh.inputs.interp_method = 'trilinear'
func_sampler_lh.inputs.sampling_method = "point"
func_sampler_lh.inputs.sampling_range = 0.5
func_sampler_lh.inputs.sampling_units = "frac"

func_sampler_rh = pe.MapNode(interface=fs.SampleToSurface(hemi="rh"),
                             name='func_sampler_rh',
                             iterfield=["source_file", "reg_file"])
func_sampler_rh.inputs.no_reshape = True
func_sampler_rh.inputs.interp_method = 'trilinear'
func_sampler_rh.inputs.sampling_method = "point"
func_sampler_rh.inputs.sampling_range = 0.5
Exemplo n.º 15
0
def init_epi_surf_wf(output_spaces, name='epi_surf_wf'):
    """ Sample functional images to FreeSurfer surfaces

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.

    Outputs are in GIFTI format.

    output_spaces : set of structural spaces to sample functional series to
    """
    workflow = pe.Workflow(name=name)
    inputnode = pe.Node(
        niu.IdentityInterface(fields=['source_file', 'subject_id', 'subjects_dir']),
        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['surfaces']), name='outputnode')

    spaces = [space for space in output_spaces if space.startswith('fs')]

    def select_target(subject_id, space):
        """ Given a source subject ID and a target space, get the target subject ID """
        return subject_id if space == 'fsnative' else space

    targets = pe.MapNode(niu.Function(function=select_target),
                         iterfield=['space'], name='targets', run_without_submitting=True)
    targets.inputs.space = spaces

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.MapNode(niu.Rename(format_string='%(subject)s', keep_ext=True),
                            iterfield='subject', name='rename_src', run_without_submitting=True)
    rename_src.inputs.subject = spaces

    sampler = pe.MapNode(
        fs.SampleToSurface(sampling_method='average', sampling_range=(0, 1, 0.2),
                           sampling_units='frac', reg_header=True,
                           interp_method='trilinear', cortex_mask=True,
                           out_type='gii'),
        iterfield=['source_file', 'target_subject'],
        iterables=('hemi', ['lh', 'rh']),
        name='sampler')

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True), name='merger',
                         joinsource='sampler', joinfield=['in1'], run_without_submitting=True)

    def update_gifti_metadata(in_file):
        import os
        import nibabel as nib
        img = nib.load(in_file)
        fname = os.path.basename(in_file)
        if fname[:3] in ('lh.', 'rh.'):
            asp = 'CortexLeft' if fname[0] == 'l' else 'CortexRight'
        else:
            raise ValueError(
                "AnatomicalStructurePrimary cannot be derived from filename")
        primary = nib.gifti.GiftiNVPairs('AnatomicalStructurePrimary', asp)
        if not any(nvpair.name == primary.name for nvpair in img.meta.data):
            img.meta.data.insert(0, primary)
        img.to_filename(fname)
        return os.path.abspath(fname)

    update_metadata = pe.MapNode(niu.Function(function=update_gifti_metadata),
                                 iterfield='in_file', name='update_metadata')

    workflow.connect([
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (sampler, merger, [('out_file', 'in1')]),
        (merger, update_metadata, [('out', 'in_file')]),
        (update_metadata, outputnode, [('out', 'surfaces')]),
        ])

    return workflow
Exemplo n.º 16
0
def create_surface_ols_workflow(name="surface_group",
                                subject_list=None,
                                exp_info=None):
    """Workflow to project ffx copes onto surface and run ols."""
    if subject_list is None:
        subject_list = []
    if exp_info is None:
        exp_info = lyman.default_experiment_parameters()

    inputnode = Node(
        IdentityInterface(["l1_contrast", "copes", "reg_file", "subject_id"]),
        "inputnode")

    hemisource = Node(IdentityInterface(["hemi"]), "hemisource")
    hemisource.iterables = ("hemi", ["lh", "rh"])

    # Sample the volume-encoded native data onto the fsaverage surface
    # manifold with projection + spherical transform
    surfsample = MapNode(
        fs.SampleToSurface(sampling_method=exp_info["sampling_method"],
                           sampling_range=exp_info["sampling_range"],
                           sampling_units=exp_info["sampling_units"],
                           smooth_surf=exp_info["surf_smooth"],
                           target_subject="fsaverage"),
        ["subject_id", "reg_file", "source_file"], "surfsample")

    # Remove subjects with completely empty images
    removeempty = Node(RemoveEmpty(), "removeempty")

    # Concatenate the subject files into a 4D image
    mergecope = Node(fs.Concatenate(), "mergecope")

    # Run the one-sample OLS model
    glmfit = Node(
        fs.GLMFit(one_sample=True,
                  surf=True,
                  cortex=True,
                  glm_dir="_glm_results",
                  subject_id="fsaverage"), "glmfit")

    # Use the cached Monte-Carlo simulations for correction
    cluster = Node(
        Function(["y_file", "glm_dir", "sign", "cluster_zthresh", "p_thresh"],
                 ["glm_dir", "thresholded_file"], glm_corrections, imports),
        "cluster")
    cluster.inputs.cluster_zthresh = exp_info["cluster_zthresh"]
    cluster.inputs.p_thresh = exp_info["grf_pthresh"]
    cluster.inputs.sign = exp_info["surf_corr_sign"]

    # Return the outputs
    outputnode = Node(IdentityInterface(["glm_dir", "sig_file"]), "outputnode")

    # Define and connect the workflow
    group = Workflow(name)
    group.connect([
        (inputnode, surfsample, [("copes", "source_file"),
                                 ("reg_file", "reg_file"),
                                 ("subject_id", "subject_id")]),
        (hemisource, surfsample, [("hemi", "hemi")]),
        (surfsample, removeempty, [("out_file", "in_files")]),
        (removeempty, mergecope, [("out_files", "in_files")]),
        (mergecope, glmfit, [("concatenated_file", "in_file")]),
        (hemisource, glmfit, [("hemi", "hemi")]),
        (mergecope, cluster, [("concatenated_file", "y_file")]),
        (glmfit, cluster, [("glm_dir", "glm_dir")]),
        (glmfit, outputnode, [("glm_dir", "glm_dir")]),
        (cluster, outputnode, [("thresholded_file", "sig_file")]),
    ])

    return group, inputnode, outputnode
def create_workflow(files,
                    target_file,
                    subject_id,
                    TR,
                    slice_times,
                    norm_threshold=1,
                    num_components=5,
                    vol_fwhm=None,
                    surf_fwhm=None,
                    lowpass_freq=-1,
                    highpass_freq=-1,
                    subjects_dir=None,
                    sink_directory=os.getcwd(),
                    target_subject=['fsaverage3', 'fsaverage4'],
                    name='resting'):

    wf = Workflow(name=name)

    # Rename files in case they are named identically
    name_unique = MapNode(Rename(format_string='rest_%(run)02d'),
                          iterfield=['in_file', 'run'],
                          name='rename')
    name_unique.inputs.keep_ext = True
    name_unique.inputs.run = list(range(1, len(files) + 1))
    name_unique.inputs.in_file = files

    realign = Node(interface=spm.Realign(), name="realign")
    realign.inputs.jobtype = 'estwrite'

    num_slices = len(slice_times)
    slice_timing = Node(interface=spm.SliceTiming(), name="slice_timing")
    slice_timing.inputs.num_slices = num_slices
    slice_timing.inputs.time_repetition = TR
    slice_timing.inputs.time_acquisition = TR - TR / float(num_slices)
    slice_timing.inputs.slice_order = (np.argsort(slice_times) + 1).tolist()
    slice_timing.inputs.ref_slice = int(num_slices / 2)

    # Comute TSNR on realigned data regressing polynomials upto order 2
    tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr')
    wf.connect(slice_timing, 'timecorrected_files', tsnr, 'in_file')

    # Compute the median image across runs
    calc_median = Node(Function(input_names=['in_files'],
                                output_names=['median_file'],
                                function=median,
                                imports=imports),
                       name='median')
    wf.connect(tsnr, 'detrended_file', calc_median, 'in_files')
    """Segment and Register
    """

    registration = create_reg_workflow(name='registration')
    wf.connect(calc_median, 'median_file', registration,
               'inputspec.mean_image')
    registration.inputs.inputspec.subject_id = subject_id
    registration.inputs.inputspec.subjects_dir = subjects_dir
    registration.inputs.inputspec.target_image = target_file
    """Use :class:`nipype.algorithms.rapidart` to determine which of the
    images in the functional series are outliers based on deviations in
    intensity or movement.
    """

    art = Node(interface=ArtifactDetect(), name="art")
    art.inputs.use_differences = [True, True]
    art.inputs.use_norm = True
    art.inputs.norm_threshold = norm_threshold
    art.inputs.zintensity_threshold = 9
    art.inputs.mask_type = 'spm_global'
    art.inputs.parameter_source = 'SPM'
    """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose
    to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal
    voxel sizes.
    """

    wf.connect([
        (name_unique, realign, [('out_file', 'in_files')]),
        (realign, slice_timing, [('realigned_files', 'in_files')]),
        (slice_timing, art, [('timecorrected_files', 'realigned_files')]),
        (realign, art, [('realignment_parameters', 'realignment_parameters')]),
    ])

    def selectindex(files, idx):
        import numpy as np
        from nipype.utils.filemanip import filename_to_list, list_to_filename
        return list_to_filename(
            np.array(filename_to_list(files))[idx].tolist())

    mask = Node(fsl.BET(), name='getmask')
    mask.inputs.mask = True
    wf.connect(calc_median, 'median_file', mask, 'in_file')

    # get segmentation in normalized functional space

    def merge_files(in1, in2):
        out_files = filename_to_list(in1)
        out_files.extend(filename_to_list(in2))
        return out_files

    # filter some noise

    # Compute motion regressors
    motreg = Node(Function(
        input_names=['motion_params', 'order', 'derivatives'],
        output_names=['out_files'],
        function=motion_regressors,
        imports=imports),
                  name='getmotionregress')
    wf.connect(realign, 'realignment_parameters', motreg, 'motion_params')

    # Create a filter to remove motion and art confounds
    createfilter1 = Node(Function(
        input_names=['motion_params', 'comp_norm', 'outliers', 'detrend_poly'],
        output_names=['out_files'],
        function=build_filter1,
        imports=imports),
                         name='makemotionbasedfilter')
    createfilter1.inputs.detrend_poly = 2
    wf.connect(motreg, 'out_files', createfilter1, 'motion_params')
    wf.connect(art, 'norm_files', createfilter1, 'comp_norm')
    wf.connect(art, 'outlier_files', createfilter1, 'outliers')

    filter1 = MapNode(fsl.GLM(out_f_name='F_mcart.nii',
                              out_pf_name='pF_mcart.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filtermotion')

    wf.connect(slice_timing, 'timecorrected_files', filter1, 'in_file')
    wf.connect(slice_timing, ('timecorrected_files', rename, '_filtermotart'),
               filter1, 'out_res_name')
    wf.connect(createfilter1, 'out_files', filter1, 'design')

    createfilter2 = MapNode(Function(input_names=[
        'realigned_file', 'mask_file', 'num_components', 'extra_regressors'
    ],
                                     output_names=['out_files'],
                                     function=extract_noise_components,
                                     imports=imports),
                            iterfield=['realigned_file', 'extra_regressors'],
                            name='makecompcorrfilter')
    createfilter2.inputs.num_components = num_components

    wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors')
    wf.connect(filter1, 'out_res', createfilter2, 'realigned_file')
    wf.connect(registration,
               ('outputspec.segmentation_files', selectindex, [0, 2]),
               createfilter2, 'mask_file')

    filter2 = MapNode(fsl.GLM(out_f_name='F.nii',
                              out_pf_name='pF.nii',
                              demean=True),
                      iterfield=['in_file', 'design', 'out_res_name'],
                      name='filter_noise_nosmooth')
    wf.connect(filter1, 'out_res', filter2, 'in_file')
    wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2,
               'out_res_name')
    wf.connect(createfilter2, 'out_files', filter2, 'design')
    wf.connect(mask, 'mask_file', filter2, 'mask')

    bandpass = Node(Function(
        input_names=['files', 'lowpass_freq', 'highpass_freq', 'fs'],
        output_names=['out_files'],
        function=bandpass_filter,
        imports=imports),
                    name='bandpass_unsmooth')
    bandpass.inputs.fs = 1. / TR
    bandpass.inputs.highpass_freq = highpass_freq
    bandpass.inputs.lowpass_freq = lowpass_freq
    wf.connect(filter2, 'out_res', bandpass, 'files')
    """Smooth the functional data using
    :class:`nipype.interfaces.spm.Smooth`.
    """

    smooth = Node(interface=spm.Smooth(), name="smooth")
    smooth.inputs.fwhm = vol_fwhm

    wf.connect(bandpass, 'out_files', smooth, 'in_files')

    collector = Node(Merge(2), name='collect_streams')
    wf.connect(smooth, 'smoothed_files', collector, 'in1')
    wf.connect(bandpass, 'out_files', collector, 'in2')
    """
    Transform the remaining images. First to anatomical and then to target
    """

    warpall = MapNode(ants.ApplyTransforms(),
                      iterfield=['input_image'],
                      name='warpall')
    warpall.inputs.input_image_type = 3
    warpall.inputs.interpolation = 'Linear'
    warpall.inputs.invert_transform_flags = [False, False]
    warpall.inputs.terminal_output = 'file'
    warpall.inputs.reference_image = target_file
    warpall.inputs.args = '--float'
    warpall.inputs.num_threads = 1

    # transform to target
    wf.connect(collector, 'out', warpall, 'input_image')
    wf.connect(registration, 'outputspec.transforms', warpall, 'transforms')

    mask_target = Node(fsl.ImageMaths(op_string='-bin'), name='target_mask')

    wf.connect(registration, 'outputspec.anat2target', mask_target, 'in_file')

    maskts = MapNode(fsl.ApplyMask(), iterfield=['in_file'], name='ts_masker')
    wf.connect(warpall, 'output_image', maskts, 'in_file')
    wf.connect(mask_target, 'out_file', maskts, 'mask_file')

    # map to surface
    # extract aparc+aseg ROIs
    # extract subcortical ROIs
    # extract target space ROIs
    # combine subcortical and cortical rois into a single cifti file

    #######
    # Convert aparc to subject functional space

    # Sample the average time series in aparc ROIs
    sampleaparc = MapNode(
        freesurfer.SegStats(default_color_table=True),
        iterfield=['in_file', 'summary_file', 'avgwf_txt_file'],
        name='aparc_ts')
    sampleaparc.inputs.segment_id = ([8] + list(range(10, 14)) +
                                     [17, 18, 26, 47] + list(range(49, 55)) +
                                     [58] + list(range(1001, 1036)) +
                                     list(range(2001, 2036)))

    wf.connect(registration, 'outputspec.aparc', sampleaparc,
               'segmentation_file')
    wf.connect(collector, 'out', sampleaparc, 'in_file')

    def get_names(files, suffix):
        """Generate appropriate names for output files
        """
        from nipype.utils.filemanip import (split_filename, filename_to_list,
                                            list_to_filename)
        out_names = []
        for filename in files:
            _, name, _ = split_filename(filename)
            out_names.append(name + suffix)
        return list_to_filename(out_names)

    wf.connect(collector, ('out', get_names, '_avgwf.txt'), sampleaparc,
               'avgwf_txt_file')
    wf.connect(collector, ('out', get_names, '_summary.stats'), sampleaparc,
               'summary_file')

    # Sample the time series onto the surface of the target surface. Performs
    # sampling into left and right hemisphere
    target = Node(IdentityInterface(fields=['target_subject']), name='target')
    target.iterables = ('target_subject', filename_to_list(target_subject))

    samplerlh = MapNode(freesurfer.SampleToSurface(),
                        iterfield=['source_file'],
                        name='sampler_lh')
    samplerlh.inputs.sampling_method = "average"
    samplerlh.inputs.sampling_range = (0.1, 0.9, 0.1)
    samplerlh.inputs.sampling_units = "frac"
    samplerlh.inputs.interp_method = "trilinear"
    samplerlh.inputs.smooth_surf = surf_fwhm
    # samplerlh.inputs.cortex_mask = True
    samplerlh.inputs.out_type = 'niigz'
    samplerlh.inputs.subjects_dir = subjects_dir

    samplerrh = samplerlh.clone('sampler_rh')

    samplerlh.inputs.hemi = 'lh'
    wf.connect(collector, 'out', samplerlh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerlh, 'reg_file')
    wf.connect(target, 'target_subject', samplerlh, 'target_subject')

    samplerrh.set_input('hemi', 'rh')
    wf.connect(collector, 'out', samplerrh, 'source_file')
    wf.connect(registration, 'outputspec.out_reg_file', samplerrh, 'reg_file')
    wf.connect(target, 'target_subject', samplerrh, 'target_subject')

    # Combine left and right hemisphere to text file
    combiner = MapNode(Function(input_names=['left', 'right'],
                                output_names=['out_file'],
                                function=combine_hemi,
                                imports=imports),
                       iterfield=['left', 'right'],
                       name="combiner")
    wf.connect(samplerlh, 'out_file', combiner, 'left')
    wf.connect(samplerrh, 'out_file', combiner, 'right')

    # Sample the time series file for each subcortical roi
    ts2txt = MapNode(Function(
        input_names=['timeseries_file', 'label_file', 'indices'],
        output_names=['out_file'],
        function=extract_subrois,
        imports=imports),
                     iterfield=['timeseries_file'],
                     name='getsubcortts')
    ts2txt.inputs.indices = [8] + list(range(10, 14)) + [17, 18, 26, 47] +\
        list(range(49, 55)) + [58]
    ts2txt.inputs.label_file = \
        os.path.abspath(('OASIS-TRT-20_jointfusion_DKT31_CMA_labels_in_MNI152_'
                         '2mm_v2.nii.gz'))
    wf.connect(maskts, 'out_file', ts2txt, 'timeseries_file')

    ######

    substitutions = [('_target_subject_', ''),
                     ('_filtermotart_cleaned_bp_trans_masked', ''),
                     ('_filtermotart_cleaned_bp', '')]
    regex_subs = [
        ('_ts_masker.*/sar', '/smooth/'),
        ('_ts_masker.*/ar', '/unsmooth/'),
        ('_combiner.*/sar', '/smooth/'),
        ('_combiner.*/ar', '/unsmooth/'),
        ('_aparc_ts.*/sar', '/smooth/'),
        ('_aparc_ts.*/ar', '/unsmooth/'),
        ('_getsubcortts.*/sar', '/smooth/'),
        ('_getsubcortts.*/ar', '/unsmooth/'),
        ('series/sar', 'series/smooth/'),
        ('series/ar', 'series/unsmooth/'),
        ('_inverse_transform./', ''),
    ]
    # Save the relevant data into an output directory
    datasink = Node(interface=DataSink(), name="datasink")
    datasink.inputs.base_directory = sink_directory
    datasink.inputs.container = subject_id
    datasink.inputs.substitutions = substitutions
    datasink.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(realign, 'realignment_parameters', datasink,
               'resting.qa.motion')
    wf.connect(art, 'norm_files', datasink, 'resting.qa.art.@norm')
    wf.connect(art, 'intensity_files', datasink, 'resting.qa.art.@intensity')
    wf.connect(art, 'outlier_files', datasink, 'resting.qa.art.@outlier_files')
    wf.connect(registration, 'outputspec.segmentation_files', datasink,
               'resting.mask_files')
    wf.connect(registration, 'outputspec.anat2target', datasink,
               'resting.qa.ants')
    wf.connect(mask, 'mask_file', datasink, 'resting.mask_files.@brainmask')
    wf.connect(mask_target, 'out_file', datasink, 'resting.mask_files.target')
    wf.connect(filter1, 'out_f', datasink, 'resting.qa.compmaps.@mc_F')
    wf.connect(filter1, 'out_pf', datasink, 'resting.qa.compmaps.@mc_pF')
    wf.connect(filter2, 'out_f', datasink, 'resting.qa.compmaps')
    wf.connect(filter2, 'out_pf', datasink, 'resting.qa.compmaps.@p')
    wf.connect(bandpass, 'out_files', datasink,
               'resting.timeseries.@bandpassed')
    wf.connect(smooth, 'smoothed_files', datasink,
               'resting.timeseries.@smoothed')
    wf.connect(createfilter1, 'out_files', datasink,
               'resting.regress.@regressors')
    wf.connect(createfilter2, 'out_files', datasink,
               'resting.regress.@compcorr')
    wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target')
    wf.connect(sampleaparc, 'summary_file', datasink,
               'resting.parcellations.aparc')
    wf.connect(sampleaparc, 'avgwf_txt_file', datasink,
               'resting.parcellations.aparc.@avgwf')
    wf.connect(ts2txt, 'out_file', datasink,
               'resting.parcellations.grayo.@subcortical')

    datasink2 = Node(interface=DataSink(), name="datasink2")
    datasink2.inputs.base_directory = sink_directory
    datasink2.inputs.container = subject_id
    datasink2.inputs.substitutions = substitutions
    datasink2.inputs.regexp_substitutions = regex_subs  # (r'(/_.*(\d+/))', r'/run\2')
    wf.connect(combiner, 'out_file', datasink2,
               'resting.parcellations.grayo.@surface')
    return wf
Exemplo n.º 18
0
def do_pipe3_projection(subject_ID,
                        freesurfer_dir,
                        workflow_dir,
                        output_dir,
                        tract_number,
                        use_sample=False):
    """
    Packages and Data Setup
    =======================
    Import necessary modules from nipype.
    """

    import nipype.interfaces.io as io  # Data i/o
    import nipype.interfaces.utility as util  # utility
    import nipype.pipeline.engine as pe  # pipeline engine
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fsurf  # freesurfer
    import nipype.interfaces.ants as ants
    import os.path as op  # system functions

    from nipype.interfaces.utility import Function

    from dmri_pipe_aux import get_connectivity_matrix
    from dmri_pipe_aux import surf2file
    from dmri_pipe_aux import voxels2nii
    from dmri_pipe_aux import normalize_matrix
    from dmri_pipe_aux import interface2surf
    from dmri_pipe_aux import read_voxels
    from dmri_pipe_aux import downsample_matrix
    from dmri_pipe_aux import merge_matrices
    """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """
    Point to the freesurfer subjects directory (Recon-all must have been run on the subjects)
    """ """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """"""

    subjects_dir = op.abspath(freesurfer_dir)
    fsurf.FSCommand.set_default_subjects_dir(subjects_dir)
    fsl.FSLCommand.set_default_output_type('NIFTI')
    """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """
    define the workflow
    """ """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """"""

    dmripipeline = pe.Workflow(name='pipe3_projection')
    """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """
    Use datasource node to perform the actual data grabbing.
    Templates for the associated images are used to obtain the correct images.
    """ """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """"""

    data_template = subject_ID + "/%s/" + "%s" + "%s"

    info = dict(
        wm=[['fa_masking', subject_ID, '_mask_wm.nii']],
        seeds_left=[['fa_masking', subject_ID, '_interface_left_voxels.txt']],
        seeds_right=[['fa_masking', subject_ID,
                      '_interface_right_voxels.txt']],
        index_left=[['fa_masking', subject_ID, '_interface_left_index.nii']],
        index_right=[['fa_masking', subject_ID, '_interface_right_index.nii']],
        fa=[['fa_masking', subject_ID, '_fa_masked.nii']],
        t1=[['anatomy', subject_ID, '_t1_masked.nii']],
        inv_flirt_mat=[['anatomy', '', 'flirt_t1_2_fa_inv.mat']],
        warp=[['anatomy', '', 'ants_fa_2_regt1_Warp.nii.gz']])

    datasource = pe.Node(interface=io.DataGrabber(outfields=info.keys()),
                         name='datasource')
    datasource.inputs.template = data_template
    datasource.inputs.base_directory = output_dir
    datasource.inputs.template_args = info
    datasource.inputs.sort_filelist = True
    datasource.run_without_submitting = True

    tracts_left_source = pe.Node(
        interface=io.DataGrabber(outfields=['tracts_left']),
        name='tracts_left_source')
    tracts_left_source.inputs.template = subject_ID + '/raw_tracts/lh/probtract_*.nii'
    tracts_left_source.inputs.base_directory = output_dir
    tracts_left_source.inputs.sort_filelist = True
    tracts_left_source.run_without_submitting = True

    tracts_right_source = pe.Node(
        interface=io.DataGrabber(outfields=['tracts_right']),
        name='tracts_right_source')
    tracts_right_source.inputs.template = subject_ID + '/raw_tracts/rh/probtract_*.nii'
    tracts_right_source.inputs.base_directory = output_dir
    tracts_right_source.inputs.sort_filelist = True
    tracts_right_source.run_without_submitting = True
    """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """
    The input node declared here will be the main
    conduits for the raw data to the rest of the processing pipeline.
    """ """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """"""

    inputnode = pe.Node(interface=util.IdentityInterface(fields=[
        "wm", "seeds_left", "seeds_right", "index_left", "index_right", "fa",
        "t1", "inv_flirt_mat", "warp", "tracts_left", "tracts_right"
    ]),
                        name="inputnode")
    """
    read seed coordinates
    """

    interface_voxels_left = pe.Node(interface=Function(
        input_names=["seed_file", "use_sample"],
        output_names=["seed_list"],
        function=read_voxels),
                                    name='70_interface_voxels_left')
    interface_voxels_left.inputs.use_sample = use_sample
    dmripipeline.connect(inputnode, "seeds_left", interface_voxels_left,
                         "seed_file")

    interface_voxels_right = interface_voxels_left.clone(
        name='70_interface_voxels_right')
    dmripipeline.connect(inputnode, "seeds_right", interface_voxels_right,
                         "seed_file")
    """
    Get the direct connectivity matrix
    """

    connectivity_matrix = pe.Node(interface=Function(
        input_names=[
            "tract_list_left", "tract_list_right", "voxel_list_left",
            "voxel_list_right", "max_value"
        ],
        output_names=[
            "submatrix_left_left", "submatrix_left_right",
            "submatrix_right_left", "submatrix_right_right", "exclusion_list"
        ],
        function=get_connectivity_matrix),
                                  name='71_direct_connect_array')
    connectivity_matrix.inputs.max_value = tract_number
    connectivity_matrix.run_without_submitting = True
    #    connectivity_matrix.plugin_args={'override_specs': 'requirements = Machine == "kalifornien.cbs.mpg.de"'}
    dmripipeline.connect(inputnode, "tracts_left", connectivity_matrix,
                         "tract_list_left")
    dmripipeline.connect(inputnode, "tracts_right", connectivity_matrix,
                         "tract_list_right")
    dmripipeline.connect(interface_voxels_left, "seed_list",
                         connectivity_matrix, "voxel_list_left")
    dmripipeline.connect(interface_voxels_right, "seed_list",
                         connectivity_matrix, "voxel_list_right")

    tract_exclusion_mask = pe.Node(interface=Function(
        input_names=["voxel_list", "ref_image", "outfile"],
        output_names=["outfile"],
        function=voxels2nii),
                                   name='72_tract_exclusion_mask')
    tract_exclusion_mask.inputs.outfile = subject_ID + '_tractseed_exclusion_mask.nii'
    dmripipeline.connect(inputnode, "wm", tract_exclusion_mask, "ref_image")
    dmripipeline.connect(connectivity_matrix, "exclusion_list",
                         tract_exclusion_mask, "voxel_list")

    submatrix_left_left = pe.Node(interface=Function(
        input_names=["in_array", "max_value", "outfile_prefix"],
        output_names=[
            "mat_matrix_nat", "mat_matrix_log", "nii_matrix_nat",
            "nii_matrix_log"
        ],
        function=normalize_matrix),
                                  name='73_submatrix_left_left')
    submatrix_left_left.run_without_submitting = True
    submatrix_left_left.inputs.max_value = tract_number
    submatrix_left_left.inputs.outfile_prefix = 'directconnect_left_left'
    dmripipeline.connect(connectivity_matrix, "submatrix_left_left",
                         submatrix_left_left, "in_array")

    submatrix_left_right = submatrix_left_left.clone(
        name='73_submatrix_left_right')
    submatrix_left_right.inputs.outfile_prefix = 'directconnect_left_right'
    dmripipeline.connect(connectivity_matrix, "submatrix_left_right",
                         submatrix_left_right, "in_array")

    submatrix_right_left = submatrix_left_left.clone(
        name='73_submatrix_right_left')
    submatrix_right_left.inputs.outfile_prefix = 'directconnect_right_left'
    dmripipeline.connect(connectivity_matrix, "submatrix_right_left",
                         submatrix_right_left, "in_array")

    submatrix_right_right = submatrix_left_left.clone(
        name='73_submatrix_right_right')
    submatrix_right_right.inputs.outfile_prefix = 'directconnect_right_right'
    dmripipeline.connect(connectivity_matrix, "submatrix_right_right",
                         submatrix_right_right, "in_array")

    #     full_matrix_nat = pe.Node(interface=Function(input_names=["sm_left_left", "sm_left_right","sm_right_left", "sm_right_right", "out_filename"], output_names=["out_file"], function=merge_matrices), name='73_full_matrix_nat')
    #     full_matrix_nat.inputs.out_filename = 'directconnect_full_nat.mat'
    #     full_matrix_nat.run_without_submitting = True
    #     dmripipeline.connect(submatrix_left_left, "nii_matrix_nat", full_matrix_nat, "sm_left_left")
    #     dmripipeline.connect(submatrix_left_right, "nii_matrix_nat", full_matrix_nat, "sm_left_right")
    #     dmripipeline.connect(submatrix_right_left, "nii_matrix_nat", full_matrix_nat, "sm_right_left")
    #     dmripipeline.connect(submatrix_right_right, "nii_matrix_nat", full_matrix_nat, "sm_right_right")
    #
    #     full_matrix_log = full_matrix_nat.clone(name='73_full_matrix_log')
    #     full_matrix_log.inputs.out_filename = 'directconnect_full_log.mat'
    #     full_matrix_log.run_without_submitting = True
    #     dmripipeline.connect(submatrix_left_left, "nii_matrix_log", full_matrix_log, "sm_left_left")
    #     dmripipeline.connect(submatrix_left_right, "nii_matrix_log", full_matrix_log, "sm_left_right")
    #     dmripipeline.connect(submatrix_right_left, "nii_matrix_log", full_matrix_log, "sm_right_left")
    #     dmripipeline.connect(submatrix_right_right, "nii_matrix_log", full_matrix_log, "sm_right_right")
    """
    # invert and binarize tract exclusion mask and remove those voxels from the index interfaces
    """
    tract_denoise_mask = pe.Node(interface=fsl.maths.MathsCommand(),
                                 name='74_tract_denoise_mask')
    tract_denoise_mask.inputs.args = '-binv'
    tract_denoise_mask.run_without_submitting = True
    dmripipeline.connect(tract_exclusion_mask, "outfile", tract_denoise_mask,
                         "in_file")

    index_pruned_left = pe.Node(interface=fsl.maths.ApplyMask(),
                                name='75_interface_pruned_left')
    index_pruned_left.inputs.out_file = subject_ID + '_interface_pruned_left.nii'
    index_pruned_left.run_without_submitting = True
    dmripipeline.connect(inputnode, "index_left", index_pruned_left, "in_file")
    dmripipeline.connect(tract_denoise_mask, "out_file", index_pruned_left,
                         "mask_file")

    index_pruned_right = index_pruned_left.clone(
        name='75_interface_pruned_right')
    index_pruned_right.inputs.out_file = subject_ID + '_interface_pruned_right.nii'
    dmripipeline.connect(inputnode, "index_right", index_pruned_right,
                         "in_file")
    dmripipeline.connect(tract_denoise_mask, "out_file", index_pruned_right,
                         "mask_file")
    """
    # warp index image to t1 space
    """
    index_warped_2_t1_left = pe.Node(interface=ants.WarpImageMultiTransform(),
                                     name='76_index_warped_2_t1_left')
    index_warped_2_t1_left.inputs.use_nearest = True
    index_warped_2_t1_left.run_without_submitting = True
    dmripipeline.connect([(index_pruned_left, index_warped_2_t1_left,
                           [('out_file', 'input_image')])])
    dmripipeline.connect([(inputnode, index_warped_2_t1_left,
                           [('fa', 'reference_image')])])
    dmripipeline.connect([(inputnode, index_warped_2_t1_left,
                           [('warp', 'transformation_series')])])

    index_warped_2_t1_right = index_warped_2_t1_left.clone(
        name='76_index_warped_2_t1_right')
    dmripipeline.connect([(index_pruned_right, index_warped_2_t1_right,
                           [('out_file', 'input_image')])])
    dmripipeline.connect([(inputnode, index_warped_2_t1_right,
                           [('fa', 'reference_image')])])
    dmripipeline.connect([(inputnode, index_warped_2_t1_right,
                           [('warp', 'transformation_series')])])

    index_final_2_t1_left = pe.Node(interface=fsl.ApplyXfm(),
                                    name='77_index_final_2_t1_left')
    index_final_2_t1_left.inputs.apply_xfm = True
    index_final_2_t1_left.run_without_submitting = True
    index_final_2_t1_left.inputs.interp = 'nearestneighbour'
    index_final_2_t1_left.inputs.out_file = subject_ID + '_index_seedt1_left.nii'
    dmripipeline.connect([(index_warped_2_t1_left, index_final_2_t1_left,
                           [("output_image", "in_file")])])
    dmripipeline.connect([(inputnode, index_final_2_t1_left,
                           [("inv_flirt_mat", "in_matrix_file")])])
    dmripipeline.connect([(inputnode, index_final_2_t1_left, [("t1",
                                                               "reference")])])

    index_final_2_t1_right = index_final_2_t1_left.clone(
        name='77_index_final_2_t1_right')
    index_final_2_t1_right.inputs.out_file = subject_ID + '_index_seedt1_right.nii'
    dmripipeline.connect([(index_warped_2_t1_right, index_final_2_t1_right,
                           [("output_image", "in_file")])])
    dmripipeline.connect([(inputnode, index_final_2_t1_right,
                           [("inv_flirt_mat", "in_matrix_file")])])
    dmripipeline.connect([(inputnode, index_final_2_t1_right,
                           [("t1", "reference")])])
    """
    extra processing
    """

    index_vol2surf_left = pe.Node(interface=fsurf.SampleToSurface(),
                                  name='78_index_vol2surf_left')
    index_vol2surf_left.inputs.hemi = 'lh'
    index_vol2surf_left.inputs.subject_id = subject_ID
    index_vol2surf_left.inputs.reg_header = True
    index_vol2surf_left.inputs.interp_method = 'nearest'
    index_vol2surf_left.inputs.sampling_method = 'point'
    index_vol2surf_left.inputs.sampling_range = 0
    index_vol2surf_left.inputs.sampling_units = 'frac'
    index_vol2surf_left.inputs.surface = 'orig'
    #index_vol2surf_left.inputs.cortex_mask = True
    index_vol2surf_left.inputs.terminal_output = 'file'
    index_vol2surf_left.inputs.out_file = subject_ID + '_index_seedt1_2surf_left.mgz'
    index_vol2surf_left.run_without_submitting = True
    dmripipeline.connect([(index_final_2_t1_left, index_vol2surf_left,
                           [('out_file', 'source_file')])])

    index_vol2surf_right = index_vol2surf_left.clone(
        name='78_index_vol2surf_right')
    index_vol2surf_right.inputs.hemi = 'rh'
    index_vol2surf_right.inputs.out_file = subject_ID + '_index_seedt1_2surf_right.mgz'
    dmripipeline.connect([(index_final_2_t1_right, index_vol2surf_right,
                           [('out_file', 'source_file')])])

    index_2_t1_reorient_left = pe.Node(interface=fsl.Reorient2Std(),
                                       name='79_next_2_t1_reorient_left')
    index_2_t1_reorient_left.inputs.out_file = subject_ID + '_index_seedt1_reorient_left.nii'
    index_2_t1_reorient_left.run_without_submitting = True
    dmripipeline.connect(index_final_2_t1_left, 'out_file',
                         index_2_t1_reorient_left, 'in_file')

    index_2_t1_reorient_right = index_2_t1_reorient_left.clone(
        name='79_next_2_t1_reorient_right')
    index_2_t1_reorient_right.inputs.out_file = subject_ID + '_index_seedt1_reorient_right.nii'
    dmripipeline.connect(index_final_2_t1_right, 'out_file',
                         index_2_t1_reorient_right, 'in_file')

    index_interface2surf_left = pe.Node(interface=Function(
        input_names=[
            "interface_image", "surface_file", "cortex_label", "ref_mgz",
            "out_file"
        ],
        output_names=["out_file"],
        function=interface2surf),
                                        name='80_index_interface2surf_left')
    index_interface2surf_left.inputs.surface_file = subjects_dir + '/' + subject_ID + '/surf/lh.orig'
    index_interface2surf_left.inputs.cortex_label = subjects_dir + '/' + subject_ID + '/label/lh.cortex.label'
    index_interface2surf_left.inputs.out_file = subject_ID + '_index_seedt1_2surf_left.mgz'
    dmripipeline.connect(index_2_t1_reorient_left, 'out_file',
                         index_interface2surf_left, 'interface_image')
    dmripipeline.connect(index_vol2surf_left, 'out_file',
                         index_interface2surf_left, 'ref_mgz')

    index_interface2surf_right = index_interface2surf_left.clone(
        name='80_index_interface2surf_right')
    index_interface2surf_right.inputs.surface_file = subjects_dir + '/' + subject_ID + '/surf/rh.orig'
    index_interface2surf_right.inputs.cortex_label = subjects_dir + '/' + subject_ID + '/label/rh.cortex.label'
    index_interface2surf_right.inputs.out_file = subject_ID + '_index_seedt1_2surf_right.mgz'
    dmripipeline.connect(index_2_t1_reorient_right, 'out_file',
                         index_interface2surf_right, 'interface_image')
    dmripipeline.connect(index_vol2surf_right, 'out_file',
                         index_interface2surf_right, 'ref_mgz')

    fs_indexlist_left = pe.Node(interface=Function(
        input_names=["in_surface_values", "cortex_label", "out_file"],
        output_names=["out_file"],
        function=surf2file),
                                name='81_index_fsnative_left')
    fs_indexlist_left.inputs.cortex_label = op.join(
        freesurfer_dir, subject_ID + '/label/lh.cortex.label')
    fs_indexlist_left.inputs.out_file = subject_ID + '_seed_index_fsnative_left.txt'
    fs_indexlist_left.run_without_submitting = True
    dmripipeline.connect([(index_interface2surf_left, fs_indexlist_left,
                           [("out_file", "in_surface_values")])])

    fs_indexlist_right = fs_indexlist_left.clone(
        name='81_index_fsnative_right')
    fs_indexlist_right.inputs.cortex_label = op.join(
        freesurfer_dir, subject_ID + '/label/rh.cortex.label')
    fs_indexlist_right.inputs.out_file = subject_ID + '_seed_index_fsnative_right.txt'
    dmripipeline.connect([(index_interface2surf_right, fs_indexlist_right,
                           [("out_file", "in_surface_values")])])
    """""" """""" """""" """""" """
    """ """""" """""" """""" """"""

    index_fsaverage5_left = pe.Node(interface=fsurf.SurfaceTransform(),
                                    name='81_index_fsaverage5_left')
    index_fsaverage5_left.inputs.hemi = 'lh'
    index_fsaverage5_left.inputs.source_subject = subject_ID
    index_fsaverage5_left.inputs.target_subject = 'fsaverage5'
    index_fsaverage5_left.inputs.args = '--mapmethod nnf --label-src lh.cortex.label --label-trg lh.cortex.label'
    index_fsaverage5_left.inputs.out_file = subject_ID + '_index_seedt1_fsaverage5_left.mgz'
    #index_fsaverage5_left.run_without_submitting = True
    dmripipeline.connect([(index_interface2surf_left, index_fsaverage5_left,
                           [('out_file', 'source_file')])])

    index_fsaverage5_right = index_fsaverage5_left.clone(
        name='81_index_fsaverage5_right')
    index_fsaverage5_right.inputs.hemi = 'rh'
    index_fsaverage5_left.inputs.args = '--mapmethod nnf --label-src rh.cortex.label --label-trg rh.cortex.label'
    index_fsaverage5_right.inputs.out_file = subject_ID + '_index_seedt1_fsaverage5_right.mgz'
    dmripipeline.connect([(index_interface2surf_right, index_fsaverage5_right,
                           [('out_file', 'source_file')])])

    fs5_indexlist_left = pe.Node(interface=Function(
        input_names=["in_surface_values", "cortex_label", "out_file"],
        output_names=["out_file"],
        function=surf2file),
                                 name='82_index_fsav5_left')
    fs5_indexlist_left.inputs.cortex_label = op.join(
        freesurfer_dir, 'fsaverage5/label/lh.cortex.label')
    fs5_indexlist_left.inputs.out_file = subject_ID + '_seed_index_fs5_left.txt'
    #fs5_indexlist_left.run_without_submitting = True
    dmripipeline.connect([(index_fsaverage5_left, fs5_indexlist_left,
                           [("out_file", "in_surface_values")])])

    fs5_indexlist_right = fs5_indexlist_left.clone(name='82_index_fsav5_right')
    fs5_indexlist_right.inputs.cortex_label = op.join(
        freesurfer_dir, 'fsaverage5/label/rh.cortex.label')
    fs5_indexlist_right.inputs.out_file = subject_ID + '_seed_index_fs5_right.txt'
    dmripipeline.connect([(index_fsaverage5_right, fs5_indexlist_right,
                           [("out_file", "in_surface_values")])])

    index_fsaverage4_left = pe.Node(interface=fsurf.SurfaceTransform(),
                                    name='81_index_fsaverage4_left')
    index_fsaverage4_left.inputs.hemi = 'lh'
    index_fsaverage4_left.inputs.source_subject = subject_ID
    index_fsaverage4_left.inputs.target_subject = 'fsaverage4'
    index_fsaverage4_left.inputs.args = '--mapmethod nnf --label-src lh.cortex.label --label-trg lh.cortex.label'
    index_fsaverage4_left.inputs.out_file = subject_ID + '_index_seedt1_fsaverage4_left.mgz'
    #index_fsaverage4_left.run_without_submitting = True
    dmripipeline.connect([(index_interface2surf_left, index_fsaverage4_left,
                           [('out_file', 'source_file')])])

    index_fsaverage4_right = index_fsaverage4_left.clone(
        name='81_index_fsaverage4_right')
    index_fsaverage4_right.inputs.hemi = 'rh'
    index_fsaverage4_left.inputs.args = '--mapmethod nnf --label-src rh.cortex.label --label-trg rh.cortex.label'
    index_fsaverage4_right.inputs.out_file = subject_ID + '_index_seedt1_fsaverage4_right.mgz'
    dmripipeline.connect([(index_interface2surf_right, index_fsaverage4_right,
                           [('out_file', 'source_file')])])

    fs4_indexlist_left = pe.Node(interface=Function(
        input_names=["in_surface_values", "cortex_label", "out_file"],
        output_names=["out_file"],
        function=surf2file),
                                 name='82_index_fsav4_left')
    fs4_indexlist_left.inputs.cortex_label = op.join(
        freesurfer_dir, 'fsaverage4/label/lh.cortex.label')
    fs4_indexlist_left.inputs.out_file = subject_ID + '_seed_index_fs4_left.txt'
    #fs4_indexlist_left.run_without_submitting = True
    dmripipeline.connect([(index_fsaverage4_left, fs4_indexlist_left,
                           [("out_file", "in_surface_values")])])

    fs4_indexlist_right = fs4_indexlist_left.clone(name='82_index_fsav4_right')
    fs4_indexlist_right.inputs.cortex_label = op.join(
        freesurfer_dir, 'fsaverage4/label/rh.cortex.label')
    fs4_indexlist_right.inputs.out_file = subject_ID + '_seed_index_fs4_right.txt'
    dmripipeline.connect([(index_fsaverage4_right, fs4_indexlist_right,
                           [("out_file", "in_surface_values")])])
    """
    downsample matrices according to fsaverage projections
    """
    if (not use_sample):
        connect_mat_fs4_nat_left_left = pe.Node(
            interface=Function(input_names=[
                "index_row_file", "index_col_file", "matrix_file",
                "out_prefix", "dist2sim"
            ],
                               output_names=["out_mat", "out_nii"],
                               function=downsample_matrix),
            name='83_connect_mat_fs4_nat_left_left')
        connect_mat_fs4_nat_left_left.inputs.out_prefix = subject_ID + '_connect_fs4_nat_left_left'
        connect_mat_fs4_nat_left_left.inputs.dist2sim = False
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_nat_left_left, "index_row_file")
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_nat_left_left, "index_col_file")
        dmripipeline.connect(submatrix_left_left, "mat_matrix_nat",
                             connect_mat_fs4_nat_left_left, "matrix_file")

        connect_mat_fs4_nat_left_right = connect_mat_fs4_nat_left_left.clone(
            name='83_connect_mat_fs4_nat_left_right')
        connect_mat_fs4_nat_left_right.inputs.out_prefix = subject_ID + '_connect_fs4_nat_left_right'
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_nat_left_right, "index_row_file")
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_nat_left_right, "index_col_file")
        dmripipeline.connect(submatrix_left_right, "mat_matrix_nat",
                             connect_mat_fs4_nat_left_right, "matrix_file")

        connect_mat_fs4_nat_right_left = connect_mat_fs4_nat_left_left.clone(
            name='83_connect_mat_fs4_nat_right_left')
        connect_mat_fs4_nat_right_left.inputs.out_prefix = subject_ID + '_connect_fs4_nat_right_left'
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_nat_right_left, "index_row_file")
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_nat_right_left, "index_col_file")
        dmripipeline.connect(submatrix_right_left, "mat_matrix_nat",
                             connect_mat_fs4_nat_right_left, "matrix_file")

        connect_mat_fs4_nat_right_right = connect_mat_fs4_nat_left_left.clone(
            name='83_connect_mat_fs4_nat_right_right')
        connect_mat_fs4_nat_right_right.inputs.out_prefix = subject_ID + '_connect_fs4_nat_right_right'
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_nat_right_right, "index_row_file")
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_nat_right_right, "index_col_file")
        dmripipeline.connect(submatrix_right_right, "mat_matrix_nat",
                             connect_mat_fs4_nat_right_right, "matrix_file")

        connect_mat_fs4_log_left_left = connect_mat_fs4_nat_left_left.clone(
            name='83_connect_mat_fs4_log_left_left')
        connect_mat_fs4_log_left_left.inputs.out_prefix = subject_ID + '_connect_fs4_log_left_left'
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_log_left_left, "index_row_file")
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_log_left_left, "index_col_file")
        dmripipeline.connect(submatrix_left_left, "mat_matrix_log",
                             connect_mat_fs4_log_left_left, "matrix_file")

        connect_mat_fs4_log_left_right = connect_mat_fs4_log_left_left.clone(
            name='83_connect_mat_fs4_log_left_right')
        connect_mat_fs4_log_left_right.inputs.out_prefix = subject_ID + '_connect_fs4_log_left_right'
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_log_left_right, "index_row_file")
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_log_left_right, "index_col_file")
        dmripipeline.connect(submatrix_left_right, "mat_matrix_log",
                             connect_mat_fs4_log_left_right, "matrix_file")

        connect_mat_fs4_log_right_left = connect_mat_fs4_log_left_left.clone(
            name='83_connect_mat_fs4_log_right_left')
        connect_mat_fs4_log_right_left.inputs.out_prefix = subject_ID + '_connect_fs4_log_right_left'
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_log_right_left, "index_row_file")
        dmripipeline.connect(fs4_indexlist_left, "out_file",
                             connect_mat_fs4_log_right_left, "index_col_file")
        dmripipeline.connect(submatrix_right_left, "mat_matrix_log",
                             connect_mat_fs4_log_right_left, "matrix_file")

        connect_mat_fs4_log_right_right = connect_mat_fs4_log_left_left.clone(
            name='83_connect_mat_fs4_log_right_right')
        connect_mat_fs4_log_right_right.inputs.out_prefix = subject_ID + '_connect_fs4_log_right_right'
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_log_right_right, "index_row_file")
        dmripipeline.connect(fs4_indexlist_right, "out_file",
                             connect_mat_fs4_log_right_right, "index_col_file")
        dmripipeline.connect(submatrix_right_right, "mat_matrix_log",
                             connect_mat_fs4_log_right_right, "matrix_file")

        #         connect_mat_fs4_nat_full = pe.Node(interface=Function(input_names=["sm_left_left", "sm_left_right","sm_right_left", "sm_right_right", "out_filename"], output_names=["out_file"], function=merge_matrices), name='83_connect_mat_fs4_nat_full')
        #         connect_mat_fs4_nat_full.inputs.out_filename = subject_ID + '_connect_fs4_nat_full.mat'
        #         connect_mat_fs4_nat_full.run_without_submitting = True
        #         dmripipeline.connect(connect_mat_fs4_nat_left_left, "out_nii", connect_mat_fs4_nat_full, "sm_left_left")
        #         dmripipeline.connect(connect_mat_fs4_nat_left_right, "out_nii", connect_mat_fs4_nat_full, "sm_left_right")
        #         dmripipeline.connect(connect_mat_fs4_nat_right_left, "out_nii", connect_mat_fs4_nat_full, "sm_right_left")
        #         dmripipeline.connect(connect_mat_fs4_nat_right_right, "out_nii", connect_mat_fs4_nat_full, "sm_right_right")
        #
        #         connect_mat_fs4_log_full = connect_mat_fs4_nat_full.clone(name='83_connect_mat_fs4_log_full')
        #         connect_mat_fs4_log_full.inputs.outfile_prefix = subject_ID + '_connect_fs4_log_full.mat'
        #         connect_mat_fs4_log_full.run_without_submitting = True
        #         dmripipeline.connect(connect_mat_fs4_log_left_left, "out_nii", connect_mat_fs4_log_full, "sm_left_left")
        #         dmripipeline.connect(connect_mat_fs4_log_left_right, "out_nii", connect_mat_fs4_log_full, "sm_left_right")
        #         dmripipeline.connect(connect_mat_fs4_log_right_left, "out_nii", connect_mat_fs4_log_full, "sm_right_left")
        #         dmripipeline.connect(connect_mat_fs4_log_right_right, "out_nii", connect_mat_fs4_log_full, "sm_right_right")

        connect_mat_fs5_nat_left_left = connect_mat_fs4_nat_left_left.clone(
            name='83_connect_mat_fs5_nat_left_left')
        connect_mat_fs5_nat_left_left.inputs.out_prefix = subject_ID + '_connect_fs5_nat_left_left'
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_nat_left_left, "index_row_file")
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_nat_left_left, "index_col_file")
        dmripipeline.connect(submatrix_left_left, "mat_matrix_nat",
                             connect_mat_fs5_nat_left_left, "matrix_file")

        connect_mat_fs5_nat_left_right = connect_mat_fs5_nat_left_left.clone(
            name='83_connect_mat_fs5_nat_left_right')
        connect_mat_fs5_nat_left_right.inputs.out_prefix = subject_ID + '_connect_fs5_nat_left_right'
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_nat_left_right, "index_row_file")
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_nat_left_right, "index_col_file")
        dmripipeline.connect(submatrix_left_right, "mat_matrix_nat",
                             connect_mat_fs5_nat_left_right, "matrix_file")

        connect_mat_fs5_nat_right_left = connect_mat_fs5_nat_left_left.clone(
            name='83_connect_mat_fs5_nat_right_left')
        connect_mat_fs5_nat_right_left.inputs.out_prefix = subject_ID + '_connect_fs5_nat_right_left'
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_nat_right_left, "index_row_file")
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_nat_right_left, "index_col_file")
        dmripipeline.connect(submatrix_right_left, "mat_matrix_nat",
                             connect_mat_fs5_nat_right_left, "matrix_file")

        connect_mat_fs5_nat_right_right = connect_mat_fs5_nat_left_left.clone(
            name='83_connect_mat_fs5_nat_right_right')
        connect_mat_fs5_nat_right_right.inputs.out_prefix = subject_ID + '_connect_fs5_nat_right_right'
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_nat_right_right, "index_row_file")
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_nat_right_right, "index_col_file")
        dmripipeline.connect(submatrix_right_right, "mat_matrix_nat",
                             connect_mat_fs5_nat_right_right, "matrix_file")

        connect_mat_fs5_log_left_left = connect_mat_fs5_nat_left_left.clone(
            name='83_connect_mat_fs5_log_left_left')
        connect_mat_fs5_log_left_left.inputs.out_prefix = subject_ID + '_connect_fs5_log_left_left'
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_log_left_left, "index_row_file")
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_log_left_left, "index_col_file")
        dmripipeline.connect(submatrix_left_left, "mat_matrix_log",
                             connect_mat_fs5_log_left_left, "matrix_file")

        connect_mat_fs5_log_left_right = connect_mat_fs5_log_left_left.clone(
            name='83_connect_mat_fs5_log_left_right')
        connect_mat_fs5_log_left_right.inputs.out_prefix = subject_ID + '_connect_fs5_log_left_right'
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_log_left_right, "index_row_file")
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_log_left_right, "index_col_file")
        dmripipeline.connect(submatrix_left_right, "mat_matrix_log",
                             connect_mat_fs5_log_left_right, "matrix_file")

        connect_mat_fs5_log_right_left = connect_mat_fs5_log_left_left.clone(
            name='83_connect_mat_fs5_log_right_left')
        connect_mat_fs5_log_right_left.inputs.out_prefix = subject_ID + '_connect_fs5_log_right_left'
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_log_right_left, "index_row_file")
        dmripipeline.connect(fs5_indexlist_left, "out_file",
                             connect_mat_fs5_log_right_left, "index_col_file")
        dmripipeline.connect(submatrix_right_left, "mat_matrix_log",
                             connect_mat_fs5_log_right_left, "matrix_file")

        connect_mat_fs5_log_right_right = connect_mat_fs5_log_left_left.clone(
            name='83_connect_mat_fs5_log_right_right')
        connect_mat_fs5_log_right_right.inputs.out_prefix = subject_ID + '_connect_fs5_log_right_right'
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_log_right_right, "index_row_file")
        dmripipeline.connect(fs5_indexlist_right, "out_file",
                             connect_mat_fs5_log_right_right, "index_col_file")
        dmripipeline.connect(submatrix_right_right, "mat_matrix_log",
                             connect_mat_fs5_log_right_right, "matrix_file")

#         connect_mat_fs5_nat_full = connect_mat_fs4_nat_full.clone(name='83_connect_mat_fs5_nat_full')
#         connect_mat_fs5_nat_full.inputs.outfile_prefix = subject_ID + '_connect_fs5_nat_full.mat'
#         connect_mat_fs5_nat_full.run_without_submitting = True
#         dmripipeline.connect(connect_mat_fs5_nat_left_left, "out_nii", connect_mat_fs5_nat_full, "sm_left_left")
#         dmripipeline.connect(connect_mat_fs5_nat_left_right, "out_nii", connect_mat_fs5_nat_full, "sm_left_right")
#         dmripipeline.connect(connect_mat_fs5_nat_right_left, "out_nii", connect_mat_fs5_nat_full, "sm_right_left")
#         dmripipeline.connect(connect_mat_fs5_nat_right_right, "out_nii", connect_mat_fs5_nat_full, "sm_right_right")
#
#         connect_mat_fs5_log_full = connect_mat_fs5_nat_full.clone(name='83_connect_mat_fs5_log_full')
#         connect_mat_fs5_log_full.inputs.out_filename = subject_ID + '_connect_fs5_log_full.mat'
#         connect_mat_fs5_log_full.run_without_submitting = True
#         dmripipeline.connect(connect_mat_fs5_log_left_left, "out_nii", connect_mat_fs5_log_full, "sm_left_left")
#         dmripipeline.connect(connect_mat_fs5_log_left_right, "out_nii", connect_mat_fs5_log_full, "sm_left_right")
#         dmripipeline.connect(connect_mat_fs5_log_right_left, "out_nii", connect_mat_fs5_log_full, "sm_right_left")
#         dmripipeline.connect(connect_mat_fs5_log_right_right, "out_nii", connect_mat_fs5_log_full, "sm_right_right")
#
    """
    use a sink to save outputs
    """

    datasink = pe.Node(io.DataSink(), name='99_datasink')
    datasink.inputs.base_directory = output_dir
    datasink.inputs.container = subject_ID
    datasink.inputs.parameterization = True
    #datasink.run_without_submitting = True

    dmripipeline.connect(index_pruned_left, 'out_file', datasink,
                         'interface_index.@3')
    dmripipeline.connect(index_pruned_right, 'out_file', datasink,
                         'interface_index.@4')
    dmripipeline.connect(index_final_2_t1_left, 'out_file', datasink,
                         'interface_index.@5')
    dmripipeline.connect(index_final_2_t1_right, 'out_file', datasink,
                         'interface_index.@6')
    dmripipeline.connect(index_interface2surf_left, 'out_file', datasink,
                         'interface_index.@7')
    dmripipeline.connect(index_interface2surf_right, 'out_file', datasink,
                         'interface_index.@8')
    dmripipeline.connect(index_fsaverage5_left, 'out_file', datasink,
                         'interface_index.@9')
    dmripipeline.connect(index_fsaverage5_right, 'out_file', datasink,
                         'interface_index.@10')
    dmripipeline.connect(fs5_indexlist_left, 'out_file', datasink,
                         'interface_index.@11')
    dmripipeline.connect(fs5_indexlist_right, 'out_file', datasink,
                         'interface_index.@12')
    dmripipeline.connect(index_fsaverage4_left, 'out_file', datasink,
                         'interface_index.@13')
    dmripipeline.connect(index_fsaverage4_right, 'out_file', datasink,
                         'interface_index.@14')
    dmripipeline.connect(fs4_indexlist_left, 'out_file', datasink,
                         'interface_index.@15')
    dmripipeline.connect(fs4_indexlist_right, 'out_file', datasink,
                         'interface_index.@16')
    dmripipeline.connect(fs_indexlist_left, 'out_file', datasink,
                         'interface_index.@17')
    dmripipeline.connect(fs_indexlist_right, 'out_file', datasink,
                         'interface_index.@18')
    dmripipeline.connect(tract_exclusion_mask, 'outfile', datasink,
                         'interface_index.@19')

    #    dmripipeline.connect(submatrix_left_left, 'mat_matrix_nat', datasink, 'connect_matrix.native.mat')
    #    dmripipeline.connect(submatrix_left_left, 'mat_matrix_log', datasink, 'connect_matrix.native.mat.@2')
    dmripipeline.connect(submatrix_left_left, 'nii_matrix_nat', datasink,
                         'connect_matrix.native.@3')
    dmripipeline.connect(submatrix_left_left, 'nii_matrix_log', datasink,
                         'connect_matrix.native.@4')
    #    dmripipeline.connect(submatrix_right_right, 'mat_matrix_nat', datasink, 'connect_matrix.native.mat.@5')
    #    dmripipeline.connect(submatrix_right_right, 'mat_matrix_log', datasink, 'connect_matrix.native.mat.@6')
    dmripipeline.connect(submatrix_right_right, 'nii_matrix_nat', datasink,
                         'connect_matrix.native.@7')
    dmripipeline.connect(submatrix_right_right, 'nii_matrix_log', datasink,
                         'connect_matrix.native.@8')
    #    dmripipeline.connect(submatrix_left_right, 'mat_matrix_nat', datasink, 'connect_matrix.native.mat')
    #    dmripipeline.connect(submatrix_left_right, 'mat_matrix_log', datasink, 'connect_matrix.native.mat.@2')
    dmripipeline.connect(submatrix_left_right, 'nii_matrix_nat', datasink,
                         'connect_matrix.native.@9')
    dmripipeline.connect(submatrix_left_right, 'nii_matrix_log', datasink,
                         'connect_matrix.native.@10')
    #    dmripipeline.connect(submatrix_right_left, 'mat_matrix_nat', datasink, 'connect_matrix.native.mat')
    #    dmripipeline.connect(submatrix_right_left, 'mat_matrix_log', datasink, 'connect_matrix.native.mat.@2')
    dmripipeline.connect(submatrix_right_left, 'nii_matrix_nat', datasink,
                         'connect_matrix.native.@11')
    dmripipeline.connect(submatrix_right_left, 'nii_matrix_log', datasink,
                         'connect_matrix.native.@12')

    #     dmripipeline.connect(full_matrix_nat, 'out_file', datasink, 'connect_matrix.native.@9')
    #     dmripipeline.connect(full_matrix_log, 'out_file', datasink, 'connect_matrix.native.@11')

    if (not use_sample):
        #        dmripipeline.connect(connect_mat_fs4_nat_left_left, 'out_mat', datasink, 'connect_matrix.fs4.mat.@1')
        #        dmripipeline.connect(connect_mat_fs4_log_left_left, 'out_mat', datasink, 'connect_matrix.fs4.mat.@2')
        #        dmripipeline.connect(connect_mat_fs4_nat_right_right, 'out_mat', datasink, 'connect_matrix.fs4.mat.@3')
        #        dmripipeline.connect(connect_mat_fs4_log_right_right, 'out_mat', datasink, 'connect_matrix.fs4.mat.@4')
        #        dmripipeline.connect(connect_mat_fs4_nat_left_right, 'out_mat', datasink, 'connect_matrix.fs4.mat.@5')
        #        dmripipeline.connect(connect_mat_fs4_log_left_right, 'out_mat', datasink, 'connect_matrix.fs4.mat.@6')
        #        dmripipeline.connect(connect_mat_fs4_nat_right_left, 'out_mat', datasink, 'connect_matrix.fs4.mat.@7')
        #        dmripipeline.connect(connect_mat_fs4_log_right_left, 'out_mat', datasink, 'connect_matrix.fs4.mat.@8')

        dmripipeline.connect(connect_mat_fs4_nat_left_left, 'out_nii',
                             datasink, 'connect_matrix.fs4.@1')
        dmripipeline.connect(connect_mat_fs4_log_left_left, 'out_nii',
                             datasink, 'connect_matrix.fs4.@2')
        dmripipeline.connect(connect_mat_fs4_nat_right_right, 'out_nii',
                             datasink, 'connect_matrix.fs4.@3')
        dmripipeline.connect(connect_mat_fs4_log_right_right, 'out_nii',
                             datasink, 'connect_matrix.fs4.@4')
        dmripipeline.connect(connect_mat_fs4_nat_left_right, 'out_nii',
                             datasink, 'connect_matrix.fs4.@5')
        dmripipeline.connect(connect_mat_fs4_log_left_right, 'out_nii',
                             datasink, 'connect_matrix.fs4.@6')
        dmripipeline.connect(connect_mat_fs4_nat_right_left, 'out_nii',
                             datasink, 'connect_matrix.fs4.@7')
        dmripipeline.connect(connect_mat_fs4_log_right_left, 'out_nii',
                             datasink, 'connect_matrix.fs4.@8')

        #         dmripipeline.connect(connect_mat_fs4_nat_full, 'out_file', datasink, 'connect_matrix.@28')
        #         dmripipeline.connect(connect_mat_fs4_log_full, 'out_file', datasink, 'connect_matrix.@30')

        #        dmripipeline.connect(connect_mat_fs5_nat_left_left, 'out_mat', datasink, 'connect_matrix.fs5.mat.@1')
        #        dmripipeline.connect(connect_mat_fs5_log_left_left, 'out_mat', datasink, 'connect_matrix.fs5.mat.@2')
        #        dmripipeline.connect(connect_mat_fs5_nat_right_right, 'out_mat', datasink, 'connect_matrix.fs5.mat.@3')
        #        dmripipeline.connect(connect_mat_fs5_log_right_right, 'out_mat', datasink, 'connect_matrix.fs5.mat.@4')
        #        dmripipeline.connect(connect_mat_fs5_nat_left_right, 'out_mat', datasink, 'connect_matrix.fs5.mat.@5')
        #        dmripipeline.connect(connect_mat_fs5_log_left_right, 'out_mat', datasink, 'connect_matrix.fs5.mat.@6')
        #        dmripipeline.connect(connect_mat_fs5_nat_right_left, 'out_mat', datasink, 'connect_matrix.fs5.mat.@7')
        #        dmripipeline.connect(connect_mat_fs5_log_right_left, 'out_mat', datasink, 'connect_matrix.fs5.mat.@8')

        dmripipeline.connect(connect_mat_fs5_nat_left_left, 'out_nii',
                             datasink, 'connect_matrix.fs5.@1')
        dmripipeline.connect(connect_mat_fs5_log_left_left, 'out_nii',
                             datasink, 'connect_matrix.fs5.@2')
        dmripipeline.connect(connect_mat_fs5_nat_right_right, 'out_nii',
                             datasink, 'connect_matrix.fs5.@3')
        dmripipeline.connect(connect_mat_fs5_log_right_right, 'out_nii',
                             datasink, 'connect_matrix.fs5.@4')
        dmripipeline.connect(connect_mat_fs5_nat_left_right, 'out_nii',
                             datasink, 'connect_matrix.fs5.@5')
        dmripipeline.connect(connect_mat_fs5_log_left_right, 'out_nii',
                             datasink, 'connect_matrix.fs5.@6')
        dmripipeline.connect(connect_mat_fs5_nat_right_left, 'out_nii',
                             datasink, 'connect_matrix.fs5.@7')
        dmripipeline.connect(connect_mat_fs5_log_right_left, 'out_nii',
                             datasink, 'connect_matrix.fs5.@8')

#         dmripipeline.connect(connect_mat_fs5_nat_full, 'out_file', datasink, 'connect_matrix.@40')
#         dmripipeline.connect(connect_mat_fs5_log_full, 'out_file', datasink, 'connect_matrix.@42')
    """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """
    ===============================================================================
    Connecting the workflow
    ===============================================================================
    """ """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """""" """"""
    """
    Create a higher-level workflow
    ------------------------------
    Finally, we create another higher-level workflow to connect our dmripipeline workflow with the info and datagrabbing nodes
    declared at the beginning. Our tutorial is now extensible to any arbitrary number of subjects by simply adding
    their names to the subject list and their data to the proper folders.
    """

    connectprepro = pe.Workflow(name="dmri_pipe3_projection")

    connectprepro.base_dir = op.abspath(workflow_dir + "/workflow_" +
                                        subject_ID)
    connectprepro.connect([(datasource, dmripipeline,
                            [('wm', 'inputnode.wm'),
                             ('seeds_left', 'inputnode.seeds_left'),
                             ('seeds_right', 'inputnode.seeds_right'),
                             ('t1', 'inputnode.t1'),
                             ('warp', 'inputnode.warp'),
                             ('inv_flirt_mat', 'inputnode.inv_flirt_mat'),
                             ('fa', 'inputnode.fa'),
                             ('index_left', 'inputnode.index_left'),
                             ('index_right', 'inputnode.index_right')]),
                           (tracts_left_source, dmripipeline,
                            [('tracts_left', 'inputnode.tracts_left')]),
                           (tracts_right_source, dmripipeline,
                            [('tracts_right', 'inputnode.tracts_right')])])

    return connectprepro
Exemplo n.º 19
0
def init_bold_surf_wf(mem_gb,
                      output_spaces,
                      medial_surface_nan,
                      name='bold_surf_wf'):
    """
    This workflow samples functional images to FreeSurfer surfaces

    For each vertex, the cortical ribbon is sampled at six points (spaced 20% of thickness apart)
    and averaged.

    Outputs are in GIFTI format.

    .. workflow::
        :graph2use: colored
        :simple_form: yes

        from fmriprep.workflows.bold import init_bold_surf_wf
        wf = init_bold_surf_wf(mem_gb=0.1,
                               output_spaces=['T1w', 'fsnative',
                                             'template', 'fsaverage5'],
                               medial_surface_nan=False)

    **Parameters**

        output_spaces : list
            List of output spaces functional images are to be resampled to
            Target spaces beginning with ``fs`` will be selected for resampling,
            such as ``fsaverage`` or related template spaces
            If the list contains ``fsnative``, images will be resampled to the
            individual subject's native surface
        medial_surface_nan : bool
            Replace medial wall values with NaNs on functional GIFTI files

    **Inputs**

        source_file
            Motion-corrected BOLD series in T1 space
        t1_preproc
            Bias-corrected structural template image
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID
        t1_2_fsnative_forward_transform
            LTA-style affine matrix translating from T1w to FreeSurfer-conformed subject space

    **Outputs**

        surfaces
            BOLD series, resampled to FreeSurfer surfaces

    """
    # Ensure volumetric spaces do not sneak into this workflow
    spaces = [space for space in output_spaces if space.startswith('fs')]

    workflow = Workflow(name=name)

    if spaces:
        workflow.__desc__ = """\
The BOLD time-series, were resampled to surfaces on the following
spaces: {out_spaces}.
""".format(out_spaces=', '.join(['*%s*' % s for s in spaces]))
    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 't1_preproc', 'subject_id', 'subjects_dir',
        't1_2_fsnative_forward_transform'
    ]),
                        name='inputnode')

    outputnode = pe.Node(niu.IdentityInterface(fields=['surfaces']),
                         name='outputnode')

    def select_target(subject_id, space):
        """ Given a source subject ID and a target space, get the target subject ID """
        return subject_id if space == 'fsnative' else space

    targets = pe.MapNode(niu.Function(function=select_target),
                         iterfield=['space'],
                         name='targets',
                         mem_gb=DEFAULT_MEMORY_MIN_GB)
    targets.inputs.space = spaces

    # Rename the source file to the output space to simplify naming later
    rename_src = pe.MapNode(niu.Rename(format_string='%(subject)s',
                                       keep_ext=True),
                            iterfield='subject',
                            name='rename_src',
                            run_without_submitting=True,
                            mem_gb=DEFAULT_MEMORY_MIN_GB)
    rename_src.inputs.subject = spaces

    resampling_xfm = pe.Node(LTAConvert(in_lta='identity.nofile',
                                        out_lta=True),
                             name='resampling_xfm')
    set_xfm_source = pe.Node(ConcatenateLTA(out_type='RAS2RAS'),
                             name='set_xfm_source')

    sampler = pe.MapNode(fs.SampleToSurface(sampling_method='average',
                                            sampling_range=(0, 1, 0.2),
                                            sampling_units='frac',
                                            interp_method='trilinear',
                                            cortex_mask=True,
                                            override_reg_subj=True,
                                            out_type='gii'),
                         iterfield=['source_file', 'target_subject'],
                         iterables=('hemi', ['lh', 'rh']),
                         name='sampler',
                         mem_gb=mem_gb * 3)

    medial_nans = pe.MapNode(MedialNaNs(),
                             iterfield=['in_file', 'target_subject'],
                             name='medial_nans',
                             mem_gb=DEFAULT_MEMORY_MIN_GB)

    merger = pe.JoinNode(niu.Merge(1, ravel_inputs=True),
                         name='merger',
                         joinsource='sampler',
                         joinfield=['in1'],
                         run_without_submitting=True,
                         mem_gb=DEFAULT_MEMORY_MIN_GB)

    update_metadata = pe.MapNode(GiftiSetAnatomicalStructure(),
                                 iterfield='in_file',
                                 name='update_metadata',
                                 mem_gb=DEFAULT_MEMORY_MIN_GB)

    workflow.connect([
        (inputnode, targets, [('subject_id', 'subject_id')]),
        (inputnode, rename_src, [('source_file', 'in_file')]),
        (inputnode, resampling_xfm, [('source_file', 'source_file'),
                                     ('t1_preproc', 'target_file')]),
        (inputnode, set_xfm_source, [('t1_2_fsnative_forward_transform',
                                      'in_lta2')]),
        (resampling_xfm, set_xfm_source, [('out_lta', 'in_lta1')]),
        (inputnode, sampler, [('subjects_dir', 'subjects_dir'),
                              ('subject_id', 'subject_id')]),
        (set_xfm_source, sampler, [('out_file', 'reg_file')]),
        (targets, sampler, [('out', 'target_subject')]),
        (rename_src, sampler, [('out_file', 'source_file')]),
        (merger, update_metadata, [('out', 'in_file')]),
        (update_metadata, outputnode, [('out_file', 'surfaces')]),
    ])

    if medial_surface_nan:
        workflow.connect([
            (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]),
            (sampler, medial_nans, [('out_file', 'in_file')]),
            (targets, medial_nans, [('out', 'target_subject')]),
            (medial_nans, merger, [('out_file', 'in1')]),
        ])
    else:
        workflow.connect(sampler, 'out_file', merger, 'in1')

    return workflow
Exemplo n.º 20
0
def create_ml_preprocess_workflow(
        name,
        project_dir,
        work_dir,
        sessions_file,
        session_template,
        fs_dir,
        annot_template='{subject_id}/label/{hemi}.aparc.a2009s.annot',
        fwhm_vals=[2],
        ico_order_vals=[4],
        do_save_vol_ds=False,
        do_save_smooth_vol_ds=False,
        do_save_surface_smooth_vol_ds=False,
        do_save_surface_ds=False,
        do_save_smooth_surface_ds=False,
        do_save_sphere_nifti=False,
        do_save_sphere_ds=True,
        do_save_join_sessions_ds=True,
        do_save_join_subjects_ds=True):

    #initialize workflow
    workflow = pe.Workflow(name=name)
    workflow.base_dir = work_dir

    sessions_info = ColumnData(sessions_file, dtype=str)
    subject_ids = set(sessions_info['subject_id'])
    session_map = [
        (sid, [s for i, s, r in zip(*sessions_info.values()) if i == sid])
        for sid in subject_ids
    ]

    ##for each subject
    subjects = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
                       name='subjects')
    subjects.iterables = [('subject_id', subject_ids)]

    ##for each session
    sessions = pe.Node(
        interface=util.IdentityInterface(fields=['subject_id', 'session_dir']),
        name='sessions')
    sessions.itersource = ('subjects', 'subject_id')
    sessions.iterables = [('session_dir', dict(session_map))]
    workflow.connect(subjects, 'subject_id', sessions, 'subject_id')

    #get session directory
    get_session_dir = pe.Node(interface=nio.SelectFiles(session_template),
                              name='get_session_dir')
    workflow.connect(sessions, 'session_dir', get_session_dir, 'session_dir')

    #save outputs
    datasink = pe.Node(nio.DataSink(), name='datasink')
    datasink.inputs.parameterization = False
    workflow.connect(get_session_dir, 'session_dir', datasink,
                     'base_directory')

    template = {
        'nifti_file': 'mri/f.nii.gz',
        'attributes_file': 'attributes.txt',
        'reg_file': 'mri/transforms/functional_to_anatomy.dat'
    }
    get_files = pe.Node(nio.SelectFiles(template), name='get_files')
    workflow.connect(get_session_dir, 'session_dir', get_files,
                     'base_directory')

    vol_to_ds = pe.Node(nmutil.NiftiToDataset(), name='vol_to_ds')
    vol_to_ds.inputs.ds_file = 'vol.hdf5'

    workflow.connect(get_files, 'nifti_file', vol_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', vol_to_ds,
                     'attributes_file')
    workflow.connect(subjects, 'subject_id', vol_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', vol_to_ds, 'session_id')

    if do_save_vol_ds:
        workflow.connect(vol_to_ds, 'ds_file', datasink, 'ml.@vol')

    fwhm = pe.Node(util.IdentityInterface(fields=['fwhm']), name='fwhm')
    fwhm.iterables = [('fwhm', fwhm_vals)]

    if do_save_smooth_vol_ds:
        smooth_vol = pe.Node(interface=fs.MRIConvert(), name='smooth_vol')
        workflow.connect(get_files, 'nifti_file', smooth_vol, 'in_file')
        workflow.connect(fwhm, 'fwhm', smooth_vol, 'fwhm')

        smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                   name='smooth_vol_to_ds')
        smooth_vol_to_ds.inputs.ds_file = 'smooth_vol.hdf5'

        workflow.connect(smooth_vol, 'out_file', smooth_vol_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_vol_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_vol_to_ds,
                         'session_id')

        workflow.connect(smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@smooth_vol')

    if do_save_surface_smooth_vol_ds:
        surface_smooth_vol = pe.Node(interface=fs.Smooth(),
                                     name='surface_smooth_vol')
        workflow.connect(get_files, 'reg_file', surface_smooth_vol, 'reg_file')
        workflow.connect(get_files, 'nifti_file', surface_smooth_vol,
                         'in_file')
        workflow.connect(fwhm, 'fwhm', surface_smooth_vol, 'surface_fwhm')

        surface_smooth_vol_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                           name='surface_smooth_vol_to_ds')
        surface_smooth_vol_to_ds.inputs.ds_file = 'surface_smooth_vol.hdf5'

        workflow.connect(surface_smooth_vol, 'out_file',
                         surface_smooth_vol_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file',
                         surface_smooth_vol_to_ds, 'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_smooth_vol_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', surface_smooth_vol_to_ds,
                         'session_id')

        workflow.connect(surface_smooth_vol_to_ds, 'ds_file', datasink,
                         'ml.@surface_smooth_vol')

    hemi = pe.Node(util.IdentityInterface(fields=['hemi']), name='hemi')
    hemi.iterables = [('hemi', ['lh', 'rh'])]

    to_surface = pe.Node(fs.SampleToSurface(), name='to_surface')
    to_surface.inputs.sampling_method = 'average'
    to_surface.inputs.sampling_range = (0., 1., 0.1)
    to_surface.inputs.sampling_units = 'frac'
    to_surface.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_surface, 'hemi')
    workflow.connect(get_files, 'nifti_file', to_surface, 'source_file')
    workflow.connect(get_files, 'reg_file', to_surface, 'reg_file')

    if do_save_surface_ds:
        surface_to_ds = pe.Node(nmutil.NiftiToDataset(), name='surface_to_ds')
        workflow.connect(to_surface, 'out_file', surface_to_ds, 'nifti_file')
        workflow.connect(get_files, 'attributes_file', surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', surface_to_ds, 'subject_id')
        workflow.connect(sessions, 'session_dir', surface_to_ds, 'session_id')

        join_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                    name='join_surfaces',
                                    joinsource='hemi',
                                    joinfield='input_datasets')
        join_surfaces.inputs.joined_dataset = 'surface.hdf5'
        join_surfaces.inputs.join_hemispheres = True
        workflow.connect(surface_to_ds, 'ds_file', join_surfaces,
                         'input_datasets')

        workflow.connect(join_surfaces, 'joined_dataset', datasink,
                         'ml.@surface')

    smooth_surface = pe.Node(fs.SurfaceSmooth(), name='smooth_surface')
    smooth_surface.inputs.subjects_dir = fs_dir
    workflow.connect(to_surface, 'out_file', smooth_surface, 'in_file')
    workflow.connect(sessions, 'subject_id', smooth_surface, 'subject_id')
    workflow.connect(hemi, 'hemi', smooth_surface, 'hemi')
    workflow.connect(fwhm, 'fwhm', smooth_surface, 'fwhm')

    if do_save_smooth_surface_ds:
        smooth_surface_to_ds = pe.Node(nmutil.NiftiToDataset(),
                                       name='smooth_surface_to_ds')
        workflow.connect(smooth_surface, 'out_file', smooth_surface_to_ds,
                         'nifti_file')
        workflow.connect(get_files, 'attributes_file', smooth_surface_to_ds,
                         'attributes_file')
        workflow.connect(subjects, 'subject_id', smooth_surface_to_ds,
                         'subject_id')
        workflow.connect(sessions, 'session_dir', smooth_surface_to_ds,
                         'session_id')

        join_smooth_surfaces = pe.JoinNode(nmutil.JoinDatasets(),
                                           name='join_smooth_surfaces',
                                           joinsource='hemi',
                                           joinfield='input_datasets')
        join_smooth_surfaces.inputs.joined_dataset = 'smooth_surface.hdf5'
        join_smooth_surfaces.inputs.join_hemispheres = True
        workflow.connect(smooth_surface_to_ds, 'ds_file', join_smooth_surfaces,
                         'input_datasets')

        workflow.connect(join_smooth_surfaces, 'joined_dataset', datasink,
                         'ml.@smooth_surface')

    ico_order = pe.Node(util.IdentityInterface(fields=['ico_order']),
                        name='ico_order')
    ico_order.iterables = [('ico_order', ico_order_vals)]

    to_sphere = pe.Node(fs.SurfaceTransform(), name='to_sphere')
    to_sphere.inputs.target_subject = 'ico'
    to_sphere.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', to_sphere, 'hemi')
    workflow.connect(smooth_surface, 'out_file', to_sphere, 'source_file')
    workflow.connect(subjects, 'subject_id', to_sphere, 'source_subject')
    workflow.connect(ico_order, 'ico_order', to_sphere, 'target_ico_order')

    if do_save_sphere_nifti:
        workflow.connect(to_sphere, 'out_file', datasink, 'surf.@sphere')

    template = {'annot_file': annot_template}
    get_annot_file = pe.Node(nio.SelectFiles(template), name='get_annot_file')
    get_annot_file.inputs.base_directory = fs_dir
    get_annot_file.inputs.subject_id = 'fsaverage'
    workflow.connect(hemi, 'hemi', get_annot_file, 'hemi')

    transform_annot = pe.Node(fs.SurfaceTransform(), name='transform_annot')
    transform_annot.inputs.source_subject = 'fsaverage'
    transform_annot.inputs.target_subject = 'ico'
    transform_annot.inputs.subjects_dir = fs_dir
    workflow.connect(hemi, 'hemi', transform_annot, 'hemi')
    workflow.connect(get_annot_file, 'annot_file', transform_annot,
                     'source_annot_file')
    workflow.connect(ico_order, 'ico_order', transform_annot,
                     'target_ico_order')

    sphere_to_ds = pe.Node(nmutil.NiftiToDataset(), name='sphere_to_ds')
    workflow.connect(to_sphere, 'out_file', sphere_to_ds, 'nifti_file')
    workflow.connect(get_files, 'attributes_file', sphere_to_ds,
                     'attributes_file')
    workflow.connect(transform_annot, 'out_file', sphere_to_ds, 'annot_file')
    workflow.connect(subjects, 'subject_id', sphere_to_ds, 'subject_id')
    workflow.connect(sessions, 'session_dir', sphere_to_ds, 'session_id')

    join_hemispheres = pe.JoinNode(nmutil.JoinDatasets(),
                                   name='join_hemispheres',
                                   joinsource='hemi',
                                   joinfield='input_datasets')
    join_hemispheres.inputs.joined_dataset = 'sphere.hdf5'
    join_hemispheres.inputs.join_hemispheres = True

    workflow.connect(sphere_to_ds, 'ds_file', join_hemispheres,
                     'input_datasets')

    if do_save_sphere_ds:
        workflow.connect(join_hemispheres, 'joined_dataset', datasink,
                         'ml.@sphere')

    join_sessions = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_sessions',
                                joinsource='sessions',
                                joinfield='input_datasets')
    workflow.connect(join_hemispheres, 'joined_dataset', join_sessions,
                     'input_datasets')

    if do_save_join_sessions_ds:
        join_sessions_sink = pe.Node(nio.DataSink(), name='join_sessions_sink')
        join_sessions_sink.inputs.parameterization = False
        join_sessions_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(subjects, 'subject_id', join_sessions_sink,
                         'container')
        workflow.connect(join_sessions, 'joined_dataset', join_sessions_sink,
                         '@join_sessions')

    join_subjects = pe.JoinNode(nmutil.JoinDatasets(),
                                name='join_subjects',
                                joinsource='subjects',
                                joinfield='input_datasets')
    workflow.connect(join_sessions, 'joined_dataset', join_subjects,
                     'input_datasets')

    if do_save_join_subjects_ds:
        join_subjects_sink = pe.Node(nio.DataSink(), name='join_subjects_sink')
        join_subjects_sink.inputs.parameterization = False
        join_subjects_sink.inputs.base_directory = os.path.join(
            project_dir, 'ml')
        workflow.connect(join_subjects, 'joined_dataset', join_subjects_sink,
                         '@join_subjects')

    return workflow