def labelvol(subjects_dir, template_file, reg_file, label_file):
    import nipype.interfaces.freesurfer as fs
    import nibabel as nib
    import numpy as np
    import os
    labelfiles = [l for l in label_file if l is not None]
    print labelfiles
    if labelfiles:
        node = fs.Label2Vol()
        node.inputs.subjects_dir = subjects_dir
        node.inputs.template_file = template_file
        node.inputs.reg_file = reg_file
        node.inputs.label_file = labelfiles
        res = node.run()
        vol_label_file = res.outputs.vol_label_file

    else:  # totally empty in L and R hemis
        print "making empty volume"
        img = nib.load(template_file)
        aff = img.get_affine()
        empty = np.zeros(img.shape)
        out = nib.Nifti1Image(empty, affine=aff)
        vol_label_file = os.path.abspath('empty.nii.gz')
        out.to_filename(vol_label_file)

    return vol_label_file
Exemplo n.º 2
0
    def freesurfer_to_anat(cls):
        """Function which removes freesurfer padding and transforms freesurfer segmentation to native space."""
        if config.verbose:
            print('Aligning freesurfer file to anatomical native space.')
        # Rawavg is in native anatomical space, so align to this file. vol_label_file defines output file name.
        native_segmented_brain = freesurfer.Label2Vol(seg_file='freesurfer/mri/aseg.auto_noCCseg.mgz',
                                                      template_file='freesurfer/mri/rawavg.mgz',
                                                      vol_label_file='freesurfer/mri/native_segmented_brain.mgz',
                                                      reg_header='freesurfer/mri/aseg.auto_noCCseg.mgz',
                                                      terminal_output='none')
        native_segmented_brain.run()

        mgz_to_nii = freesurfer.MRIConvert(in_file='freesurfer/mri/native_segmented_brain.mgz',
                                           out_file='freesurfer/mri/native_segmented_brain.nii',
                                           out_type='nii',
                                           terminal_output='none')
        mgz_to_nii.run()
Exemplo n.º 3
0
def get_regions(name='get_regions'):
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.io as nio
    import nipype.interfaces.freesurfer as fs
    wf = pe.Workflow(name=name)
    inputspec = pe.Node(niu.IdentityInterface(
        fields=["surf_dir", "subject_id", "surface", "reg_file", "mean"]),
                        name='inputspec')
    l2v = pe.MapNode(fs.Label2Vol(),
                     name="label2vol",
                     iterfield=['hemi', 'annot_file'])
    l2v.inputs.hemi = ['lh', 'rh']
    wf.connect(inputspec, 'surf_dir', l2v, "subjects_dir")
    wf.connect(inputspec, 'subject_id', l2v, 'subject_id')
    wf.connect(inputspec, "reg_file", l2v, "reg_file")
    wf.connect(inputspec, "mean", l2v, "template_file")
    fssource = pe.MapNode(nio.FreeSurferSource(),
                          name='fssource',
                          iterfield=['hemi'])
    fssource.inputs.hemi = ['lh', 'rh']
    wf.connect(inputspec, 'surf_dir', fssource, "subjects_dir")
    wf.connect(inputspec, 'subject_id', fssource, 'subject_id')
    wf.connect(fssource, ('annot', pickfile), l2v, 'annot_file')
    bin = pe.MapNode(niu.Function(
        input_names=["in_file", "subject_id", "surf_dir", "hemi"],
        output_names=["out_files"],
        function=binarize_and_name),
                     name="binarize_and_name",
                     iterfield=['hemi', "in_file"])
    wf.connect(inputspec, "subject_id", bin, "subject_id")
    wf.connect(inputspec, "surf_dir", bin, "surf_dir")
    wf.connect(l2v, "vol_label_file", bin, "in_file")
    bin.inputs.hemi = ['lh', 'rh']
    outputspec = pe.Node(niu.IdentityInterface(fields=["ROIs"]),
                         name='outputspec')
    wf.connect(bin, ("out_files", merge), outputspec, "ROIs")

    return wf
Exemplo n.º 4
0
def init_segs_to_native_wf(name='segs_to_native', segmentation='aseg'):
    """
    Get a segmentation from FreeSurfer conformed space into native T1w space.

    .. workflow::
        :graph2use: orig
        :simple_form: yes

        from smriprep.workflows.surfaces import init_segs_to_native_wf
        wf = init_segs_to_native_wf()


    **Parameters**
        segmentation
            The name of a segmentation ('aseg' or 'aparc_aseg' or 'wmparc')

    **Inputs**

        in_file
            Anatomical, merged T1w image after INU correction
        subjects_dir
            FreeSurfer SUBJECTS_DIR
        subject_id
            FreeSurfer subject ID


    **Outputs**

        out_file
            The selected segmentation, after resampling in native space
    """
    workflow = Workflow(name='%s_%s' % (name, segmentation))
    inputnode = pe.Node(niu.IdentityInterface(
        ['in_file', 'subjects_dir', 'subject_id']),
                        name='inputnode')
    outputnode = pe.Node(niu.IdentityInterface(['out_file']),
                         name='outputnode')
    # Extract the aseg and aparc+aseg outputs
    fssource = pe.Node(nio.FreeSurferSource(), name='fs_datasource')
    tonative = pe.Node(fs.Label2Vol(), name='tonative')
    tonii = pe.Node(fs.MRIConvert(out_type='niigz', resample_type='nearest'),
                    name='tonii')

    if segmentation.startswith('aparc'):
        if segmentation == 'aparc_aseg':

            def _sel(x):
                return [parc for parc in x if 'aparc+' in parc][0]
        elif segmentation == 'aparc_a2009s':

            def _sel(x):
                return [parc for parc in x if 'a2009s+' in parc][0]
        elif segmentation == 'aparc_dkt':

            def _sel(x):
                return [parc for parc in x if 'DKTatlas+' in parc][0]

        segmentation = (segmentation, _sel)

    workflow.connect([
        (inputnode, fssource, [('subjects_dir', 'subjects_dir'),
                               ('subject_id', 'subject_id')]),
        (inputnode, tonii, [('in_file', 'reslice_like')]),
        (fssource, tonative, [(segmentation, 'seg_file'),
                              ('rawavg', 'template_file'),
                              ('aseg', 'reg_header')]),
        (tonative, tonii, [('vol_label_file', 'in_file')]),
        (tonii, outputnode, [('out_file', 'out_file')]),
    ])
    return workflow
Exemplo n.º 5
0
def segstats_workflow(c, name='segstats'):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    from .scripts.u0a14c5b5899911e1bca80023dfa375f2.modified_nipype_workflows import create_get_stats_flow
    from .scripts.u0a14c5b5899911e1bca80023dfa375f2.utils import tolist, pickidx
    if not c.use_annotation:
        workflow = create_get_stats_flow(name='segstats', withreg=c.use_reg)
    else:
        workflow = create_get_stats_flow(name='segstats')

    workflow.inputs.segstats.avgwf_txt_file = True
    datagrabber = c.datagrabber.create_dataflow()
    merge = pe.Node(fsl.Merge(dimension='t'), name='merge_files')
    inputspec = workflow.get_node('inputspec')
    subject_iterable = datagrabber.get_node("subject_id_iterable")
    # merge files grabbed
    stats = workflow.get_node('segstats')
    print "colortablefile:", c.color_table_file
    if c.color_table_file == "Default":
        stats.inputs.default_color_table = True
    elif c.color_table_file == "Color_Table":
        stats.inputs.color_table_file = c.color_file
    elif c.color_table_file == "GCA_color_table":
        stats.inputs.gca_color_table = c.color_file

    workflow.connect(datagrabber, ('datagrabber.in_files', tolist), merge,
                     'in_files')
    doubler = lambda x: [x, x]
    # This means you're using an annotation from a standard surface
    if c.use_annotation:

        surf2surf = pe.MapNode(fs.SurfaceTransform(
            source_subject=c.annot_space, subjects_dir=c.surf_dir),
                               name="surf2surf",
                               iterfield=['hemi', 'source_annot_file'])
        surf2surf.inputs.source_annot_file = [c.lh_annotation, c.rh_annotation]
        workflow.connect(subject_iterable, "subject_id", surf2surf,
                         "target_subject")
        surf2surf.inputs.hemi = ['lh', 'rh']
        #add = pe.Node(fsl.BinaryMaths(operation='add'),name="add")
        #workflow.connect(add,'out_file',inputspec,"label_file")
        label2vol = pe.MapNode(fs.Label2Vol(subjects_dir=c.surf_dir,
                                            proj=c.proj),
                               name='label2vol',
                               iterfield=["hemi", "annot_file"])
        workflow.connect(surf2surf, "out_file", label2vol, "annot_file")
        workflow.connect(subject_iterable, "subject_id", label2vol,
                         "subject_id")
        workflow.connect(merge, "merged_file", label2vol, "template_file")
        label2vol.inputs.hemi = ['lh', 'rh']
        workflow.connect(datagrabber, 'datagrabber.reg_file', label2vol,
                         'reg_file')
        if c.inverse_reg:
            label2vol.inputs.invert_mtx = c.inverse_reg
        workflow.connect(label2vol, 'vol_label_file', inputspec, 'label_file')
        workflow.connect(merge, ('merged_file', doubler), inputspec,
                         'source_file')

    #This means you're using annotations on the subjects surface
    if c.use_subject_annotation:
        label2vol = pe.MapNode(fs.Label2Vol(subjects_dir=c.surf_dir,
                                            proj=c.proj),
                               name='label2vol',
                               iterfield=["hemi", "annot_file"])
        label2vol.inputs.hemi = ['lh', 'rh']
        workflow.connect(datagrabber, "datagrabber.label_file", label2vol,
                         "annot_file")
        workflow.connect(subject_iterable, "subject_id", label2vol,
                         "subject_id")
        workflow.connect(merge, "merged_file", label2vol, "template_file")
        workflow.connect(label2vol, 'vol_label_file', inputspec, "label_file")
        workflow.connect(merge, ('merged_file', doubler), inputspec,
                         'source_file')
        workflow.connect(datagrabber, 'datagrabber.reg_file', label2vol,
                         'reg_file')
        if c.inverse_reg:
            label2vol.inputs.invert_mtx = c.inverse_reg

    if not c.use_subject_annotation and not c.use_annotation:
        workflow.connect(merge, 'merged_file', inputspec, 'source_file')

    # This means you're using a labeled volume like aparc+aseg
    if c.use_reg and not c.use_annotation and not c.use_subject_annotation:
        workflow.connect(datagrabber, 'datagrabber.reg_file', inputspec,
                         'reg_file')
        workflow.inputs.inputspec.subjects_dir = c.surf_dir
        workflow.inputs.inputspec.inverse = c.inverse_reg

    if c.use_standard_label and not c.use_annotation and not c.use_subject_annotation:
        workflow.inputs.inputspec.label_file = c.label_file
    elif not c.use_standard_label and not c.use_annotation and not c.use_subject_annotation:
        workflow.connect(datagrabber, 'datagrabber.label_file', inputspec,
                         "label_file")

    sinker = pe.Node(nio.DataSink(), name='sinker')
    sinker.inputs.base_directory = os.path.join(c.sink_dir)

    workflow.connect(subject_iterable, 'subject_id', sinker, 'container')

    def get_subs(subject_id, subject_annot):
        subs = [('_subject_id_%s' % subject_id, '')]
        if subject_annot:
            subs.append(('_segstats0/summary.stats', 'lh.summary.stats'))
            subs.append(('_segstats1/summary.stats', 'rh.summary.stats'))
            subs.append(('_segstats0', ''))
            subs.append(('_segstats1', ''))
        else:
            subs.append(('_segstats0', ''))
        return subs

    workflow.connect(
        subject_iterable,
        ('subject_id', get_subs, c.use_subject_annotation or c.use_annotation),
        sinker, 'substitutions')
    outputspec = workflow.get_node('outputspec')
    workflow.connect(outputspec, 'stats_file', sinker,
                     'segstats.%s.@stats' % c.statname)
    workflow.connect(stats, "avgwf_txt_file", sinker,
                     'segstats.%s.@avg' % c.statname)

    return workflow
Exemplo n.º 6
0
def localizer(name='localizer'):
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu

    wf = pe.Workflow(name=name)
    inputspec = pe.Node(niu.IdentityInterface(fields=[
        "subject_id", "subjects_dir", "overlay", 'reg', 'mean', 'thresh',
        'roi', "mask_overlay", "use_mask_overlay", "uthresh"
    ]),
                        name='inputspec')
    surf_label = pe.MapNode(niu.Function(input_names=[
        'vertex', 'hemi', 'subject', 'overlay', 'reg', 'sd', 'thresh'
    ],
                                         output_names=['filename'],
                                         function=get_surface_label),
                            name='get_surface_label',
                            iterfield=['hemi', 'vertex'])
    surf_label.inputs.hemi = ['lh', 'rh']
    #surf_label.inputs.vertex = [61091, 60437]
    #surf_label.inputs.thresh = 1.5

    masker = pe.Node(niu.Function(
        input_names=['mask', 'overlay', 'use_mask_overlay', 'thresh'],
        output_names=['outfile'],
        function=mask_overlay),
                     name='mask_overlay')

    bg = pe.Node(niu.Function(input_names=['overlay', 'uthresh'],
                              output_names=['outfile'],
                              function=background),
                 name='background')
    wf.connect(inputspec, 'overlay', bg, 'overlay')
    wf.connect(inputspec, 'uthresh', bg, 'uthresh')
    wf.connect(inputspec, 'overlay', masker, 'overlay')
    wf.connect(inputspec, 'mask_overlay', masker, 'mask')
    wf.connect(inputspec, 'use_mask_overlay', masker, 'use_mask_overlay')
    wf.connect(inputspec, 'thresh', masker, 'thresh')
    wf.connect(masker, 'outfile', surf_label, 'overlay')

    wf.connect(inputspec, "subject_id", surf_label, "subject")
    wf.connect(inputspec, "subjects_dir", surf_label, "sd")
    #wf.connect(inputspec,"overlay",surf_label,"overlay")
    wf.connect(inputspec, "reg", surf_label, "reg")

    label2vol = pe.Node(fs.Label2Vol(), name='labels2vol')
    wf.connect(inputspec, 'subjects_dir', label2vol, 'subjects_dir')
    wf.connect(inputspec, 'mean', label2vol, 'template_file')
    wf.connect(inputspec, 'reg', label2vol, 'reg_file')
    wf.connect(surf_label, 'filename', label2vol, 'label_file')

    verts = pe.MapNode(niu.Function(input_names=[
        'sub', 'sd', 'overlay', 'reg', 'mean', 'hemi', 'roi', 'thresh'
    ],
                                    output_names=['vertex'],
                                    function=get_vertices),
                       name='get_verts',
                       iterfield=['hemi'])
    verts.inputs.hemi = ['lh', 'rh']
    wf.connect(inputspec, 'subject_id', verts, 'sub')
    wf.connect(inputspec, 'subjects_dir', verts, 'sd')
    #wf.connect(inputspec,'overlay',verts,'overlay')
    wf.connect(masker, 'outfile', verts, 'overlay')
    wf.connect(inputspec, 'reg', verts, 'reg')
    wf.connect(inputspec, 'mean', verts, 'mean')
    wf.connect(inputspec, 'thresh', verts, 'thresh')
    wf.connect(inputspec, 'roi', verts, 'roi')
    wf.connect(verts, 'vertex', surf_label, 'vertex')
    wf.connect(inputspec, 'thresh', surf_label, 'thresh')

    studyref = pe.Node(niu.Function(input_names=['mean'],
                                    output_names=['study_ref'],
                                    function=study_ref),
                       name='studyref')
    wf.connect(inputspec, 'mean', studyref, 'mean')

    outputspec = pe.Node(
        niu.IdentityInterface(fields=['rois', 'reference', 'study_ref']),
        name='outputspec')

    wf.connect(studyref, 'study_ref', outputspec, 'study_ref')
    bin = pe.Node(fsl.ImageMaths(op_string='-bin'), name="binarize_roi")
    changetype = pe.Node(fsl.ChangeDataType(output_datatype='short'),
                         name='to_short')

    wf.connect(bg, 'outfile', outputspec, 'reference')
    wf.connect(label2vol, 'vol_label_file', bin, 'in_file')
    wf.connect(bin, 'out_file', changetype, 'in_file')
    wf.connect(changetype, 'out_file', outputspec, 'rois')
    return wf
def localizer(name='localizer'):
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.io as nio
    wf = pe.Workflow(name=name)
    inputspec = pe.Node(niu.IdentityInterface(fields=["subject_id",
                                                      "subjects_dir",
                                                      "overlay",
                                                      'reg',
                                                      'mean',
                                                      'thresh',
                                                      'roi',
                                                      "mask_overlay",
                                                      "use_mask_overlay","uthresh"]),name='inputspec')
    surf_label = pe.MapNode(niu.Function(input_names=['vertex',
                                                   'hemi',
                                                   'subject',
                                                   'overlay',
                                                   'reg',
                                                   'sd',
                                                   'thresh'],
                                      output_names=['filename','labels'],
                                      function=get_surface_label),
        name='get_surface_label', iterfield=['hemi','vertex'])
    surf_label.inputs.hemi=['lh','rh']
    #surf_label.inputs.vertex = [61091, 60437]
    #surf_label.inputs.thresh = 1.5

    masker = pe.Node(niu.Function(input_names=['mask',
                                               'overlay',
                                               'use_mask_overlay',
                                               'thresh'],
                                  output_names=['outfile'],function=mask_overlay),
        name='mask_overlay')

    bg = pe.Node(niu.Function(input_names=['overlay','uthresh'],output_names=['outfile'],function=background),name='background')
    wf.connect(inputspec,'overlay',bg,'overlay')
    wf.connect(inputspec,'uthresh',bg,'uthresh')
    wf.connect(inputspec,'overlay',masker,'overlay')
    wf.connect(inputspec,'mask_overlay',masker,'mask')
    wf.connect(inputspec,'use_mask_overlay',masker,'use_mask_overlay')
    wf.connect(inputspec,'thresh',masker,'thresh')
    wf.connect(masker,'outfile',surf_label,'overlay')

    wf.connect(inputspec,"subject_id",surf_label,"subject")
    wf.connect(inputspec,"subjects_dir",surf_label,"sd")
    #wf.connect(inputspec,"overlay",surf_label,"overlay")
    wf.connect(inputspec,"reg",surf_label,"reg")

    label2vol = pe.Node(fs.Label2Vol(),name='labels2vol')
    wf.connect(inputspec,'subjects_dir',label2vol,'subjects_dir')
    wf.connect(inputspec,'mean',label2vol,'template_file')
    wf.connect(inputspec,'reg',label2vol,'reg_file')
    wf.connect(surf_label,'filename',label2vol,'label_file')

    verts = pe.MapNode(niu.Function(input_names=['sub',
                                              'sd',
                                              'overlay',
                                              'reg',
                                              'mean',
                                              'hemi',
                                              'roi',
                                              'thresh'],
                                 output_names=['vertex'],
                                 function=get_vertices),
        name='get_verts',iterfield=['hemi'])
    verts.inputs.hemi = ['lh','rh']
    wf.connect(inputspec,'subject_id',verts,'sub')
    wf.connect(inputspec,'subjects_dir',verts,'sd')
    #wf.connect(inputspec,'overlay',verts,'overlay')
    wf.connect(masker,'outfile',verts,'overlay')
    wf.connect(inputspec,'reg',verts,'reg')
    wf.connect(inputspec,'mean',verts,'mean')
    wf.connect(inputspec,'thresh',verts,'thresh')
    wf.connect(inputspec,'roi',verts,'roi')
    wf.connect(verts,'vertex',surf_label,'vertex')
    wf.connect(inputspec,'thresh',surf_label,'thresh')

    from ...smri.freesurfer_brain_masks import pickaparc

    fssource = pe.Node(nio.FreeSurferSource(),name='fssource')
    wf.connect(inputspec,"subjects_dir",fssource,"subjects_dir")
    wf.connect(inputspec,"subject_id", fssource,"subject_id")

    bg_mask = pe.Node(fs.Binarize(wm_ven_csf=True, erode=2),name="bg_mask")

    wf.connect(fssource,("aparc_aseg",pickaparc),bg_mask,"in_file")

    warp_mask = pe.Node(fs.ApplyVolTransform(inverse=True,interp='nearest'),name="warp_to_func")
    wf.connect(inputspec,"mean",warp_mask,"source_file")
    wf.connect(bg_mask,"binary_file",warp_mask,"target_file")
    wf.connect(inputspec,"reg", warp_mask,"reg_file")
    

    do_bg_mask = pe.Node(fs.ApplyMask(),name="do_bg_mask")
    wf.connect(warp_mask,"transformed_file",do_bg_mask,"mask_file")

    studyref = pe.Node(niu.Function(input_names=['mean'],output_names=['study_ref'], function=study_ref),name='studyref')
    wf.connect(inputspec,'mean',studyref,'mean')

    outputspec = pe.Node(niu.IdentityInterface(fields=['rois','reference','study_ref','labels']),name='outputspec')

    wf.connect(studyref,'study_ref', outputspec, 'study_ref')
    bin = pe.Node(fsl.ImageMaths(op_string = '-bin'),name="binarize_roi")
    changetype = pe.Node(fsl.ChangeDataType(output_datatype='short'),name='to_short')

    wf.connect(bg,'outfile',do_bg_mask,"in_file")
    wf.connect(do_bg_mask,("out_file",shorty), outputspec,'reference')
    wf.connect(label2vol,'vol_label_file',bin,'in_file')
    wf.connect(bin,'out_file', changetype, 'in_file')
    wf.connect(changetype, 'out_file', outputspec, 'rois')
    wf.connect(surf_label,'labels',outputspec,'labels')
    return wf
Exemplo n.º 8
0
def test_label2vol():
    input_map = dict(
        annot_file=dict(
            copyfile=False,
            mandatory=True,
            xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
            requires=('subject_id', 'hemi'),
            argstr='--annot %s',
        ),
        aparc_aseg=dict(
            xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
            argstr='--aparc+aseg',
            mandatory=True,
        ),
        args=dict(argstr='%s', ),
        environ=dict(),
        fill_thresh=dict(argstr='--fillthresh %.f', ),
        hemi=dict(argstr='--hemi %s', ),
        identity=dict(
            xor=('reg_file', 'reg_header', 'identity'),
            argstr='--identity',
        ),
        invert_mtx=dict(argstr='--invertmtx', ),
        label_file=dict(
            copyfile=False,
            xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
            argstr='--label %s...',
            mandatory=True,
        ),
        label_hit_file=dict(argstr='--hits %s', ),
        label_voxel_volume=dict(argstr='--labvoxvol %f', ),
        map_label_stat=dict(argstr='--label-stat %s', ),
        native_vox2ras=dict(argstr='--native-vox2ras', ),
        proj=dict(argstr='--proj %s %f %f %f', ),
        reg_file=dict(
            xor=('reg_file', 'reg_header', 'identity'),
            argstr='--reg %s',
        ),
        reg_header=dict(
            xor=('reg_file', 'reg_header', 'identity'),
            argstr='--regheader %s',
        ),
        seg_file=dict(
            copyfile=False,
            mandatory=True,
            xor=('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
            argstr='--seg %s',
        ),
        subject_id=dict(argstr='--subject %s', ),
        subjects_dir=dict(),
        surface=dict(argstr='--surf %s', ),
        template_file=dict(
            mandatory=True,
            argstr='--temp %s',
        ),
        vol_label_file=dict(argstr='--o %s', ),
    )
    instance = freesurfer.Label2Vol()
    for key, metadata in input_map.items():
        for metakey, value in metadata.items():
            yield assert_equal, getattr(instance.inputs.traits()[key],
                                        metakey), value
Exemplo n.º 9
0
def segstats_workflow(c, name='segstats'):
    import nipype.interfaces.fsl as fsl
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.io as nio
    import nipype.pipeline.engine as pe
    from .scripts.u0a14c5b5899911e1bca80023dfa375f2.modified_nipype_workflows import create_get_stats_flow
    from .scripts.u0a14c5b5899911e1bca80023dfa375f2.utils import tolist, pickidx
    if not c.use_annotation:
        workflow = create_get_stats_flow(name='segstats', withreg=c.use_reg)
    else:
        workflow = create_get_stats_flow(name='segstats')

    workflow.inputs.segstats.avgwf_txt_file = True
    datagrabber = c.datagrabber.create_dataflow()
    merge = pe.Node(fsl.Merge(dimension='t'), name='merge_files')
    inputspec = workflow.get_node('inputspec')
    subject_iterable = datagrabber.get_node("subject_id_iterable")
    # merge files grabbed
    stats = workflow.get_node('segstats')
    print "colortablefile:", c.color_table_file
    if c.color_table_file == "Default":
        stats.inputs.default_color_table = True
    elif c.color_table_file == "Color_Table":
        stats.inputs.color_table_file = c.color_file
    elif c.color_table_file == "GCA_color_table":
        stats.inputs.gca_color_table = c.color_file

    workflow.connect(datagrabber, ('datagrabber.in_files', tolist), merge,
                     'in_files')

    if c.use_annotation:

        surf2surf = pe.MapNode(fs.SurfaceTransform(
            source_subject=c.annot_space, subjects_dir=c.surf_dir),
                               name="surf2surf",
                               iterfield=['hemi', 'source_annot_file'])
        surf2surf.inputs.source_annot_file = [c.lh_annotation, c.rh_annotation]
        workflow.connect(subject_iterable, "subject_id", surf2surf,
                         "target_subject")
        surf2surf.inputs.hemi = ['lh', 'rh']

        add = pe.Node(fsl.BinaryMaths(operation='add'), name="add")
        workflow.connect(add, 'out_file', inputspec, "label_file")
        label2vol = pe.MapNode(fs.Label2Vol(subjects_dir=c.surf_dir,
                                            proj=c.proj),
                               name='label2vol',
                               iterfield=["hemi", "annot_file"])
        workflow.connect(surf2surf, "out_file", label2vol, "annot_file")
        workflow.connect(subject_iterable, "subject_id", label2vol,
                         "subject_id")
        #fssource = pe.Node(nio.FreeSurferSource(subjects_dir = c.surf_dir),name='fssource')
        #workflow.connect(subject_iterable,"subject_id",fssource,"subject_id")
        #workflow.connect(subject_iterable,"subject_id",label2vol,"reg_header")
        #workflow.connect(fssource,"orig",label2vol,"template_file")
        workflow.connect(merge, "merged_file", label2vol, "template_file")
        label2vol.inputs.hemi = ['lh', 'rh']
        workflow.connect(datagrabber, 'datagrabber.reg_file', label2vol,
                         'reg_file')
        if c.inverse_reg:
            label2vol.inputs.invert_mtx = c.inverse_reg
        workflow.connect(label2vol, ('vol_label_file', pickidx, 0), add,
                         'in_file')
        workflow.connect(label2vol, ('vol_label_file', pickidx, 1), add,
                         'operand_file')

    workflow.connect(merge, 'merged_file', inputspec, 'source_file')

    if c.use_reg and not c.use_annotation:
        workflow.connect(datagrabber, 'datagrabber.reg_file', inputspec,
                         'reg_file')
        workflow.inputs.inputspec.subjects_dir = c.surf_dir
        workflow.inputs.inputspec.inverse = c.inverse_reg

    if c.use_standard_label and not c.use_annotation:
        workflow.inputs.inputspec.label_file = c.label_file
    elif not c.use_standard_label and not c.use_annotation:
        workflow.connect(datagrabber, 'datagrabber.label_file', inputspec,
                         "label_file")

    sinker = pe.Node(nio.DataSink(), name='sinker')
    sinker.inputs.base_directory = c.sink_dir

    workflow.connect(subject_iterable, 'subject_id', sinker, 'container')

    def get_subs(subject_id):
        subs = [('_subject_id_%s' % subject_id, '')]
        return subs

    workflow.connect(subject_iterable, ('subject_id', get_subs), sinker,
                     'substitutions')
    outputspec = workflow.get_node('outputspec')
    workflow.connect(outputspec, 'stats_file', sinker, 'segstats.@stats')
    workflow.connect(stats, "avgwf_txt_file", sinker, 'segstats.@avg')

    return workflow
Exemplo n.º 10
0
def init_templateflow_wf(
    bids_dir,
    output_dir,
    participant_label,
    mov_template,
    ref_template='MNI152NLin2009cAsym',
    use_float=True,
    omp_nthreads=None,
    mem_gb=3.0,
    modality='T1w',
    normalization_quality='precise',
    name='templateflow_wf',
    fs_subjects_dir=None,
):
    """
    A Nipype workflow to perform image registration between two templates
    *R* and *M*. *R* is the *reference template*, selected by a templateflow
    identifier such as ``MNI152NLin2009cAsym``, and *M* is the *moving
    template* (e.g., ``MNI152Lin``). This workflows maps data defined on
    template-*M* space onto template-*R* space.


    1. Run the subrogate images through ``antsBrainExtraction``.
    2. Recompute :abbr:`INU (intensity non-uniformity)` correction using
        the mask obtained in 1).
    3. Independently, run spatial normalization of every
       :abbr:`INU (intensity non-uniformity)` corrected image
       (supplied via ``in_files``) to both templates.
    4. Calculate an initialization between both templates, using them directly.
    5. Run multi-channel image registration of the images resulting from
        3). Both sets of images (one registered to *R* and another to *M*)
        are then used as reference and moving images in the registration
        framework.

    **Parameters**

    in_files: list of files
        a list of paths pointing to the images that will be used as surrogates
    mov_template: str
        a templateflow identifier for template-*M*
    ref_template: str
        a templateflow identifier for template-*R* (default: ``MNI152NLin2009cAsym``).


    """
    # number of participants
    ninputs = len(participant_label)
    ants_env = {
        'NSLOTS': '%d' % omp_nthreads,
        'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS': '%d' % omp_nthreads,
        'OMP_NUM_THREADS': '%d' % omp_nthreads,
    }

    # Get path to templates
    tpl_ref = str(
        get_template(ref_template, suffix=modality, desc=None, resolution=1))
    tpl_ref_mask = str(
        get_template(ref_template, suffix='mask', desc='brain', resolution=1))
    tpl_mov = str(
        get_template(mov_template, suffix=modality, desc=None, resolution=1))
    tpl_mov_mask = str(
        get_template(mov_template, suffix='mask', desc='brain', resolution=1))

    wf = pe.Workflow(name)
    inputnode = pe.Node(niu.IdentityInterface(fields=['participant_label']),
                        name='inputnode')
    inputnode.iterables = ('participant_label',
                           sorted(list(participant_label)))

    pick_file = pe.Node(niu.Function(function=_bids_pick),
                        name='pick_file',
                        run_without_submitting=True)
    pick_file.inputs.bids_root = bids_dir

    ref_bex = init_brain_extraction_wf(
        in_template=ref_template,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        bids_suffix=modality,
        name='reference_bex',
    )

    mov_bex = init_brain_extraction_wf(
        in_template=mov_template,
        omp_nthreads=omp_nthreads,
        mem_gb=mem_gb,
        bids_suffix=modality,
        name='moving_bex',
    )

    ref_norm = pe.Node(Registration(from_file=pkgr.resource_filename(
        'niworkflows.data', 't1w-mni_registration_%s_000.json' %
        normalization_quality)),
                       name='ref_norm',
                       n_procs=omp_nthreads)
    ref_norm.inputs.fixed_image = tpl_ref
    ref_norm.inputs.fixed_image_masks = tpl_ref_mask
    ref_norm.inputs.environ = ants_env

    # Register the INU-corrected image to the other template
    mov_norm = pe.Node(Registration(from_file=pkgr.resource_filename(
        'niworkflows.data', 't1w-mni_registration_%s_000.json' %
        normalization_quality)),
                       name='mov_norm',
                       n_procs=omp_nthreads)
    mov_norm.inputs.fixed_image = tpl_mov
    mov_norm.inputs.fixed_image_masks = tpl_mov_mask
    mov_norm.inputs.environ = ants_env

    # Initialize between-templates transform with antsAI
    init_aff = pe.Node(AI(
        metric=('Mattes', 32, 'Regular', 0.2),
        transform=('Affine', 0.1),
        search_factor=(20, 0.12),
        principal_axes=False,
        convergence=(10, 1e-6, 10),
        verbose=True,
        fixed_image=tpl_ref,
        fixed_image_mask=tpl_ref_mask,
        moving_image=tpl_mov,
        moving_image_mask=tpl_mov_mask,
        environ=ants_env,
    ),
                       name='init_aff',
                       n_procs=omp_nthreads)

    ref_buffer = pe.JoinNode(niu.IdentityInterface(fields=['fixed_image']),
                             joinsource='inputnode',
                             joinfield='fixed_image',
                             name='ref_buffer')

    mov_buffer = pe.JoinNode(niu.IdentityInterface(fields=['moving_image']),
                             joinsource='inputnode',
                             joinfield='moving_image',
                             name='mov_buffer')

    flow = pe.Node(
        Registration(from_file=pkgr.resource_filename(
            'niworkflows.data', 't1w-mni_registration_%s_000.json' %
            normalization_quality)),
        name='flow_norm',
        n_procs=omp_nthreads,
    )
    flow.inputs.fixed_image_masks = tpl_ref_mask
    flow.inputs.moving_image_masks = tpl_mov_mask
    flow.inputs.metric = [[v] * ninputs for v in flow.inputs.metric]
    flow.inputs.metric_weight = [[1 / ninputs] * ninputs
                                 for _ in flow.inputs.metric_weight]
    flow.inputs.radius_or_number_of_bins = [
        [v] * ninputs for v in flow.inputs.radius_or_number_of_bins
    ]
    flow.inputs.sampling_percentage = [[v] * ninputs
                                       for v in flow.inputs.sampling_percentage
                                       ]
    flow.inputs.sampling_strategy = [[v] * ninputs
                                     for v in flow.inputs.sampling_strategy]
    flow.inputs.environ = ants_env

    # Datasinking
    ref_norm_ds = pe.Node(DerivativesDataSink(base_directory=str(
        output_dir.parent),
                                              out_path_base=output_dir.name,
                                              space=ref_template,
                                              desc='preproc',
                                              keep_dtype=True),
                          name='ref_norm_ds',
                          run_without_submitting=True)

    mov_norm_ds = pe.Node(DerivativesDataSink(base_directory=str(
        output_dir.parent),
                                              out_path_base=output_dir.name,
                                              space=mov_template,
                                              desc='preproc',
                                              keep_dtype=True),
                          name='mov_norm_ds',
                          run_without_submitting=True)

    xfm_ds = pe.Node(DerivativesDataSink(
        base_directory=str(output_dir.parent),
        out_path_base=output_dir.name,
        allowed_entities=['from', 'mode'],
        mode='image',
        suffix='xfm',
        source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template),
        **{'from': mov_template}),
                     name='xfm_ds',
                     run_without_submitting=True)

    wf.connect([
        (inputnode, pick_file, [('participant_label', 'participant_label')]),
        (pick_file, ref_bex, [('out', 'inputnode.in_files')]),
        (pick_file, mov_bex, [('out', 'inputnode.in_files')]),
        (ref_bex, ref_norm, [('outputnode.bias_corrected', 'moving_image'),
                             ('outputnode.out_mask', 'moving_image_masks'),
                             ('norm.forward_transforms',
                              'initial_moving_transform')]),
        (ref_bex, mov_norm, [('outputnode.bias_corrected', 'moving_image')]),
        (mov_bex, mov_norm, [('outputnode.out_mask', 'moving_image_masks'),
                             ('norm.forward_transforms',
                              'initial_moving_transform')]),
        (init_aff, flow, [('output_transform', 'initial_moving_transform')]),
        (ref_norm, ref_buffer, [('warped_image', 'fixed_image')]),
        (mov_norm, mov_buffer, [('warped_image', 'moving_image')]),
        (ref_buffer, flow, [('fixed_image', 'fixed_image')]),
        (mov_buffer, flow, [('moving_image', 'moving_image')]),
        (pick_file, ref_norm_ds, [('out', 'source_file')]),
        (ref_norm, ref_norm_ds, [('warped_image', 'in_file')]),
        (pick_file, mov_norm_ds, [('out', 'source_file')]),
        (mov_norm, mov_norm_ds, [('warped_image', 'in_file')]),
        (flow, xfm_ds, [('composite_transform', 'in_file')]),
    ])

    if fs_subjects_dir:
        fssource = pe.Node(FreeSurferSource(subjects_dir=str(fs_subjects_dir)),
                           name='fssource',
                           run_without_submitting=True)
        tonative = pe.Node(fs.Label2Vol(subjects_dir=str(fs_subjects_dir)),
                           name='tonative')
        tonii = pe.Node(fs.MRIConvert(out_type='niigz',
                                      resample_type='nearest'),
                        name='tonii')

        ref_aparc = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                            float=True,
                                            reference_image=tpl_ref,
                                            environ=ants_env),
                            name='ref_aparc',
                            mem_gb=1,
                            n_procs=omp_nthreads)

        mov_aparc = pe.Node(ApplyTransforms(interpolation='MultiLabel',
                                            float=True,
                                            reference_image=tpl_mov,
                                            environ=ants_env),
                            name='mov_aparc',
                            mem_gb=1,
                            n_procs=omp_nthreads)

        ref_aparc_buffer = pe.JoinNode(niu.IdentityInterface(fields=['aparc']),
                                       joinsource='inputnode',
                                       joinfield='aparc',
                                       name='ref_aparc_buffer')

        ref_join_labels = pe.Node(AntsJointFusion(
            target_image=[tpl_ref],
            out_label_fusion='merged_aparc.nii.gz',
            out_intensity_fusion_name_format='merged_aparc_intensity_%d.nii.gz',
            out_label_post_prob_name_format='merged_aparc_posterior_%d.nii.gz',
            out_atlas_voting_weight_name_format='merged_aparc_weight_%d.nii.gz',
            environ=ants_env,
        ),
                                  name='ref_join_labels',
                                  n_procs=omp_nthreads)

        ref_join_labels_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                                     name='ref_join_labels_ds',
                                     run_without_submitting=True)

        ref_join_probs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='probtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                                    name='ref_join_probs_ds',
                                    run_without_submitting=True)

        # ref_join_voting_ds = pe.Node(
        #     DerivativesDataSink(
        #         base_directory=str(output_dir.parent),
        #         out_path_base=output_dir.name, space=ref_template,
        #         suffix='probtissue', desc='aparcvoting', keep_dtype=False,
        #         source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
        #     name='ref_join_voting_ds', run_without_submitting=True)

        mov_aparc_buffer = pe.JoinNode(niu.IdentityInterface(fields=['aparc']),
                                       joinsource='inputnode',
                                       joinfield='aparc',
                                       name='mov_aparc_buffer')

        mov_join_labels = pe.Node(AntsJointFusion(
            target_image=[tpl_mov],
            out_label_fusion='merged_aparc.nii.gz',
            out_intensity_fusion_name_format='merged_aparc_intensity_%d.nii.gz',
            out_label_post_prob_name_format='merged_aparc_posterior_%d.nii.gz',
            out_atlas_voting_weight_name_format='merged_aparc_weight_%d.nii.gz',
            environ=ants_env,
        ),
                                  name='mov_join_labels',
                                  n_procs=omp_nthreads)

        mov_join_labels_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                                     name='mov_join_labels_ds',
                                     run_without_submitting=True)

        mov_join_probs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            suffix='probtissue',
            desc='aparc',
            keep_dtype=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                                    name='mov_join_probs_ds',
                                    run_without_submitting=True)

        ref_aparc_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False),
                               name='ref_aparc_ds',
                               run_without_submitting=True)

        mov_aparc_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            suffix='dtissue',
            desc='aparc',
            keep_dtype=False),
                               name='mov_aparc_ds',
                               run_without_submitting=True)
        # Extract surfaces
        cifti_wf = init_gifti_surface_wf(name='cifti_surfaces',
                                         subjects_dir=str(fs_subjects_dir))

        # Move surfaces to template spaces
        gii2csv = pe.MapNode(GiftiToCSV(itk_lps=True),
                             iterfield=['in_file'],
                             name='gii2csv')
        ref_map_surf = pe.MapNode(ApplyTransformsToPoints(dimension=3,
                                                          environ=ants_env),
                                  n_procs=omp_nthreads,
                                  name='ref_map_surf',
                                  iterfield=['input_file'])
        ref_csv2gii = pe.MapNode(CSVToGifti(itk_lps=True),
                                 name='ref_csv2gii',
                                 iterfield=['in_file', 'gii_file'])
        ref_surfs_buffer = pe.JoinNode(
            niu.IdentityInterface(fields=['surfaces']),
            joinsource='inputnode',
            joinfield='surfaces',
            name='ref_surfs_buffer')
        ref_surfs_unzip = pe.Node(UnzipJoinedSurfaces(),
                                  name='ref_surfs_unzip',
                                  run_without_submitting=True)
        ref_ply = pe.MapNode(SurfacesToPointCloud(),
                             name='ref_ply',
                             iterfield=['in_files'])
        ref_recon = pe.MapNode(PoissonRecon(),
                               name='ref_recon',
                               iterfield=['in_file'])
        ref_avggii = pe.MapNode(PLYtoGifti(),
                                name='ref_avggii',
                                iterfield=['in_file', 'surf_key'])
        ref_smooth = pe.MapNode(fs.SmoothTessellation(),
                                name='ref_smooth',
                                iterfield=['in_file'])

        ref_surfs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            keep_dtype=False,
            compress=False),
                               name='ref_surfs_ds',
                               run_without_submitting=True)
        ref_avg_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=ref_template,
            keep_dtype=False,
            compress=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(ref_template)),
                             name='ref_avg_ds',
                             run_without_submitting=True)

        mov_map_surf = pe.MapNode(ApplyTransformsToPoints(dimension=3,
                                                          environ=ants_env),
                                  n_procs=omp_nthreads,
                                  name='mov_map_surf',
                                  iterfield=['input_file'])
        mov_csv2gii = pe.MapNode(CSVToGifti(itk_lps=True),
                                 name='mov_csv2gii',
                                 iterfield=['in_file', 'gii_file'])
        mov_surfs_buffer = pe.JoinNode(
            niu.IdentityInterface(fields=['surfaces']),
            joinsource='inputnode',
            joinfield='surfaces',
            name='mov_surfs_buffer')
        mov_surfs_unzip = pe.Node(UnzipJoinedSurfaces(),
                                  name='mov_surfs_unzip',
                                  run_without_submitting=True)
        mov_ply = pe.MapNode(SurfacesToPointCloud(),
                             name='mov_ply',
                             iterfield=['in_files'])
        mov_recon = pe.MapNode(PoissonRecon(),
                               name='mov_recon',
                               iterfield=['in_file'])
        mov_avggii = pe.MapNode(PLYtoGifti(),
                                name='mov_avggii',
                                iterfield=['in_file', 'surf_key'])
        mov_smooth = pe.MapNode(fs.SmoothTessellation(),
                                name='mov_smooth',
                                iterfield=['in_file'])

        mov_surfs_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            keep_dtype=False,
            compress=False),
                               name='mov_surfs_ds',
                               run_without_submitting=True)
        mov_avg_ds = pe.Node(DerivativesDataSink(
            base_directory=str(output_dir.parent),
            out_path_base=output_dir.name,
            space=mov_template,
            keep_dtype=False,
            compress=False,
            source_file='group/tpl-{0}_T1w.nii.gz'.format(mov_template)),
                             name='mov_avg_ds',
                             run_without_submitting=True)

        wf.connect([
            (inputnode, fssource, [(('participant_label', _sub_decorate),
                                    'subject_id')]),
            (inputnode, cifti_wf, [(('participant_label', _sub_decorate),
                                    'inputnode.subject_id')]),
            (pick_file, cifti_wf, [('out', 'inputnode.in_t1w')]),
            (pick_file, tonii, [('out', 'reslice_like')]),
            # Select DKT aparc
            (fssource, tonative, [(('aparc_aseg', _last), 'seg_file'),
                                  ('rawavg', 'template_file'),
                                  ('aseg', 'reg_header')]),
            (tonative, tonii, [('vol_label_file', 'in_file')]),
            (tonii, ref_aparc, [('out_file', 'input_image')]),
            (tonii, mov_aparc, [('out_file', 'input_image')]),
            (ref_norm, ref_aparc, [('composite_transform', 'transforms')]),
            (mov_norm, mov_aparc, [('composite_transform', 'transforms')]),
            (ref_buffer, ref_join_labels, [('fixed_image', 'atlas_image')]),
            (ref_aparc, ref_aparc_buffer, [('output_image', 'aparc')]),
            (ref_aparc_buffer, ref_join_labels,
             [('aparc', 'atlas_segmentation_image')]),
            (mov_buffer, mov_join_labels, [('moving_image', 'atlas_image')]),
            (mov_aparc, mov_aparc_buffer, [('output_image', 'aparc')]),
            (mov_aparc_buffer, mov_join_labels,
             [('aparc', 'atlas_segmentation_image')]),
            # Datasinks
            (ref_join_labels, ref_join_labels_ds, [('out_label_fusion',
                                                    'in_file')]),
            (ref_join_labels, ref_join_probs_ds,
             [('out_label_post_prob', 'in_file'),
              (('out_label_post_prob', _get_extra), 'extra_values')]),
            # (ref_join_labels, ref_join_voting_ds, [
            #     ('out_atlas_voting_weight_name_format', 'in_file')]),
            (mov_join_labels, mov_join_labels_ds, [('out_label_fusion',
                                                    'in_file')]),
            (mov_join_labels, mov_join_probs_ds,
             [('out_label_post_prob', 'in_file'),
              (('out_label_post_prob', _get_extra), 'extra_values')]),
            (pick_file, ref_aparc_ds, [('out', 'source_file')]),
            (ref_aparc, ref_aparc_ds, [('output_image', 'in_file')]),
            (pick_file, mov_aparc_ds, [('out', 'source_file')]),
            (mov_aparc, mov_aparc_ds, [('output_image', 'in_file')]),
            # Mapping ref surfaces
            (cifti_wf, gii2csv, [(('outputnode.surf_norm', _discard_inflated),
                                  'in_file')]),
            (gii2csv, ref_map_surf, [('out_file', 'input_file')]),
            (ref_norm, ref_map_surf, [(('inverse_composite_transform',
                                        _ensure_list), 'transforms')]),
            (ref_map_surf, ref_csv2gii, [('output_file', 'in_file')]),
            (cifti_wf, ref_csv2gii, [(('outputnode.surf_norm',
                                       _discard_inflated), 'gii_file')]),
            (pick_file, ref_surfs_ds, [('out', 'source_file')]),
            (ref_csv2gii, ref_surfs_ds, [('out_file', 'in_file'),
                                         (('out_file', _get_surf_extra),
                                          'extra_values')]),
            (ref_csv2gii, ref_surfs_buffer, [('out_file', 'surfaces')]),
            (ref_surfs_buffer, ref_surfs_unzip, [('surfaces', 'in_files')]),
            (ref_surfs_unzip, ref_ply, [('out_files', 'in_files')]),
            (ref_ply, ref_recon, [('out_file', 'in_file')]),
            (ref_recon, ref_avggii, [('out_file', 'in_file')]),
            (ref_surfs_unzip, ref_avggii, [('surf_keys', 'surf_key')]),
            (ref_avggii, ref_smooth, [('out_file', 'in_file')]),
            (ref_smooth, ref_avg_ds, [('surface', 'in_file'),
                                      (('surface', _get_surf_extra),
                                       'extra_values')]),

            # Mapping mov surfaces
            (gii2csv, mov_map_surf, [('out_file', 'input_file')]),
            (mov_norm, mov_map_surf, [(('inverse_composite_transform',
                                        _ensure_list), 'transforms')]),
            (mov_map_surf, mov_csv2gii, [('output_file', 'in_file')]),
            (cifti_wf, mov_csv2gii, [(('outputnode.surf_norm',
                                       _discard_inflated), 'gii_file')]),
            (pick_file, mov_surfs_ds, [('out', 'source_file')]),
            (mov_csv2gii, mov_surfs_ds, [('out_file', 'in_file'),
                                         (('out_file', _get_surf_extra),
                                          'extra_values')]),
            (mov_csv2gii, mov_surfs_buffer, [('out_file', 'surfaces')]),
            (mov_surfs_buffer, mov_surfs_unzip, [('surfaces', 'in_files')]),
            (mov_surfs_unzip, mov_ply, [('out_files', 'in_files')]),
            (mov_ply, mov_recon, [('out_file', 'in_file')]),
            (mov_recon, mov_avggii, [('out_file', 'in_file')]),
            (mov_surfs_unzip, mov_avggii, [('surf_keys', 'surf_key')]),
            (mov_avggii, mov_smooth, [('out_file', 'in_file')]),
            (mov_smooth, mov_avg_ds, [('surface', 'in_file'),
                                      (('surface', _get_surf_extra),
                                       'extra_values')]),
        ])

    return wf
Exemplo n.º 11
0
def create_masks_from_surface_workflow(name = 'masks_from_surface'):
    """Creates EPI space masks from surface labels
    Requires fsl and freesurfer tools
    Parameters
    ----------
    name : string
        name of workflow
    Example
    -------
    >>> masks_from_surface = create_masks_from_surface_workflow('masks_from_surface')
    >>> masks_from_surface.inputs.inputspec.EPI_space_file = 'example_func.nii.gz'
    >>> masks_from_surface.inputs.inputspec.label_directory = 'retmap'
    >>> masks_from_surface.inputs.inputspec.freesurfer_subject_ID = 'sub_01'
    >>> masks_from_surface.inputs.inputspec.freesurfer_subject_dir = '$SUBJECTS_DIR'
    
    from spynoza.workflows.sub_workflows.masks import create_masks_from_surface_workflow
    mfs = create_masks_from_surface_workflow(name = 'mfs')
    mfs.inputs.inputspec.freesurfer_subject_dir = '/home/raw_data/-2014/reward/human_reward/data/FS_SJID'
    mfs.inputs.inputspec.label_directory = 'retmap'
    mfs.inputs.inputspec.EPI_space_file = '/home/shared/-2014/reward/new/sub-002/reg/example_func.nii.gz'
    mfs.inputs.inputspec.output_directory = '/home/shared/-2014/reward/new/sub-002/masks/'
    mfs.inputs.inputspec.freesurfer_subject_ID = 'sub-002'
    mfs.inputs.inputspec.reg_file = '/home/shared/-2014/reward/new/sub-002/reg/register.dat'
    mfs.inputs.inputspec.fill_thresh = 0.01
    mfs.inputs.inputspec.re = '*.label'
    mfs.run('MultiProc', plugin_args={'n_procs': 32})


    Inputs::
          inputspec.EPI_space_file : EPI session file
          inputspec.reg_file : EPI session registration file
          inputspec.re : regular expression for the 
          inputspec.fill_thresh :  label2vol fill threshold argument
          inputspec.freesurfer_subject_ID : FS subject ID
          inputspec.freesurfer_subject_dir : $SUBJECTS_DIR
          inputspec.label_directory : directory that contains the labels in 'label'
          inputspec.output_directory : output directory in which a subfolder 
                                        with the name of label_directory is placed.
    Outputs::
           outputspec.output_masks : the output masks that are created.
    """
    ### NODES
    import nipype.pipeline as pe
    from nipype.interfaces.utility import Function, IdentityInterface, Merge
    import nipype.interfaces.io as nio
    import nipype.interfaces.utility as niu
    import os.path as op
    from .nodes import FS_label_list_glob_node
    
    input_node = pe.Node(IdentityInterface(
        fields=['EPI_space_file', 
        'output_directory', 
        'freesurfer_subject_ID', 
        'freesurfer_subject_dir', 
        'label_directory', 
        'reg_file', 
        'fill_thresh',
        're']), name='inputspec')
    output_node = pe.Node(IdentityInterface(fields=([
        'masks'])), name='outputspec')

    # housekeeping function for finding label files in FS directory
    FS_label_list_node = pe.Node(interface=FS_LabelNode,
                                 name='FS_label_list_node')

    label_2_vol_node = pe.MapNode(interface=freesurfer.Label2Vol(), name='l2v',
                                iterfield = 'label_file')

    ########################################################################################
    # actual workflow
    ########################################################################################

    masks_from_surface_workflow = pe.Workflow(name=name)

    masks_from_surface_workflow.connect(input_node, 'freesurfer_subject_ID', FS_label_list_node, 'freesurfer_subject_ID')    
    masks_from_surface_workflow.connect(input_node, 'freesurfer_subject_dir', FS_label_list_node, 'freesurfer_subject_dir')
    masks_from_surface_workflow.connect(input_node, 'label_directory', FS_label_list_node, 'label_directory')
    masks_from_surface_workflow.connect(input_node, 're', FS_label_list_node, 're')

    masks_from_surface_workflow.connect(input_node, 'reg_file', label_2_vol_node, 'reg_file')
    masks_from_surface_workflow.connect(input_node, 'EPI_space_file', label_2_vol_node, 'template_file')
    masks_from_surface_workflow.connect(input_node, 'fill_thresh', label_2_vol_node, 'fill_thresh')

    masks_from_surface_workflow.connect(input_node, 'freesurfer_subject_dir', label_2_vol_node, 'subjects_dir')
    masks_from_surface_workflow.connect(input_node, 'freesurfer_subject_ID', label_2_vol_node, 'subject_id')

    # and the iter field filled in from the label collection node
    masks_from_surface_workflow.connect(FS_label_list_node, 'label_list', label_2_vol_node, 'label_file')

    ########################################################################################
    # outputs via datasink
    ########################################################################################
    datasink = pe.Node(nio.DataSink(), name='sinker')
    datasink.inputs.parameterization = False

    # first link the workflow's output_directory into the datasink.
    masks_from_surface_workflow.connect(input_node, 'output_directory', datasink, 'base_directory')

    masks_from_surface_workflow.connect(label_2_vol_node, 'vol_label_file', datasink, 'roi')
    masks_from_surface_workflow.connect(label_2_vol_node, 'vol_label_file', output_node, 'masks')

    return masks_from_surface_workflow