Пример #1
0
def main(subject, session, bids_folder):

    wf = pe.Workflow(name=f'sample_com_to_fsnative_{subject}_{session}',
                     base_dir='/tmp')

    spaces = ['fsnative', 'fsaverage']

    tms_dir = op.join(bids_folder, 'derivatives', 'tms_targets',
                      f'sub-{subject}', f'ses-{session}', 'func')

    surf_wf = init_bold_surf_wf(4, spaces, True, name=f'sample2surf')

    inputnode = pe.Node(niu.IdentityInterface(fields=[
        'source_file', 'subjects_dir', 'subject_id', 't1w2fsnative_xfm'
    ]),
                        name=f'inputnode')

    source_file = op.join(tms_dir,
                          f'sub-{subject}_desc-r2smoothnpc_mask.nii.gz')
    print(source_file)
    inputnode.inputs.source_file = source_file

    to_float = pe.Node(fsl.ChangeDataType(output_datatype='float'),
                       name='to_float')

    inputnode.inputs.subjects_dir = op.join(bids_folder, 'derivatives',
                                            'freesurfer')
    inputnode.inputs.subject_id = f'sub-{subject}'
    inputnode.inputs.t1w2fsnative_xfm = op.join(
        bids_folder,
        f'derivatives/fmriprep/sub-{subject}/anat/sub-{subject}_from-T1w_to-fsnative_mode-image_xfm.txt'
    )

    wf.connect(inputnode, 'source_file', to_float, 'in_file')
    wf.connect(to_float, 'out_file', surf_wf, 'inputnode.source_file')

    wf.connect([(inputnode, surf_wf,
                 [('subjects_dir', 'inputnode.subjects_dir'),
                  ('subject_id', 'inputnode.subject_id'),
                  ('t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm')])])

    export_file = pe.MapNode(ExportFile(clobber=True),
                             iterfield=['in_file', 'out_file'],
                             name=f'exporter')

    export_file.inputs.out_file = [
        op.join(
            bids_folder,
            f'derivatives/tms_targets/sub-{subject}/ses-{session}/func/sub-{subject}_ses-{session}_desc-r2com.volume.optim_space-{space}_hemi-{hemi}.func.gii'
        ) for space, hemi in product(spaces, ['L', 'R'])
    ]

    wf.connect(surf_wf, ('outputnode.surfaces', flatten), export_file,
               'in_file')

    wf.run(plugin='MultiProc', plugin_args={'n_procs': 4})
Пример #2
0
def get_workflow(parameters, name=0):
    wf = pe.Workflow(name="%04d" % name + "regionGrowing")
    wf.base_dir = "/scratch/henry_temp/keshavan/region_growing_test"
    n = pe.Node(niu.Function(input_names=[
        "inputv", "seeds", "multiplier", "nbhd", "iterations", "timestep",
        "smoothingiterations"
    ],
                             output_names=["outfile"],
                             function=getSRGS),
                name="srgs")
    inputspec = pe.Node(niu.IdentityInterface(fields=["seeds", "in_file"]),
                        name="inputspec")
    n.iterables = [(q, parameters[q].tolist()) for q in [
        "multiplier", "nbhd", "iterations", "timestep", "smoothingiterations"
    ]]
    n.synchronize = True
    wf.connect(inputspec, "seeds", n, "seeds")
    wf.connect(inputspec, "in_file", n, "inputv")

    dt = pe.Node(fsl.ChangeDataType(output_datatype="short"), name="changedt")
    wf.connect(n, "outfile", dt, "in_file")

    stats = pe.Node(fsl.ImageStats(op_string="-c -w"), name="stats")
    wf.connect(dt, "out_file", stats, "in_file")

    avg = pe.JoinNode(ants.AverageImages(dimension=3, normalize=False),
                      name="average",
                      joinsource="srgs",
                      joinfield=["images"])
    wf.connect(dt, "out_file", avg, "images")

    st = pe.JoinNode(niu.Function(input_names=["out_stats", "parameters"],
                                  output_names=["outfile"],
                                  function=combine_stats),
                     name="combiner",
                     joinsource="srgs",
                     joinfield=["out_stats"])
    #wf.connect(dt, "out_file", st, "out_files")
    wf.connect(stats, "out_stat", st, "out_stats")
    st.inputs.parameters = parameters

    outputspec = pe.Node(niu.IdentityInterface(fields=["avg_image", "stats"]),
                         name="outputspec")
    wf.connect(avg, "output_average_image", outputspec, "avg_image")
    wf.connect(st, "outfile", outputspec, "stats")
    return wf, inputspec, outputspec
Пример #3
0
datasink.inputs.substitutions = substitutions

#-----------------------------------------------------------------------------------------------------
# In[6]:
Bmatrix = '/media/amr/Amr_4TB/Work/October_Acquistion/Bmatrix_ExploreDTI.txt'
Wax_FA_Template = '/media/amr/HDD/Work/standard/FMRIB58_FA_2mm.nii.gz'
Study_Template = '/media/amr/HDD/Work/October_Acquistion/FA_Template_Cluster.nii.gz'
#The AND and NOT added together to facilitate the transformations
CC_mask_AND_Study = '/media/amr/Amr_4TB/Work/October_Acquistion/Standard_Diffusion/CC_FA_Study_Template.nii'
CC_mask_NOT_Study = '/media/amr/Amr_4TB/Work/October_Acquistion/Standard_Diffusion/CC_Exclusion_FA_Study_Template_mask.nii'

#-----------------------------------------------------------------------------------------------------
# In[7]:
#We need to change from .nii.gz to nii
decompress = Node(fsl.ChangeDataType(), name='decompress')
decompress.inputs.output_datatype = 'float'
decompress.inputs.output_type = 'NIFTI'


#-----------------------------------------------------------------------------------------------------
# In[8]:
def ExploreDTI_sort(eddy_file):
    import nipype.interfaces.matlab as Matlab
    import os
    import re
    from shutil import copy

    matlab = Matlab.MatlabCommand()

    # below is where you add paths that matlab might require, this is equivalent to addpath()
Пример #4
0
def localizer(name='localizer'):
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu

    wf = pe.Workflow(name=name)
    inputspec = pe.Node(niu.IdentityInterface(fields=[
        "subject_id", "subjects_dir", "overlay", 'reg', 'mean', 'thresh',
        'roi', "mask_overlay", "use_mask_overlay", "uthresh"
    ]),
                        name='inputspec')
    surf_label = pe.MapNode(niu.Function(input_names=[
        'vertex', 'hemi', 'subject', 'overlay', 'reg', 'sd', 'thresh'
    ],
                                         output_names=['filename'],
                                         function=get_surface_label),
                            name='get_surface_label',
                            iterfield=['hemi', 'vertex'])
    surf_label.inputs.hemi = ['lh', 'rh']
    #surf_label.inputs.vertex = [61091, 60437]
    #surf_label.inputs.thresh = 1.5

    masker = pe.Node(niu.Function(
        input_names=['mask', 'overlay', 'use_mask_overlay', 'thresh'],
        output_names=['outfile'],
        function=mask_overlay),
                     name='mask_overlay')

    bg = pe.Node(niu.Function(input_names=['overlay', 'uthresh'],
                              output_names=['outfile'],
                              function=background),
                 name='background')
    wf.connect(inputspec, 'overlay', bg, 'overlay')
    wf.connect(inputspec, 'uthresh', bg, 'uthresh')
    wf.connect(inputspec, 'overlay', masker, 'overlay')
    wf.connect(inputspec, 'mask_overlay', masker, 'mask')
    wf.connect(inputspec, 'use_mask_overlay', masker, 'use_mask_overlay')
    wf.connect(inputspec, 'thresh', masker, 'thresh')
    wf.connect(masker, 'outfile', surf_label, 'overlay')

    wf.connect(inputspec, "subject_id", surf_label, "subject")
    wf.connect(inputspec, "subjects_dir", surf_label, "sd")
    #wf.connect(inputspec,"overlay",surf_label,"overlay")
    wf.connect(inputspec, "reg", surf_label, "reg")

    label2vol = pe.Node(fs.Label2Vol(), name='labels2vol')
    wf.connect(inputspec, 'subjects_dir', label2vol, 'subjects_dir')
    wf.connect(inputspec, 'mean', label2vol, 'template_file')
    wf.connect(inputspec, 'reg', label2vol, 'reg_file')
    wf.connect(surf_label, 'filename', label2vol, 'label_file')

    verts = pe.MapNode(niu.Function(input_names=[
        'sub', 'sd', 'overlay', 'reg', 'mean', 'hemi', 'roi', 'thresh'
    ],
                                    output_names=['vertex'],
                                    function=get_vertices),
                       name='get_verts',
                       iterfield=['hemi'])
    verts.inputs.hemi = ['lh', 'rh']
    wf.connect(inputspec, 'subject_id', verts, 'sub')
    wf.connect(inputspec, 'subjects_dir', verts, 'sd')
    #wf.connect(inputspec,'overlay',verts,'overlay')
    wf.connect(masker, 'outfile', verts, 'overlay')
    wf.connect(inputspec, 'reg', verts, 'reg')
    wf.connect(inputspec, 'mean', verts, 'mean')
    wf.connect(inputspec, 'thresh', verts, 'thresh')
    wf.connect(inputspec, 'roi', verts, 'roi')
    wf.connect(verts, 'vertex', surf_label, 'vertex')
    wf.connect(inputspec, 'thresh', surf_label, 'thresh')

    studyref = pe.Node(niu.Function(input_names=['mean'],
                                    output_names=['study_ref'],
                                    function=study_ref),
                       name='studyref')
    wf.connect(inputspec, 'mean', studyref, 'mean')

    outputspec = pe.Node(
        niu.IdentityInterface(fields=['rois', 'reference', 'study_ref']),
        name='outputspec')

    wf.connect(studyref, 'study_ref', outputspec, 'study_ref')
    bin = pe.Node(fsl.ImageMaths(op_string='-bin'), name="binarize_roi")
    changetype = pe.Node(fsl.ChangeDataType(output_datatype='short'),
                         name='to_short')

    wf.connect(bg, 'outfile', outputspec, 'reference')
    wf.connect(label2vol, 'vol_label_file', bin, 'in_file')
    wf.connect(bin, 'out_file', changetype, 'in_file')
    wf.connect(changetype, 'out_file', outputspec, 'rois')
    return wf
Пример #5
0
def create_resting_workflow(name="resting_state"):
    """Return a preprocessing workflow.

    Input spec node takes these three inputs:
        - Timeseries (image files)
        - FWHM of smoothing kernel for (in mms)
        - FNIRT warp coefficient image
        - Freesurfer Subject ID
 
    Output node returns these files:
        - Smoothed timeseries (fully preprocessed and smoothed timeseries in native space)
        - Unsmoothed timeseries (identical steps except no smoothing in the volume)
        - Example func (target volume for MCFLIRT realignment)
        - Mean func (unsmoothed mean functional)
        - Funcational mask (binary dilated brainmask in functional space)
        - Realignment parameters (text files from MCFLIRT)
        - Outlier Files (outlier text files from ART)
        - Plotted estimated rotations from MCFLIRT
        - Plotted estimated translastion from MCFLIRT
        - Plotted estimated relative and absolute displacement from MCFLIRT
        - Plotted global mean intensity value
        - Sliced png of the example func (MCFLIRT target)
        - Sliced png of the unsmoothed mean functional volume
        - Tkregister-style affine matrix
        - FSL-style affine matrix
        - Sliced png summarizing the functional to anatomical transform
        - Optimization cost file quantitatively summarizing the transformation

    """
    resting = pe.Workflow(name=name)

    # Define the inputs for the preprocessing workflow
    inputnode = pe.Node(util.IdentityInterface(
        fields=["timeseries", "subject_id", "warpfield", "smooth_fwhm"]),
                        name="inputspec")

    # Remove the first two frames to account for T1 stabalization
    trimmer = pe.MapNode(fsl.ExtractROI(t_min=6),
                         iterfield=["in_file"],
                         name="trimmer")

    # Convert functional images to float representation
    img2float = pe.MapNode(fsl.ChangeDataType(output_datatype="float"),
                           iterfield=["in_file"],
                           name="img2float")

    # Perform slice-timing correction
    slicetime = pe.MapNode(fsl.SliceTimer(interleaved=True, time_repetition=6),
                           iterfield=["in_file"],
                           name="slicetime")

    # Motion correct
    realign = create_realignment_workflow()

    skullstrip = create_skullstrip_workflow()

    art = create_art_workflow(make_movie=False)

    func2anat = create_bbregister_workflow()

    confounds = create_confound_removal_workflow()

    susan = create_susan_smooth()

    normalize = create_normalize_workflow()

    tosurf = create_surface_projection_workflow()

    rename = pe.MapNode(util.Rename(format_string="timeseries", keep_ext=True),
                        iterfield=["in_file"],
                        name="rename")

    resting.connect([
        (inputnode, trimmer, [("timeseries", "in_file"),
                              (("timeseries", get_trimmed_length), "t_size")]),
        (trimmer, img2float, [("roi_file", "in_file")]),
        (img2float, slicetime, [("out_file", "in_file")]),
        (slicetime, realign, [("slice_time_corrected_file",
                               "inputs.timeseries")]),
        (realign, skullstrip, [("outputs.timeseries", "inputs.timeseries")]),
        (realign, art, [("outputs.realign_parameters",
                         "inputs.realignment_parameters")]),
        (img2float, art, [("out_file", "inputs.raw_timeseries")]),
        (skullstrip, art, [("outputs.timeseries",
                            "inputs.realigned_timeseries"),
                           ("outputs.mask_file", "inputs.mask_file")]),
        (skullstrip, func2anat, [("outputs.mean_func", "inputs.source_file")]),
        (inputnode, func2anat, [("subject_id", "inputs.subject_id")]),
        (inputnode, confounds, [("subject_id", "inputs.subject_id")]),
        (skullstrip, confounds, [("outputs.timeseries", "inputs.timeseries")]),
        (realign, confounds, [("outputs.realign_parameters",
                               "inputs.motion_parameters")]),
        (func2anat, confounds, [("outputs.tkreg_mat", "inputs.reg_file")]),
        (confounds, susan, [("outputs.timeseries", "inputnode.in_files")]),
        (skullstrip, susan, [("outputs.mask_file", "inputnode.mask_file")]),
        (inputnode, susan, [("smooth_fwhm", "inputnode.fwhm")]),
        (susan, rename, [("outputnode.smoothed_files", "in_file")]),
        (susan, normalize, [("outputnode.smoothed_files", "inputs.timeseries")
                            ]),
        (inputnode, normalize, [("warpfield", "inputs.warpfield")]),
        (func2anat, normalize, [("outputs.flirt_mat", "inputs.flirt_affine")]),
        (confounds, tosurf, [("outputs.timeseries", "inputs.timeseries")]),
        (func2anat, tosurf, [("outputs.tkreg_mat", "inputs.tkreg_affine")]),
        (inputnode, tosurf, [("subject_id", "inputs.subject_id"),
                             ("smooth_fwhm", "inputs.smooth_fwhm")]),
    ])

    # Define the outputs of the top-level workflow
    output_fields = [
        "volume_timeseries", "surface_timeseries", "native_timeseries",
        "example_func", "mean_func", "functional_mask", "realign_parameters",
        "mean_func_slices", "intensity_plot", "outlier_volumes",
        "realign_report", "flirt_affine", "tkreg_affine", "coreg_report",
        "confound_sources"
    ]

    outputnode = pe.Node(util.IdentityInterface(fields=output_fields),
                         name="outputspec")

    resting.connect([
        (realign, outputnode, [("outputs.realign_report", "realign_report"),
                               ("outputs.realign_parameters",
                                "realign_parameters"),
                               ("outputs.example_func", "example_func")]),
        (skullstrip, outputnode, [("outputs.mean_func", "mean_func"),
                                  ("outputs.mask_file", "functional_mask"),
                                  ("outputs.report_png", "mean_func_slices")]),
        (art, outputnode, [("outputs.intensity_plot", "intensity_plot"),
                           ("outputs.outlier_volumes", "outlier_volumes")]),
        (func2anat, outputnode, [("outputs.tkreg_mat", "tkreg_affine"),
                                 ("outputs.flirt_mat", "flirt_affine"),
                                 ("outputs.report", "coreg_report")]),
        (confounds, outputnode, [("outputs.confound_sources",
                                  "confound_sources")]),
        (tosurf, outputnode, [("outputs.timeseries", "surface_timeseries")]),
        (normalize, outputnode, [("outputs.timeseries", "volume_timeseries")]),
        (rename, outputnode, [("out_file", "native_timeseries")]),
    ])

    return resting, inputnode, outputnode
def localizer(name='localizer'):
    import nipype.interfaces.freesurfer as fs
    import nipype.interfaces.fsl as fsl
    import nipype.pipeline.engine as pe
    import nipype.interfaces.utility as niu
    import nipype.interfaces.io as nio
    wf = pe.Workflow(name=name)
    inputspec = pe.Node(niu.IdentityInterface(fields=["subject_id",
                                                      "subjects_dir",
                                                      "overlay",
                                                      'reg',
                                                      'mean',
                                                      'thresh',
                                                      'roi',
                                                      "mask_overlay",
                                                      "use_mask_overlay","uthresh"]),name='inputspec')
    surf_label = pe.MapNode(niu.Function(input_names=['vertex',
                                                   'hemi',
                                                   'subject',
                                                   'overlay',
                                                   'reg',
                                                   'sd',
                                                   'thresh'],
                                      output_names=['filename','labels'],
                                      function=get_surface_label),
        name='get_surface_label', iterfield=['hemi','vertex'])
    surf_label.inputs.hemi=['lh','rh']
    #surf_label.inputs.vertex = [61091, 60437]
    #surf_label.inputs.thresh = 1.5

    masker = pe.Node(niu.Function(input_names=['mask',
                                               'overlay',
                                               'use_mask_overlay',
                                               'thresh'],
                                  output_names=['outfile'],function=mask_overlay),
        name='mask_overlay')

    bg = pe.Node(niu.Function(input_names=['overlay','uthresh'],output_names=['outfile'],function=background),name='background')
    wf.connect(inputspec,'overlay',bg,'overlay')
    wf.connect(inputspec,'uthresh',bg,'uthresh')
    wf.connect(inputspec,'overlay',masker,'overlay')
    wf.connect(inputspec,'mask_overlay',masker,'mask')
    wf.connect(inputspec,'use_mask_overlay',masker,'use_mask_overlay')
    wf.connect(inputspec,'thresh',masker,'thresh')
    wf.connect(masker,'outfile',surf_label,'overlay')

    wf.connect(inputspec,"subject_id",surf_label,"subject")
    wf.connect(inputspec,"subjects_dir",surf_label,"sd")
    #wf.connect(inputspec,"overlay",surf_label,"overlay")
    wf.connect(inputspec,"reg",surf_label,"reg")

    label2vol = pe.Node(fs.Label2Vol(),name='labels2vol')
    wf.connect(inputspec,'subjects_dir',label2vol,'subjects_dir')
    wf.connect(inputspec,'mean',label2vol,'template_file')
    wf.connect(inputspec,'reg',label2vol,'reg_file')
    wf.connect(surf_label,'filename',label2vol,'label_file')

    verts = pe.MapNode(niu.Function(input_names=['sub',
                                              'sd',
                                              'overlay',
                                              'reg',
                                              'mean',
                                              'hemi',
                                              'roi',
                                              'thresh'],
                                 output_names=['vertex'],
                                 function=get_vertices),
        name='get_verts',iterfield=['hemi'])
    verts.inputs.hemi = ['lh','rh']
    wf.connect(inputspec,'subject_id',verts,'sub')
    wf.connect(inputspec,'subjects_dir',verts,'sd')
    #wf.connect(inputspec,'overlay',verts,'overlay')
    wf.connect(masker,'outfile',verts,'overlay')
    wf.connect(inputspec,'reg',verts,'reg')
    wf.connect(inputspec,'mean',verts,'mean')
    wf.connect(inputspec,'thresh',verts,'thresh')
    wf.connect(inputspec,'roi',verts,'roi')
    wf.connect(verts,'vertex',surf_label,'vertex')
    wf.connect(inputspec,'thresh',surf_label,'thresh')

    from ...smri.freesurfer_brain_masks import pickaparc

    fssource = pe.Node(nio.FreeSurferSource(),name='fssource')
    wf.connect(inputspec,"subjects_dir",fssource,"subjects_dir")
    wf.connect(inputspec,"subject_id", fssource,"subject_id")

    bg_mask = pe.Node(fs.Binarize(wm_ven_csf=True, erode=2),name="bg_mask")

    wf.connect(fssource,("aparc_aseg",pickaparc),bg_mask,"in_file")

    warp_mask = pe.Node(fs.ApplyVolTransform(inverse=True,interp='nearest'),name="warp_to_func")
    wf.connect(inputspec,"mean",warp_mask,"source_file")
    wf.connect(bg_mask,"binary_file",warp_mask,"target_file")
    wf.connect(inputspec,"reg", warp_mask,"reg_file")
    

    do_bg_mask = pe.Node(fs.ApplyMask(),name="do_bg_mask")
    wf.connect(warp_mask,"transformed_file",do_bg_mask,"mask_file")

    studyref = pe.Node(niu.Function(input_names=['mean'],output_names=['study_ref'], function=study_ref),name='studyref')
    wf.connect(inputspec,'mean',studyref,'mean')

    outputspec = pe.Node(niu.IdentityInterface(fields=['rois','reference','study_ref','labels']),name='outputspec')

    wf.connect(studyref,'study_ref', outputspec, 'study_ref')
    bin = pe.Node(fsl.ImageMaths(op_string = '-bin'),name="binarize_roi")
    changetype = pe.Node(fsl.ChangeDataType(output_datatype='short'),name='to_short')

    wf.connect(bg,'outfile',do_bg_mask,"in_file")
    wf.connect(do_bg_mask,("out_file",shorty), outputspec,'reference')
    wf.connect(label2vol,'vol_label_file',bin,'in_file')
    wf.connect(bin,'out_file', changetype, 'in_file')
    wf.connect(changetype, 'out_file', outputspec, 'rois')
    wf.connect(surf_label,'labels',outputspec,'labels')
    return wf
Пример #7
0
NodeHash_1cecfcd0.inputs.percentage = 0.75

#Custom interface wrapping function Getpercentthresh
NodeHash_20695ac0 = pe.MapNode(interface = firstlevelhelpers.Getpercentthresh, name = 'NodeName_20695ac0', iterfield = ['value'])
NodeHash_20695ac0.inputs.percentage = 0.1

#Wraps command **fslmaths**
NodeHash_10ff9c60 = pe.MapNode(interface = fsl.Threshold(), name = 'NodeName_10ff9c60', iterfield = ['in_file', 'thresh'])

#Wraps command **fslmaths**
NodeHash_843b4a0 = pe.MapNode(interface = fsl.MinImage(), name = 'NodeName_843b4a0', iterfield = ['in_file'])
NodeHash_843b4a0.inputs.args = '-bin'
NodeHash_843b4a0.inputs.dimension = 'T'

#Wraps command **fslmaths**
NodeHash_2b6977b0 = pe.MapNode(interface = fsl.ChangeDataType(), name = 'NodeName_2b6977b0', iterfield = ['in_file'])
NodeHash_2b6977b0.inputs.output_datatype = 'char'

#Wraps command **fslmaths**
NodeHash_258767c0 = pe.MapNode(interface = fsl.DilateImage(), name = 'NodeName_258767c0', iterfield = ['in_file'])
NodeHash_258767c0.inputs.operation = 'max'

#Wraps command **fslstats**
NodeHash_2fd0bda0 = pe.MapNode(interface = fsl.ImageStats(), name = 'NodeName_2fd0bda0', iterfield = ['in_file', 'mask_file'])
NodeHash_2fd0bda0.inputs.op_string = '-p 50'

#Wraps command **fslmaths**
NodeHash_ffd7a90 = pe.MapNode(interface = fsl.ApplyMask(), name = 'NodeName_ffd7a90', iterfield = ['in_file', 'mask_file'])

#Wraps command **fslmaths**
NodeHash_255ee520 = pe.MapNode(interface = fsl.MeanImage(), name = 'NodeName_255ee520', iterfield = ['in_file'])
NodeHash_16878250.inputs.percentage = 0.1

#Wraps command **fslmaths**
NodeHash_171fdb30 = pe.MapNode(interface=fsl.Threshold(),
                               name='NodeName_171fdb30',
                               iterfield=['in_file', 'thresh'])

#Wraps command **fslmaths**
NodeHash_179e1da0 = pe.MapNode(interface=fsl.MinImage(),
                               name='NodeName_179e1da0',
                               iterfield=['in_file'])
NodeHash_179e1da0.inputs.args = '-bin'
NodeHash_179e1da0.inputs.dimension = 'T'

#Wraps command **fslmaths**
NodeHash_1879dad0 = pe.MapNode(interface=fsl.ChangeDataType(),
                               name='NodeName_1879dad0',
                               iterfield=['in_file'])
NodeHash_1879dad0.inputs.output_datatype = 'char'

#Wraps command **fslmaths**
NodeHash_188ac400 = pe.MapNode(interface=fsl.DilateImage(),
                               name='NodeName_188ac400',
                               iterfield=['in_file'])
NodeHash_188ac400.inputs.operation = 'max'

#Wraps command **fslstats**
NodeHash_1a202720 = pe.MapNode(interface=fsl.ImageStats(),
                               name='NodeName_1a202720',
                               iterfield=['in_file', 'mask_file'])
NodeHash_1a202720.inputs.op_string = '-p 50'
NodeHash_14e113c0.inputs.percentage = 0.75

#Custom interface wrapping function Getpercentthresh
NodeHash_14e3d7c0 = pe.MapNode(interface = firstlevelhelpers.Getpercentthresh, name = 'NodeName_14e3d7c0', iterfield = ['value'])
NodeHash_14e3d7c0.inputs.percentage = 0.1

#Wraps command **fslmaths**
NodeHash_157bd2b0 = pe.MapNode(interface = fsl.Threshold(), name = 'NodeName_157bd2b0', iterfield = ['in_file', 'thresh'])

#Wraps command **fslmaths**
NodeHash_15f74220 = pe.MapNode(interface = fsl.MinImage(), name = 'NodeName_15f74220', iterfield = ['in_file'])
NodeHash_15f74220.inputs.args = '-bin'
NodeHash_15f74220.inputs.dimension = 'T'

#Wraps command **fslmaths**
NodeHash_16d421a0 = pe.MapNode(interface = fsl.ChangeDataType(), name = 'NodeName_16d421a0', iterfield = ['in_file'])
NodeHash_16d421a0.inputs.output_datatype = 'char'

#Wraps command **fslmaths**
NodeHash_1801caa0 = pe.MapNode(interface = fsl.DilateImage(), name = 'NodeName_1801caa0', iterfield = ['in_file'])
NodeHash_1801caa0.inputs.operation = 'max'

#Wraps command **fslstats**
NodeHash_18508680 = pe.MapNode(interface = fsl.ImageStats(), name = 'NodeName_18508680', iterfield = ['in_file', 'mask_file'])
NodeHash_18508680.inputs.op_string = '-p 50'

#Wraps command **fslmaths**
NodeHash_181e69e0 = pe.MapNode(interface = fsl.ApplyMask(), name = 'NodeName_181e69e0', iterfield = ['in_file', 'mask_file'])

#Wraps command **fslmaths**
NodeHash_182b4bf0 = pe.MapNode(interface = fsl.MeanImage(), name = 'NodeName_182b4bf0', iterfield = ['in_file'])
Пример #10
0
# apply all transforms
applytransform = Node(
    ants.ApplyTransforms(
        input_image_type=3,
        #output_image='rest_preprocessed2mni.nii.gz',
        interpolation='BSpline',
        invert_transform_flags=[False, False]),
    name='applytransform')

applytransform.inputs.reference_image = template
applytransform.plugin_args = {'submit_specs': 'request_memory = 30000'}
mni.connect([(selectfiles, applytransform, [('rest', 'input_image')]),
             (translist, applytransform, [('out', 'transforms')])])

# tune down image to float
changedt = Node(fsl.ChangeDataType(output_datatype='float',
                                   out_file='rest_preprocessed2mni.nii.gz'),
                name='changedt')
changedt.plugin_args = {'submit_specs': 'request_memory = 30000'}
mni.connect([(applytransform, changedt, [('output_image', 'in_file')])])


# make base directory
def makebase(subject_id, out_dir):
    return out_dir % subject_id


# sink
sink = Node(nio.DataSink(base_directory=out_dir, parameterization=False),
            name='sink')

mni.connect([(subject_infosource, sink, [(('subject_id', makebase, out_dir),